# Copyright (C) 2025 CodeLogic Inc.
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at https://mozilla.org/MPL/2.0/.
"""
Handler for the codelogic-ci tool.
"""
import os
import sys
import re
from collections import Counter
from typing import Optional, Dict, List, Tuple
import mcp.types as types
def analyze_build_logs(successful_log: Optional[str], failed_log: Optional[str]) -> Dict:
"""
Analyze build logs to identify low-value patterns that should be filtered out.
Uses the provided log examples to identify:
- Repetitive lines (likely progress indicators or noise)
- Very short lines (likely formatting or separators)
- Common verbose prefixes (repeated command output)
- Empty lines and separator lines
Returns a dictionary with filtering configuration including:
- patterns_to_filter: List of regex patterns to filter out
- exact_lines_to_filter: List of exact lines to filter (repetitive lines)
- verbose_prefixes: Common prefixes that indicate verbose output
- min_line_length: Minimum line length to keep
- max_repetition: Maximum times a line can repeat before filtering
"""
all_logs = []
if successful_log:
all_logs.append(("successful", successful_log))
if failed_log:
all_logs.append(("failed", failed_log))
if not all_logs:
return {}
# Base patterns that are typically low-value (empty lines, separators, etc.)
base_noise_patterns = [
r'^\s*$', # Empty lines
r'^\s*[-=]+\s*$', # Separator lines (dashes, equals)
r'^\s*\.\.\.\s*$', # Ellipsis-only lines
r'^\s*[*]+\s*$', # Asterisk-only lines
]
# Analyze log content to identify noise patterns
lines_by_type = {"successful": [], "failed": []}
line_frequencies = Counter()
all_lines = []
for log_type, log_content in all_logs:
if not log_content:
continue
lines = log_content.split('\n')
lines_by_type[log_type] = lines
all_lines.extend(lines)
line_frequencies.update(lines)
total_lines = len(all_lines)
if total_lines == 0:
return {}
# Identify highly repetitive lines (likely noise - progress indicators, etc.)
# A line that appears 2+ times or more than 5% of total lines is likely noise
repetition_threshold = max(2, int(total_lines * 0.05))
repetitive_lines = [
line.strip() for line, count in line_frequencies.items()
if count >= repetition_threshold and len(line.strip()) > 0
]
# Identify very short lines that appear frequently (likely formatting noise)
short_line_frequencies = Counter()
for line in all_lines:
stripped = line.strip()
if len(stripped) > 0 and len(stripped) < 15:
short_line_frequencies[stripped] += 1
# Short lines that appear 2+ times are likely noise
short_noise_lines = [
line for line, count in short_line_frequencies.items()
if count >= max(2, int(total_lines * 0.05)) and len(line) < 15
]
# Identify common prefixes in verbose output (e.g., "Downloading", "Installing", etc.)
prefix_patterns = Counter()
for line in all_lines[:2000]: # Sample first 2000 lines to avoid memory issues
stripped = line.strip()
if len(stripped) > 10 and ' ' in stripped:
# Get first word as prefix
prefix = stripped.split()[0]
# Only consider prefixes that are reasonable length and appear frequently
if 3 <= len(prefix) <= 30:
prefix_patterns[prefix] += 1
# Prefixes that appear 2+ times are likely verbose output
verbose_prefixes = [
prefix for prefix, count in prefix_patterns.items()
if count >= max(2, int(total_lines * 0.05))
]
# Identify common patterns in repetitive lines (e.g., "Downloading...", "Building...")
pattern_candidates = []
for line in repetitive_lines[:50]: # Analyze top 50 repetitive lines
stripped = line.strip()
if len(stripped) > 5:
# Look for patterns like "Verb...", "Verb: ...", "[timestamp] message"
if re.match(r'^[A-Za-z]+\.\.\.\s*$', stripped):
pattern_candidates.append(r'^[A-Za-z]+\.\.\.\s*$')
elif re.match(r'^\[.*?\]\s*$', stripped):
pattern_candidates.append(r'^\[.*?\]\s*$')
# Build filtering configuration - focus only on identifying noise
filtering_config = {
"patterns_to_filter": base_noise_patterns + list(set(pattern_candidates)),
"exact_lines_to_filter": repetitive_lines[:100], # Top 100 repetitive exact lines
"short_lines_to_filter": short_noise_lines[:100], # Top 100 short noise lines
"verbose_prefixes": verbose_prefixes[:30], # Top 30 verbose prefixes
"min_line_length": 3, # Filter lines shorter than this
"max_repetition": repetition_threshold, # Filter if line repeats more than this
"summary": {
"total_lines_analyzed": total_lines,
"repetitive_lines_found": len(repetitive_lines),
"short_noise_lines_found": len(short_noise_lines),
"verbose_prefixes_found": len(verbose_prefixes)
}
}
return filtering_config
def generate_log_filter_script(filtering_config: Dict, platform: str) -> str:
"""
Generate a log filtering script based on the filtering configuration.
Filters out identified low-value patterns from the log examples.
Returns platform-specific filtering commands.
"""
if not filtering_config:
return ""
patterns = filtering_config.get("patterns_to_filter", [])
exact_lines = filtering_config.get("exact_lines_to_filter", [])
short_lines = filtering_config.get("short_lines_to_filter", [])
verbose_prefixes = filtering_config.get("verbose_prefixes", [])
min_line_length = filtering_config.get("min_line_length", 3)
max_repetition = filtering_config.get("max_repetition", 3)
# Generate bash/shell filtering script
filter_script = f"""# Log filtering script to reduce verbosity
# This script filters out low-value log content identified from your build log examples
filter_log() {{
local input_file="$1"
local output_file="$2"
# Create temporary file
local temp_file=$(mktemp)
# Process each line
while IFS= read -r line || [ -n "$line" ]; do
skip_line=false
# Filter empty lines
if [ -z "${{line// }}" ]; then
skip_line=true
fi
# Filter very short lines
if [ ${{#line}} -lt {min_line_length} ]; then
skip_line=true
fi
# Filter known noise patterns
"""
# Add pattern filtering
for pattern in patterns[:15]: # Limit to top 15 patterns
escaped_pattern = pattern.replace("'", "'\\''").replace('\\', '\\\\')
filter_script += f""" if echo "$line" | grep -qE '{escaped_pattern}'; then
skip_line=true
fi
"""
# Add exact line filtering (repetitive lines)
for exact_line in exact_lines[:50]: # Limit to top 50 exact lines
escaped_line = exact_line.replace("'", "'\\''").replace('"', '\\"').replace('$', '\\$').replace('`', '\\`')
filter_script += f""" if [ "$line" = "{escaped_line}" ]; then
skip_line=true
fi
"""
# Add short line filtering
for short_line in short_lines[:50]: # Limit to top 50 short lines
escaped_short = short_line.replace("'", "'\\''").replace('"', '\\"').replace('$', '\\$').replace('`', '\\`')
filter_script += f""" if [ "$line" = "{escaped_short}" ]; then
skip_line=true
fi
"""
# Add verbose prefix filtering
if verbose_prefixes:
filter_script += """ # Filter lines starting with verbose prefixes (if prefix appears too frequently)
"""
for prefix in verbose_prefixes[:20]: # Limit to top 20 prefixes
escaped_prefix = prefix.replace("'", "'\\''").replace('\\', '\\\\').replace('$', '\\$')
filter_script += f""" if echo "$line" | grep -qE '^{escaped_prefix}'; then
prefix_count=$(grep -c "^${{escaped_prefix}}" "$input_file" 2>/dev/null || echo "0")
if [ "$prefix_count" -gt {max_repetition} ]; then
skip_line=true
fi
fi
"""
filter_script += """ # Output line if not filtered
if [ "$skip_line" = false ]; then
echo "$line"
fi
done < "$input_file" > "$temp_file"
# Remove duplicate consecutive lines
awk '!seen[$0]++' "$temp_file" > "$output_file"
rm -f "$temp_file"
}
"""
return filter_script
def generate_log_filtering_instructions(filtering_config: Optional[Dict], platform: str, agent_type: str = "dotnet") -> str:
"""
Generate instructions for integrating log filtering into CI/CD pipelines.
"""
if not filtering_config:
return ""
summary = filtering_config.get("summary", {})
filter_script = generate_log_filter_script(filtering_config, platform)
instructions = f"""
## 📊 Log Filtering Configuration
Based on analysis of your build logs, the following filtering has been configured to reduce verbosity:
### Analysis Summary
- **Total lines analyzed**: {summary.get('total_lines_analyzed', 0)}
- **Repetitive lines found**: {summary.get('repetitive_lines_found', 0)} (lines that repeat frequently)
- **Short noise lines found**: {summary.get('short_noise_lines_found', 0)} (very short lines that appear often)
- **Verbose prefixes identified**: {summary.get('verbose_prefixes_found', 0)} (common prefixes indicating verbose output)
### Filtering Strategy
The log filtering will:
1. **Remove identified noise patterns**: Based on analysis of your provided log examples, the following low-value content will be filtered:
- Repetitive lines (exact lines that appear many times)
- Very short lines (identified from your logs)
- Verbose prefixes (common prefixes that indicate repetitive verbose output)
- Empty lines and separator lines
2. **Keep everything else**: All other log content is preserved - we only filter out the specific noise patterns identified from your examples
3. **Reduce verbosity**: Focus on removing known noise patterns from your specific build output without trying to predict what's valuable
### Integration Instructions
**IMPORTANT**: Apply log filtering BEFORE sending logs to CodeLogic. This ensures only valuable information is sent.
"""
if platform == "jenkins":
instructions += f"""
#### For Jenkins:
Add this filtering step in your post block before sending build info:
```groovy
// Add log filtering function
def filterLog(inputFile, outputFile) {{
sh '''
{filter_script}
# Apply filtering
filter_log "${{inputFile}}" "${{outputFile}}"
'''
}}
// In your post block, before send_build_info:
post {{
always {{
script {{
// Filter logs before sending
filterLog("${{WORKSPACE}}/logs/codelogic-build.log", "${{WORKSPACE}}/logs/codelogic-build-filtered.log")
// Use filtered log for CodeLogic
sh '''
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}:/scan" \\
--volume "${{WORKSPACE}}/logs:/log_file_path" \\
${{CODELOGIC_HOST}}/codelogic_{{{{agent_type}}}}:latest send_build_info \\
--agent-uuid="${{AGENT_UUID}}" \\
--agent-password="${{AGENT_PASSWORD}}" \\
--server="${{CODELOGIC_HOST}}" \\
--job-name="${{JOB_NAME}}" \\
--build-number="${{BUILD_NUMBER}}" \\
--build-status="${{currentBuild.result}}" \\
--pipeline-system="Jenkins" \\
--log-file="/log_file_path/codelogic-build-filtered.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
'''
}}
}}
}}
```
"""
elif platform == "github-actions":
instructions += f"""
#### For GitHub Actions:
Add this filtering step before sending build info:
```yaml
- name: Filter build logs
if: always()
run: |
{filter_script}
# Apply filtering
filter_log logs/build.log logs/build-filtered.log
- name: Send Build Info
if: always()
run: |
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/scan" \\
--volume "${{{{ github.workspace }}}}/logs:/log_file_path" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{{{{agent_type}}}}:latest send_build_info \\
--agent-uuid="${{{{ secrets.AGENT_UUID }}}}" \\
--agent-password="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--server="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--job-name="${{{{ github.repository }}}}" \\
--build-number="${{{{ github.run_number }}}}" \\
--build-status="${{{{ job.status }}}}" \\
--pipeline-system="GitHub Actions" \\
--log-file="/log_file_path/build-filtered.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
continue-on-error: true
```
"""
elif platform == "azure-devops":
instructions += f"""
#### For Azure DevOps:
Add this filtering step before sending build info:
```yaml
- task: Bash@3
displayName: 'Filter build logs'
condition: always()
inputs:
targetType: 'inline'
script: |
{filter_script}
# Apply filtering
filter_log logs/build.log logs/build-filtered.log
- task: Docker@2
displayName: 'Send Build Info'
condition: always()
inputs:
command: 'run'
arguments: |
--pull always \\
--rm \\
--env CODELOGIC_HOST="$(codelogicHost)" \\
--env AGENT_UUID="$(agentUuid)" \\
--env AGENT_PASSWORD="$(agentPassword)" \\
--volume "$(Build.SourcesDirectory):/scan" \\
--volume "$(Build.SourcesDirectory)/logs:/log_file_path" \\
$(codelogicHost)/codelogic_{{{{agent_type}}}}:latest send_build_info \\
--agent-uuid="$(agentUuid)" \\
--agent-password="$(agentPassword)" \\
--server="$(codelogicHost)" \\
--job-name="$(Build.DefinitionName)" \\
--build-number="$(Build.BuildNumber)" \\
--build-status="$(Agent.JobStatus)" \\
--pipeline-system="Azure DevOps" \\
--log-file="/log_file_path/build-filtered.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
continueOnError: true
```
"""
elif platform == "gitlab":
instructions += f"""
#### For GitLab CI/CD:
Add this filtering step before sending build info:
```yaml
filter_logs:
stage: build-info
image: alpine:latest
script:
- |
{filter_script}
# Apply filtering
filter_log logs/build.log logs/build-filtered.log
artifacts:
paths:
- logs/build-filtered.log
expire_in: 1 hour
send_build_info:
stage: build-info
image: docker:latest
services:
- docker:dind
script:
- |
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="$CODELOGIC_HOST" \\
--env AGENT_UUID="$AGENT_UUID" \\
--env AGENT_PASSWORD="$AGENT_PASSWORD" \\
--volume "$CI_PROJECT_DIR:/scan" \\
--volume "$CI_PROJECT_DIR/logs:/log_file_path" \\
$CODELOGIC_HOST/codelogic_{{{{agent_type}}}}:latest send_build_info \\
--agent-uuid="$AGENT_UUID" \\
--agent-password="$AGENT_PASSWORD" \\
--server="$CODELOGIC_HOST" \\
--job-name="$CI_PROJECT_NAME" \\
--build-number="$CI_PIPELINE_ID" \\
--build-status="$CI_JOB_STATUS" \\
--pipeline-system="GitLab CI/CD" \\
--log-file="/log_file_path/build-filtered.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
dependencies:
- filter_logs
allow_failure: true
```
"""
instructions += """
### Customization
You can further customize the filtering by:
1. Adjusting the `min_line_length` threshold in the filtering script
2. Adding or removing specific patterns in the filtering script
3. Modifying the exact lines or prefixes to filter based on your needs
### Testing
After implementing log filtering:
1. Run a test build and verify the filtered log contains the information you need
2. Check that verbose noise patterns identified from your examples have been reduced
3. Verify that important information (errors, failures, etc.) is still present
4. Adjust filtering rules in the script if needed - add more patterns or remove overly aggressive filters
"""
return instructions
async def handle_ci(arguments: dict | None) -> list[types.TextContent]:
"""Handle the codelogic-ci tool for unified CI/CD configuration (analyze + build-info)"""
if not arguments:
sys.stderr.write("Missing arguments\n")
raise ValueError("Missing arguments")
agent_type = arguments.get("agent_type")
scan_path = arguments.get("scan_path")
application_name = arguments.get("application_name")
ci_platform = arguments.get("ci_platform", "generic")
successful_build_log = arguments.get("successful_build_log")
failed_build_log = arguments.get("failed_build_log")
# Validate required parameters
if not agent_type or not scan_path or not application_name:
sys.stderr.write("Agent type, scan path, and application name are required\n")
raise ValueError("Agent type, scan path, and application name are required")
# Validate agent type
valid_agent_types = ["dotnet", "java", "sql", "javascript"]
if agent_type not in valid_agent_types:
sys.stderr.write(f"Invalid agent type: {agent_type}. Must be one of: {', '.join(valid_agent_types)}\n")
raise ValueError(f"Invalid agent type: {agent_type}. Must be one of: {', '.join(valid_agent_types)}")
# Validate CI platform
valid_ci_platforms = ["jenkins", "github-actions", "azure-devops", "gitlab", "generic"]
if ci_platform not in valid_ci_platforms:
sys.stderr.write(f"Invalid CI platform: {ci_platform}. Must be one of: {', '.join(valid_ci_platforms)}\n")
raise ValueError(f"Invalid CI platform: {ci_platform}. Must be one of: {', '.join(valid_ci_platforms)}")
# Get server configuration
server_host = os.getenv("CODELOGIC_SERVER_HOST")
# Analyze logs if provided
log_filtering_config = None
if successful_build_log or failed_build_log:
log_filtering_config = analyze_build_logs(successful_build_log, failed_build_log)
# Generate Docker agent configuration based on agent type
agent_config = generate_docker_agent_config(
agent_type, scan_path, application_name,
ci_platform, server_host, log_filtering_config
)
return [
types.TextContent(
type="text",
text=agent_config
)
]
def generate_docker_agent_config(agent_type, scan_path, application_name, ci_platform, server_host, log_filtering_config: Optional[Dict] = None):
"""Generate Docker agent configuration with AI-actionable prompts for CI/CD file modification"""
# Agent type mappings
agent_images = {
"dotnet": "codelogic_dotnet",
"java": "codelogic_java",
"sql": "codelogic_sql",
"javascript": "codelogic_javascript"
}
agent_image = agent_images.get(agent_type, "codelogic_dotnet")
# Generate structured data for AI models to directly modify CI/CD files
structured_config = {
"target_files": get_target_files(ci_platform),
"environment_variables": {
"CODELOGIC_HOST": server_host,
"AGENT_UUID": "your-agent-uuid",
"AGENT_PASSWORD": "your-agent-password"
},
"docker_command": generate_docker_command(agent_type, scan_path, application_name, server_host, agent_image),
"file_modifications": generate_file_modifications(ci_platform, agent_type, scan_path, application_name, server_host, agent_image),
"setup_instructions": generate_setup_instructions(ci_platform),
"validation_checks": generate_validation_checks(ci_platform)
}
# Add note about log filtering if configured
log_filtering_note = ""
if log_filtering_config:
log_filtering_note = """
> **📊 Log Filtering Enabled**: Custom log filtering has been configured based on your build log examples.
> See the "Log Filtering Configuration" section below for integration instructions.
"""
config = f"""# CodeLogic CI Integration - Unified CI/CD Guide
{log_filtering_note}## 🎯 AI Model Instructions
**Use the structured data below to directly modify CI/CD files in the repository.**
### Target Files to Modify
{format_target_files(structured_config['target_files'])}
### Environment Variables to Add
{format_environment_variables(structured_config['environment_variables'])}
### Docker Command Template
```bash
{structured_config['docker_command']}
```
### Specific File Modifications
{format_file_modifications(structured_config['file_modifications'])}
### Setup Instructions
{format_setup_instructions(structured_config['setup_instructions'])}
### Validation Checks
{format_validation_checks(structured_config['validation_checks'])}
## 🔧 Environment Variable Usage Guide
### For CodeLogic Test Error Reporting Operations:
- **Required**: `CODELOGIC_HOST`, `AGENT_UUID`, `AGENT_PASSWORD`
- **Purpose**: Send test error reporting metadata and context to CodeLogic
### Send Test Error Reporting Command Syntax:
- **Use explicit parameters**: `--agent-uuid`, `--agent-password`, `--server`
- **Include pipeline system**: `--pipeline-system="Jenkins"`, `"GitHub Actions"`, `"Azure DevOps"`, `"GitLab CI/CD"`
#### **GitHub Actions:**
- `--job-name="${{{{ github.repository }}}}"`
- `--build-number="${{{{ github.run_number }}}}"`
- `--build-status="${{{{ job.status }}}}"`
- `--pipeline-system="GitHub Actions"`
#### **Azure DevOps:**
- `--job-name="${{{{ BUILD_DEFINITIONNAME }}}}"`
- `--build-number="${{{{ BUILD_BUILDNUMBER }}}}"`
- `--build-status="${{{{ AGENT_JOBSTATUS }}}}"`
- `--pipeline-system="Azure DevOps"`
#### **GitLab CI/CD:**
- `--job-name="${{{{ CI_PROJECT_NAME }}}}"`
- `--build-number="${{{{ CI_PIPELINE_ID }}}}"`
- `--build-status="${{{{ CI_JOB_STATUS }}}}"`
- `--pipeline-system="GitLab CI/CD"`
"""
# Add platform-specific configurations
if ci_platform == "jenkins":
config += generate_jenkins_config(agent_type, scan_path, application_name, server_host)
elif ci_platform == "github-actions":
config += generate_github_actions_config(agent_type, scan_path, application_name, server_host)
elif ci_platform == "azure-devops":
config += generate_azure_devops_config(agent_type, scan_path, application_name, server_host)
elif ci_platform == "gitlab":
config += generate_gitlab_config(agent_type, scan_path, application_name, server_host)
else:
config += generate_generic_config(agent_type, scan_path, application_name, server_host)
# Add build info section
config += f"""
## Build Information Integration
To send build information to CodeLogic, add this step after your scan:
```bash
# Send build information
docker run --rm \\
--env CODELOGIC_HOST="{server_host}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "{scan_path}:/scan" \\
--volume "${{PWD}}/logs:/log_file_path" \\
{server_host}/{agent_image}:latest send_build_info \\
--log-file="/log_file_path/build.log"
```
"""
# Add log filtering instructions if log analysis was performed
if log_filtering_config:
config += generate_log_filtering_instructions(log_filtering_config, ci_platform, agent_type)
config += """
## Best Practices
3. **Security**: Store credentials as environment variables, never in code
4. **Performance**: Use `--pull always` to ensure latest agent version
5. **Logging**: Mount log directories for error reporting collection
"""
# Append unified pipeline and best-practices guidance
config += """
## Pipeline Overview
- CI Platforms: Jenkins, GitHub Actions, Azure DevOps, GitLab CI
- Agent Types: dotnet, java, sql, javascript
- Core Utilities: analyze (code scanning) and send_build_info (build/test log reporting)
## Build and Test Error Reporting (Two-step requirement)
1. CAPTURE logs to a file (e.g., logs/build.log)
2. SEND with send_build_info, mounting the logs folder and specifying --log-file
### Platform Flags for send_build_info
- Jenkins: --job-name="${JOB_NAME}" --build-number="${BUILD_NUMBER}" --build-status="${currentBuild.result}" --pipeline-system="Jenkins"
- GitHub Actions: --job-name="${{ github.repository }}" --build-number="${{ github.run_number }}" --build-status="${{ job.status }}" --pipeline-system="GitHub Actions"
- Azure DevOps: --job-name="$(Build.DefinitionName)" --build-number="$(Build.BuildNumber)" --build-status="$(Agent.JobStatus)" --pipeline-system="Azure DevOps"
- GitLab CI/CD: --job-name="${CI_PROJECT_NAME}" --build-number="${CI_PIPELINE_ID}" --build-status="${CI_JOB_STATUS}" --pipeline-system="GitLab CI/CD"
### Common Mistakes to Avoid
- WRONG: Sending build info without capturing logs first
- WRONG: Missing --log-file parameter
- WRONG: Not mounting logs volume (e.g., --volume "$PWD/logs:/log_file_path")
- WRONG: Jenkins step as a normal stage (should use post block)
- WRONG: Not using always() / condition: always() so failures are missed
## DevOps Best Practices
### Scan Space Management
- Choose a naming strategy (environment-, branch-, team-, or project-based)
- Replace YOUR_SCAN_SPACE_NAME consistently across pipelines
### Security Configuration (store as secrets)
```bash
CODELOGIC_HOST="https://your-instance.app.codelogic.com"
AGENT_UUID="your-agent-uuid"
AGENT_PASSWORD="your-agent-password"
SCAN_SPACE_PREFIX="your-team" # optional
```
### Error Handling Strategy
1. Scan failures: continue pipeline but mark unstable/allow_failure
2. Build info failures: log warning; do not fail pipeline
3. Network issues: retry with exponential backoff
4. Credential issues: fail fast with clear errors
### Performance Optimization
1. Parallel scans when using multiple agent types
2. Incremental scans with --rescan
3. Set Docker memory limits appropriately
4. Use Docker layer caching
"""
return config
def get_target_files(ci_platform):
"""Get target files for each CI/CD platform"""
platform_files = {
"jenkins": ["Jenkinsfile", ".jenkins/pipeline.groovy"],
"github-actions": [".github/workflows/*.yml"],
"azure-devops": ["azure-pipelines.yml", ".azure-pipelines/*.yml"],
"gitlab": [".gitlab-ci.yml"],
"generic": ["*.yml", "*.yaml", "Jenkinsfile", "Dockerfile"]
}
return platform_files.get(ci_platform, platform_files["generic"])
def generate_docker_command(agent_type, scan_path, application_name, server_host, agent_image):
"""Generate the Docker command template with proper environment variable handling"""
return f"""# CodeLogic Scan Operation - Docker Command
## Required Environment Variables (Scan Operation)
- `CODELOGIC_HOST`: {server_host}
- `AGENT_UUID`: your-agent-uuid
- `AGENT_PASSWORD`: your-agent-password
## Docker Command
```bash
docker run --pull always --rm --interactive \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "{scan_path}:/scan" \\
{server_host}/{agent_image}:latest analyze \\
--application "{application_name}" \\
--path /scan \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
```
## ⚠️ CRITICAL: Scan Target Must Be Built Artifacts, NOT Source Code
**CodeLogic scans must target BUILT ARTIFACTS (compiled binaries, assemblies, JARs, etc.), NOT source code.**
**Why built artifacts?**
- CodeLogic analyzes compiled code to understand actual runtime behavior
- Source code analysis doesn't capture compiled dependencies, optimizations, or actual execution paths
- Built artifacts contain the actual code that will run in production
**Common built artifact directories:**
- **.NET**: `installdir/`, `bin/Release/`, `publish/`
- **Java**: `target/`, `build/libs/`, `dist/`
- **JavaScript/Node.js**: `dist/`, `build/`, `out/`
- **Python**: `dist/`, `build/`, `.venv/lib/` (for packaged distributions)
**Determine the correct path:**
1. Look at your build stage output - where are artifacts published/installed?
2. Check for directories like `installdir`, `dist`, `target`, `build`, `publish`
3. The scan path should point to the directory containing compiled binaries, not source `.cs`, `.java`, `.js` files
## Important Notes
- **Only 3 environment variables are needed for the analyze operation**
- **Do NOT include JOB_NAME, BUILD_NUMBER, GIT_COMMIT, or GIT_BRANCH for scan**
- **These additional variables are only used for test error reporting operations**
- **ALWAYS scan built artifacts, never source code**
## Send Build Info Command (Separate Operation)
For sending build information, use the proper `send_build_info` command:
```bash
# Standardized send_build_info command
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}:/scan" \\
--volume "${{WORKSPACE}}/logs:/log_file_path" \\
${{CODELOGIC_HOST}}/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="${{AGENT_UUID}}" \\
--agent-password="${{AGENT_PASSWORD}}" \\
--server="${{CODELOGIC_HOST}}" \\
--job-name="${{JOB_NAME}}" \\
--build-number="${{BUILD_NUMBER}}" \\
--build-status="${{currentBuild.result}}" \\
--pipeline-system="Jenkins" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
```
### Send Build Info Options:
- `--agent-uuid`: Required authentication
- `--agent-password`: Required authentication
- `--server`: CodeLogic server URL
- `--job-name`: CI/CD job name (use platform-specific variables)
- `--build-number`: Build number (use platform-specific variables)
- `--build-status`: SUCCESS, FAILURE, UNSTABLE, etc. (use platform-specific variables)
- `--pipeline-system`: Jenkins, GitHub Actions, Azure DevOps, GitLab CI/CD
- `--log-file`: Path to build log file
- `--log-lines`: Number of log lines to send (default: 1000)
- `--timeout`: Network timeout in seconds (default: 60)
- `--verbose`: Extra logging"""
def generate_file_modifications(ci_platform, agent_type, scan_path, application_name, server_host, agent_image):
"""Generate specific file modifications for each platform"""
_dq3 = '"""' # triple double-quote for embedding in f-string (avoids closing the f-string in Jenkins sh blocks)
modifications = {
"jenkins": {
"file": "Jenkinsfile",
"modifications": [
{
"type": "add_environment",
"location": "environment block",
"content": f"""environment {{
CODELOGIC_HOST = '{server_host}'
AGENT_UUID = credentials('codelogic-agent-uuid')
AGENT_PASSWORD = credentials('codelogic-agent-password')
}}"""
},
{
"type": "add_stage",
"location": "after build stages",
"content": f"""stage('CodeLogic Scan') {{
when {{
anyOf {{
branch 'main'
branch 'develop'
branch 'feature/*'
}}
}}
steps {{
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {{
script {{
// ⚠️ CRITICAL: CodeLogic scans must target BUILT ARTIFACTS, not source code
// Determine the artifact path from your build stage output
// Examples:
// .NET: "${{WORKSPACE}}/NetCape/installdir" or "${{WORKSPACE}}/bin/Release"
// Java: "${{WORKSPACE}}/target" or "${{WORKSPACE}}/build/libs"
// JavaScript: "${{WORKSPACE}}/dist" or "${{WORKSPACE}}/build"
def artifactPath = "{scan_path}" // Replace with your actual artifact directory
echo "Scanning BUILT ARTIFACTS at: ${{artifactPath}}"
echo "NOT scanning source code - CodeLogic requires compiled binaries"
}}
// CodeLogic analyze operation - only needs basic auth environment variables
// Do NOT include JOB_NAME, BUILD_NUMBER, GIT_COMMIT, or GIT_BRANCH for analyze
sh '''
# Use artifact path (built artifacts, not source code)
ARTIFACT_PATH="{scan_path}" # Replace with your actual artifact directory
docker run --pull always --rm --interactive \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}:/workspace" \\
${{CODELOGIC_HOST}}/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path "/workspace/$ARTIFACT_PATH" \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
'''
}}
}}
}}
// ❌ DEPRECATED: Stage-based build info (DO NOT USE)
// This approach is INCORRECT because:
// - Won't run if earlier stages fail
// - Can't reliably capture final build status
// - Misses console output from failed builds
//
// Use the post block approach below instead!
// RECOMMENDED: Use post block for build info
// This ensures build info is sent even on failures and captures final status
post {{
always {{
script {{
// Only send build info for main/develop/feature branches
if (env.BRANCH_NAME ==~ /(main|develop|feature\\/.*)/) {{
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {{
// STEP 1: Unstash logs from individual stages (if they were stashed)
// Each stage should stash its logs in a post block:
// post {{
// always {{
// stash includes: 'logs/**', name: 'stage-logs', allowEmpty: true
// }}
// }}
try {{
unstash 'build-logs'
}} catch (Exception e) {{
echo "Warning: Could not unstash build logs: ${{e.message}}"
}}
try {{
unstash 'test-logs'
}} catch (Exception e) {{
echo "Warning: Could not unstash test logs: ${{e.message}}"
}}
// STEP 2: Ensure git repository is in correct state (not detached HEAD)
// This is critical to ensure git branch is properly detected
def branchName = env.BRANCH_NAME ?: sh(script: 'git rev-parse --abbrev-ref HEAD', returnStdout: true).trim()
// Ensure git repository exists - re-checkout if missing (workspace may have been cleaned)
def gitRepoWasMissing = false
if (!fileExists("${{WORKSPACE}}/.git")) {{
echo "Git repository not found in workspace, re-checking out..."
checkout scm
gitRepoWasMissing = true
}}
// Ensure git branch is set correctly (not detached HEAD)
sh {_dq3}
cd ${{WORKSPACE}}
git checkout -b ${{branchName}} 2>/dev/null || git checkout ${{branchName}} 2>/dev/null || true
git symbolic-ref HEAD refs/heads/${{branchName}} 2>/dev/null || true
{_dq3}
// STEP 3: Consolidate all log files into a single file for CodeLogic
// Get build status before shell operations
def buildStatus = currentBuild.result ?: 'SUCCESS'
// Re-unstash logs after checkout if they were removed
if (gitRepoWasMissing) {{
try {{
unstash 'build-logs'
}} catch (Exception e) {{
echo "Warning: Could not unstash build logs after checkout: ${{e.message}}"
}}
try {{
unstash 'test-logs'
}} catch (Exception e) {{
echo "Warning: Could not unstash test logs after checkout: ${{e.message}}"
}}
}}
sh {_dq3}
# Ensure logs directory exists
mkdir -p ${{WORKSPACE}}/logs
# Create consolidated log file with build information
{{
echo "=== Build Information ==="
echo "Build Date: $(date)"
echo "Job Name: ${{JOB_NAME}}"
echo "Build Number: ${{BUILD_NUMBER}}"
echo "Branch: ${{branchName}}"
echo "Git Commit: ${{GIT_COMMIT}}"
echo "Build Result: ${{buildStatus}}"
echo ""
echo "=== ALL BUILD AND TEST LOGS ==="
echo ""
# Include all log files from logs directory
if [ -d "${{WORKSPACE}}/logs" ]; then
for logfile in $(find "${{WORKSPACE}}/logs" -type f -name "*.log" ! -name "codelogic-build.log" | sort); do
echo ""
echo "=== $(basename "$logfile") (FULL LOG) ==="
# Convert Windows line endings (CRLF) to Unix (LF) to ensure text-only output
sed 's/\\r//g' "$logfile" 2>/dev/null || cat "$logfile"
echo ""
done
fi
}} | sed 's/\\r//g' | tr -d '\\000' | LC_ALL=C tr -cd '\\011\\012\\040-\\176' > ${{WORKSPACE}}/logs/codelogic-build.log
{_dq3}
// STEP 4: Send build info with consolidated logs to CodeLogic
echo "Sending build info with status: ${{buildStatus}} for branch: ${{branchName}}"
// Verify git repository exists before Docker command
sh {_dq3}
if [ ! -d "${{WORKSPACE}}/.git" ]; then
echo "ERROR: Git repository not found at ${{WORKSPACE}}/.git" >&2
exit 1
fi
{_dq3}
sh '''
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}:/scan" \\
--volume "${{WORKSPACE}}/logs:/log_file_path" \\
${{CODELOGIC_HOST}}/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="${{AGENT_UUID}}" \\
--agent-password="${{AGENT_PASSWORD}}" \\
--server="${{CODELOGIC_HOST}}" \\
--job-name="${{JOB_NAME}}" \\
--build-number="${{BUILD_NUMBER}}" \\
--build-status="${{buildStatus}}" \\
--pipeline-system="Jenkins" \\
--log-file="/log_file_path/codelogic-build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
'''
}}
}}
}}
}}
}}
// WHY USE POST BLOCK?
// ✅ Runs after all stages complete (captures final status)
// ✅ Always executes (runs even if build fails - critical for error reporting!)
// ✅ Consolidates log files from individual stages (secure - no console log pulling)
// ✅ Proper build status (currentBuild.result is accurate here)
// ❌ Stage-based approach: Won't run if earlier stages fail, can't capture final status
//
// SECURITY NOTE: This approach does NOT pull console logs from Jenkins (which is a security risk).
// Instead, each stage logs its output to files using tee/Tee-Object, and those files are consolidated here."""
}
]
},
"github-actions": {
"file": ".github/workflows/codelogic-scan.yml",
"modifications": [
{
"type": "create_file",
"content": f"""name: CodeLogic Scan
on:
push:
branches: [ main, develop, feature/* ]
pull_request:
branches: [ main ]
jobs:
codelogic-scan:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: CodeLogic Scan
run: |
docker run --pull always --rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/scan" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path /scan \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
continue-on-error: true
- name: Send Build Info
if: always()
run: |
# Create logs directory
mkdir -p logs
# Capture build information
echo "Build completed at: $(date)" > logs/build.log
echo "Repository: ${{{{ github.repository }}}}" >> logs/build.log
echo "Workflow: ${{{{ github.workflow }}}}" >> logs/build.log
echo "Run Number: ${{{{ github.run_number }}}}" >> logs/build.log
echo "Commit: ${{{{ github.sha }}}}" >> logs/build.log
echo "Branch: ${{{{ github.ref_name }}}}" >> logs/build.log
echo "Build Status: ${{{{ job.status }}}}" >> logs/build.log
# Send build info with proper command syntax
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/scan" \\
--volume "${{{{ github.workspace }}}}/logs:/log_file_path" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="${{{{ secrets.AGENT_UUID }}}}" \\
--agent-password="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--server="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--job-name="${{{{ github.repository }}}}" \\
--build-number="${{{{ github.run_number }}}}" \\
--build-status="${{{{ job.status }}}}" \\
--pipeline-system="GitHub Actions" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
continue-on-error: true"""
}
]
},
"azure-devops": {
"file": "azure-pipelines.yml",
"modifications": [
{
"type": "create_file",
"content": f"""trigger:
- main
- develop
pool:
vmImage: 'ubuntu-latest'
variables:
codelogicHost: '{server_host}'
agentUuid: $(codelogicAgentUuid)
agentPassword: $(codelogicAgentPassword)
stages:
- stage: CodeLogicScan
displayName: 'CodeLogic Scan'
jobs:
- job: Scan
displayName: 'Run CodeLogic Scan'
steps:
- task: Docker@2
displayName: 'CodeLogic Scan'
inputs:
command: 'run'
arguments: |
--pull always --rm \\
--env CODELOGIC_HOST="$(codelogicHost)" \\
--env AGENT_UUID="$(agentUuid)" \\
--env AGENT_PASSWORD="$(agentPassword)" \\
--volume "$(Build.SourcesDirectory):/scan" \\
$(codelogicHost)/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path /scan \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
continueOnError: true
- task: Docker@2
displayName: 'Send Build Info'
condition: always()
inputs:
command: 'run'
arguments: |
--pull always \\
--rm \\
--env CODELOGIC_HOST="$(codelogicHost)" \\
--env AGENT_UUID="$(agentUuid)" \\
--env AGENT_PASSWORD="$(agentPassword)" \\
--volume "$(Build.SourcesDirectory):/scan" \\
--volume "$(Build.SourcesDirectory)/logs:/log_file_path" \\
$(codelogicHost)/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="$(agentUuid)" \\
--agent-password="$(agentPassword)" \\
--server="$(codelogicHost)" \\
--job-name="$(Build.DefinitionName)" \\
--build-number="$(Build.BuildNumber)" \\
--build-status="$(Agent.JobStatus)" \\
--pipeline-system="Azure DevOps" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
continueOnError: true
- task: PublishBuildArtifacts@1
displayName: 'Publish Build Logs'
inputs:
pathToPublish: 'logs'
artifactName: 'build-logs'
condition: always()"""
}
]
},
"gitlab": {
"file": ".gitlab-ci.yml",
"modifications": [
{
"type": "create_file",
"content": f"""stages:
- scan
- build-info
variables:
CODELOGIC_HOST: "{server_host}"
DOCKER_DRIVER: overlay2
codelogic_scan:
stage: scan
image: docker:latest
services:
- docker:dind
before_script:
- docker info
script:
- |
docker run --pull always --rm \\
--env CODELOGIC_HOST="$CODELOGIC_HOST" \\
--env AGENT_UUID="$AGENT_UUID" \\
--env AGENT_PASSWORD="$AGENT_PASSWORD" \\
--volume "$CI_PROJECT_DIR:/scan" \\
$CODELOGIC_HOST/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path /scan \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
rules:
- if: $CI_COMMIT_BRANCH == "main"
- if: $CI_COMMIT_BRANCH == "develop"
- if: $CI_COMMIT_BRANCH =~ /^feature\\/.*$/
allow_failure: true
send_build_info:
stage: build-info
image: docker:latest
services:
- docker:dind
script:
- |
# Create logs directory
mkdir -p logs
# Capture build information
echo "Build completed at: $(date)" > logs/build.log
echo "Project: $CI_PROJECT_NAME" >> logs/build.log
echo "Pipeline: $CI_PIPELINE_ID" >> logs/build.log
echo "Job: $CI_JOB_NAME" >> logs/build.log
echo "Commit: $CI_COMMIT_SHA" >> logs/build.log
echo "Branch: $CI_COMMIT_REF_NAME" >> logs/build.log
echo "Build Status: $CI_JOB_STATUS" >> logs/build.log
# Send build info with proper command syntax
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="$CODELOGIC_HOST" \\
--env AGENT_UUID="$AGENT_UUID" \\
--env AGENT_PASSWORD="$AGENT_PASSWORD" \\
--volume "$CI_PROJECT_DIR:/scan" \\
--volume "$CI_PROJECT_DIR/logs:/log_file_path" \\
$CODELOGIC_HOST/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="$AGENT_UUID" \\
--agent-password="$AGENT_PASSWORD" \\
--server="$CODELOGIC_HOST" \\
--job-name="$CI_PROJECT_NAME" \\
--build-number="$CI_PIPELINE_ID" \\
--build-status="$CI_JOB_STATUS" \\
--pipeline-system="GitLab CI/CD" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
rules:
- if: $CI_COMMIT_BRANCH == "main"
- if: $CI_COMMIT_BRANCH == "develop"
allow_failure: true
artifacts:
paths:
- logs/
expire_in: 30 days"""
}
]
}
}
return modifications.get(ci_platform, {})
def generate_setup_instructions(ci_platform):
"""Generate setup instructions for each platform"""
instructions = {
"jenkins": [
"1. Go to Jenkins → Manage Jenkins → Manage Credentials",
"2. Add Secret Text credentials: codelogic-agent-uuid, codelogic-agent-password",
"3. Install Docker Pipeline Plugin if not already installed",
"4. Configure build triggers for main, develop, and feature branches",
"5. Test the pipeline with a sample build"
],
"github-actions": [
"1. Go to repository Settings → Secrets and variables → Actions",
"2. Add repository secrets: CODELOGIC_HOST, AGENT_UUID, AGENT_PASSWORD",
"3. Ensure Docker is available in runner (default for ubuntu-latest)",
"4. Configure branch triggers for main, develop, and feature branches",
"5. Test the workflow with a sample commit"
],
"azure-devops": [
"1. Go to pipeline variables and add: codelogicAgentUuid, codelogicAgentPassword",
"2. Mark variables as secret",
"3. Ensure Docker task is available",
"4. Configure build triggers for main, develop, and feature branches",
"5. Test the pipeline with a sample build"
],
"gitlab": [
"1. Go to Settings → CI/CD → Variables",
"2. Add variables: AGENT_UUID, AGENT_PASSWORD",
"3. Mark as protected and masked",
"4. Ensure Docker-in-Docker is enabled",
"5. Configure branch rules for main, develop, and feature branches",
"6. Test the pipeline with a sample commit"
]
}
return instructions.get(ci_platform, [])
def generate_validation_checks(ci_platform):
"""Generate validation checks for each platform"""
checks = {
"jenkins": [
"Verify credentials are properly configured",
"Test Docker command manually",
"Check Jenkins agent has Docker access"
],
"github-actions": [
"Verify secrets are set correctly",
"Test workflow runs without errors",
"Check Docker is available in runner"
],
"azure-devops": [
"Verify variables are marked as secret",
"Test Docker task execution",
"Check pipeline permissions"
],
"gitlab": [
"Verify variables are protected and masked",
"Test Docker-in-Docker functionality",
"Check pipeline permissions"
]
}
return checks.get(ci_platform, [])
def format_target_files(target_files):
"""Format target files for display"""
if isinstance(target_files, list):
return "\n".join([f"- `{file}`" for file in target_files])
return f"- `{target_files}`"
def format_environment_variables(env_vars):
"""Format environment variables for display"""
return "\n".join([f"- `{key}`: {value}" for key, value in env_vars.items()])
def format_file_modifications(modifications):
"""Format file modifications for display"""
if not modifications:
return "No specific modifications required."
result = []
for mod in modifications.get('modifications', []):
result.append(f"**{mod['type'].replace('_', ' ').title()}**: {mod.get('location', 'N/A')}")
result.append(f"```\n{mod['content']}\n```")
return "\n".join(result)
def format_setup_instructions(instructions):
"""Format setup instructions for display"""
return "\n".join([f"{i+1}. {instruction}" for i, instruction in enumerate(instructions)])
def format_validation_checks(checks):
"""Format validation checks for display"""
return "\n".join([f"- {check}" for check in checks])
def generate_jenkins_config(agent_type, scan_path, application_name, server_host):
"""Generate Jenkins-specific configuration with AI modification prompts"""
# Technology-specific guidance based on agent type
tech_guidance = {
'dotnet': {
'build_command': 'dotnet build',
'test_command': 'dotnet test',
'env_info': 'dotnet --version && dotnet --info',
'artifacts': '*.dll, *.exe, *.so',
'test_results': 'TestResults/**/*.trx'
},
'java': {
'build_command': 'mvn clean compile',
'test_command': 'mvn test',
'env_info': 'java -version && mvn -version',
'artifacts': '*.jar, *.war, *.ear',
'test_results': 'target/surefire-reports/**/*.xml'
},
'javascript': {
'build_command': 'npm run build',
'test_command': 'npm test',
'env_info': 'node --version && npm --version',
'artifacts': 'dist/**, build/**, *.js',
'test_results': 'coverage/**, test-results/**'
}
}
tech_info = tech_guidance.get(agent_type, tech_guidance['java']) # Default to Java
return f"""
### 🎯 Jenkins File Modification Guide
**AI Prompt: "Modify the Jenkinsfile to add CodeLogic scanning for {agent_type.upper()} applications"**
**CRITICAL INSTRUCTIONS FOR AI:**
1. **CodeLogic scans MUST target BUILT ARTIFACTS, NOT source code**
- Scans should target compiled binaries (`.dll`, `.jar`, bundled `.js`), not source files (`.cs`, `.java`, `.ts`)
- Determine the artifact path from the build stage output (e.g., `installdir/`, `target/`, `dist/`, `build/`)
- The scan stage must run AFTER the build stage completes
2. **You MUST modify existing build and test stages** to capture logs to files
3. **Choose the appropriate log capture method based on the CI platform and OS**:
- For Linux/Unix agents: Use `tee`, redirection, or shell logging
- For Windows agents: Use PowerShell Tee-Object, redirection, or similar
- For cross-platform: Consider the agent's actual OS or provide platform-specific examples
4. **Do NOT just add new stages** - modify the existing ones to include log capture
5. **The log capture is REQUIRED, not optional** - CodeLogic needs these logs for analysis
6. **Use BEFORE/AFTER examples** to show exactly what to change
7. **NEVER pull console logs from Jenkins** - This is a security vulnerability (see Security Best Practices below)
## ⚠️ IMPORTANT: Scan Space Name Required
**Before implementing CodeLogic integration, you MUST determine the scan space name.**
### Check if scan space is already defined:
Look for existing scan space configuration in your CI file:
- Search for `SCAN_SPACE`, `scan_space_name`, or similar variables
- Look for CodeLogic-related environment variables
- Check if there are existing CodeLogic integration patterns
### If scan space is NOT explicitly defined:
**You MUST ask the user to provide a scan space name before proceeding.**
**Ask the user: "What scan space name should be used for CodeLogic scans? (e.g., 'Development', 'Production', 'Team-Frontend')"**
**Do NOT proceed with CodeLogic integration until you have a scan space name from the user.**
## 🔒 Security Best Practices for Jenkins Logging
### ⚠️ Security Risk: Console Log Pulling
**NEVER use `currentBuild.rawBuild.getLog()` or similar methods to pull console logs from Jenkins.**
**Why this is a security risk:**
- Console logs may contain sensitive information (credentials, tokens, secrets)
- Pulling console logs programmatically can expose data that should remain in Jenkins
- This violates security best practices for CI/CD pipelines
### ✅ Secure Alternative: Per-Step Logging
**Instead of pulling console logs, use per-step logging:**
1. **Each stage creates log files early** - Before running commands, create log files to capture output
2. **Each command uses `tee` (Linux) or `Tee-Object` (Windows)** - This both displays output AND saves it to a file
3. **Stash logs in post blocks** - Each stage's post block stashes its logs for later consolidation
4. **Consolidate in main post block** - The main post block unstashes and consolidates all log files
**Benefits of per-step logging:**
- ✅ **Secure** - No console log pulling, sensitive data stays in Jenkins
- ✅ **Reliable** - Logs are captured even if stages fail
- ✅ **Complete** - All output is captured, not just console logs
- ✅ **Organized** - Each stage has its own log file, making debugging easier
**Example pattern:**
```groovy
stage('Build') {{
steps {{
sh '''
mkdir -p logs
echo "=== Build Information ===" > logs/build.log
dotnet build 2>&1 | tee -a logs/build.log
'''
}}
post {{
always {{
stash includes: 'logs/**', name: 'build-logs', allowEmpty: true
}}
}}
}}
post {{
always {{
script {{
unstash 'build-logs'
// Consolidate all log files into codelogic-build.log
// Send to CodeLogic using send_build_info
}}
}}
}}
```
#### Step 1: Add Environment Variables
Add this to the `environment` block in your Jenkinsfile:
```groovy
environment {{
CODELOGIC_HOST = '{server_host}'
CODELOGIC_IMAGE = '${{CODELOGIC_HOST}}/codelogic_{agent_type}:latest'
AGENT_UUID = credentials('codelogic-agent-uuid')
AGENT_PASSWORD = credentials('codelogic-agent-password')
}}
```
#### Step 2: **REQUIRED** - Modify Existing Build Stages to Capture Logs
**CRITICAL**: You MUST modify your existing build stages to capture build and test logs. This is not optional - CodeLogic needs these logs for proper analysis.
**Find your existing build stages** (like `stage('Build')`, `stage('Unit Test')`, etc.) and modify them to capture logs:
**Platform-Specific Log Capture Guidance:**
- **Linux/Unix agents**: Use `tee` command (e.g., `{tech_info['build_command']} 2>&1 | tee logs/build.log`)
- **Windows agents**: Use PowerShell (e.g., `{tech_info['build_command']} 2>&1 | Tee-Object -FilePath logs/build.log`)
- **Cross-platform**: Detect the OS and use appropriate method, or use redirection (e.g., `{tech_info['build_command']} > logs/build.log 2>&1`)
```groovy
// BEFORE: Your existing build stage
stage('Build') {{
steps {{
sh 'dotnet build'
}}
}}
// AFTER: Modified to capture logs (Linux/Unix example with tee)
stage('Build') {{
steps {{
sh '''
# Create logs directory FIRST - before any other operations
mkdir -p logs
# Create log file early to capture all output
echo "=== Build Information ===" > logs/build.log
echo "Build Time: $(date)" >> logs/build.log
echo "Branch: ${{BRANCH_NAME}}" >> logs/build.log
echo "Commit: ${{GIT_COMMIT}}" >> logs/build.log
echo "=== Build Output ===" >> logs/build.log
# Capture build output AND continue with normal build
# Use tee to both display output AND save to log file
# Choose appropriate log capture based on your CI agent OS:
# Linux/Unix: use tee command
{tech_info['build_command']} 2>&1 | tee -a logs/build.log
# Capture environment info for CodeLogic
echo "=== Environment Information ===" >> logs/build.log
{tech_info['env_info']} >> logs/build.log
'''
}}
post {{
always {{
// Stash logs before cleaning workspace (for CodeLogic integration)
stash includes: 'logs/**', name: 'build-logs', allowEmpty: true
}}
}}
}}
// ALTERNATIVE: For Windows PowerShell agents
stage('Build') {{
steps {{
powershell '''
# Create logs directory FIRST - before any other operations
New-Item -ItemType Directory -Force -Path logs
# Create log file early to capture all output
"=== Build Information ===" | Out-File logs/build.log
"Build Time: $(Get-Date)" | Out-File -Append logs/build.log
"Branch: ${{env:BRANCH_NAME}}" | Out-File -Append logs/build.log
"Commit: ${{env:GIT_COMMIT}}" | Out-File -Append logs/build.log
"=== Build Output ===" | Out-File -Append logs/build.log
# Capture build output using PowerShell Tee-Object
# Tee-Object both displays output AND saves to log file
{tech_info['build_command']} 2>&1 | Tee-Object -FilePath logs/build.log -Append
# Capture environment info for CodeLogic
"=== Environment Information ===" | Out-File -Append logs/build.log
{tech_info['env_info']} | Out-File -Append logs/build.log
'''
}}
post {{
always {{
// Stash logs before cleaning workspace (for CodeLogic integration)
stash includes: 'logs/**', name: 'build-logs', allowEmpty: true
}}
}}
}}
```
#### Step 3: **REQUIRED** - Modify Existing Test Stages to Capture Logs
**CRITICAL**: You MUST modify your existing test stages to capture test logs and results.
```groovy
// BEFORE: Your existing test stage
stage('Unit Test') {{
steps {{
sh 'dotnet test'
}}
}}
// AFTER: Modified to capture logs (Linux/Unix example)
stage('Unit Test') {{
steps {{
sh '''
# Create logs directory if it doesn't exist
mkdir -p logs
# Create test log file early to capture all output
echo "=== Test Information ===" > logs/test.log
echo "Test Time: $(date)" >> logs/test.log
echo "Branch: ${{BRANCH_NAME}}" >> logs/test.log
echo "=== Test Output ===" >> logs/test.log
# Capture test output AND continue with normal tests
# Use tee to both display output AND save to log file
# Choose appropriate log capture based on your CI agent OS:
# Linux/Unix: use tee command
{tech_info['test_command']} 2>&1 | tee -a logs/test.log
# Archive test results for CodeLogic
archiveArtifacts artifacts: '{tech_info['test_results']}', allowEmptyArchive: true
'''
}}
post {{
always {{
// Stash logs before cleaning workspace (for CodeLogic integration)
stash includes: 'logs/**', name: 'test-logs', allowEmpty: true
}}
}}
}}
// ALTERNATIVE: For Windows PowerShell agents
stage('Unit Test') {{
steps {{
powershell '''
# Create logs directory if it doesn't exist
New-Item -ItemType Directory -Force -Path logs
# Create test log file early to capture all output
"=== Test Information ===" | Out-File logs/test.log
"Test Time: $(Get-Date)" | Out-File -Append logs/test.log
"Branch: ${{env:BRANCH_NAME}}" | Out-File -Append logs/test.log
"=== Test Output ===" | Out-File -Append logs/test.log
# Capture test output using PowerShell Tee-Object
# Tee-Object both displays output AND saves to log file
{tech_info['test_command']} 2>&1 | Tee-Object -FilePath logs/test.log -Append
# Archive test results for CodeLogic
archiveArtifacts artifacts: '{tech_info['test_results']}', allowEmptyArchive: true
'''
}}
post {{
always {{
// Stash logs before cleaning workspace (for CodeLogic integration)
stash includes: 'logs/**', name: 'test-logs', allowEmpty: true
}}
}}
}}
```
**IMPORTANT**: Log capture methods (tee, Tee-Object, redirection) will:
- ✅ **Continue your normal build/test process** (output goes to console)
- ✅ **Save a copy to log files** (for CodeLogic analysis)
- ✅ **Not break your existing pipeline** (if build fails, pipeline still fails)
- ⚠️ **Choose the right method for your CI agent OS** (Linux/Unix vs Windows)
#### Example for .NET Projects:
If you have existing .NET build stages like this:
```groovy
stage('Build netCape') {{
steps {{
sh '''
dotnet restore
dotnet publish -c Release -p:Version=$MAVEN_PUBLISH_VERSION
'''
}}
}}
```
**MODIFY them to this (Linux/Unix example):**
```groovy
stage('Build netCape') {{
steps {{
sh '''
# Create logs directory FIRST - before any other operations
mkdir -p logs
# Create log file early to capture all output
echo "=== Build Information ===" > logs/build.log
echo "Build Time: $(date)" >> logs/build.log
echo "Branch: ${{BRANCH_NAME}}" >> logs/build.log
echo "Commit: ${{GIT_COMMIT}}" >> logs/build.log
echo "MAVEN_PUBLISH_VERSION: $MAVEN_PUBLISH_VERSION" >> logs/build.log
echo "=== Build Output ===" >> logs/build.log
dotnet restore 2>&1 | tee -a logs/build.log
# Use tee for Linux/Unix agents to both display AND save output
dotnet publish -c Release -p:Version=$MAVEN_PUBLISH_VERSION 2>&1 | tee -a logs/build.log
# Capture environment info for CodeLogic
echo "=== Environment Information ===" >> logs/build.log
dotnet --version >> logs/build.log
dotnet --info >> logs/build.log
'''
}}
post {{
always {{
// Stash logs before cleaning workspace (for CodeLogic integration)
stash includes: 'logs/**', name: 'build-logs', allowEmpty: true
}}
}}
}}
```
**ALTERNATIVE for Windows agents:**
```groovy
stage('Build netCape') {{
steps {{
powershell '''
# Create logs directory FIRST - before any other operations
New-Item -ItemType Directory -Force -Path logs
# Create log file early to capture all output
"=== Build Information ===" | Out-File logs/build.log
"Build Time: $(Get-Date)" | Out-File -Append logs/build.log
"Branch: ${{env:BRANCH_NAME}}" | Out-File -Append logs/build.log
"Commit: ${{env:GIT_COMMIT}}" | Out-File -Append logs/build.log
"MAVEN_PUBLISH_VERSION: ${{env:MAVEN_PUBLISH_VERSION}}" | Out-File -Append logs/build.log
"=== Build Output ===" | Out-File -Append logs/build.log
dotnet restore 2>&1 | Tee-Object -FilePath logs/build.log -Append
# Use Tee-Object for Windows PowerShell agents to both display AND save output
dotnet publish -c Release -p:Version=${{env:MAVEN_PUBLISH_VERSION}} 2>&1 | Tee-Object -FilePath logs/build.log -Append
# Capture environment info for CodeLogic
"=== Environment Information ===" | Out-File -Append logs/build.log
dotnet --version | Out-File -Append logs/build.log
dotnet --info | Out-File -Append logs/build.log
'''
}}
post {{
always {{
// Stash logs before cleaning workspace (for CodeLogic integration)
stash includes: 'logs/**', name: 'build-logs', allowEmpty: true
}}
}}
}}
```
#### Step 4: Add CodeLogic Build Info Collection Stage
Insert this stage after your build/test stages:
```groovy
stage('CodeLogic Build Info Collection') {{
when {{
anyOf {{
branch 'main'
branch 'develop'
branch 'feature/*'
}}
}}
steps {{
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {{
sh '''
mkdir -p logs
# Collect comprehensive build information
echo "=== Build Information ===" > logs/codelogic-build.log
echo "Job: ${{JOB_NAME}}" >> logs/codelogic-build.log
echo "Build: ${{BUILD_NUMBER}}" >> logs/codelogic-build.log
echo "Branch: ${{BRANCH_NAME}}" >> logs/codelogic-build.log
echo "Commit: ${{GIT_COMMIT}}" >> logs/codelogic-build.log
echo "" >> logs/codelogic-build.log
# Append build logs if they exist
[ -f build.log ] && echo "=== Build Log ===" >> logs/codelogic-build.log && cat build.log >> logs/codelogic-build.log
[ -f test.log ] && echo "=== Test Log ===" >> logs/codelogic-build.log && cat test.log >> logs/codelogic-build.log
# Send to CodeLogic
docker run --rm \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}:/scan" \\
--volume "${{WORKSPACE}}/logs:/log_file_path" \\
${{CODELOGIC_IMAGE}} send_build_info \\
--agent-uuid="${{AGENT_UUID}}" \\
--agent-password="${{AGENT_PASSWORD}}" \\
--server="${{CODELOGIC_HOST}}" \\
--job-name="${{JOB_NAME}}" \\
--build-number="${{BUILD_NUMBER}}" \\
--build-status="${{currentBuild.result}}" \\
--pipeline-system="Jenkins" \\
--log-file="/log_file_path/codelogic-build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
'''
}}
}}
}}
```
#### Step 5: Add CodeLogic Scan Stage
Insert this stage after your build/test stages (scans must run AFTER artifacts are built):
**⚠️ CRITICAL: This stage must scan BUILT ARTIFACTS, not source code.**
**Determine the artifact path:**
- **.NET**: Look for `installdir/`, `bin/Release/`, or `publish/` directories created by your build
- **Java**: Look for `target/`, `build/libs/`, or `dist/` directories
- **JavaScript**: Look for `dist/`, `build/`, or `out/` directories
- The path should contain compiled binaries (`.dll`, `.jar`, `.js` bundles), NOT source files (`.cs`, `.java`, `.ts`)
```groovy
stage('CodeLogic Scan') {{
when {{
anyOf {{
branch 'main'
branch 'develop'
branch 'feature/*'
}}
}}
steps {{
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {{
script {{
// Determine scan space name based on branch
def scanSpaceName = env.SCAN_SPACE_NAME ?: "YOUR_SCAN_SPACE_NAME-${{BRANCH_NAME}}"
// Determine artifact path - THIS MUST BE BUILT ARTIFACTS, NOT SOURCE CODE
// Examples:
// .NET: "${{WORKSPACE}}/NetCape/installdir" or "${{WORKSPACE}}/bin/Release"
// Java: "${{WORKSPACE}}/target" or "${{WORKSPACE}}/build/libs"
// JavaScript: "${{WORKSPACE}}/dist" or "${{WORKSPACE}}/build"
def artifactPath = "{scan_path}" // Replace with your actual artifact directory
echo "Starting CodeLogic {agent_type} scan..."
echo "Application: {application_name}"
echo "Scan Space: ${{scanSpaceName}}"
echo "Target Path: ${{artifactPath}} (BUILT ARTIFACTS)"
// Verify artifact path exists and contains built artifacts
sh '''
if [ ! -d "${{artifactPath}}" ]; then
echo "ERROR: Artifact path does not exist: ${{artifactPath}}"
echo "Make sure the build stage completed successfully and artifacts were created."
exit 1
fi
# Check if path contains source files (this is wrong!)
if find "${{artifactPath}}" -name "*.cs" -o -name "*.java" -o -name "*.ts" | head -1 | grep -q .; then
echo "WARNING: Artifact path appears to contain source code files!"
echo "CodeLogic should scan BUILT ARTIFACTS (binaries), not source code."
echo "Please verify the artifact path points to compiled output."
fi
'''
}}
sh '''
# Determine scan space name based on branch
if [[ "${{BRANCH_NAME}}" =~ ^(main|develop|master)$ ]]; then
SCAN_SPACE="YOUR_SCAN_SPACE_NAME-${{BRANCH_NAME}}"
else
SCAN_SPACE="YOUR_SCAN_SPACE_NAME-${{BRANCH_NAME}}-${{BUILD_NUMBER}}"
fi
# Use the artifact path (built artifacts, not source code)
ARTIFACT_PATH="{scan_path}" # Replace with your actual artifact directory
echo "Starting CodeLogic {agent_type} scan..."
echo "Application: {application_name}"
echo "Scan Space: $SCAN_SPACE"
echo "Target Path: $ARTIFACT_PATH (BUILT ARTIFACTS)"
docker run --pull always --rm --interactive \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}:/workspace" \\
${{CODELOGIC_IMAGE}} analyze \\
--application "{application_name}" \\
--path "/workspace/$ARTIFACT_PATH" \\
--scan-space-name "$SCAN_SPACE" \\
--rescan \\
--expunge-scan-sessions
'''
}}
}}
}}
```
### 🔧 Jenkins Setup Instructions
**AI Prompt: "Set up Jenkins credentials for CodeLogic"**
1. **Add Credentials**:
- Go to Jenkins → Manage Jenkins → Manage Credentials
- Add Secret Text credentials:
- ID: `codelogic-agent-uuid`
- Secret: Your CodeLogic agent UUID
- Add another Secret Text credential:
- ID: `codelogic-agent-password`
- Secret: Your CodeLogic agent password
2. **Install Required Plugins**:
- Docker Pipeline Plugin
- Credentials Plugin
### 📋 Complete Jenkinsfile Template
**AI Prompt: "Create a complete Jenkinsfile with CodeLogic integration"**
```groovy
pipeline {{
agent any
environment {{
CODELOGIC_HOST = '{server_host}'
AGENT_UUID = credentials('codelogic-agent-uuid')
AGENT_PASSWORD = credentials('codelogic-agent-password')
}}
stages {{
stage('Build') {{
steps {{
// Your existing build steps
echo 'Building application...'
}}
}}
stage('Test') {{
steps {{
// Your existing test steps
echo 'Running tests...'
}}
}}
stage('CodeLogic Scan') {{
steps {{
catchError(buildResult: 'SUCCESS', stageResult: 'FAILURE') {{
script {{
// ⚠️ CRITICAL: Determine the artifact path (BUILT ARTIFACTS, not source code)
// Examples:
// .NET: "${{WORKSPACE}}/NetCape/installdir" or "${{WORKSPACE}}/bin/Release"
// Java: "${{WORKSPACE}}/target" or "${{WORKSPACE}}/build/libs"
// JavaScript: "${{WORKSPACE}}/dist" or "${{WORKSPACE}}/build"
def artifactPath = "{scan_path}" // Replace with your actual artifact directory
echo "Scanning BUILT ARTIFACTS at: ${{artifactPath}}"
echo "NOT scanning source code - CodeLogic requires compiled binaries"
}}
sh '''
# Use artifact path (built artifacts, not source code)
ARTIFACT_PATH="{scan_path}" # Replace with your actual artifact directory
docker run --pull always --rm --interactive \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}:/workspace" \\
${{CODELOGIC_HOST}}/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path "/workspace/$ARTIFACT_PATH" \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
'''
}}
}}
}}
stage('Send Build Info') {{
steps {{
sh '''
docker run --rm \\
--env CODELOGIC_HOST="${{CODELOGIC_HOST}}" \\
--env AGENT_UUID="${{AGENT_UUID}}" \\
--env AGENT_PASSWORD="${{AGENT_PASSWORD}}" \\
--volume "${{WORKSPACE}}/logs:/log_file_path" \\
${{CODELOGIC_HOST}}/codelogic_{agent_type}:latest send_build_info \\
--log-file="/log_file_path/build.log"
'''
}}
}}
}}
post {{
always {{
archiveArtifacts artifacts: 'logs/**', allowEmptyArchive: true
}}
success {{
echo 'Pipeline completed successfully'
}}
failure {{
echo 'Pipeline failed'
}}
}}
}}
```
"""
def generate_github_actions_config(agent_type, scan_path, application_name, server_host):
"""Generate GitHub Actions configuration with AI modification prompts"""
return f"""
### 🎯 GitHub Actions File Modification Guide
**AI Prompt: "Modify GitHub Actions workflow to add CodeLogic scanning"**
#### Step 1: Add Secrets to Repository
**AI Prompt: "Add CodeLogic secrets to GitHub repository"**
1. Go to repository Settings → Secrets and variables → Actions
2. Add these repository secrets:
- `CODELOGIC_HOST`: {server_host}
- `AGENT_UUID`: Your CodeLogic agent UUID
- `AGENT_PASSWORD`: Your CodeLogic agent password
#### Step 2: Create or Modify Workflow File
**AI Prompt: "Create .github/workflows/codelogic-scan.yml with CodeLogic integration"**
Create `.github/workflows/codelogic-scan.yml`:
```yaml
name: CodeLogic Scan
on:
push:
branches: [ main, develop, feature/* ]
pull_request:
branches: [ main ]
jobs:
codelogic-scan:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
with:
fetch-depth: 0
- name: CodeLogic Scan
run: |
# ⚠️ CRITICAL: CodeLogic scans must target BUILT ARTIFACTS, not source code
# Determine the artifact path from your build step output
# Examples:
# .NET: "bin/Release" or "publish"
# Java: "target" or "build/libs"
# JavaScript: "dist" or "build"
ARTIFACT_PATH="{scan_path}" # Replace with your actual artifact directory
echo "Scanning BUILT ARTIFACTS at: $ARTIFACT_PATH"
echo "NOT scanning source code - CodeLogic requires compiled binaries"
docker run --pull always --rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/workspace" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path "/workspace/$ARTIFACT_PATH" \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
continue-on-error: true
- name: Send Build Info
if: always()
run: |
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/scan" \\
--volume "${{{{ github.workspace }}}}/logs:/log_file_path" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="${{{{ secrets.AGENT_UUID }}}}" \\
--agent-password="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--server="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--job-name="${{{{ github.repository }}}}" \\
--build-number="${{{{ github.run_number }}}}" \\
--build-status="${{{{ job.status }}}}" \\
--pipeline-system="GitHub Actions" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
continue-on-error: true
- name: Upload build logs
uses: actions/upload-artifact@v4
if: always()
with:
name: build-logs
path: logs/
retention-days: 30
```
#### Step 3: Modify Existing Workflow
**AI Prompt: "Add CodeLogic scanning to existing GitHub Actions workflow"**
If you have an existing workflow, add these steps:
```yaml
# Add to your existing workflow
- name: CodeLogic Scan
run: |
docker run --pull always --rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/scan" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path /scan \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
continue-on-error: true
```
### 🔧 GitHub Actions Setup Instructions
**AI Prompt: "Set up GitHub Actions for CodeLogic integration"**
1. **Repository Secrets**:
- Go to repository Settings → Secrets and variables → Actions
- Add repository secrets (not environment secrets)
- Mark as sensitive data
2. **Workflow Permissions**:
- Ensure workflow has necessary permissions
- Add `permissions: contents: read` if needed
3. **Docker Support**:
- GitHub Actions runners include Docker by default
- No additional setup required
### 📋 Complete Workflow Template
**AI Prompt: "Create a complete GitHub Actions workflow with CodeLogic integration"**
```yaml
name: CI/CD Pipeline with CodeLogic
on:
push:
branches: [ main, develop, feature/* ]
pull_request:
branches: [ main ]
jobs:
build-and-test:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v4
- name: Setup .NET
uses: actions/setup-dotnet@v3
with:
dotnet-version: '6.0'
- name: Restore dependencies
run: dotnet restore
- name: Build
run: dotnet build --no-restore
- name: Test
run: dotnet test --no-build --verbosity normal
- name: CodeLogic Scan
run: |
# ⚠️ CRITICAL: CodeLogic scans must target BUILT ARTIFACTS, not source code
# Determine the artifact path from your build step output
# Examples:
# .NET: "bin/Release" or "publish"
# Java: "target" or "build/libs"
# JavaScript: "dist" or "build"
ARTIFACT_PATH="{scan_path}" # Replace with your actual artifact directory
echo "Scanning BUILT ARTIFACTS at: $ARTIFACT_PATH"
echo "NOT scanning source code - CodeLogic requires compiled binaries"
docker run --pull always --rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/workspace" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path "/workspace/$ARTIFACT_PATH" \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
continue-on-error: true
- name: Send Build Info
if: github.ref == 'refs/heads/main' || github.ref == 'refs/heads/develop'
run: |
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--env AGENT_UUID="${{{{ secrets.AGENT_UUID }}}}" \\
--env AGENT_PASSWORD="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--volume "${{{{ github.workspace }}}}:/scan" \\
--volume "${{{{ github.workspace }}}}/logs:/log_file_path" \\
${{{{ secrets.CODELOGIC_HOST }}}}/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="${{{{ secrets.AGENT_UUID }}}}" \\
--agent-password="${{{{ secrets.AGENT_PASSWORD }}}}" \\
--server="${{{{ secrets.CODELOGIC_HOST }}}}" \\
--job-name="${{{{ github.repository }}}}" \\
--build-number="${{{{ github.run_number }}}}" \\
--build-status="${{{{ job.status }}}}" \\
--pipeline-system="GitHub Actions" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
continue-on-error: true
- name: Upload build logs
uses: actions/upload-artifact@v4
if: always()
with:
name: build-logs
path: logs/
retention-days: 30
```
"""
def generate_azure_devops_config(agent_type, scan_path, application_name, server_host):
"""Generate Azure DevOps configuration"""
return f"""
### Azure DevOps Pipeline
Create `azure-pipelines.yml`:
```yaml
trigger:
- main
- develop
pool:
vmImage: 'ubuntu-latest'
variables:
codelogicHost: '{server_host}'
agentUuid: $(codelogicAgentUuid)
agentPassword: $(codelogicAgentPassword)
stages:
- stage: CodeLogicScan
displayName: 'CodeLogic Scan'
jobs:
- job: Scan
displayName: 'Run CodeLogic Scan'
steps:
- task: Docker@2
displayName: 'CodeLogic Scan'
inputs:
command: 'run'
arguments: |
# ⚠️ CRITICAL: CodeLogic scans must target BUILT ARTIFACTS, not source code
# Determine the artifact path from your build step output
# Examples:
# .NET: "bin/Release" or "publish"
# Java: "target" or "build/libs"
# JavaScript: "dist" or "build"
--pull always --rm \\
--env CODELOGIC_HOST="$(codelogicHost)" \\
--env AGENT_UUID="$(agentUuid)" \\
--env AGENT_PASSWORD="$(agentPassword)" \\
--volume "$(Build.SourcesDirectory):/workspace" \\
$(codelogicHost)/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path "/workspace/{scan_path}" \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
continueOnError: true
- task: Docker@2
displayName: 'Send Build Info'
condition: always()
inputs:
command: 'run'
arguments: |
--pull always \\
--rm \\
--env CODELOGIC_HOST="$(codelogicHost)" \\
--env AGENT_UUID="$(agentUuid)" \\
--env AGENT_PASSWORD="$(agentPassword)" \\
--volume "$(Build.SourcesDirectory):/scan" \\
--volume "$(Build.SourcesDirectory)/logs:/log_file_path" \\
$(codelogicHost)/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="$(agentUuid)" \\
--agent-password="$(agentPassword)" \\
--server="$(codelogicHost)" \\
--job-name="$(Build.DefinitionName)" \\
--build-number="$(Build.BuildNumber)" \\
--build-status="$(Agent.JobStatus)" \\
--pipeline-system="Azure DevOps" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
continueOnError: true
- task: PublishBuildArtifacts@1
displayName: 'Publish Build Logs'
inputs:
pathToPublish: 'logs'
artifactName: 'build-logs'
condition: always()
```
### Azure DevOps Variables
Add these variables to your pipeline:
- `codelogicAgentUuid`: Your agent UUID (mark as secret)
- `codelogicAgentPassword`: Your agent password (mark as secret)
"""
def generate_gitlab_config(agent_type, scan_path, application_name, server_host):
"""Generate GitLab CI configuration"""
return f"""
### GitLab CI Configuration
Create `.gitlab-ci.yml`:
```yaml
stages:
- scan
- build-info
variables:
CODELOGIC_HOST: "{server_host}"
DOCKER_DRIVER: overlay2
codelogic_scan:
stage: scan
image: docker:latest
services:
- docker:dind
before_script:
- docker info
script:
- |
# ⚠️ CRITICAL: CodeLogic scans must target BUILT ARTIFACTS, not source code
# Determine the artifact path from your build step output
# Examples:
# .NET: "bin/Release" or "publish"
# Java: "target" or "build/libs"
# JavaScript: "dist" or "build"
ARTIFACT_PATH="{scan_path}" # Replace with your actual artifact directory
echo "Scanning BUILT ARTIFACTS at: $ARTIFACT_PATH"
echo "NOT scanning source code - CodeLogic requires compiled binaries"
docker run --pull always --rm \\
--env CODELOGIC_HOST="$CODELOGIC_HOST" \\
--env AGENT_UUID="$AGENT_UUID" \\
--env AGENT_PASSWORD="$AGENT_PASSWORD" \\
--volume "$CI_PROJECT_DIR:/workspace" \\
$CODELOGIC_HOST/codelogic_{agent_type}:latest analyze \\
--application "{application_name}" \\
--path "/workspace/$ARTIFACT_PATH" \\
--scan-space-name "YOUR_SCAN_SPACE_NAME" \\
--rescan \\
--expunge-scan-sessions
rules:
- if: $CI_COMMIT_BRANCH == "main"
- if: $CI_COMMIT_BRANCH == "develop"
- if: $CI_COMMIT_BRANCH =~ /^feature\\/.*$/
allow_failure: true
send_build_info:
stage: build-info
image: docker:latest
services:
- docker:dind
script:
- |
docker run \\
--pull always \\
--rm \\
--env CODELOGIC_HOST="$CODELOGIC_HOST" \\
--env AGENT_UUID="$AGENT_UUID" \\
--env AGENT_PASSWORD="$AGENT_PASSWORD" \\
--volume "$CI_PROJECT_DIR:/scan" \\
--volume "$CI_PROJECT_DIR/logs:/log_file_path" \\
$CODELOGIC_HOST/codelogic_{agent_type}:latest send_build_info \\
--agent-uuid="$AGENT_UUID" \\
--agent-password="$AGENT_PASSWORD" \\
--server="$CODELOGIC_HOST" \\
--job-name="$CI_PROJECT_NAME" \\
--build-number="$CI_PIPELINE_ID" \\
--build-status="$CI_JOB_STATUS" \\
--pipeline-system="GitLab CI/CD" \\
--log-file="/log_file_path/build.log" \\
--log-lines=1000 \\
--timeout=60 \\
--verbose
rules:
- if: $CI_COMMIT_BRANCH == "main"
- if: $CI_COMMIT_BRANCH == "develop"
allow_failure: true
```
### GitLab Variables
Add these variables to your project:
- `AGENT_UUID`: Your agent UUID (mark as protected and masked)
- `AGENT_PASSWORD`: Your agent password (mark as protected and masked)
"""
def generate_generic_config(agent_type, scan_path, application_name, server_host):
"""Generate generic configuration for any CI/CD platform"""
return f"""
### Generic CI/CD Configuration
For any CI/CD platform, use these environment variables:
```bash
export CODELOGIC_HOST="{server_host}"
export AGENT_UUID="your-agent-uuid"
export AGENT_PASSWORD="your-agent-password"
```
### Shell Script Example
Create `codelogic-scan.sh`:
```bash
#!/bin/bash
set -e
# Configuration
CODELOGIC_HOST="${{CODELOGIC_HOST:-{server_host}}}"
AGENT_UUID="${{AGENT_UUID}}"
AGENT_PASSWORD="${{AGENT_PASSWORD}}"
SCAN_PATH="${{SCAN_PATH:-{scan_path}}}"
APPLICATION_NAME="${{APPLICATION_NAME:-{application_name}}}"
SCAN_SPACE="${{SCAN_SPACE:-YOUR_SCAN_SPACE_NAME}}"
# Run CodeLogic scan
echo "Starting CodeLogic {agent_type} scan..."
docker run --pull always --rm --interactive \\
--env CODELOGIC_HOST="$CODELOGIC_HOST" \\
--env AGENT_UUID="$AGENT_UUID" \\
--env AGENT_PASSWORD="$AGENT_PASSWORD" \\
--volume "$SCAN_PATH:/scan" \\
$CODELOGIC_HOST/codelogic_{agent_type}:latest analyze \\
--application "$APPLICATION_NAME" \\
--path /scan \\
--scan-space-name "$SCAN_SPACE" \\
--rescan \\
--expunge-scan-sessions
echo "CodeLogic scan completed successfully"
"""