/**
* @fileoverview Additional tests for metrics client history functionality
* This file adds coverage for the uncovered methods in metrics-client.ts
*/
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
import { MetricsClient } from '../../client/metrics-client.js';
import { MetricShortcode, MetricKey } from '../../types/metrics.js';
import type { MetricsClientTestable } from '../test-types.js';
import { TestableMetricsClient as TestableMetricsClientUtil } from '../utils/test-utils.js';
// Test helper class to expose private methods for testing
class TestableMetricsClient extends MetricsClient {
// skipcq: JS-0105 - Test helper method calling static method
testBuildMetricHistoryQuery() {
// buildMetricHistoryQuery is now a static method
return TestableMetricsClientUtil.testBuildMetricHistoryQuery();
}
testExtractHistoryFromResponse(data: unknown, params: unknown) {
return (this as unknown as MetricsClientTestable).extractHistoryFromResponse(data, params);
}
// skipcq: JS-0105 - Test helper method calling static method
testCalculateTrend(values: unknown[]) {
// calculateTrend is now a static method
return TestableMetricsClientUtil.testCalculateTrend(values);
}
testHandleMetricsError(error: unknown) {
return (this as unknown as MetricsClientTestable).handleMetricsError(error);
}
testExtractMetricsFromResponse(data: unknown) {
return (this as unknown as MetricsClientTestable).extractMetricsFromResponse(data);
}
// skipcq: JS-0105 - Test helper method calling static method
testHandleTestEnvironment(params: unknown) {
// handleTestEnvironment is now a static method
return TestableMetricsClientUtil.testHandleTestEnvironment(params);
}
}
interface MockBaseClient {
findProjectByKey: ReturnType<typeof vi.fn>;
executeGraphQL: ReturnType<typeof vi.fn>;
logger: {
info: ReturnType<typeof vi.fn>;
error: ReturnType<typeof vi.fn>;
debug: ReturnType<typeof vi.fn>;
warn: ReturnType<typeof vi.fn>;
};
extractHistoryFromResponse?: ReturnType<typeof vi.fn>;
extractMetricsFromResponse?: ReturnType<typeof vi.fn>;
}
describe('MetricsClient - History and Additional Methods', () => {
let metricsClient: TestableMetricsClient;
let mockBaseClient: MockBaseClient;
let originalEnv: typeof process.env;
beforeEach(() => {
originalEnv = { ...process.env };
metricsClient = new TestableMetricsClient('test-api-key');
mockBaseClient = metricsClient as unknown as MockBaseClient;
// Mock the methods on the client instance
mockBaseClient.findProjectByKey = vi.fn();
mockBaseClient.executeGraphQL = vi.fn();
mockBaseClient.logger = {
info: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
warn: vi.fn(),
};
// Ensure logger and other properties are properly set up
if (!mockBaseClient.logger) {
mockBaseClient.logger = {
info: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
warn: vi.fn(),
};
}
if (!mockBaseClient.extractHistoryFromResponse) {
mockBaseClient.extractHistoryFromResponse = vi.fn();
}
if (!mockBaseClient.extractMetricsFromResponse) {
mockBaseClient.extractMetricsFromResponse = vi.fn();
}
});
afterEach(() => {
process.env = originalEnv;
});
describe('getMetricHistory', () => {
const mockHistoryParams = {
projectKey: 'test-project',
metricShortcode: MetricShortcode.LCV,
metricKey: MetricKey.AGGREGATE,
};
it('should fetch metric history successfully', async () => {
const mockProject = {
repository: {
login: 'test-org',
name: 'test-repo',
provider: 'github',
},
};
const mockResponse = {
data: {
repository: {
metrics: [
{
shortcode: 'LCV',
name: 'Line Coverage',
unit: '%',
items: [
{
key: 'AGGREGATE',
values: {
edges: [
{
node: {
id: '1',
value: 75.5,
measuredAt: '2023-01-01T00:00:00Z',
commitOid: 'abc123',
},
},
{
node: {
id: '2',
value: 78.2,
measuredAt: '2023-01-02T00:00:00Z',
commitOid: 'def456',
},
},
],
},
},
],
},
],
},
},
};
(mockBaseClient.findProjectByKey as any).mockResolvedValue(mockProject); // skipcq: JS-0323 // skipcq: JS-0323
(mockBaseClient.executeGraphQL as any).mockResolvedValue(mockResponse); // skipcq: JS-0323
const result = await metricsClient.getMetricHistory(mockHistoryParams);
expect(result).not.toBeNull();
expect(result?.shortcode).toBe(MetricShortcode.LCV);
expect(result?.metricKey).toBe(MetricKey.AGGREGATE);
expect(result?.values).toHaveLength(2);
expect(result?.values?.[0]?.value).toBe(75.5);
expect(result?.values?.[1]?.value).toBe(78.2);
expect(result?.isTrendingPositive).toBe(true);
});
it.skip('should throw error when project not found', async () => {
// Use a different project key to avoid test environment interception
const nonTestParams = {
projectKey: 'non-test-project',
metricShortcode: 'UNKNOWN_METRIC' as MetricShortcode, // Non-standard metric to avoid mock data
metricKey: 'UNKNOWN_KEY' as MetricKey,
};
(mockBaseClient.findProjectByKey as any).mockResolvedValue(null); // skipcq: JS-0323
await expect(metricsClient.getMetricHistory(nonTestParams)).rejects.toThrow(
'Project with key non-test-project not found'
);
});
it('should return null when no data in response', async () => {
// Temporarily disable test environment
process.env.NODE_ENV = 'production';
const mockProject = {
repository: {
login: 'test-org',
name: 'test-repo',
provider: 'github',
},
};
(mockBaseClient.findProjectByKey as any).mockResolvedValue(mockProject); // skipcq: JS-0323
(mockBaseClient.executeGraphQL as any).mockResolvedValue({ data: null }); // skipcq: JS-0323
const result = await metricsClient.getMetricHistory(mockHistoryParams);
expect(result).toBeNull();
// Restore test environment
process.env.NODE_ENV = 'test';
});
it('should handle test environment with ERROR_TEST', async () => {
process.env.NODE_ENV = 'test';
process.env.ERROR_TEST = 'true';
await expect(metricsClient.getMetricHistory(mockHistoryParams)).rejects.toThrow(
'GraphQL Error: Unauthorized access'
);
});
it('should handle test environment with NOT_FOUND_TEST', async () => {
process.env.NODE_ENV = 'test';
process.env.NOT_FOUND_TEST = 'true';
const result = await metricsClient.getMetricHistory(mockHistoryParams);
expect(result).toBeNull();
});
it('should return mock data in test environment for LCV AGGREGATE', async () => {
process.env.NODE_ENV = 'test';
const result = await metricsClient.getMetricHistory(mockHistoryParams);
expect(result).not.toBeNull();
expect(result?.shortcode).toBe(MetricShortcode.LCV);
expect(result?.values).toHaveLength(2);
expect(result?.values?.[0]?.value).toBe(75.5);
expect(result?.values?.[1]?.value).toBe(78.2);
});
it('should handle errors with "not found" message', async () => {
// Temporarily disable test environment
process.env.NODE_ENV = 'production';
const mockProject = {
repository: {
login: 'test-org',
name: 'test-repo',
provider: 'github',
},
};
(mockBaseClient.findProjectByKey as any).mockResolvedValue(mockProject); // skipcq: JS-0323
(mockBaseClient.executeGraphQL as any).mockRejectedValue(new Error('Metric not found')); // skipcq: JS-0323
const result = await metricsClient.getMetricHistory(mockHistoryParams);
expect(result).toBeNull();
// Restore test environment
process.env.NODE_ENV = 'test';
});
it('should handle errors with "NoneType" message', async () => {
// Temporarily disable test environment
process.env.NODE_ENV = 'production';
const mockProject = {
repository: {
login: 'test-org',
name: 'test-repo',
provider: 'github',
},
};
mockBaseClient.findProjectByKey.mockResolvedValue(mockProject);
mockBaseClient.executeGraphQL.mockRejectedValue(
new Error("'NoneType' object has no attribute")
);
const result = await metricsClient.getMetricHistory(mockHistoryParams);
expect(result).toBeNull();
// Restore test environment
process.env.NODE_ENV = 'test';
});
it('should throw other errors', async () => {
// Temporarily disable test environment
process.env.NODE_ENV = 'production';
const mockProject = {
repository: {
login: 'test-org',
name: 'test-repo',
provider: 'github',
},
};
(mockBaseClient.findProjectByKey as any).mockResolvedValue(mockProject); // skipcq: JS-0323
(mockBaseClient.executeGraphQL as any).mockRejectedValue(new Error('Network error')); // skipcq: JS-0323
await expect(metricsClient.getMetricHistory(mockHistoryParams)).rejects.toThrow(
'Network error'
);
// Restore test environment
process.env.NODE_ENV = 'test';
});
});
describe('buildMetricHistoryQuery', () => {
it('should build correct GraphQL query for metric history', () => {
const query = metricsClient.testBuildMetricHistoryQuery();
expect(query).toContain('query getMetricHistory');
expect(query).toContain('$login: String!');
expect(query).toContain('$name: String!');
expect(query).toContain('$provider: VCSProvider!');
expect(query).toContain('$metricShortcode: String!');
expect(query).toContain('$metricKey: String!');
expect(query).toContain('values(first: $first)');
expect(query).toContain('measuredAt');
});
});
describe('extractHistoryFromResponse', () => {
const mockParams = {
projectKey: 'test-project',
metricShortcode: MetricShortcode.LCV,
metricKey: MetricKey.AGGREGATE,
};
it('should extract history data from response', () => {
const mockResponseData = {
repository: {
metrics: [
{
shortcode: 'LCV',
name: 'Line Coverage',
unit: '%',
items: [
{
key: 'AGGREGATE',
values: {
edges: [
{
node: {
value: 75.5,
measuredAt: '2023-01-01T00:00:00Z',
commitOid: 'abc123',
},
},
{
node: {
value: 78.2,
measuredAt: '2023-01-02T00:00:00Z',
commitOid: 'def456',
},
},
],
},
},
],
},
],
},
};
const result = metricsClient.testExtractHistoryFromResponse(
mockResponseData,
mockParams
) as any; // skipcq: JS-0323
expect(result).not.toBeNull();
expect(result.shortcode).toBe(MetricShortcode.LCV);
expect(result.values).toHaveLength(2);
expect(result.values[0].value).toBe(75.5);
// Trend calculation: (78.2 - 75.5) / 75.5 * 100 = 3.57%, which is < 5%, so it's stable
expect(result.isTrendingPositive).toBe(false);
});
it('should return null when metric not found', () => {
const mockResponseData = {
repository: {
metrics: [
{
shortcode: 'BCV', // Different metric
items: [],
},
],
},
};
const result = metricsClient.testExtractHistoryFromResponse(mockResponseData, mockParams);
expect(result).toBeNull();
});
it('should return null when item not found', () => {
const mockResponseData = {
repository: {
metrics: [
{
shortcode: 'LCV',
items: [
{
key: 'FILE', // Different key
values: { edges: [] },
},
],
},
],
},
};
const result = metricsClient.testExtractHistoryFromResponse(mockResponseData, mockParams);
expect(result).toBeNull();
});
it('should return null when values not present', () => {
const mockResponseData = {
repository: {
metrics: [
{
shortcode: 'LCV',
items: [
{
key: 'AGGREGATE',
// No values property
},
],
},
],
},
};
const result = metricsClient.testExtractHistoryFromResponse(mockResponseData, mockParams);
expect(result).toBeNull();
});
it('should handle edges without nodes', () => {
const mockResponseData = {
repository: {
metrics: [
{
shortcode: 'LCV',
items: [
{
key: 'AGGREGATE',
values: {
edges: [
{ node: { value: 75.5, measuredAt: '2023-01-01T00:00:00Z' } },
{}, // Edge without node
{ node: { value: 78.2, measuredAt: '2023-01-02T00:00:00Z' } },
],
},
},
],
},
],
},
};
const result = metricsClient.testExtractHistoryFromResponse(
mockResponseData,
mockParams
) as any; // skipcq: JS-0323
expect(result).not.toBeNull();
expect(result.values).toHaveLength(2); // Only valid nodes
});
it.skip('should handle errors during extraction', () => {
// Pass data that will cause an error when accessing nested properties
const invalidData = null;
// Create a new metrics client with a mocked logger
const testClient = new MetricsClient('test-api-key');
const testMockClient = testClient as unknown as any; // skipcq: JS-0323
testMockClient.logger = {
info: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
warn: vi.fn(),
};
const result = testMockClient.extractHistoryFromResponse(invalidData, mockParams);
expect(result).toBeNull();
expect(testMockClient.logger.error).toHaveBeenCalledWith(
'Error extracting history from response',
expect.any(Object)
);
});
});
describe('calculateTrend', () => {
it('should return stable for less than 2 values', () => {
const values = [{ value: 75, createdAt: '2023-01-01T00:00:00Z' }];
const trend = metricsClient.testCalculateTrend(values);
expect(trend).toBe('stable');
});
it('should return stable for empty array', () => {
const trend = metricsClient.testCalculateTrend([]);
expect(trend).toBe('stable');
});
it('should return improving for positive trend', () => {
const values = [
{ value: 75, createdAt: '2023-01-01T00:00:00Z' },
{ value: 80, createdAt: '2023-01-02T00:00:00Z' },
{ value: 85, createdAt: '2023-01-03T00:00:00Z' },
];
const trend = metricsClient.testCalculateTrend(values);
expect(trend).toBe('improving');
});
it('should return declining for negative trend', () => {
const values = [
{ value: 85, createdAt: '2023-01-01T00:00:00Z' },
{ value: 80, createdAt: '2023-01-02T00:00:00Z' },
{ value: 75, createdAt: '2023-01-03T00:00:00Z' },
];
const trend = metricsClient.testCalculateTrend(values);
expect(trend).toBe('declining');
});
it('should return stable for small change (less than 5%)', () => {
const values = [
{ value: 80, createdAt: '2023-01-01T00:00:00Z' },
{ value: 81, createdAt: '2023-01-02T00:00:00Z' },
];
const trend = metricsClient.testCalculateTrend(values);
expect(trend).toBe('stable');
});
it('should handle unsorted values correctly', () => {
const values = [
{ value: 85, createdAt: '2023-01-03T00:00:00Z' },
{ value: 75, createdAt: '2023-01-01T00:00:00Z' },
{ value: 80, createdAt: '2023-01-02T00:00:00Z' },
];
const trend = metricsClient.testCalculateTrend(values);
expect(trend).toBe('improving'); // 75 to 85
});
});
describe('handleMetricsError', () => {
it('should return empty array for NoneType errors', () => {
const error = new Error("'NoneType' object has no attribute 'metrics'");
const result = metricsClient.testHandleMetricsError(error);
expect(result).toEqual([]);
});
it('should throw non-NoneType errors', () => {
const error = new Error('Network error');
expect(() => {
metricsClient.testHandleMetricsError(error);
}).toThrow('Network error');
});
it('should handle non-Error objects', () => {
const error = 'String error';
expect(() => {
metricsClient.testHandleMetricsError(error);
}).toThrow('String error');
});
});
describe('extractMetricsFromResponse error handling', () => {
it.skip('should handle errors during extraction', () => {
// Pass invalid data that will cause an error
const invalidData = null;
// Create a new metrics client with a mocked logger
const testClient = new MetricsClient('test-api-key');
const testMockClient = testClient as unknown as any; // skipcq: JS-0323
testMockClient.logger = {
info: vi.fn(),
error: vi.fn(),
debug: vi.fn(),
warn: vi.fn(),
};
const result = testMockClient.extractMetricsFromResponse(invalidData);
expect(result).toEqual([]);
expect(testMockClient.logger.error).toHaveBeenCalledWith(
'Error extracting metrics from response',
expect.any(Object)
);
});
it('should handle missing items in metrics', () => {
const mockResponseData = {
repository: {
metrics: [
{
shortcode: 'LCV',
name: 'Line Coverage',
// Missing items property
},
],
},
};
const result = metricsClient.testExtractMetricsFromResponse(mockResponseData);
expect(result).toHaveLength(1);
expect((result as any)[0].items).toEqual([]); // skipcq: JS-0323
});
});
describe('handleTestEnvironment edge cases', () => {
const mockHistoryParams = {
projectKey: 'test-project',
metricShortcode: MetricShortcode.LCV,
metricKey: MetricKey.AGGREGATE,
};
it('should return undefined when not in test environment', () => {
process.env.NODE_ENV = 'production';
const result = metricsClient.testHandleTestEnvironment(mockHistoryParams);
expect(result).toBeUndefined();
});
it('should return undefined for non-matching parameters in test', () => {
process.env.NODE_ENV = 'test';
const params = {
projectKey: 'test-project',
metricShortcode: MetricShortcode.BCV, // Different metric
metricKey: MetricKey.AGGREGATE,
};
const result = metricsClient.testHandleTestEnvironment(params);
expect(result).toBeUndefined();
});
});
});