Skip to content

Commit 8ebd147

Browse files
committed
feat(core): enhance E2E testing with golden test monitor and Slack notifications
1 parent 9d97679 commit 8ebd147

File tree

2 files changed

+367
-2
lines changed

2 files changed

+367
-2
lines changed

.github/workflows/ci.yml

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -95,17 +95,27 @@ jobs:
9595
pnpm nx run-many -t check-imports check-commit check-lock-files check-codeowners --parallel=1 --no-dte &
9696
pids+=($!)
9797
98-
pnpm nx affected --targets=lint,test,build,format-native,lint-native &
98+
pnpm nx affected --targets=lint,test,build,e2e,format-native,lint-native &
9999
pids+=($!)
100100
101-
pnpm nx run-many -t e2e,e2e-ci &
101+
pnpm nx run-many -t e2e-ci -- --json --outputFile=test-results.json &
102102
pids+=($!)
103103
104104
for pid in "${pids[@]}"; do
105105
wait "$pid"
106106
done
107107
timeout-minutes: 100
108108

109+
- name: Install dependencies for golden test monitor
110+
run: pnpm install axios
111+
112+
- name: Golden Test Monitor - Final E2E CI Check
113+
env:
114+
SLACK_WEBHOOK_URL: ${{ secrets.ACTION_MONITORING_SLACK }}
115+
run: |
116+
echo "🎯 Running Golden Test Monitor"
117+
node ./scripts/golden-test-config.js
118+
109119
main-macos:
110120
runs-on: macos-latest
111121

scripts/golden-test.config.js

Lines changed: 355 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,355 @@
1+
const fs = require('fs');
2+
const axios = require('axios');
3+
4+
const goldenTestConfig = {
5+
angular: ['*'],
6+
cypress: ['*'],
7+
esbuild: ['*'],
8+
eslint: ['*'],
9+
gradle: ['*'],
10+
jest: ['*'],
11+
js: ['*'],
12+
'learn-smoke-test': ['*'],
13+
next: ['*'],
14+
node: ['*'],
15+
nuxt: ['*'],
16+
nx: ['*'],
17+
'nx-init': ['*'],
18+
playwright: ['*'],
19+
plugin: ['*'],
20+
react: ['*'],
21+
release: ['*'],
22+
rollup: ['*'],
23+
rspack: ['*'],
24+
storybook: ['*'],
25+
vite: ['*'],
26+
vue: ['*'],
27+
web: ['*'],
28+
webpack: ['*'],
29+
'workspace-create': ['*'],
30+
};
31+
32+
const SLACK_WEBHOOK_URL = process.env.SLACK_WEBHOOK_URL;
33+
34+
function parseTestResults() {
35+
const testResults = [];
36+
37+
console.log('🔍 Looking for test results in project folders...');
38+
39+
for (const projectName of Object.keys(goldenTestConfig)) {
40+
const testResultsPath = `e2e/${projectName}/test-results.json`;
41+
42+
if (fs.existsSync(testResultsPath)) {
43+
try {
44+
const rawResults = JSON.parse(fs.readFileSync(testResultsPath, 'utf8'));
45+
console.log(
46+
`✅ Found test results for ${projectName}: ${testResultsPath}`
47+
);
48+
49+
const projectResults = parseProjectResults(rawResults, projectName);
50+
testResults.push(...projectResults);
51+
} catch (error) {
52+
console.warn(`⚠️ Failed to parse ${testResultsPath}:`, error.message);
53+
}
54+
} else {
55+
console.log(
56+
`ℹ️ No test results found for ${projectName} (${testResultsPath}) - skipping`
57+
);
58+
}
59+
}
60+
61+
return testResults;
62+
}
63+
64+
function parseProjectResults(nxResults, projectName) {
65+
const testResults = [];
66+
67+
console.log(`📊 Processing results for project: ${projectName}`);
68+
69+
// Handle Jest test result format
70+
if (nxResults.testResults && Array.isArray(nxResults.testResults)) {
71+
console.log(
72+
`📋 Found Jest format results with ${nxResults.testResults.length} test suites`
73+
);
74+
75+
nxResults.testResults.forEach((testSuite) => {
76+
const parsed = parseJestTestSuite(testSuite, projectName, nxResults);
77+
if (parsed) testResults.push(parsed);
78+
});
79+
} else {
80+
console.warn(
81+
`⚠️ Unexpected test result format for ${projectName}, expected Jest format with testResults array`
82+
);
83+
}
84+
85+
console.log(`📈 Found ${testResults.length} test results for ${projectName}`);
86+
return testResults;
87+
}
88+
89+
// Parse Jest test suite result
90+
function parseJestTestSuite(testSuite, projectName, overallResults) {
91+
// Extract test file name from the test suite
92+
const testFile =
93+
testSuite.name || testSuite.testFilePath || `${projectName} test suite`;
94+
95+
// Determine if this test suite passed or failed
96+
const hasFailed =
97+
testSuite.status === 'failed' ||
98+
testSuite.numFailingTests > 0 ||
99+
testSuite.failureMessage;
100+
101+
const status = hasFailed ? 'failed' : 'passed';
102+
103+
return {
104+
project: projectName,
105+
testFile: testFile,
106+
status: status,
107+
failures: testSuite.numFailingTests || 0,
108+
successes: testSuite.numPassingTests || 0,
109+
total: testSuite.numPassingTests + testSuite.numFailingTests || 0,
110+
duration: testSuite.perfStats
111+
? testSuite.perfStats.end - testSuite.perfStats.start
112+
: 0,
113+
startTime: testSuite.perfStats?.start,
114+
endTime: testSuite.perfStats?.end,
115+
failureMessage: testSuite.failureMessage,
116+
source: `e2e/${projectName}/test-results.json`,
117+
// Include overall test run info
118+
overallSuccess: overallResults.success,
119+
totalFailedSuites: overallResults.numFailedTestSuites,
120+
totalFailedTests: overallResults.numFailedTests,
121+
};
122+
}
123+
// Check if a test is golden (since all tests in our config are '*', they all are)
124+
function isGoldenTest(project, testFile) {
125+
const projectConfig = goldenTestConfig[project];
126+
if (!projectConfig) return false;
127+
128+
// Handle wildcard '*' - all tests in project are golden
129+
if (projectConfig.includes('*')) return true;
130+
131+
return projectConfig.some((pattern) => {
132+
if (pattern.includes('*')) {
133+
const regex = new RegExp(pattern.replace(/\*/g, '.*'));
134+
return regex.test(testFile);
135+
}
136+
return testFile.includes(pattern);
137+
});
138+
}
139+
140+
function analyzeGoldenTestFailures(testResults) {
141+
const goldenFailures = [];
142+
const summary = {
143+
totalTests: testResults.length,
144+
totalFailures: 0,
145+
goldenFailures: 0,
146+
nonGoldenFailures: 0,
147+
projects: new Set(),
148+
};
149+
150+
for (const result of testResults) {
151+
summary.projects.add(result.project);
152+
153+
if (result.status === 'failed' || result.failures > 0) {
154+
summary.totalFailures++;
155+
156+
if (isGoldenTest(result.project, result.testFile)) {
157+
summary.goldenFailures++;
158+
goldenFailures.push(result);
159+
} else {
160+
summary.nonGoldenFailures++;
161+
}
162+
}
163+
}
164+
165+
return {
166+
goldenFailures,
167+
summary: {
168+
...summary,
169+
projects: Array.from(summary.projects),
170+
},
171+
};
172+
}
173+
174+
// Send Slack alert for golden test failures
175+
async function sendSlackAlert(goldenFailures, summary) {
176+
if (!SLACK_WEBHOOK_URL) {
177+
console.log(
178+
'❌ No Slack webhook configured (SLACK_WEBHOOK_URL), skipping notification'
179+
);
180+
return false;
181+
}
182+
183+
if (goldenFailures.length === 0) {
184+
console.log('✅ No golden test failures detected, no alert needed');
185+
return false;
186+
}
187+
188+
// Group failures by project
189+
const failuresByProject = {};
190+
for (const failure of goldenFailures) {
191+
if (!failuresByProject[failure.project]) {
192+
failuresByProject[failure.project] = [];
193+
}
194+
failuresByProject[failure.project].push(failure);
195+
}
196+
197+
// Build Slack message blocks
198+
const blocks = [
199+
{
200+
type: 'header',
201+
text: {
202+
type: 'plain_text',
203+
text: `🚨 Golden E2E Test Failures`,
204+
emoji: true,
205+
},
206+
},
207+
{
208+
type: 'section',
209+
text: {
210+
type: 'mrkdwn',
211+
text: `*${goldenFailures.length}* golden tests are failing and require immediate attention!`,
212+
},
213+
},
214+
{
215+
type: 'section',
216+
fields: [
217+
{
218+
type: 'mrkdwn',
219+
text: `*Total Tests:* ${summary.totalTests}`,
220+
},
221+
{
222+
type: 'mrkdwn',
223+
text: `*Golden Failures:* ${summary.goldenFailures}`,
224+
},
225+
{
226+
type: 'mrkdwn',
227+
text: `*Affected Projects:* ${Object.keys(failuresByProject).length}`,
228+
},
229+
],
230+
},
231+
];
232+
233+
// Add details for each project with failures
234+
for (const [project, failures] of Object.entries(failuresByProject)) {
235+
const failureList = failures
236+
.slice(0, 3) // Limit to 3 failures per project for readability
237+
.map((f) => {
238+
const fileName = f.testFile.split('/').pop() || f.testFile;
239+
const failureCount = f.failures ? ` (${f.failures} failed tests)` : '';
240+
return `• \`${fileName}\`${failureCount}`;
241+
})
242+
.join('\n');
243+
244+
const moreFailures =
245+
failures.length > 3
246+
? `\n_...and ${failures.length - 3} more test suites_`
247+
: '';
248+
249+
blocks.push({
250+
type: 'section',
251+
text: {
252+
type: 'mrkdwn',
253+
text: `*${project}* - FAILED:\n${failureList}${moreFailures}`,
254+
},
255+
});
256+
}
257+
258+
// Add context information
259+
blocks.push({
260+
type: 'context',
261+
elements: [
262+
{
263+
type: 'mrkdwn',
264+
text: `Branch: \`${
265+
process.env.GITHUB_REF_NAME ||
266+
process.env.GITHUB_HEAD_REF ||
267+
'unknown'
268+
}\` | Commit: \`${(process.env.GITHUB_SHA || 'unknown').substring(
269+
0,
270+
8
271+
)}\` | Run: <${process.env.GITHUB_SERVER_URL}/${
272+
process.env.GITHUB_REPOSITORY
273+
}/actions/runs/${process.env.GITHUB_RUN_ID}|#${
274+
process.env.GITHUB_RUN_NUMBER || 'local'
275+
}>`,
276+
},
277+
],
278+
});
279+
280+
const message = {
281+
channel: SLACK_CHANNEL,
282+
username: 'Golden Test Monitor',
283+
icon_emoji: ':rotating_light:',
284+
blocks: blocks,
285+
};
286+
287+
try {
288+
await axios.post(SLACK_WEBHOOK_URL, message);
289+
console.log(`✅ Slack alert sent successfully to #${SLACK_CHANNEL}`);
290+
return true;
291+
} catch (error) {
292+
console.error(
293+
'❌ Failed to send Slack alert:',
294+
error.response?.data || error.message
295+
);
296+
return false;
297+
}
298+
}
299+
300+
// Main execution function
301+
async function main() {
302+
console.log('🔍 Starting Golden Test Monitor...');
303+
console.log(`📋 Monitoring ${Object.keys(goldenTestConfig).length} projects`);
304+
console.log(`📢 Slack alerts will be sent to #${SLACK_CHANNEL}\n`);
305+
306+
// Parse test results
307+
const testResults = parseTestResults();
308+
if (testResults.length === 0) {
309+
console.log(
310+
'ℹ️ No test results found - this may be normal if no e2e tests ran'
311+
);
312+
console.log('✅ Exiting gracefully - no golden tests to check');
313+
process.exit(0);
314+
}
315+
316+
console.log(`📊 Found ${testResults.length} test results\n`);
317+
318+
// Analyze for golden test failures
319+
const { goldenFailures, summary } = analyzeGoldenTestFailures(testResults);
320+
321+
// Display summary
322+
console.log('📈 Analysis Summary:');
323+
console.log(` Total Tests: ${summary.totalTests}`);
324+
console.log(` Total Failures: ${summary.totalFailures}`);
325+
console.log(` Golden Failures: ${summary.goldenFailures} ⚠️`);
326+
console.log(` Non-Golden Failures: ${summary.nonGoldenFailures}`);
327+
console.log(` Affected Projects: ${summary.projects.join(', ')}\n`);
328+
329+
// Send Slack alert if needed
330+
await sendSlackAlert(goldenFailures, summary);
331+
332+
// Exit with appropriate code
333+
if (summary.goldenFailures > 0) {
334+
console.log('❌ Golden tests are failing - immediate attention required!');
335+
process.exit(1);
336+
} else {
337+
console.log('✅ All golden tests are passing!');
338+
process.exit(0);
339+
}
340+
}
341+
342+
if (require.main === module) {
343+
main().catch((error) => {
344+
console.error('💥 Golden Test Monitor failed:', error);
345+
process.exit(1);
346+
});
347+
}
348+
349+
module.exports = {
350+
goldenTestConfig,
351+
parseTestResults,
352+
analyzeGoldenTestFailures,
353+
sendSlackAlert,
354+
main,
355+
};

0 commit comments

Comments
 (0)