Skip to content

Commit 798837c

Browse files
committed
feat(core): enhance E2E testing with golden test monitor and Slack notifications
1 parent 9d97679 commit 798837c

File tree

2 files changed

+375
-3
lines changed

2 files changed

+375
-3
lines changed

.github/workflows/ci.yml

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -95,17 +95,31 @@ jobs:
9595
pnpm nx run-many -t check-imports check-commit check-lock-files check-codeowners --parallel=1 --no-dte &
9696
pids+=($!)
9797
98-
pnpm nx affected --targets=lint,test,build,format-native,lint-native &
98+
pnpm nx affected --targets=lint,test,build,e2e,format-native,lint-native &
9999
pids+=($!)
100100
101-
pnpm nx run-many -t e2e,e2e-ci &
102-
pids+=($!)
101+
pnpm nx run-many -t e2e-ci -- --json --outputFile=test-results.json &
102+
e2e_pid=$!
103103
104+
# Wait for all non-e2e processes first
104105
for pid in "${pids[@]}"; do
105106
wait "$pid"
106107
done
108+
109+
# Wait for e2e-ci but don't fail if it fails
110+
wait "$e2e_pid" || echo "⚠️ E2E tests failed, but continuing to golden test monitor..."
107111
timeout-minutes: 100
108112

113+
- name: Install dependencies for golden test monitor
114+
run: pnpm install axios
115+
116+
- name: Golden Test Monitor - Final E2E CI Check
117+
env:
118+
SLACK_WEBHOOK_URL: ${{ secrets.ACTION_MONITORING_SLACK }}
119+
run: |
120+
echo "🎯 Running Golden Test Monitor - this determines the final e2e-ci status..."
121+
node ./e2e/golden-test-config.js
122+
109123
main-macos:
110124
runs-on: macos-latest
111125

e2e/golden-test.config.js

Lines changed: 358 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,358 @@
1+
const fs = require('fs');
2+
const axios = require('axios');
3+
4+
const goldenTestConfig = {
5+
angular: ['*'],
6+
cypress: ['*'],
7+
esbuild: ['*'],
8+
eslint: ['*'],
9+
gradle: ['*'],
10+
jest: ['*'],
11+
js: ['*'],
12+
'learn-smoke-test': ['*'],
13+
next: ['*'],
14+
node: ['*'],
15+
nuxt: ['*'],
16+
nx: ['*'],
17+
'nx-init': ['*'],
18+
playwright: ['*'],
19+
plugin: ['*'],
20+
react: ['*'],
21+
release: ['*'],
22+
rollup: ['*'],
23+
rspack: ['*'],
24+
storybook: ['*'],
25+
vite: ['*'],
26+
vue: ['*'],
27+
web: ['*'],
28+
webpack: ['*'],
29+
'workspace-create': ['*'],
30+
};
31+
32+
const SLACK_WEBHOOK_URL = process.env.SLACK_WEBHOOK_URL;
33+
34+
// Parse test results from project folders
35+
function parseTestResults() {
36+
const testResults = [];
37+
38+
console.log('🔍 Looking for test results in project folders...');
39+
40+
for (const projectName of Object.keys(goldenTestConfig)) {
41+
const testResultsPath = `e2e/${projectName}/test-results.json`;
42+
43+
if (fs.existsSync(testResultsPath)) {
44+
try {
45+
const rawResults = JSON.parse(fs.readFileSync(testResultsPath, 'utf8'));
46+
console.log(
47+
`✅ Found test results for ${projectName}: ${testResultsPath}`
48+
);
49+
50+
const projectResults = parseProjectResults(rawResults, projectName);
51+
testResults.push(...projectResults);
52+
} catch (error) {
53+
console.warn(`⚠️ Failed to parse ${testResultsPath}:`, error.message);
54+
}
55+
} else {
56+
console.log(
57+
`ℹ️ No test results found for ${projectName} (${testResultsPath}) - skipping`
58+
);
59+
}
60+
}
61+
62+
return testResults;
63+
}
64+
65+
function parseProjectResults(nxResults, projectName) {
66+
const testResults = [];
67+
68+
console.log(`📊 Processing results for project: ${projectName}`);
69+
70+
// Handle Jest test result format
71+
if (nxResults.testResults && Array.isArray(nxResults.testResults)) {
72+
console.log(
73+
`📋 Found Jest format results with ${nxResults.testResults.length} test suites`
74+
);
75+
76+
nxResults.testResults.forEach((testSuite) => {
77+
const parsed = parseJestTestSuite(testSuite, projectName, nxResults);
78+
if (parsed) testResults.push(parsed);
79+
});
80+
} else {
81+
console.warn(
82+
`⚠️ Unexpected test result format for ${projectName}, expected Jest format with testResults array`
83+
);
84+
}
85+
86+
console.log(`📈 Found ${testResults.length} test results for ${projectName}`);
87+
return testResults;
88+
}
89+
90+
// Parse Jest test suite result
91+
function parseJestTestSuite(testSuite, projectName, overallResults) {
92+
// Extract test file name from the test suite
93+
const testFile =
94+
testSuite.name || testSuite.testFilePath || `${projectName} test suite`;
95+
96+
// Determine if this test suite passed or failed
97+
const hasFailed =
98+
testSuite.status === 'failed' ||
99+
testSuite.numFailingTests > 0 ||
100+
testSuite.failureMessage;
101+
102+
const status = hasFailed ? 'failed' : 'passed';
103+
104+
return {
105+
project: projectName,
106+
testFile: testFile,
107+
status: status,
108+
failures: testSuite.numFailingTests || 0,
109+
successes: testSuite.numPassingTests || 0,
110+
total: testSuite.numPassingTests + testSuite.numFailingTests || 0,
111+
duration: testSuite.perfStats
112+
? testSuite.perfStats.end - testSuite.perfStats.start
113+
: 0,
114+
startTime: testSuite.perfStats?.start,
115+
endTime: testSuite.perfStats?.end,
116+
failureMessage: testSuite.failureMessage,
117+
source: `e2e/${projectName}/test-results.json`,
118+
// Include overall test run info
119+
overallSuccess: overallResults.success,
120+
totalFailedSuites: overallResults.numFailedTestSuites,
121+
totalFailedTests: overallResults.numFailedTests,
122+
};
123+
}
124+
// Check if a test is golden (since all tests in our config are '*', they all are)
125+
function isGoldenTest(project, testFile) {
126+
const projectConfig = goldenTestConfig[project];
127+
if (!projectConfig) return false;
128+
129+
// Handle wildcard '*' - all tests in project are golden
130+
if (projectConfig.includes('*')) return true;
131+
132+
// Check specific test files
133+
return projectConfig.some((pattern) => {
134+
if (pattern.includes('*')) {
135+
const regex = new RegExp(pattern.replace(/\*/g, '.*'));
136+
return regex.test(testFile);
137+
}
138+
return testFile.includes(pattern);
139+
});
140+
}
141+
142+
// Analyze test results for golden test failures
143+
function analyzeGoldenTestFailures(testResults) {
144+
const goldenFailures = [];
145+
const summary = {
146+
totalTests: testResults.length,
147+
totalFailures: 0,
148+
goldenFailures: 0,
149+
nonGoldenFailures: 0,
150+
projects: new Set(),
151+
};
152+
153+
for (const result of testResults) {
154+
summary.projects.add(result.project);
155+
156+
if (result.status === 'failed' || result.failures > 0) {
157+
summary.totalFailures++;
158+
159+
if (isGoldenTest(result.project, result.testFile)) {
160+
summary.goldenFailures++;
161+
goldenFailures.push(result);
162+
} else {
163+
summary.nonGoldenFailures++;
164+
}
165+
}
166+
}
167+
168+
return {
169+
goldenFailures,
170+
summary: {
171+
...summary,
172+
projects: Array.from(summary.projects),
173+
},
174+
};
175+
}
176+
177+
// Send Slack alert for golden test failures
178+
async function sendSlackAlert(goldenFailures, summary) {
179+
if (!SLACK_WEBHOOK_URL) {
180+
console.log(
181+
'❌ No Slack webhook configured (SLACK_WEBHOOK_URL), skipping notification'
182+
);
183+
return false;
184+
}
185+
186+
if (goldenFailures.length === 0) {
187+
console.log('✅ No golden test failures detected, no alert needed');
188+
return false;
189+
}
190+
191+
// Group failures by project
192+
const failuresByProject = {};
193+
for (const failure of goldenFailures) {
194+
if (!failuresByProject[failure.project]) {
195+
failuresByProject[failure.project] = [];
196+
}
197+
failuresByProject[failure.project].push(failure);
198+
}
199+
200+
// Build Slack message blocks
201+
const blocks = [
202+
{
203+
type: 'header',
204+
text: {
205+
type: 'plain_text',
206+
text: `🚨 Golden E2E Test Failures`,
207+
emoji: true,
208+
},
209+
},
210+
{
211+
type: 'section',
212+
text: {
213+
type: 'mrkdwn',
214+
text: `*${goldenFailures.length}* golden tests are failing and require immediate attention!`,
215+
},
216+
},
217+
{
218+
type: 'section',
219+
fields: [
220+
{
221+
type: 'mrkdwn',
222+
text: `*Total Tests:* ${summary.totalTests}`,
223+
},
224+
{
225+
type: 'mrkdwn',
226+
text: `*Golden Failures:* ${summary.goldenFailures}`,
227+
},
228+
{
229+
type: 'mrkdwn',
230+
text: `*Affected Projects:* ${Object.keys(failuresByProject).length}`,
231+
},
232+
],
233+
},
234+
];
235+
236+
// Add details for each project with failures
237+
for (const [project, failures] of Object.entries(failuresByProject)) {
238+
const failureList = failures
239+
.slice(0, 3) // Limit to 3 failures per project for readability
240+
.map((f) => {
241+
const fileName = f.testFile.split('/').pop() || f.testFile;
242+
const failureCount = f.failures ? ` (${f.failures} failed tests)` : '';
243+
return `• \`${fileName}\`${failureCount}`;
244+
})
245+
.join('\n');
246+
247+
const moreFailures =
248+
failures.length > 3
249+
? `\n_...and ${failures.length - 3} more test suites_`
250+
: '';
251+
252+
blocks.push({
253+
type: 'section',
254+
text: {
255+
type: 'mrkdwn',
256+
text: `*${project}* - FAILED:\n${failureList}${moreFailures}`,
257+
},
258+
});
259+
}
260+
261+
// Add context information
262+
blocks.push({
263+
type: 'context',
264+
elements: [
265+
{
266+
type: 'mrkdwn',
267+
text: `Branch: \`${
268+
process.env.GITHUB_REF_NAME ||
269+
process.env.GITHUB_HEAD_REF ||
270+
'unknown'
271+
}\` | Commit: \`${(process.env.GITHUB_SHA || 'unknown').substring(
272+
0,
273+
8
274+
)}\` | Run: <${process.env.GITHUB_SERVER_URL}/${
275+
process.env.GITHUB_REPOSITORY
276+
}/actions/runs/${process.env.GITHUB_RUN_ID}|#${
277+
process.env.GITHUB_RUN_NUMBER || 'local'
278+
}>`,
279+
},
280+
],
281+
});
282+
283+
const message = {
284+
channel: SLACK_CHANNEL,
285+
username: 'Golden Test Monitor',
286+
icon_emoji: ':rotating_light:',
287+
blocks: blocks,
288+
};
289+
290+
try {
291+
await axios.post(SLACK_WEBHOOK_URL, message);
292+
console.log(`✅ Slack alert sent successfully to #${SLACK_CHANNEL}`);
293+
return true;
294+
} catch (error) {
295+
console.error(
296+
'❌ Failed to send Slack alert:',
297+
error.response?.data || error.message
298+
);
299+
return false;
300+
}
301+
}
302+
303+
// Main execution function
304+
async function main() {
305+
console.log('🔍 Starting Golden Test Monitor...');
306+
console.log(`📋 Monitoring ${Object.keys(goldenTestConfig).length} projects`);
307+
console.log(`📢 Slack alerts will be sent to #${SLACK_CHANNEL}\n`);
308+
309+
// Parse test results
310+
const testResults = parseTestResults();
311+
if (testResults.length === 0) {
312+
console.log(
313+
'ℹ️ No test results found - this may be normal if no e2e tests ran'
314+
);
315+
console.log('✅ Exiting gracefully - no golden tests to check');
316+
process.exit(0);
317+
}
318+
319+
console.log(`📊 Found ${testResults.length} test results\n`);
320+
321+
// Analyze for golden test failures
322+
const { goldenFailures, summary } = analyzeGoldenTestFailures(testResults);
323+
324+
// Display summary
325+
console.log('📈 Analysis Summary:');
326+
console.log(` Total Tests: ${summary.totalTests}`);
327+
console.log(` Total Failures: ${summary.totalFailures}`);
328+
console.log(` Golden Failures: ${summary.goldenFailures} ⚠️`);
329+
console.log(` Non-Golden Failures: ${summary.nonGoldenFailures}`);
330+
console.log(` Affected Projects: ${summary.projects.join(', ')}\n`);
331+
332+
// Send Slack alert if needed
333+
await sendSlackAlert(goldenFailures, summary);
334+
335+
// Exit with appropriate code
336+
if (summary.goldenFailures > 0) {
337+
console.log('❌ Golden tests are failing - immediate attention required!');
338+
process.exit(1);
339+
} else {
340+
console.log('✅ All golden tests are passing!');
341+
process.exit(0);
342+
}
343+
}
344+
345+
if (require.main === module) {
346+
main().catch((error) => {
347+
console.error('💥 Golden Test Monitor failed:', error);
348+
process.exit(1);
349+
});
350+
}
351+
352+
module.exports = {
353+
goldenTestConfig,
354+
parseTestResults,
355+
analyzeGoldenTestFailures,
356+
sendSlackAlert,
357+
main,
358+
};

0 commit comments

Comments
 (0)