Skip to content

Commit 8b12a79

Browse files
authored
feat(cache): enlongate cache time at calling (#284)
Because - cache TTL needs to be elongated when a file is used for in-context query This commit - implements the logic to elongate the cache TTL - improves integration test robustness - cleans up codebase
1 parent a17b3c4 commit 8b12a79

36 files changed

+3013
-1353
lines changed

integration-test/const.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -30,7 +30,7 @@ export const mgmtVersion = 'v1beta';
3030

3131
export const namespace = "users/admin"
3232
export const defaultUsername = "admin"
33-
export const defaultPassword = "password"
33+
export const defaultPassword = "qazwsxedc"
3434

3535
export const params = {
3636
headers: {

integration-test/grpc-kb-update.js

Lines changed: 39 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -1807,13 +1807,13 @@ function TestCC02_DeletingFilesDuringSwap(client, data) {
18071807

18081808
// Process files
18091809

1810-
// Wait for processing (using helper function)
1810+
// Wait for processing (using helper function) - increased timeout for CI
18111811
const resultCC2 = helper.waitForMultipleFilesProcessingComplete(
18121812
data.expectedOwner.id,
18131813
knowledgeBaseIdCC2,
18141814
[fileUid1, fileUid2],
18151815
data.header,
1816-
600
1816+
900
18171817
);
18181818

18191819
check(resultCC2, {
@@ -1967,13 +1967,13 @@ function TestCC03_RapidOperations(client, data) {
19671967

19681968
// Process files
19691969

1970-
// Wait for processing (using helper function)
1970+
// Wait for processing (using helper function) - increased timeout for CI
19711971
const resultCC3 = helper.waitForMultipleFilesProcessingComplete(
19721972
data.expectedOwner.id,
19731973
knowledgeBaseIdCC3,
19741974
[fileUid1, fileUid2],
19751975
data.header,
1976-
600
1976+
900
19771977
);
19781978

19791979
check(resultCC3, {
@@ -2087,14 +2087,14 @@ function TestCC03_RapidOperations(client, data) {
20872087
console.warn(`CC3: Failed to re-query files: ${e}, using original UIDs`);
20882088
}
20892089

2090-
// Wait for new files to complete processing
2090+
// Wait for new files to complete processing - increased timeout for CI
20912091
console.log("CC3: Waiting for new files to complete processing...");
20922092
const newFilesResult = helper.waitForMultipleFilesProcessingComplete(
20932093
data.expectedOwner.id,
20942094
knowledgeBaseIdCC3,
20952095
[swappedFileUid1, swappedFileUid2, swappedFileUid3],
20962096
data.header,
2097-
600
2097+
900
20982098
);
20992099

21002100
check(newFilesResult, {
@@ -2197,13 +2197,13 @@ function TestCC04_RaceConditions(client, data) {
21972197

21982198
// Process initial file
21992199
// Auto-trigger: Processing starts automatically on upload
2200-
// Wait for processing (using helper function)
2200+
// Wait for processing (using helper function) - increased timeout for CI
22012201
const resultCC4 = helper.waitForFileProcessingComplete(
22022202
data.expectedOwner.id,
22032203
knowledgeBaseIdCC4,
22042204
fileUidCC4,
22052205
data.header,
2206-
600
2206+
900
22072207
);
22082208

22092209
check(resultCC4, {
@@ -2338,7 +2338,7 @@ function TestCC04_RaceConditions(client, data) {
23382338
knowledgeBaseIdCC4,
23392339
raceFileUidToCheck,
23402340
data.header,
2341-
600
2341+
900
23422342
);
23432343

23442344
check(raceFileResult, {
@@ -2698,13 +2698,13 @@ function TestCC06_DeletingFilesAfterSwap(client, data) {
26982698

26992699
// Process files
27002700
// Auto-trigger: Processing starts automatically on upload
2701-
// Wait for processing (using helper function)
2701+
// Wait for processing (using helper function) - increased timeout for CI
27022702
const resultCC6 = helper.waitForMultipleFilesProcessingComplete(
27032703
data.expectedOwner.id,
27042704
knowledgeBaseIdCC6,
27052705
[fileUid1CC6, fileUid2CC6],
27062706
data.header,
2707-
600
2707+
900
27082708
);
27092709

27102710
check(resultCC6, {
@@ -2771,27 +2771,48 @@ function TestCC06_DeletingFilesAfterSwap(client, data) {
27712771

27722772
console.log(`CC6: Retention period active - Production UID: ${prodKBUIDCC6}, Rollback UID: ${rollbackKBUIDCC6}`);
27732773

2774-
// List files to get the file UID after swap
2774+
// Add extra wait time for async operations to settle in CI environments
2775+
console.log("CC6: Waiting for post-update operations to settle...");
2776+
sleep(5);
2777+
2778+
// List files with retry logic for CI environments (files may not be immediately available)
2779+
let fileToDeleteUID = null;
2780+
let listAttempts = 0;
2781+
const MAX_LIST_ATTEMPTS = 5;
2782+
2783+
while (!fileToDeleteUID && listAttempts < MAX_LIST_ATTEMPTS) {
2784+
listAttempts++;
2785+
27752786
const listFilesRes = http.request(
27762787
"GET",
27772788
`${constant.artifactRESTPublicHost}/v1alpha/namespaces/${data.expectedOwner.id}/knowledge-bases/${knowledgeBaseIdCC6}/files`,
27782789
null,
27792790
data.header
27802791
);
27812792

2782-
let fileToDeleteUID = null;
27832793
if (listFilesRes.status === 200) {
27842794
const files = listFilesRes.json().files || [];
2795+
console.log(`CC6: List attempt ${listAttempts}/${MAX_LIST_ATTEMPTS} - Found ${files.length} files`);
2796+
27852797
for (const file of files) {
27862798
if (file.filename === file2NameCC6) {
27872799
fileToDeleteUID = file.fileUid;
27882800
break;
27892801
}
2802+
}
2803+
2804+
if (!fileToDeleteUID && listAttempts < MAX_LIST_ATTEMPTS) {
2805+
console.log(`CC6: File not found yet, waiting 3s before retry...`);
2806+
sleep(3);
2807+
}
2808+
} else {
2809+
console.warn(`CC6: List files returned status ${listFilesRes.status}, retrying in 3s...`);
2810+
sleep(3);
27902811
}
27912812
}
27922813

27932814
if (!fileToDeleteUID) {
2794-
console.error("CC6: Could not find file to delete");
2815+
console.error(`CC6: Could not find file to delete after ${MAX_LIST_ATTEMPTS} attempts`);
27952816
http.request("DELETE", `${constant.artifactRESTPublicHost}/v1alpha/namespaces/${data.expectedOwner.id}/knowledge-bases/${knowledgeBaseIdCC6}`, null, data.header);
27962817
return;
27972818
}
@@ -2913,13 +2934,13 @@ function TestCC07_MultipleOperations(client, data) {
29132934

29142935
// Process all files
29152936

2916-
// Wait for processing (using helper function)
2937+
// Wait for processing (using helper function) - increased timeout for CI
29172938
const resultCC7 = helper.waitForMultipleFilesProcessingComplete(
29182939
data.expectedOwner.id,
29192940
knowledgeBaseIdCC7,
29202941
[fileUid1CC7, fileUid2CC7, fileUid3CC7],
29212942
data.header,
2922-
600
2943+
900
29232944
);
29242945

29252946
check(resultCC7, {
@@ -3014,14 +3035,14 @@ function TestCC07_MultipleOperations(client, data) {
30143035
client.invoke("artifact.artifact.v1alpha.ArtifactPrivateService/DeleteFileAdmin", { file_id: prodFileUid2CC7 }, data.metadata);
30153036
}
30163037

3017-
// CRITICAL: Wait for new files to complete processing before validation
3038+
// CRITICAL: Wait for new files to complete processing before validation - increased timeout for CI
30183039
console.log("CC7: Waiting for 3 new files to complete sequential dual processing...");
30193040
const newFilesResult = helper.waitForMultipleFilesProcessingComplete(
30203041
data.expectedOwner.id,
30213042
knowledgeBaseIdCC7,
30223043
newFileUids,
30233044
data.header,
3024-
600
3045+
900
30253046
);
30263047

30273048
check(newFilesResult, {

integration-test/grpc-system-config-update.js

Lines changed: 40 additions & 18 deletions
Original file line numberDiff line numberDiff line change
@@ -409,30 +409,30 @@ export default function (data) {
409409
console.log(`Waiting for ${allInitialFiles.length} initial files to process...`);
410410
console.log(`File UIDs: ${allInitialFiles.map(f => f.uid).join(', ')}`);
411411

412-
// Wait for files in KB1
412+
// Wait for files in KB1 (increased timeout for AI-intensive operations in CI)
413413
if (initialFiles1.length > 0) {
414414
console.log(`Waiting for ${initialFiles1.length} files in KB1...`);
415415
const result1 = helper.waitForMultipleFilesProcessingComplete(
416416
data.expectedOwner.id,
417417
knowledgeBaseId1,
418418
initialFiles1.map(f => f.uid),
419419
data.header,
420-
360 // Max 360 seconds
420+
900 // 15 minutes for AI conversion with rate limiting
421421
);
422422
if (!result1.completed) {
423423
console.log(`KB1 files incomplete: ${result1.status}, processed ${result1.processedCount}/${initialFiles1.length}`);
424424
}
425425
}
426426

427-
// Wait for files in KB2
427+
// Wait for files in KB2 (increased timeout for AI-intensive operations in CI)
428428
if (initialFiles2.length > 0) {
429429
console.log(`Waiting for ${initialFiles2.length} files in KB2...`);
430430
const result2 = helper.waitForMultipleFilesProcessingComplete(
431431
data.expectedOwner.id,
432432
knowledgeBaseId2,
433433
initialFiles2.map(f => f.uid),
434434
data.header,
435-
360 // Max 360 seconds
435+
900 // 15 minutes for AI conversion with rate limiting
436436
);
437437
if (!result2.completed) {
438438
console.log(`KB2 files incomplete: ${result2.status}, processed ${result2.processedCount}/${initialFiles2.length}`);
@@ -1075,48 +1075,63 @@ export default function (data) {
10751075
// Wait for production retention files using robust helper
10761076
console.log(`Waiting for ${retentionFileUids.length} production retention files...`);
10771077

1078-
// Wait for KB1 retention files
1078+
// Wait for KB1 retention files (increased timeout for AI-intensive operations in CI)
1079+
let kb1Result = { completed: true, processedCount: 0 };
10791080
if (retentionFiles1.length > 0) {
1080-
const result1 = helper.waitForMultipleFilesProcessingComplete(
1081+
kb1Result = helper.waitForMultipleFilesProcessingComplete(
10811082
data.expectedOwner.id,
10821083
data.kb1_initial.knowledgeBaseId,
10831084
retentionFiles1.map(f => f.uid),
10841085
data.header,
1085-
360
1086+
900 // 15 minutes for AI conversion with rate limiting
10861087
);
1087-
if (!result1.completed) {
1088-
console.log(`KB1 retention files incomplete: ${result1.status}`);
1088+
if (!kb1Result.completed) {
1089+
console.error(`✗ KB1 retention files incomplete: ${kb1Result.status}, processed ${kb1Result.processedCount}/${retentionFiles1.length}`);
10891090
}
10901091
}
10911092

1092-
// Wait for KB2 retention files
1093+
// Wait for KB2 retention files (increased timeout for AI-intensive operations in CI)
1094+
let kb2Result = { completed: true, processedCount: 0 };
10931095
if (retentionFiles2.length > 0) {
1094-
const result2 = helper.waitForMultipleFilesProcessingComplete(
1096+
kb2Result = helper.waitForMultipleFilesProcessingComplete(
10951097
data.expectedOwner.id,
10961098
data.kb2_initial.knowledgeBaseId,
10971099
retentionFiles2.map(f => f.uid),
10981100
data.header,
1099-
360
1101+
900 // 15 minutes for AI conversion with rate limiting
11001102
);
1101-
if (!result2.completed) {
1102-
console.log(`KB2 retention files incomplete: ${result2.status}`);
1103+
if (!kb2Result.completed) {
1104+
console.error(`✗ KB2 retention files incomplete: ${kb2Result.status}, processed ${kb2Result.processedCount}/${retentionFiles2.length}`);
11031105
}
11041106
}
11051107

1106-
const productionCompleted = true;
1107-
console.log(`All ${retentionFileUids.length} production retention files processed`);
1108+
const productionCompleted = kb1Result.completed && kb2Result.completed;
1109+
1110+
if (productionCompleted) {
1111+
console.log(`✓ All ${retentionFileUids.length} production retention files processed`);
1112+
} else {
1113+
console.error(`✗ Production retention files failed: KB1=${kb1Result.completed}, KB2=${kb2Result.completed}`);
1114+
}
11081115

11091116
check({ productionCompleted }, {
11101117
"Phase 5: Production retention files processed": () => productionCompleted === true,
11111118
});
11121119

1120+
// FAIL EARLY: If production files didn't complete, don't proceed with rollback verification
1121+
if (!productionCompleted) {
1122+
console.error("Phase 5: Aborting - production retention files did not complete in time");
1123+
console.error("This prevents invalid test results in subsequent phases");
1124+
return;
1125+
}
1126+
11131127
// CRITICAL: Also wait for rollback KB files to complete
11141128
// Sequential dual-processing triggers rollback files after production completes
11151129
// Rollback workflow will block if any files are NOTSTARTED in rollback KB
1130+
// Increased timeout for CI environments with AI rate limiting (900s = 15 minutes)
11161131
console.log("Waiting for retention file processing in rollback KBs (sequential dual-processing)...");
11171132
let rollbackCompleted = false;
11181133
if (data.kb1_rollback && data.kb2_rollback) {
1119-
for (let i = 0; i < 360; i++) {
1134+
for (let i = 0; i < 1800; i++) { // 1800 * 0.5s = 900s (15 minutes)
11201135
let kb1Count = 0;
11211136
let kb2Count = 0;
11221137

@@ -1165,9 +1180,16 @@ export default function (data) {
11651180
"Phase 5: Rollback KB files processed (dual-processing)": () => rollbackCompleted === true,
11661181
});
11671182

1183+
// FAIL EARLY: If rollback files didn't complete, don't proceed with subsequent phases
11681184
if (!rollbackCompleted) {
1169-
console.warn("Rollback KB files did not complete in time - rollback may be blocked");
1185+
console.error("✗ Phase 5: Rollback KB files did not complete in time");
1186+
console.error(` Final state: KB1=${kb1Count}/2, KB2=${kb2Count}/2`);
1187+
console.error(" Sequential dual-processing may be blocked or AI service overloaded");
1188+
console.error("Phase 5: Aborting - cannot verify rollback behavior without complete data");
1189+
return;
11701190
}
1191+
1192+
console.log("✓ All rollback KB files processed successfully");
11711193
}
11721194

11731195
// ====================================================================

integration-test/helper.js

Lines changed: 7 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -1383,7 +1383,7 @@ export function waitForFileProcessingComplete(namespaceId, knowledgeBaseId, file
13831383
let consecutiveNotStarted = 0;
13841384
let consecutiveErrors = 0;
13851385
const NOTSTARTED_THRESHOLD = notStartedThreshold; // If file stays NOTSTARTED for this long, workflow likely never started
1386-
const MAX_CONSECUTIVE_ERRORS = 5; // Tolerate up to 5 consecutive API errors before giving up
1386+
const MAX_CONSECUTIVE_ERRORS = 10; // Tolerate up to 10 consecutive API errors before giving up (increased for CI)
13871387

13881388
// Adaptive polling: Start with faster polls, then back off
13891389
// This reduces load on resource-constrained systems while maintaining responsiveness
@@ -1404,7 +1404,7 @@ export function waitForFileProcessingComplete(namespaceId, knowledgeBaseId, file
14041404
console.error(`✗ File or knowledge base not found (404) - knowledge base/file may have been deleted`);
14051405
return { completed: false, status: "NOT_FOUND", error: "Knowledge base or file not found" };
14061406
} else if (statusRes.status >= 500) {
1407-
// 5xx errors might be transient on resource-constrained systems - tolerate a few
1407+
// 5xx errors might be transient on resource-constrained systems - tolerate more in CI
14081408
consecutiveErrors++;
14091409
console.warn(`⚠ API error ${statusRes.status} while checking file status (${consecutiveErrors}/${MAX_CONSECUTIVE_ERRORS})`);
14101410

@@ -1413,9 +1413,11 @@ export function waitForFileProcessingComplete(namespaceId, knowledgeBaseId, file
14131413
return { completed: false, status: "API_ERROR", error: `HTTP ${statusRes.status} - ${MAX_CONSECUTIVE_ERRORS} consecutive failures` };
14141414
}
14151415

1416-
// Back off more aggressively on errors
1417-
sleep(Math.min(pollInterval * 2, 5));
1418-
elapsed += Math.min(pollInterval * 2, 5);
1416+
// Exponential backoff on errors - more aggressive for CI environments
1417+
const backoff = Math.min(Math.pow(2, consecutiveErrors - 1), 10);
1418+
console.log(` Backing off for ${backoff}s due to API error...`);
1419+
sleep(backoff);
1420+
elapsed += backoff;
14191421
continue;
14201422
} else if (statusRes.status >= 400) {
14211423
console.error(`✗ Client API error ${statusRes.status} while checking file status`);

0 commit comments

Comments
 (0)