feat(llm): decrease the throttle on the chunking tests lol

This commit is contained in:
perf3ct 2025-06-08 21:47:53 +00:00
parent 93cf868dcf
commit b28387bada
No known key found for this signature in database
GPG Key ID: 569C4EEC436F5232

View File

@ -388,9 +388,11 @@ describe('Provider Streaming Integration Tests', () => {
describe('Performance and Scalability', () => {
it('should handle high-frequency chunk delivery', async () => {
const highFrequencyChunks = Array.from({ length: 1000 }, (_, i) => ({
// Reduced count for CI stability while still testing high frequency
const chunkCount = 500; // Reduced from 1000
const highFrequencyChunks = Array.from({ length: chunkCount }, (_, i) => ({
message: { content: `chunk${i}` },
done: i === 999
done: i === (chunkCount - 1)
}));
const mockIterator = {
@ -409,10 +411,10 @@ describe('Provider Streaming Integration Tests', () => {
);
const endTime = Date.now();
expect(result.chunkCount).toBe(1000);
expect(result.completeText).toContain('chunk999');
expect(endTime - startTime).toBeLessThan(5000); // Should complete in under 5s
});
expect(result.chunkCount).toBe(chunkCount);
expect(result.completeText).toContain(`chunk${chunkCount - 1}`);
expect(endTime - startTime).toBeLessThan(3000); // Should complete in under 3s
}, 15000); // Add 15 second timeout
it('should handle large individual chunks', async () => {
const largeContent = 'x'.repeat(100000); // 100KB chunk
@ -471,16 +473,18 @@ describe('Provider Streaming Integration Tests', () => {
describe('Memory Management', () => {
it('should not leak memory during long streaming sessions', async () => {
// Reduced chunk count for CI stability - still tests memory management
const chunkCount = 1000; // Reduced from 10000
const longSessionIterator = {
async *[Symbol.asyncIterator]() {
for (let i = 0; i < 10000; i++) {
for (let i = 0; i < chunkCount; i++) {
yield {
message: { content: `Chunk ${i} with some additional content to increase memory usage` },
done: i === 9999
done: i === (chunkCount - 1)
};
// Periodic yield to event loop
if (i % 100 === 0) {
// Periodic yield to event loop to prevent blocking
if (i % 50 === 0) { // More frequent yields for shorter test
await new Promise(resolve => setImmediate(resolve));
}
}
@ -496,12 +500,12 @@ describe('Provider Streaming Integration Tests', () => {
const finalMemory = process.memoryUsage();
expect(result.chunkCount).toBe(10000);
expect(result.chunkCount).toBe(chunkCount);
// Memory increase should be reasonable (less than 50MB)
// Memory increase should be reasonable (less than 20MB for smaller test)
const memoryIncrease = finalMemory.heapUsed - initialMemory.heapUsed;
expect(memoryIncrease).toBeLessThan(50 * 1024 * 1024);
});
expect(memoryIncrease).toBeLessThan(20 * 1024 * 1024);
}, 30000); // Add 30 second timeout for this test
it('should clean up resources on stream completion', async () => {
const resourceTracker = {