Skip to content

Commit 646ae1b

Browse files
add file streaming test
1 parent 2426006 commit 646ae1b

File tree

2 files changed

+64
-5
lines changed

2 files changed

+64
-5
lines changed

tests/e2e/session-state-isolation-workflow.test.ts

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@ describe('Session State Isolation Workflow', () => {
3737
const sandbox = await getSharedSandbox();
3838
workerUrl = sandbox.workerUrl;
3939
sandboxId = sandbox.sandboxId;
40-
baseHeaders = createTestHeaders(createUniqueSession());
40+
baseHeaders = createTestHeaders(sandboxId, createUniqueSession());
4141

4242
// Initialize the sandbox
4343
await fetch(`${workerUrl}/api/execute`, {

tests/e2e/streaming-operations-workflow.test.ts

Lines changed: 63 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -9,15 +9,14 @@ import type { ExecEvent } from '@repo/shared';
99
/**
1010
* Streaming Operations Edge Case Tests
1111
*
12-
* Tests error handling and long-running edge cases for streaming.
12+
* Tests error handling and edge cases for streaming.
1313
* Basic streaming tests are in comprehensive-workflow.test.ts.
1414
*
1515
* This file focuses on:
1616
* - Command failures with non-zero exit codes
1717
* - Nonexistent commands (exit code 127)
18-
* - Long-running commands (15s+, 60s+)
19-
* - High-volume streaming
20-
* - Intermittent output gaps
18+
* - Chunked output delivery over time
19+
* - File content streaming
2120
*/
2221
describe('Streaming Operations Edge Cases', () => {
2322
let workerUrl: string;
@@ -136,4 +135,64 @@ describe('Streaming Operations Edge Cases', () => {
136135
expect(completeEvent).toBeDefined();
137136
expect(completeEvent?.exitCode).toBe(0);
138137
}, 15000);
138+
139+
test('should stream file contents', async () => {
140+
// Create a test file first
141+
const testPath = `/workspace/stream-test-${Date.now()}.txt`;
142+
const testContent =
143+
'Line 1\nLine 2\nLine 3\nThis is streaming file content.';
144+
145+
await fetch(`${workerUrl}/api/file/write`, {
146+
method: 'POST',
147+
headers,
148+
body: JSON.stringify({ path: testPath, content: testContent })
149+
});
150+
151+
// Stream the file back
152+
const streamResponse = await fetch(`${workerUrl}/api/read/stream`, {
153+
method: 'POST',
154+
headers,
155+
body: JSON.stringify({ path: testPath })
156+
});
157+
158+
expect(streamResponse.status).toBe(200);
159+
expect(streamResponse.headers.get('Content-Type')).toBe(
160+
'text/event-stream'
161+
);
162+
163+
// Collect streamed content
164+
const reader = streamResponse.body?.getReader();
165+
expect(reader).toBeDefined();
166+
167+
const decoder = new TextDecoder();
168+
let rawContent = '';
169+
while (true) {
170+
const { done, value } = await reader!.read();
171+
if (done) break;
172+
rawContent += decoder.decode(value, { stream: true });
173+
}
174+
175+
// Parse SSE JSON events
176+
const lines = rawContent.split('\n').filter((l) => l.startsWith('data: '));
177+
const events = lines.map((l) => JSON.parse(l.slice(6)));
178+
179+
// Should have metadata, chunk(s), and complete events
180+
const metadata = events.find((e) => e.type === 'metadata');
181+
const chunk = events.find((e) => e.type === 'chunk');
182+
const complete = events.find((e) => e.type === 'complete');
183+
184+
expect(metadata).toBeDefined();
185+
expect(metadata.mimeType).toBe('text/plain');
186+
expect(chunk).toBeDefined();
187+
expect(chunk.data).toBe(testContent);
188+
expect(complete).toBeDefined();
189+
expect(complete.bytesRead).toBe(testContent.length);
190+
191+
// Cleanup
192+
await fetch(`${workerUrl}/api/file/delete`, {
193+
method: 'POST',
194+
headers,
195+
body: JSON.stringify({ path: testPath })
196+
});
197+
}, 30000);
139198
});

0 commit comments

Comments
 (0)