feat(tests): Add comprehensive tests for Docker image export and streaming functionality
This commit is contained in:
		
										
											Binary file not shown.
										
									
								
							
							
								
								
									
										40
									
								
								test-stream.js
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										40
									
								
								test-stream.js
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,40 @@ | |||||||
|  | const { SmartRequest } = require('@push.rocks/smartrequest'); | ||||||
|  |  | ||||||
|  | async function test() { | ||||||
|  |   try { | ||||||
|  |     const response = await SmartRequest.create() | ||||||
|  |       .url('http://unix:/run/user/1000/docker.sock:/images/hello-world:latest/get') | ||||||
|  |       .header('Host', 'docker.sock') | ||||||
|  |       .get(); | ||||||
|  |      | ||||||
|  |     console.log('Response status:', response.status); | ||||||
|  |     console.log('Response type:', typeof response); | ||||||
|  |      | ||||||
|  |     const stream = response.streamNode(); | ||||||
|  |     console.log('Stream type:', typeof stream); | ||||||
|  |     console.log('Has on method:', typeof stream.on); | ||||||
|  |      | ||||||
|  |     if (stream) { | ||||||
|  |       let chunks = 0; | ||||||
|  |       stream.on('data', (chunk) => { | ||||||
|  |         chunks++; | ||||||
|  |         if (chunks <= 3) console.log('Got chunk', chunks, chunk.length); | ||||||
|  |       }); | ||||||
|  |       stream.on('end', () => { | ||||||
|  |         console.log('Stream ended, total chunks:', chunks); | ||||||
|  |         process.exit(0); | ||||||
|  |       }); | ||||||
|  |       stream.on('error', (err) => { | ||||||
|  |         console.error('Stream error:', err); | ||||||
|  |         process.exit(1); | ||||||
|  |       }); | ||||||
|  |     } else { | ||||||
|  |       console.log('No stream available'); | ||||||
|  |     } | ||||||
|  |   } catch (error) { | ||||||
|  |     console.error('Error:', error); | ||||||
|  |     process.exit(1); | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | test(); | ||||||
							
								
								
									
										46
									
								
								test-stream.mjs
									
									
									
									
									
										Normal file
									
								
							
							
						
						
									
										46
									
								
								test-stream.mjs
									
									
									
									
									
										Normal file
									
								
							| @@ -0,0 +1,46 @@ | |||||||
|  | import { SmartRequest } from '@push.rocks/smartrequest'; | ||||||
|  |  | ||||||
|  | async function test() { | ||||||
|  |   try { | ||||||
|  |     const response = await SmartRequest.create() | ||||||
|  |       .url('http://unix:/run/user/1000/docker.sock:/images/hello-world:latest/get') | ||||||
|  |       .header('Host', 'docker.sock') | ||||||
|  |       .get(); | ||||||
|  |      | ||||||
|  |     console.log('Response status:', response.status); | ||||||
|  |     console.log('Response type:', typeof response); | ||||||
|  |      | ||||||
|  |     const stream = response.streamNode(); | ||||||
|  |     console.log('Stream type:', typeof stream); | ||||||
|  |     console.log('Has on method:', typeof stream.on); | ||||||
|  |      | ||||||
|  |     if (stream) { | ||||||
|  |       let chunks = 0; | ||||||
|  |       stream.on('data', (chunk) => { | ||||||
|  |         chunks++; | ||||||
|  |         if (chunks <= 3) console.log('Got chunk', chunks, chunk.length); | ||||||
|  |       }); | ||||||
|  |       stream.on('end', () => { | ||||||
|  |         console.log('Stream ended, total chunks:', chunks); | ||||||
|  |         process.exit(0); | ||||||
|  |       }); | ||||||
|  |       stream.on('error', (err) => { | ||||||
|  |         console.error('Stream error:', err); | ||||||
|  |         process.exit(1); | ||||||
|  |       }); | ||||||
|  |        | ||||||
|  |       // Set a timeout in case stream doesn't end | ||||||
|  |       setTimeout(() => { | ||||||
|  |         console.log('Timeout after 5 seconds'); | ||||||
|  |         process.exit(1); | ||||||
|  |       }, 5000); | ||||||
|  |     } else { | ||||||
|  |       console.log('No stream available'); | ||||||
|  |     } | ||||||
|  |   } catch (error) { | ||||||
|  |     console.error('Error:', error); | ||||||
|  |     process.exit(1); | ||||||
|  |   } | ||||||
|  | } | ||||||
|  |  | ||||||
|  | test(); | ||||||
| @@ -139,17 +139,17 @@ tap.test('should export images', async (toolsArg) => { | |||||||
|   await done.promise; |   await done.promise; | ||||||
| }); | }); | ||||||
|  |  | ||||||
| tap.test('should import images', async (toolsArg) => { | tap.test('should import images', async () => { | ||||||
|   const done = toolsArg.defer(); |  | ||||||
|   const fsReadStream = plugins.smartfile.fsStream.createReadStream( |   const fsReadStream = plugins.smartfile.fsStream.createReadStream( | ||||||
|     plugins.path.join(paths.nogitDir, 'testimage.tar') |     plugins.path.join(paths.nogitDir, 'testimage.tar') | ||||||
|   ); |   ); | ||||||
|   await docker.DockerImage.createFromTarStream(testDockerHost, { |   const importedImage = await docker.DockerImage.createFromTarStream(testDockerHost, { | ||||||
|     tarStream: fsReadStream, |     tarStream: fsReadStream, | ||||||
|     creationObject: { |     creationObject: { | ||||||
|       imageUrl: 'code.foss.global/host.today/ht-docker-node:latest', |       imageUrl: 'code.foss.global/host.today/ht-docker-node:latest', | ||||||
|     } |     } | ||||||
|   }) |   }); | ||||||
|  |   expect(importedImage).toBeInstanceOf(docker.DockerImage); | ||||||
| }); | }); | ||||||
|  |  | ||||||
| tap.test('should expose a working DockerImageStore', async () => { | tap.test('should expose a working DockerImageStore', async () => { | ||||||
|   | |||||||
| @@ -262,12 +262,19 @@ export class DockerHost { | |||||||
|     // Parse the response body based on content type |     // Parse the response body based on content type | ||||||
|     let body; |     let body; | ||||||
|     const contentType = response.headers['content-type'] || ''; |     const contentType = response.headers['content-type'] || ''; | ||||||
|     if (contentType.includes('application/json')) { |      | ||||||
|  |     // Docker's streaming endpoints (like /images/create) return newline-delimited JSON | ||||||
|  |     // which can't be parsed as a single JSON object | ||||||
|  |     const isStreamingEndpoint = routeArg.includes('/images/create') ||  | ||||||
|  |                                 routeArg.includes('/images/load') || | ||||||
|  |                                 routeArg.includes('/build'); | ||||||
|  |      | ||||||
|  |     if (contentType.includes('application/json') && !isStreamingEndpoint) { | ||||||
|       body = await response.json(); |       body = await response.json(); | ||||||
|     } else { |     } else { | ||||||
|       body = await response.text(); |       body = await response.text(); | ||||||
|       // Try to parse as JSON if it looks like JSON |       // Try to parse as JSON if it looks like JSON and is not a streaming response | ||||||
|       if (body && (body.startsWith('{') || body.startsWith('['))) { |       if (!isStreamingEndpoint && body && (body.startsWith('{') || body.startsWith('['))) { | ||||||
|         try { |         try { | ||||||
|           body = JSON.parse(body); |           body = JSON.parse(body); | ||||||
|         } catch { |         } catch { | ||||||
| @@ -299,7 +306,8 @@ export class DockerHost { | |||||||
|       .header('Content-Type', 'application/json') |       .header('Content-Type', 'application/json') | ||||||
|       .header('X-Registry-Auth', this.registryToken) |       .header('X-Registry-Auth', this.registryToken) | ||||||
|       .header('Host', 'docker.sock') |       .header('Host', 'docker.sock') | ||||||
|       .options({ keepAlive: false }); |       .timeout(600000) // Set 10 minute timeout for streaming operations | ||||||
|  |       .options({ keepAlive: false, autoDrain: false }); // Disable auto-drain for streaming | ||||||
|      |      | ||||||
|     // If we have a readStream, use the new stream method with logging |     // If we have a readStream, use the new stream method with logging | ||||||
|     if (readStream) { |     if (readStream) { | ||||||
|   | |||||||
| @@ -250,6 +250,12 @@ export class DockerImage { | |||||||
|   public async exportToTarStream(): Promise<plugins.smartstream.stream.Readable> { |   public async exportToTarStream(): Promise<plugins.smartstream.stream.Readable> { | ||||||
|     logger.log('info', `Exporting image ${this.RepoTags[0]} to tar stream.`); |     logger.log('info', `Exporting image ${this.RepoTags[0]} to tar stream.`); | ||||||
|     const response = await this.dockerHost.requestStreaming('GET', `/images/${encodeURIComponent(this.RepoTags[0])}/get`); |     const response = await this.dockerHost.requestStreaming('GET', `/images/${encodeURIComponent(this.RepoTags[0])}/get`); | ||||||
|  |      | ||||||
|  |     // Check if response is a Node.js stream | ||||||
|  |     if (!response || typeof response.on !== 'function') { | ||||||
|  |       throw new Error('Failed to get streaming response for image export'); | ||||||
|  |     } | ||||||
|  |      | ||||||
|     let counter = 0; |     let counter = 0; | ||||||
|     const webduplexStream = new plugins.smartstream.SmartDuplex({ |     const webduplexStream = new plugins.smartstream.SmartDuplex({ | ||||||
|       writeFunction: async (chunk, tools) => { |       writeFunction: async (chunk, tools) => { | ||||||
| @@ -259,17 +265,25 @@ export class DockerImage { | |||||||
|         return chunk; |         return chunk; | ||||||
|       } |       } | ||||||
|     }); |     }); | ||||||
|  |      | ||||||
|     response.on('data', (chunk) => { |     response.on('data', (chunk) => { | ||||||
|       if (!webduplexStream.write(chunk)) { |       if (!webduplexStream.write(chunk)) { | ||||||
|         response.pause(); |         response.pause(); | ||||||
|         webduplexStream.once('drain', () => { |         webduplexStream.once('drain', () => { | ||||||
|           response.resume(); |           response.resume(); | ||||||
|         }) |  | ||||||
|       }; |  | ||||||
|         }); |         }); | ||||||
|  |       } | ||||||
|  |     }); | ||||||
|  |      | ||||||
|     response.on('end', () => { |     response.on('end', () => { | ||||||
|       webduplexStream.end(); |       webduplexStream.end(); | ||||||
|     }) |     }); | ||||||
|  |      | ||||||
|  |     response.on('error', (error) => { | ||||||
|  |       logger.log('error', `Error during image export: ${error.message}`); | ||||||
|  |       webduplexStream.destroy(error); | ||||||
|  |     }); | ||||||
|  |      | ||||||
|     return webduplexStream; |     return webduplexStream; | ||||||
|   } |   } | ||||||
| } | } | ||||||
|   | |||||||
| @@ -89,6 +89,11 @@ export class DockerService { | |||||||
|     }> = []; |     }> = []; | ||||||
|  |  | ||||||
|     for (const network of serviceCreationDescriptor.networks) { |     for (const network of serviceCreationDescriptor.networks) { | ||||||
|  |       // Skip null networks (can happen if network creation fails) | ||||||
|  |       if (!network) { | ||||||
|  |         logger.log('warn', 'Skipping null network in service creation'); | ||||||
|  |         continue; | ||||||
|  |       } | ||||||
|       networkArray.push({ |       networkArray.push({ | ||||||
|         Target: network.Name, |         Target: network.Name, | ||||||
|         Aliases: [serviceCreationDescriptor.networkAlias], |         Aliases: [serviceCreationDescriptor.networkAlias], | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user