因此,我尝试将我之前在临时文件夹中获得的块中的文件合并起来,并且我正在使用管道,但这就是发生的情况,循环在第一次迭代中运行良好,但不执行对于迭代的其余部分。
这是代码
import { pipeline } from "stream/promises";
import { promisify } from "util";
import fs from "fs";
import path from "path";
//// Document Buffer construction function
async function combineChunks(
tempFolder,
numberedFiles,
outputPath,
combinedFileSize
) {
/*
the tempFolder is a string arg of where the chunks are
the numberedFiles is a string array of all the chunks sorted in order
the outputPath is where the constructed file is supposed to be saved
the combinedFileSize is the expected file size
*/
const writeStream = fs.createWriteStream(outputPath, { flags: "a" }); // Append mode
try {
for (const file of numberedFiles) {
const filePath = path.join(tempFolder, file);
// Validate file path to prevent directory traversal attacks
if (!filePath.startsWith(tempFolder)) {
throw new Error("Invalid file path detected"); // Reject file path if it's not within the expected directory
}
// Read the file chunk and write it to the output file
await pipeline(
fs.createReadStream(filePath), // Read stream for the chunk file
writeStream // Write stream for the output file
);
// Delete the chunk file after writing
await fs.promises.unlink(filePath);
}
// Close the write stream after all chunks are written
writeStream.end();
// Validate the combined file (file size and file type)
const stats = fs.statSync(outputPath);
const fileSize = stats.size;
const allowedDocumentExtensions =
/\.(jpg|jpeg|png|gif|bmp|svg|webp|txt|doc|docx|odt|xls|xlsx|ods|ppt|pptx|odp|pdf)$/i;
if (fileSize !== combinedFileSize) {
// Delete the file in the server
await fs.promises.unlink(outputPath);
throw new Error("File size mismatch");
}
if (!allowedDocumentExtensions.test(outputPath)) {
// Delete the file in the server
await fs.promises.unlink(outputPath);
throw new Error("Inappropriate file type detected");
}
return true;
} catch (error) {
console.error("Error combining files:", error);
return false;
}
}
现在正如我之前解释的,第一次迭代工作得很好,事实上,如果只有一个块(即 numberedFiles 只有一个字符串),则文档会按预期构建,当有大量块时,问题就会出现,等等有趣的是,它不会在控制台上抛出错误,因此调试起来有点困难 我尝试尽可能多地搜索,似乎无法找到到底出了什么问题的解决方案。 我将不胜感激提供的任何帮助!
我尝试在网上到处寻找解决方案,找不到,没有得到任何好的解决方案。
await pipeline( fs.createReadStream(filePath), // Read stream for the chunk file writeStream // Write stream for the output file );
这将关闭流,但您可以通过选项来阻止它
for (...) {
// ...
await pipeline(readStream, writeStream, { end: false })
// ...
}
// writeStream.close()
// EDIT: It appears that .close() is old way of doing and causes
// problem in some cases, nodejs team has plans to deprecate it in future.
// .end() or .destroy() should be used instead.
writeStream.end()
但我更喜欢使用
writeFile
而不是 pipeline
,因为它非常适合这种情况。
const writeFD = await fs.promises.open(outputPath, 'a+') // opens a file and returns it's descriptor
for (const srcFile of srcFiles) {
const rs = fs.createReadStream(filePath)
await fs.promises.writeFile(writeFD, rs) // write to the descriptor
}
await writeFD.close() // close the descriptor