当前,除图像以外,现在已经成功解析了multipart / form-data。
正在使用的技术:
带有Azure Function 2.0的NodeJS(尝试使用二进制和流数据类型)
Sendgrid入站解析器(https://sendgrid.com/docs/for-developers/parsing-email/setting-up-the-inbound-parse-webhook/#example-default-payload)
Azure Blob
我能够使用Buffer解析出图像,从而导致附件的以下格式
{ filename: 'cb.jpg',
name: 'cb.jpg',
type: 'image/jpeg',
content:
<Buffer 0d 0a ff d8 ff e0 00 10 4a 46 49 46 00 01 01 01 00 48 00 48 00 00 ff e2 0c 58 49 43 43 5f 50 52 4f 46 49 4c 45 00 01 01 00 00 0c 48 4c 69 6e 6f 02 10 ... > },
我试图像这样注入蔚蓝斑点的缓冲区
const blockBlobURL = BlockBlobURL.fromContainerURL(containerURL, recordId + path.sep + attachment.filename);
var bufferStream = new stream.PassThrough();
bufferStream.end(Buffer.from(attachment.content.toString('base64')));
const aborter = Aborter.timeout(NINE_MINUTES);
return uploadStreamToBlockBlob(aborter, bufferStream, blockBlobURL, EIGHT_MEGABYTES, 5);
我尝试过有无toString和base64,但没有运气。该文件已上传,但是当我尝试在存储Blob中查看文件时,我正在发送的内容格式不正确。对我缺少哪些转换步骤有任何想法吗?
根据我的测试,我们可以使用以下代码:
const fs = require('fs')
const {
Aborter,
BlobURL,
BlockBlobURL,
ContainerURL,
ServiceURL,
StorageURL,
SharedKeyCredential,
uploadStreamToBlockBlob,
} = require('@azure/storage-blob');
const stream = require('stream');
const ONE_MEGABYTE = 1024 * 1024;
const uploadOptions = { bufferSize: 4 * ONE_MEGABYTE, maxBuffers: 20 };
const ONE_MINUTE = 60 * 1000;
const aborter = Aborter.timeout(30 * ONE_MINUTE);;
const accountname ="blobstorage0516";
const key = "";
const containerName="test";
const sharedKeyCredential = new SharedKeyCredential(
accountname,
key);
const pipeline = StorageURL.newPipeline(sharedKeyCredential);
const serviceURL = new ServiceURL(
`https://${accountname}.blob.core.windows.net`,
pipeline
);
async function upload() {
console.log("----")
// read jpg to buffer
const str = fs.readFileSync("D:\\test.jpg")
//upload
const containerURL = ContainerURL.fromServiceURL(serviceURL, containerName);
const blobURL = BlobURL.fromContainerURL(containerURL, "test.jpg");
const blockBlobURL = BlockBlobURL.fromBlobURL(blobURL);
const bufferStream = new stream.PassThrough();
bufferStream.end(str);
await uploadStreamToBlockBlob(aborter, bufferStream,
blockBlobURL, uploadOptions.bufferSize, uploadOptions.maxBuffers);
// download
const baseLineImage = await blockBlobURL.download(aborter, 0)
const datastream = new stream.PassThrough();
const readableStream = baseLineImage.readableStreamBody
readableStream.on("data", data => {
datastream.push(data);
});
readableStream.on("end" , () => {
fs.writeFileSync('D:\\test1.jpg', datastream.read())
console.log("download successfully")
datastream.destroy();
});
readableStream.on("error" , (error) => {
datastream.destroy();
throw error;
});
}
upload()
.then(() => {
console.log("Successfully executed sample.");
})
.catch((err) => {
console.log(err.message);
});