I have been looking around for a good solution and finally found one for saving images directly into Google Cloud Storage API. Now, what we do here is that we save the image into the memory directly and never make a local copy of the file and directly upload it to desired bucket. Npm package needed is axios, sharp and of course @google-cloud/storage
const processImage = (imageUrl, folderName, imageName) => {
return new Promise((resolve, reject) => {
// Configure axios to receive a response type of stream, and get a readableStream of the image from the specified URL
axios({
method: 'get',
url: imageUrl,
responseType: 'stream'
})
.then((response) => {
// Create the image manipulation function
var transformer = sharp()
.resize(300)
.jpeg();
gcFile = storage.bucket(bucketName).file(folderName + '/' + imageName)
// Pipe the axios response data through the image transformer and to Google Cloud
response.data
.pipe(transformer)
.pipe(gcFile.createWriteStream({
resumable: false,
validation: false,
public: true,
contentType: "auto",
metadata: {
'Cache-Control': 'public, max-age=31536000'
}
}))
.on('error', (error) => {
reject(error)
})
.on('finish', () => {
resolve(true)
});
})
.catch(err => {
reject("Image transfer error. ", err);
});
})
}
Usage example
processImage(myUrl, myFolder, 'image1.jpg')
.then(res => {
console.log("Complete.", res);
})
.catch(err => {
console.log("Error", err);
});