Handling large file uploads can be challenging, especially when dealing with files over 100MB. In this guide, we'll explore how to implement efficient file uploads in NestJS using Cloudinary, with a focus on handling large media files through chunked uploads and streams.
Setting Up the Project
bash
npm install @nestjs/platform-express cloudinary streamifier multer @types/multer
Configuring Cloudinary Module
typescript
// cloudinary/cloudinary.config.ts
import { v2 as cloudinary } from 'cloudinary';
export const CloudinaryConfig = {
cloud_name: process.env.CLOUDINARY_CLOUD_NAME,
api_key: process.env.CLOUDINARY_API_KEY,
api_secret: process.env.CLOUDINARY_API_SECRET,
upload_preset: process.env.CLOUDINARY_UPLOAD_PRESET
};
cloudinary.config(CloudinaryConfig);
// cloudinary/cloudinary.module.ts
import { Module } from '@nestjs/common';
import { CloudinaryService } from './cloudinary.service';
@Module({
providers: [CloudinaryService],
exports: [CloudinaryService],
})
export class CloudinaryModule {}
Implementing the Upload Service
typescript
// cloudinary/cloudinary.service.ts
import { Injectable } from '@nestjs/common';
import { v2 as cloudinary } from 'cloudinary';
import { Readable } from 'stream';
@Injectable()
export class CloudinaryService {
async uploadLargeFile(file: Express.Multer.File, options: { folder: string }) {
return new Promise((resolve, reject) => {
const uploadStream = cloudinary.uploader.upload_stream(
{
folder: options.folder,
resource_type: 'auto',
chunk_size: 6000000, // 6MB chunks
upload_preset: process.env.CLOUDINARY_UPLOAD_PRESET
},
(error, result) => {
if (error) return reject(error);
resolve(result);
}
);
// Create a readable stream from the file buffer
const stream = Readable.from(file.buffer);
stream.pipe(uploadStream);
});
}
async uploadLargeFileWithProgress(file: Express.Multer.File, options: {
folder: string;
onProgress?: (progress: number) => void;
}) {
const totalSize = file.size;
let uploadedBytes = 0;
return new Promise((resolve, reject) => {
const uploadStream = cloudinary.uploader.upload_stream(
{
folder: options.folder,
resource_type: 'auto',
chunk_size: 6000000,
upload_preset: process.env.CLOUDINARY_UPLOAD_PRESET
},
(error, result) => {
if (error) return reject(error);
resolve(result);
}
);
// Handle upload progress
uploadStream.on('data', (data) => {
uploadedBytes += data.length;
const progress = Math.round((uploadedBytes / totalSize) * 100);
options.onProgress?.(progress);
});
const stream = Readable.from(file.buffer);
stream.pipe(uploadStream);
});
}
async uploadChunkedFile(chunks: Buffer[], options: { folder: string }) {
const uploadPromises = chunks.map((chunk, index) => {
return this.uploadChunk(chunk, {
folder: options.folder,
public_id: `chunk_${index}`
});
});
const uploadedChunks = await Promise.all(uploadPromises);
return this.mergeChunks(uploadedChunks, options.folder);
}
private async uploadChunk(chunk: Buffer, options: { folder: string; public_id: string }) {
return new Promise((resolve, reject) => {
const uploadStream = cloudinary.uploader.upload_stream(
{
folder: options.folder,
public_id: options.public_id,
resource_type: 'raw'
},
(error, result) => {
if (error) return reject(error);
resolve(result);
}
);
const stream = Readable.from(chunk);
stream.pipe(uploadStream);
});
}
private async mergeChunks(chunks: any[], folder: string) {
// Implement chunk merging logic based on your requirements
// This could involve downloading chunks and concatenating them,
// or using Cloudinary's API to merge videos/images
return chunks;
}
}
Implementing the Upload Controller
typescript
// upload/upload.controller.ts
import { Controller, Post, UploadedFile, UseInterceptors, MaxFileSizeValidator, ParseFilePipe } from '@nestjs/common';
import { FileInterceptor } from '@nestjs/platform-express';
import { CloudinaryService } from '../cloudinary/cloudinary.service';
@Controller('upload')
export class UploadController {
constructor(private readonly cloudinaryService: CloudinaryService) {}
@Post('large-file')
@UseInterceptors(FileInterceptor('file'))
async uploadLargeFile(
@UploadedFile(
new ParseFilePipe({
validators: [
new MaxFileSizeValidator({ maxSize: 1024 * 1024 * 1024 }) // 1GB max
],
}),
)
file: Express.Multer.File,
) {
return this.cloudinaryService.uploadLargeFile(file, {
folder: 'large-files'
});
}
@Post('chunked')
@UseInterceptors(FileInterceptor('chunk'))
async uploadChunk(@UploadedFile() chunk: Express.Multer.File) {
// Store chunk temporarily
// When all chunks are received, merge them and upload
return { message: 'Chunk received' };
}
@Post('complete-chunked')
async completeChunkedUpload() {
// Retrieve all chunks and merge them
// Then upload the complete file
return { message: 'Upload completed' };
}
Configuring Upload Middleware
typescript
// upload/upload.module.ts
import { Module } from '@nestjs/common';
import { MulterModule } from '@nestjs/platform-express';
import { diskStorage } from 'multer';
import { UploadController } from './upload.controller';
import { CloudinaryModule } from '../cloudinary/cloudinary.module';
@Module({
imports: [
MulterModule.register({
storage: diskStorage({
destination: './uploads',
filename: (req, file, cb) => {
const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9);
cb(null, file.fieldname + '-' + uniqueSuffix);
}
}),
limits: {
fileSize: 1024 * 1024 * 1024 // 1GB
}
}),
CloudinaryModule
],
controllers: [UploadController]
})
export class UploadModule {}
Implementing Progress Tracking
typescript
// upload/upload.gateway.ts
import { WebSocketGateway, WebSocketServer } from '@nestjs/websockets';
import { Server } from 'socket.io';
@WebSocketGateway()
export class UploadGateway {
@WebSocketServer()
server: Server;
handleProgress(userId: string, progress: number) {
this.server.to(userId).emit('uploadProgress', { progress });
}
}
Best Practices and Optimization
- Use chunked uploads for files larger than 100MB
- Implement proper error handling and cleanup
- Add upload progress tracking
- Use appropriate content-type validation
- Implement retry mechanisms for failed chunks
- Clean up temporary files after upload
By following these patterns and implementing proper error handling, you can create a robust file upload system that handles large files efficiently while providing a great user experience.