Koa core does not include file upload functionality, which needs to be implemented through middleware. The most commonly used file upload middleware are koa-body or koa-multer, providing powerful file upload processing capabilities.
1. Using koa-body for file upload:
Install:
bashnpm install koa-body
Basic configuration:
javascriptconst koaBody = require('koa-body'); app.use(koaBody({ multipart: true, // Enable file upload formidable: { maxFileSize: 100 * 1024 * 1024, // Max file size 100MB keepExtensions: true, // Keep file extension uploadDir: './uploads', // Upload directory multiples: true // Support multiple file upload } }));
Single file upload:
javascriptapp.use(async (ctx) => { const file = ctx.request.files.file; if (!file) { ctx.throw(400, 'No file uploaded'); } // Get file info const fileInfo = { name: file.name, size: file.size, path: file.path, type: file.type, lastModifiedDate: file.lastModifiedDate }; ctx.body = { message: 'File uploaded successfully', file: fileInfo }; });
Multiple file upload:
javascriptapp.use(async (ctx) => { const files = ctx.request.files.files; if (!files) { ctx.throw(400, 'No files uploaded'); } // Handle single file or multiple files const fileList = Array.isArray(files) ? files : [files]; const uploadedFiles = fileList.map(file => ({ name: file.name, size: file.size, path: file.path, type: file.type })); ctx.body = { message: `${uploadedFiles.length} files uploaded`, files: uploadedFiles }; });
2. Using koa-multer for file upload:
Install:
bashnpm install koa-multer
Basic configuration:
javascriptconst multer = require('koa-multer'); // Storage configuration const storage = multer.diskStorage({ destination: function (req, file, cb) { cb(null, './uploads/'); }, filename: function (req, file, cb) { const uniqueSuffix = Date.now() + '-' + Math.round(Math.random() * 1E9); cb(null, file.fieldname + '-' + uniqueSuffix + path.extname(file.originalname)); } }); const upload = multer({ storage: storage, limits: { fileSize: 100 * 1024 * 1024 // 100MB }, fileFilter: function (req, file, cb) { // File type filtering const allowedTypes = ['image/jpeg', 'image/png', 'image/gif']; if (allowedTypes.includes(file.mimetype)) { cb(null, true); } else { cb(new Error('Invalid file type'), false); } } });
Single file upload:
javascriptapp.use(upload.single('file')); app.use(async (ctx) => { const file = ctx.req.file; ctx.body = { message: 'File uploaded successfully', file: { originalname: file.originalname, filename: file.filename, path: file.path, size: file.size, mimetype: file.mimetype } }; });
Multiple file upload:
javascript// Max 10 files app.use(upload.array('files', 10)); app.use(async (ctx) => { const files = ctx.req.files; ctx.body = { message: `${files.length} files uploaded`, files: files.map(file => ({ originalname: file.originalname, filename: file.filename, path: file.path, size: file.size, mimetype: file.mimetype })) }; });
Mixed upload (files + fields):
javascriptapp.use(upload.fields([ { name: 'avatar', maxCount: 1 }, { name: 'documents', maxCount: 5 } ])); app.use(async (ctx) => { const files = ctx.req.files; const body = ctx.req.body; ctx.body = { message: 'Files uploaded successfully', avatar: files.avatar[0], documents: files.documents, data: body }; });
3. File upload security measures:
javascriptconst path = require('path'); const fs = require('fs'); app.use(koaBody({ multipart: true, formidable: { maxFileSize: 10 * 1024 * 1024, // Limit file size keepExtensions: true, uploadDir: './uploads', filter: function ({ name, originalFilename, mimetype }) { // File type validation const allowedTypes = [ 'image/jpeg', 'image/png', 'image/gif', 'application/pdf' ]; return allowedTypes.includes(mimetype); } } })); // File validation middleware async function validateFile(ctx, next) { const file = ctx.request.files.file; if (!file) { ctx.throw(400, 'No file uploaded'); } // Validate file size const maxSize = 10 * 1024 * 1024; // 10MB if (file.size > maxSize) { // Delete uploaded file fs.unlinkSync(file.path); ctx.throw(400, 'File size exceeds limit'); } // Validate file type const allowedTypes = ['image/jpeg', 'image/png', 'image/gif']; if (!allowedTypes.includes(file.type)) { fs.unlinkSync(file.path); ctx.throw(400, 'Invalid file type'); } // Validate file extension const ext = path.extname(file.name).toLowerCase(); const allowedExts = ['.jpg', '.jpeg', '.png', '.gif']; if (!allowedExts.includes(ext)) { fs.unlinkSync(file.path); ctx.throw(400, 'Invalid file extension'); } await next(); } app.use(validateFile);
4. Image processing:
Use sharp library to process uploaded images.
bashnpm install sharp
javascriptconst sharp = require('sharp'); app.use(async (ctx) => { const file = ctx.request.files.file; if (!file) { ctx.throw(400, 'No file uploaded'); } // Generate thumbnail const thumbnailPath = file.path.replace(/(\.[\w\d]+)$/, '_thumb$1'); await sharp(file.path) .resize(200, 200, { fit: 'cover', position: 'center' }) .toFile(thumbnailPath); // Compress image const compressedPath = file.path.replace(/(\.[\w\d]+)$/, '_compressed$1'); await sharp(file.path) .jpeg({ quality: 80 }) .toFile(compressedPath); ctx.body = { message: 'Image processed successfully', original: file.path, thumbnail: thumbnailPath, compressed: compressedPath }; });
5. Chunked upload:
For large files, implement chunked upload functionality.
javascriptconst fs = require('fs'); const path = require('path'); app.use(async (ctx) => { const { chunkIndex, totalChunks, fileId } = ctx.request.body; const file = ctx.request.files.chunk; const chunkDir = path.join('./uploads', fileId); const chunkPath = path.join(chunkDir, `chunk_${chunkIndex}`); // Create chunk directory if (!fs.existsSync(chunkDir)) { fs.mkdirSync(chunkDir, { recursive: true }); } // Save chunk const reader = fs.createReadStream(file.path); const writer = fs.createWriteStream(chunkPath); reader.pipe(writer); // Check if all chunks are uploaded const uploadedChunks = fs.readdirSync(chunkDir).length; if (uploadedChunks === parseInt(totalChunks)) { // Merge chunks const finalPath = path.join('./uploads', `${fileId}${path.extname(file.name)}`); const writeStream = fs.createWriteStream(finalPath); for (let i = 0; i < totalChunks; i++) { const chunkPath = path.join(chunkDir, `chunk_${i}`); const chunkData = fs.readFileSync(chunkPath); writeStream.write(chunkData); fs.unlinkSync(chunkPath); } writeStream.end(); fs.rmdirSync(chunkDir); ctx.body = { message: 'File upload completed', path: finalPath }; } else { ctx.body = { message: `Chunk ${chunkIndex} uploaded`, progress: `${uploadedChunks}/${totalChunks}` }; } });
6. File upload best practices:
-
Security measures:
- Limit file size
- Validate file type
- Validate file extension
- Use random file names
- Store in non-web accessible directories
-
Performance optimization:
- Use streaming for large files
- Implement chunked upload
- Use CDN for file storage
- Process files asynchronously
-
User experience:
- Provide upload progress
- Support resumable upload
- Show upload status
- Provide preview functionality
-
Error handling:
- Catch upload errors
- Clean up failed files
- Provide friendly error messages
- Log upload events