How to Implement Efficient Large File Uploads with Chunking in Vue and Node.js

This guide explains how to overcome large‑file upload limits by splitting files into chunks with Blob.slice, uploading them concurrently from a Vue front‑end, and merging the pieces on a Node.js back‑end using streams, while providing progress tracking and handling Nginx size restrictions.

360 Zhihui Cloud Developer
360 Zhihui Cloud Developer
360 Zhihui Cloud Developer
How to Implement Efficient Large File Uploads with Chunking in Vue and Node.js

In the Qilin big‑data system, uploading large files often hits Nginx’s default 1 MB limit, causing timeouts and poor user experience.

Simply increasing client_max_body_size raises server load, so a chunked upload strategy is needed.

Front‑end implementation

The front‑end uses Vue and Element‑Plus. Files are sliced with Blob.prototype.slice into 10 MB chunks, each uploaded via FormData and an XMLHttpRequest that reports progress.

function uploadRequest(params) {
  const file = params.file;
  const form = new FormData();
  form.append('file', file);
  const config = {
    headers: { 'Content-Type': 'multipart/form-data' },
    onUploadProgress: e => {
      if (e.total > 0) {
        e.percent = e.loaded / e.total * 100;
      }
      params.onProgress(e);
    },
    timeout: 0
  };
  request('XXXX', 'post', form, config)
    .then(res => params.onSuccess(res))
    .catch(err => params.onError(err));
}

Vue component template:

<template>
  <div class="upload-container">
    <input type="file" @change="handleFileChange" />
    <el-button @click="handleUpload">upload</el-button>
  </div>
</template>

<script setup>
import { ref } from 'vue';
const container = ref({ file: null });
function handleFileChange(e) {
  const [file] = e.target.files;
  if (!file) return;
  container.value.file = file;
}
async function handleUpload() {
  if (!container.value.file) return;
  const fileChunkList = createFileChunk(container.value.file);
  data.value = fileChunkList.map((c, i) => ({
    chunk: c,
    hash: `${container.value.file.name}-${i}`
  }));
  await uploadChunks();
}
</script>

Chunk creation:

const SIZE = 10 * 1024 * 1024; // 10 MB
function createFileChunk(file, size = SIZE) {
  const chunks = [];
  let cur = 0;
  while (cur < file.size) {
    chunks.push({ file: file.slice(cur, cur + size) });
    cur += size;
  }
  return chunks;
}

Uploading all chunks concurrently:

async function uploadChunks() {
  const requestList = data.value.map(({ chunk, hash }) => {
    const formData = new FormData();
    formData.append('chunk', chunk);
    formData.append('hash', hash);
    formData.append('filename', container.value.file.name);
    return { formData };
  }).map(({ formData }) =>
    request({ url: 'http://localhost:8888', data: formData })
  );
  await Promise.all(requestList);
}

After all chunks are sent, a merge request is issued:

async function mergeRequest() {
  await request({
    url: 'http://localhost:8888/mergeFile',
    headers: { 'content-type': 'application/json' },
    data: JSON.stringify({ size: SIZE, filename: container.value.file.name })
  });
}

Back‑end implementation (Node.js)

A simple HTTP server handles CORS, receives chunk uploads with multiparty, stores them in a temporary directory, and merges them when the client calls /mergeFile.

const http = require('http');
const path = require('path');
const fse = require('fs-extra');
const multiparty = require('multiparty');

const UPLOAD_DIR = path.resolve(__dirname, '..', 'target');

const server = http.createServer(async (req, res) => {
  res.setHeader('Access-Control-Allow-Origin', '*');
  res.setHeader('Access-Control-Allow-Headers', '*');
  if (req.method === 'OPTIONS') {
    res.status = 200;
    res.end();
    return;
  }

  if (req.url === '/mergeFile') {
    const data = await resolvePost(req);
    const { filename, size } = data;
    const filePath = path.resolve(UPLOAD_DIR, `${filename}`);
    await mergeFileChunk(filePath, filename, size);
    res.end(JSON.stringify({ code: 0, message: 'file merged success' }));
    return;
  }

  const form = new multiparty.Form();
  form.parse(req, async (err, fields, files) => {
    if (err) return;
    const [chunk] = files.chunk;
    const [hash] = fields.hash;
    const [filename] = fields.filename;
    const chunkDir = path.resolve(UPLOAD_DIR, 'chunkDir' + filename);
    if (!fse.existsSync(chunkDir)) await fse.mkdirs(chunkDir);
    await fse.move(chunk.path, `${chunkDir}/${hash}`);
    res.end('received file chunks');
  });
});

function resolvePost(req) {
  return new Promise(resolve => {
    let body = '';
    req.on('data', data => (body += data));
    req.on('end', () => resolve(JSON.parse(body)));
  });
}

async function mergeFileChunk(filePath, filename, size) {
  const chunkDir = path.resolve(UPLOAD_DIR, 'chunkDir' + filename);
  const chunkPaths = await fse.readdir(chunkDir);
  chunkPaths.sort((a, b) => a.split('-')[1] - b.split('-')[1]);
  await Promise.all(
    chunkPaths.map((p, i) =>
      pipeStream(
        path.resolve(chunkDir, p),
        fse.createWriteStream(filePath, { start: i * size })
      )
    )
  );
  fse.rmdirSync(chunkDir);
}

function pipeStream(src, writeStream) {
  return new Promise(resolve => {
    const readStream = fse.createReadStream(src);
    readStream.on('end', () => {
      fse.unlinkSync(src);
      resolve();
    });
    readStream.pipe(writeStream);
  });
}

server.listen(8888, () => console.log('listening port 8888'));

Progress tracking

The front‑end computes overall upload percentage from each chunk’s progress:

computed: {
  uploadPercentage() {
    if (!container.value.file || !data.value.length) return 0;
    const loaded = data.value
      .map(item => item.size * item.percentage)
      .reduce((a, b) => a + b, 0);
    return parseInt((loaded / container.value.file.size).toFixed(2));
  }
}

After implementing chunked uploading, the UI shows a smooth progress bar and dramatically reduces waiting time for users.

Summary

Front‑end slices large files with Blob.prototype.slice and uploads chunks concurrently.

Back‑end receives chunks, stores them, and merges them into the final file using streams.

Upload progress is tracked per chunk and aggregated with a Vue computed property.

Chunked uploading avoids Nginx size limits and improves user experience.

Original Source

Signed-in readers can open the original source through BestHub's protected redirect.

Sign in to view source
Republication Notice

This article has been distilled and summarized from source material, then republished for learning and reference. If you believe it infringes your rights, please contactadmin@besthub.devand we will review it promptly.

BackendNode.jsVueChunkingprogress barLarge File Upload
360 Zhihui Cloud Developer
Written by

360 Zhihui Cloud Developer

360 Zhihui Cloud is an enterprise open service platform that aims to "aggregate data value and empower an intelligent future," leveraging 360's extensive product and technology resources to deliver platform services to customers.

0 followers
Reader feedback

How this landed with the community

Sign in to like

Rate this article

Was this worth your time?

Sign in to rate
Discussion

0 Comments

Thoughtful readers leave field notes, pushback, and hard-won operational detail here.