search
HomeWeb Front-endJS TutorialHow to implement multipart upload in Node.js? Method introduction

NodeHow to implement multipart upload? The following article will introduce you to the method of implementing multipart upload in Node.js. I hope it will be helpful to you!

How to implement multipart upload in Node.js? Method introduction

#Uploading large files will consume a lot of time, and the upload may fail midway. At this time we need the front-end and back-end to cooperate to solve this problem.

Solution steps:

  • File fragmentation to reduce the time consumed by each request. If a request fails, you can upload it separately instead of starting from scratch

  • Notify the server to merge file fragments

  • Control the number of concurrent requests to avoid browser memory overflow

  • When a request fails due to network or other reasons, we resend the request

File fragmentation and merging

In JavaScript, the FIle object is a subclass of the 'Blob' object, which contains an important method slice, through which we can split the binary file like this:

nbsp;html>


    <meta>
    <meta>
    <meta>
    <title>Document</title>
    <script></script>


    <input>
    <button>上传</button>  
    <script>
        function SliceUpload() {
            const file = document.getElementById(&#39;fileInput&#39;).files[0]
            if (!file) return

            // 文件分片
            let size = 1024 * 50; //50KB 50KB Section size
            let fileChunks = [];
            let index = 0;        //Section num
            for (let cur = 0; cur < file.size; cur += size) {
                fileChunks.push({
                    hash: index++,
                    chunk: file.slice(cur, cur + size),
                });
            }

            // 上传分片
            const uploadList = fileChunks.map((item, index) => {
                let formData = new FormData();
                formData.append("filename", file.name);
                formData.append("hash", item.hash);
                formData.append("chunk", item.chunk);
                return axios({
                    method: "post",
                    url: "/upload",
                    data: formData,
                });
            });
            await Promise.all(uploadList);

            // 所有分片上传完成,通知服务器合并分片
            await axios({
                method: "get",
                url: "/merge",
                params: {
                    filename: file.name,
                },
            });
            console.log("Upload to complete");
        }
    </script>

Concurrency Control

If the file is large, there will be many fragments divided in this way, and the browser will initiate a large number of requests in a short period of time, which may cause memory exhaustion, so concurrency control is required.

Here we combine the Promise.race() method to control the number of concurrent requests and avoid browser memory overflow.

// 加入并发控制
async function SliceUpload() {
    const file = document.getElementById('fileInput').files[0]
    if (!file) return

    // 文件分片
    let size = 1024 * 50; //50KB 50KB Section size
    let fileChunks = [];
    let index = 0;        //Section num
    for (let cur = 0; cur  {
        // 从并发池中移除已经完成的请求
        let index = pool.findIndex((t) => t === task);
            pool.splice(index);
        });

        // 把请求放入并发池中,如果已经达到最大并发量
        pool.push(task);
        if (pool.length === max) {
            //All requests are requested complete
            await Promise.race(pool);
        }
    }

    // 所有分片上传完成,通知服务器合并分片
    await axios({
        method: "get",
        url: "/merge",
        params: {
            filename: file.name,
        },
    });
    console.log("Upload to complete");
}

Make code reusable

function SliceUpload() {
    const file = document.getElementById('fileInput').files[0]
    if (!file) return

    // 文件分片
    let size = 1024 * 50; // 分片大小设置
    let fileChunks = [];
    let index = 0;        // 分片序号
    for (let cur = 0; cur {
                // 从并发池中移除已经完成的请求
                let index = pool.findIndex(t=> t===task)
                pool.splice(index)
            }).catch(()=>{
                failList.push(item)
            }).finally(()=>{
                finish++
                // 如果有失败的重新上传
                if(finish===list.length){
                    uploadFileChunks(failList)
                }
            })
            pool.push(task)
            if(pool.length === max){
                await Promise.race(pool)
            }
        }
    }

    uploadFileChunks(fileChunks)
}

Server interface implementation

const express = require('express')
const multiparty = require('multiparty')
const fs = require('fs')
const path = require('path')
const { Buffer } = require('buffer')
// file path
const STATIC_FILES = path.join(__dirname, './static/files')
// Temporary path to upload files
const STATIC_TEMPORARY = path.join(__dirname, './static/temporary')
const server = express()
// Static file hosting
server.use(express.static(path.join(__dirname, './dist')))
// Interface for uploading slices
server.post('/upload', (req, res) => {
    const form = new multiparty.Form();
    form.parse(req, function(err, fields, files) {
        let filename = fields.filename[0]
        let hash = fields.hash[0]
        let chunk = files.chunk[0]
        let dir = `${STATIC_TEMPORARY}/${filename}`
        // console.log(filename, hash, chunk)
        try {
            if (!fs.existsSync(dir)) fs.mkdirSync(dir)
            const buffer = fs.readFileSync(chunk.path)
            const ws = fs.createWriteStream(`${dir}/${hash}`)
            ws.write(buffer)
            ws.close()
            res.send(`${filename}-${hash} Section uploaded successfully`)
        } catch (error) {
            console.error(error)
            res.status(500).send(`${filename}-${hash} Section uploading failed`)
        }
    })
})

//Merged slice interface
server.get('/merge', async (req, res) => {
    const { filename } = req.query
    try {
        let len = 0
        const bufferList = fs.readdirSync(`${STATIC_TEMPORARY}/${filename}`).map((hash,index) => {
            const buffer = fs.readFileSync(`${STATIC_TEMPORARY}/${filename}/${index}`)
            len += buffer.length
            return buffer
        });
        //Merge files
        const buffer = Buffer.concat(bufferList, len);
        const ws = fs.createWriteStream(`${STATIC_FILES}/${filename}`)
        ws.write(buffer);
        ws.close();
        res.send(`Section merge completed`);
    } catch (error) {
        console.error(error);
    }
})

server.listen(3000, _ => {
    console.log('http://localhost:3000/')
})

For more node-related knowledge, please visit : nodejs tutorial!

The above is the detailed content of How to implement multipart upload in Node.js? Method introduction. For more information, please follow other related articles on the PHP Chinese website!

Statement
This article is reproduced at:segmentfault. If there is any infringement, please contact admin@php.cn delete
Vercel是什么?怎么部署Node服务?Vercel是什么?怎么部署Node服务?May 07, 2022 pm 09:34 PM

Vercel是什么?本篇文章带大家了解一下Vercel,并介绍一下在Vercel中部署 Node 服务的方法,希望对大家有所帮助!

node.js gm是什么node.js gm是什么Jul 12, 2022 pm 06:28 PM

gm是基于node.js的图片处理插件,它封装了图片处理工具GraphicsMagick(GM)和ImageMagick(IM),可使用spawn的方式调用。gm插件不是node默认安装的,需执行“npm install gm -S”进行安装才可使用。

火了!新的JavaScript运行时:Bun,性能完爆Node火了!新的JavaScript运行时:Bun,性能完爆NodeJul 15, 2022 pm 02:03 PM

今天跟大家介绍一个最新开源的 javaScript 运行时:Bun.js。比 Node.js 快三倍,新 JavaScript 运行时 Bun 火了!

nodejs中lts是什么意思nodejs中lts是什么意思Jun 29, 2022 pm 03:30 PM

在nodejs中,lts是长期支持的意思,是“Long Time Support”的缩写;Node有奇数版本和偶数版本两条发布流程线,当一个奇数版本发布后,最近的一个偶数版本会立即进入LTS维护计划,一直持续18个月,在之后会有12个月的延长维护期,lts期间可以支持“bug fix”变更。

node爬取数据实例:聊聊怎么抓取小说章节node爬取数据实例:聊聊怎么抓取小说章节May 02, 2022 am 10:00 AM

node怎么爬取数据?下面本篇文章给大家分享一个node爬虫实例,聊聊利用node抓取小说章节的方法,希望对大家有所帮助!

深入浅析Nodejs中的net模块深入浅析Nodejs中的net模块Apr 11, 2022 pm 08:40 PM

本篇文章带大家带大家了解一下Nodejs中的net模块,希望对大家有所帮助!

怎么获取Node性能监控指标?获取方法分享怎么获取Node性能监控指标?获取方法分享Apr 19, 2022 pm 09:25 PM

怎么获取Node性能监控指标?本篇文章来和大家聊聊Node性能监控指标获取方法,希望对大家有所帮助!

什么是单点登录?怎么使用Nodejs实现SSO什么是单点登录?怎么使用Nodejs实现SSOApr 22, 2022 pm 09:13 PM

什么是单点登录?下面本篇文章给大家给大家介绍一下单点登录原理,聊聊使用Nodejs实现单点登录SSO的方法,希望对大家有所帮助!

See all articles

Hot AI Tools

Undresser.AI Undress

Undresser.AI Undress

AI-powered app for creating realistic nude photos

AI Clothes Remover

AI Clothes Remover

Online AI tool for removing clothes from photos.

Undress AI Tool

Undress AI Tool

Undress images for free

Clothoff.io

Clothoff.io

AI clothes remover

AI Hentai Generator

AI Hentai Generator

Generate AI Hentai for free.

Hot Article

Hot Tools

SublimeText3 Linux new version

SublimeText3 Linux new version

SublimeText3 Linux latest version

WebStorm Mac version

WebStorm Mac version

Useful JavaScript development tools

Dreamweaver CS6

Dreamweaver CS6

Visual web development tools

SAP NetWeaver Server Adapter for Eclipse

SAP NetWeaver Server Adapter for Eclipse

Integrate Eclipse with SAP NetWeaver application server.

SublimeText3 Chinese version

SublimeText3 Chinese version

Chinese version, very easy to use