Javascript Promises with FileReader()

前端 未结 6 1800
滥情空心
滥情空心 2020-12-05 00:25

I have the following HTML Code:


And Here\'s my JS Code:

var inputFiles = document.getE         


        
相关标签:
6条回答
  • 2020-12-05 01:03

    I upgrade Jens Lincke answer by add working example and introduce async/await syntax

    function readFile(file) {
      return new Promise((resolve, reject) => {
        let fr = new FileReader();
        fr.onload = x=> resolve(fr.result);
        fr.readAsDataURL(file) // or readAsText(file) to get raw content
    })}
    

    function readFile(file) {
      return new Promise((resolve, reject) => {
        let fr = new FileReader();
        fr.onload = x=> resolve(fr.result);
        fr.readAsDataURL(file) // or readAsText(file) to get raw content
    })}
    
    async function load(e) {
      for(let [i,f] of [...e.target.files].entries() ){
        msg.innerHTML += `<h1>File ${i}: ${f.name}</h1>`;
        let p = document.createElement("pre");
        p.innerText += await readFile(f);
        msg.appendChild(p);
      }
    }
    <input type="file" onchange="load(event)" multiple />
    <div id="msg"></div>

    0 讨论(0)
  • 2020-12-05 01:11

    We modified midos answer to get it to work the following:

    function readFile(file){
      return new Promise((resolve, reject) => {
        var fr = new FileReader();  
        fr.onload = () => {
          resolve(fr.result )
        };
        fr.readAsText(file.blob);
      });
    }
    
    0 讨论(0)
  • 2020-12-05 01:21

    If you want to do it sequentially( not synchronously) using Promises, you could do something like:

    var inputFiles = document.getElementsByTagName("input")[0];
    inputFiles.onchange = function(){
      var promise = Promise.resolve();
      inputFiles.files.map( file => promise.then(()=> pFileReader(file)));
      promise.then(() => console.log('all done...'));
    }
    
    function pFileReader(file){
      return new Promise((resolve, reject) => {
        var fr = new FileReader();  
        fr.onload = resolve;  // CHANGE to whatever function you want which would eventually call resolve
        fr.readAsDataURL(file);
      });
    }
    
    0 讨论(0)
  • 2020-12-05 01:25

    Here is another modification to Jens' answer (piggybacking off Mido's answer) to additionally check the file size:

    function readFileBase64(file, max_size){
            max_size_bytes = max_size * 1048576;
            return new Promise((resolve, reject) => {
                if (file.size > max_size_bytes) {
                    console.log("file is too big at " + (file.size / 1048576) + "MB");
                    reject("file exceeds max size of " + max_size + "MB");
                }
                else {
                var fr = new FileReader();  
                fr.onload = () => {
                    data = fr.result;
                    resolve(data)
                };
                fr.readAsDataURL(file);
                }
            });
        }
    
    0 讨论(0)
  • 2020-12-05 01:27

    The nature of FileReader is that you cannot make its operation synchronous.

    I suspect you don't really need or want it to be synchronous, just that you want to get the resulting URLs correctly. The person suggesting using promises was probably right, but not because promises make the process synchronous (they don't), but because they provide standardized semantics for dealing with asynchronous operations (whether in paralle or in series):

    Using promises, you'd start with a promise wrapper for readAsDataURL (I'm using ES2015+ here, but you can convert it to ES5 with a promise library instead):

    function readAsDataURL(file) {
        return new Promise((resolve, reject) => {
            const fr = new FileReader();
            fr.onerror = reject;
            fr.onload = function() {
                resolve(fr.result);
            }
            fr.readAsDataURL(file);
        });
    }
    

    Then you'd use the promise-based operations I describe in this answer to read those in parallel:

    Promise.all(Array.prototype.map.call(inputFiles.files, readAsDataURL))
    .then(urls => {
        // ...use `urls` (an array) here...
    })
    .catch(error => {
        // ...handle/report error...
    });
    

    ...or in series:

    Array.prototype.reduce.call(inputFiles.files, (p, file) => p.then(() => readAsDataURL(file).then(url => {
        // ...use `url` here...
    })), Promise.resolve())
    .catch(error => {
        // ...handle/report error...
    });
    

    Inside an ES2017 async function, you could use await. It doesn't do much more the parallel version:

    // Inside an `async` function
    try {
        const urls = await Promise.all(Array.prototype.map.call(inputFiles.files, readAsDataURL));
    } catch (error) {
        // ...handle/report error...
    }
    

    ...but it makes the series version much simpler and clearer:

    // Inside an `async` function
    try {
        for (let i = 0; i < inputFiles.files.length; ++i) {
            const file = inputFiles.files[i];
            const url = await readAsDataURL(file);
            // ...use `url` here...
        }
    } catch (error) {
        // ...handle/report error...
    }
    

    Without promises, you'd do this by keeping track of how many outstanding operations you have so you know when you're done:

    var inputFiles = document.getElementsByTagName("input")[0];
    inputFiles.onchange = function(){
        var data = [];      // The results
        var pending = 0;    // How many outstanding operations we have
    
        // Schedule reading all the files (this finishes before the first onload
        // callback is allowed to be executed)
        Array.prototype.forEach.call(inputFiles.files, function(file, index) {
            // Read this file, remember it in `data` using the same index
            // as the file entry
            var fr = new FileReader();
            fr.onload = function() {
                data[index] = fr.result;
                --pending;
                if (pending == 0) {
                    // All requests are complete, you're done
                }
            }
            fr.readAsDataURL(file);
            ++pending;
        });
    }
    

    Or if you want for some reason to read the files sequentially (but still asynchronously), you can do that by scheduling the next call only when the previous one is complete:

    // Note: This assumes there is at least one file, if that
    // assumption isn't valid, you'll need to add an up-front check
    var inputFiles = document.getElementsByTagName("input")[0];
    inputFiles.onchange = function(){
        var index = 0;
    
        readNext();
    
        function readNext() {
            var file = inputFiles.files[index++];
            var fr = new FileReader();
            fr.onload = function() {
                // use fr.result here
                if (index < inputFiles.files.length) {
                    // More to do, start loading the next one
                    readNext();
                }
            }
            fr.readAsDataURL(file);
        }
    }
    
    0 讨论(0)
  • 2020-12-05 01:27

    Promisified FileReader

    /**
     * Promisified FileReader
     * More info https://developer.mozilla.org/en-US/docs/Web/API/FileReader
     * @param {*} file
     * @param {*} method: readAsArrayBuffer, readAsBinaryString, readAsDataURL, readAsText
     */
    export const readFile = (file = {}, method = 'readAsText') => {
      const reader = new FileReader()
      return new Promise((resolve, reject) => {
        reader[method](file)
        reader.onload = () => {
          resolve(reader)
        }
        reader.onerror = (error) => reject(error)
      })
    }
    

    Usage

    const file =  new File(["foo"], "foo.txt", {
      type: "text/plain",
    });
    
    // Text
    const resp1 = await readFile(file)
    console.log(resp1.result)
    
    // DataURL
    const resp2 = await readFile(file, 'readAsDataURL')
    console.log(resp2.result)
    
    0 讨论(0)
提交回复
热议问题