Read chunked binary response with Fetch API

安稳与你 提交于 2019-12-01 02:09:44

问题


How do I read a binary chunked response using the Fetch API. I'm using the following code which works insofar it reads the chunked response from the server. However, the data seems to be encoded/decoded somehow causing the getFloat32 to sometimes fail. I've tried to read the response with curl, and that works fine, leading me to believe I need to do something to make fetch api treat chunks as binary. The content-type of the response is properly set to "application/octet-stream".

const consume = responseReader => {
    return responseReader.read().then(result => {
        if (result.done) { return; }
        const dv = new DataView(result.value.buffer, 0, result.value.buffer.length);
        dv.getFloat32(i, true);  // <-- sometimes this is garbled up
        return consume(responseReader);
    });
}

fetch('/binary').then(response => {
    return consume(response.body.getReader());
})
.catch(console.error);

Use the following express server to reproduce it. Note, any client side js code that can handle the below server is fine.

const express = require('express');
const app = express();

app.get('/binary', function (req, res) {
  res.header("Content-Type", "application/octet-stream");
  res.header('Content-Transfer-Encoding', 'binary');
  const repro = new Uint32Array([0x417055b8, 0x4177d16f, 0x4179e9da, 0x418660e1, 0x41770476, 0x4183e05e]);
  setInterval(function () {
    res.write(Buffer.from(repro.buffer), 'binary');
  }, 2000);
});

app.listen(3000, () => console.log('Listening on port 3000!'));

Using the above node server -13614102509256704 will be logged to console, but it should just be ~16.48. How can I retrieve the original binary float written?


回答1:


As you pointed that your issue was

The getFloat32 function takes a byte offset, clearly documented

But there is also another side to your work. So I will add that here

Fetch Streams are not supported by default by both FF and Chrome and I updated my code to work with streams on both end.

const express = require('express');
const app = express();

app.get('/', function (req, res) {
    res.send(`
    <html>
    <body>
        <h1>Chrome reader</h1>
        <script>
            function dothis() {
var chunkedUrl = '/binary';
fetch(chunkedUrl)
  .then(processChunkedResponse)
  .then(onChunkedResponseComplete)
  .catch(onChunkedResponseError)
  ;

function onChunkedResponseComplete(result) {
  console.log('all done!', result)
}

function onChunkedResponseError(err) {
  console.error(err)
}

function processChunkedResponse(response) {
  var text = '';
  var reader = response.body.getReader()

  return readChunk();

  function readChunk() {
    return reader.read().then(appendChunks);
  }

  function appendChunks(result) {
      if (!result.done){
        var chunk = new Uint32Array(result.value.buffer);      
        console.log('got chunk of', chunk.length, 'bytes')
        console.log(chunk)
      }

    if (result.done) {
      console.log('returning')
      return "done";
    } else {
      console.log('recursing')
      return readChunk();
    }
  }
}            }
        </script>
    </body>
</html>

    `);
});

app.get('/firefox', function (req, res) {
    res.send(`
<html>
<head>
    <script src="./fetch-readablestream.js"></script>
    <script src="./polyfill.js"></script>
</head>
<body>
    <h1>Firefox reader</h1>
    <script>
    function readAllChunks(readableStream) {
                  const reader = readableStream.getReader();
                  const chunks = [];

                  function pump() {
                    return reader.read().then(({ value, done }) => {
                      if (done) {
                          console.log("its completed")
                        return chunks;
                      }
                      try{
                          console.log(new Int32Array(value.buffer))
                      }
                      catch (err) {
                          console.log("error occured - " + err)
                      }
                      return pump();
                    });
                  }

                  return pump();
            }

        function dothis() {


    fetchStream('/binary', {stream: true})
    .then(response => readAllChunks(response.body))
    .then(chunks => console.dir(chunks))
    .catch(err => console.log(err));
}            
        </script>
    </body>
</html>

    `);


});

app.get('/binary', function (req, res) {
    res.header("Content-Type", "application/octet-stream");
    res.header('Content-Transfer-Encoding', 'binary');
    const repro = new Uint32Array([0x417055b8, 0x4177d16f, 0x4179e9da, 0x418660e1, 0x41770476, 0x4183e05e]);
    i = 0;
    setTimeout(function abc() {
        res.write(Buffer.from(repro.buffer), 'binary');
        i++;
        if (i < 100) {
            setTimeout(abc, 100);
        } else {
            res.end();
        }
    }, 100)


    // I'm actually using spawn('command').pipe(res) here... So chunked response is required.
});
app.use(express.static('./node_modules/fetch-readablestream/dist/'))
app.use(express.static('./node_modules/web-streams-polyfill/dist/'))
app.listen(3000, () => console.log('Listening on port 3000!'));

And now it works on FF

as well as Chrome

You need to use

https://www.npmjs.com/package/fetch-readablestream

Also I used the polyfill for ReadableStream in FF.

https://www.npmjs.com/package/web-streams-polyfill

But you can enabled native support for the same by change FF profile preferences

Added here so it may help your or someone in future



来源:https://stackoverflow.com/questions/49828310/read-chunked-binary-response-with-fetch-api

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!