AFrame: How to render a camera to a texture

萝らか妹 提交于 2019-12-04 05:27:37

问题


I'm trying to have a second camera to show a "from the sky" view of an AFrame scene. I've learned how to do it using a 2D canvas for rendering, following this example:

But I wonder if this could be done without using a external <div>, something like rendering directly to some asset, or maybe directly to the texture...

My current code is:

<html>
  <head>
    <script src="//aframe.io/releases/0.8.2/aframe.min.js"></script>

    <script>
// Original code:
//   https://wirewhiz.com/how-to-use-a-cameras-output-as-a-texture-in-aframe/
//
AFRAME.registerComponent('view',{
    'schema': {
       canvas: {
            type: 'string',
            default: ''
       },
       // desired FPS
       fps: {
            type: 'number',
            default: 90.0
       }
    },
    'init': function() {
        var targetEl = document.querySelector(this.data.canvas);
        this.counter = 0;
        this.renderer = new THREE.WebGLRenderer( { antialias: true } );
        this.renderer.setPixelRatio( window.devicePixelRatio );
        this.renderer.setSize( targetEl.offsetWidth, targetEl.offsetHeight );
        // creates spectator canvas
        targetEl.appendChild(this.renderer.domElement);
        this.renderer.domElement.id = "canvas";
        this.renderer.domElement.crossorigin="anonymous"
        this.renderer.domElement.height=300;
        this.renderer.domElement.width=400;
        this.el.removeAttribute('look-controls');
        this.el.removeAttribute('wasd-controls');
        console.log(this.renderer.domElement);
        console.log(document.querySelector('a-scene'))
    },
    'tick': function(time, timeDelta) {
        var loopFPS = 1000.0 / timeDelta;
        var hmdIsXFasterThanDesiredFPS = loopFPS / this.data.fps;
        var renderEveryNthFrame = Math.round(hmdIsXFasterThanDesiredFPS);
        if(this.counter % renderEveryNthFrame === 0){
            this.render(timeDelta);
            }
        this.counter += 1;
    },
    'render': function(){
        this.renderer.render( this.el.sceneEl.object3D , this.el.object3DMap.camera );
    }
});
    </script>
  <body>
    <a-scene physics="debug: true">

      <a-plane static-body position="0 0 -4" rotation="-90 0 0" width="30" height="40"
               color="yellow"></a-plane>

      <a-box color="red" position="0 2 0" depth="8" width="8"></a-box>

      <a-entity id="secondaryCamera" position="0 40 0" rotation="-90 0 0">
        <a-camera view="canvas:#spectatorDiv;" active="false">
        </a-camera>
      </a-entity>

      <a-entity position="0 0 10" look-controls>
        <a-entity camera position="0 1.6 0" wasd-controls>
          <a-entity geometry="primitive:plane; width:.2; height:.2" material="src:#canvas; opacity: .6"
                    position="0.2 -0.3 -0.7" rotation="0 -10 0"></a-entity>
          <a-cylinder radius="2" color="green"></a-box>
        </a-entity>
      </a-entity>

    </a-scene>

    <div style="height:300px; width:400px;" id='spectatorDiv'></div>

  </body>
</html>

回答1:


Check out the screenshot component implementation. Render to a render target, and then do whatever you want with the pixel data (use it as a texture inside WebGL, copy to a canvas...)




回答2:


After some tinkering and considering all suggestions, I've come to this code, that gives me what I wanted. In short, I built a component (camrenderer) that uses a canvas within the a-assets element for rendering the output of the camera. This allows any material to reference it (in the code below, see it added to a plane attached to the main camera). For ensuring the material gets updated when the rendering changes, you also need to add another component (canvas-updater) to the object acting as screen.

Therefore, the camera renderer can be referenced by any material in any component, with no extra hacks.

<html>
  <head>
    <script src="//aframe.io/releases/0.8.2/aframe.min.js"></script>
    <script>
AFRAME.registerComponent('camrender',{
    'schema': {
       // desired FPS
       fps: {
            type: 'number',
            default: 90.0
       },
       // Id of the canvas element used for rendering the camera
       cid: {
            type: 'string',
            default: 'camRenderer'
       },
       // Height of the renderer element
       height: {
            type: 'number',
            default: 300
       },
       // Width of the renderer element
       width: {
            type: 'number',
            default: 400
       }
    },
    'update': function(oldData) {
        var data = this.data
        if (oldData.cid !== data.cid) {
            // Find canvas element to be used for rendering
            var canvasEl = document.getElementById(this.data.cid);
            // Create renderer
            this.renderer = new THREE.WebGLRenderer({
                antialias: true,
                canvas: canvasEl
            });
            // Set properties for renderer DOM element
            this.renderer.setPixelRatio( window.devicePixelRatio );
            this.renderer.domElement.crossorigin = "anonymous";
        };
        if (oldData.width !== data.width || oldData.height !== data.height) {
            // Set size of canvas renderer
            this.renderer.setSize(data.width, data.height);
            this.renderer.domElement.height = data.height;
            this.renderer.domElement.width = data.width;
        };
        if (oldData.fps !== data.fps) {
            // Set how often to call tick
            this.tick = AFRAME.utils.throttleTick(this.tick, 1000 / data.fps , this);
        };
    },
    'tick': function(time, timeDelta) {
        this.renderer.render( this.el.sceneEl.object3D , this.el.object3DMap.camera );
    }
});

AFRAME.registerComponent('canvas-updater', {
    dependencies: ['geometry', 'material'],

    tick: function () {
        var el = this.el;
        var material;

        material = el.getObject3D('mesh').material;
        if (!material.map) { return; }
        material.map.needsUpdate = true;
    }
});
</script>
  </head>
  <body>
    <a-scene>

      <a-assets>
          <canvas id="cam2"></canvas>
      </a-assets>
      <a-plane position="0 0 -4" rotation="-90 0 0" width="30" height="40"
               color="yellow"></a-plane>

      <a-box color="red" position="0 2 0" depth="8" width="16"></a-box>
      <a-box color="blue" position="0 2 6" depth="2" width="6"></a-box>

      <a-entity position="0 40 0" rotation="-90 0 0">
        <a-camera camrender="cid: cam2" active="false">
        </a-camera>
      </a-entity>

      <a-entity position="0 0 10" look-controls>
        <a-entity camera position="0 1.6 0" wasd-controls>
          <a-entity geometry="primitive:plane; width:.2; height:.2"
                    material="src:#cam2; opacity: .6" canvas-updater
                    position="0.2 -0.3 -0.7" rotation="0 -10 0"></a-entity>
          <a-cylinder radius="2" color="green"></a-cylinder>
        </a-entity>
      </a-entity>

    </a-scene>

  </body>
</html>

Notes:

  • Thanks to Diego Marcos who put me on-track.

  • Thanks to Piotr Adam Milewski who suggested using Utils.ThrottleTick, which simplifies the code a lot.

  • This version includes a fix for a problem found with current master (but also works with 0.8.2)

  • I've created a npm package with this components: A-Frame Playground Components. So you can just use it instead of the script above.



来源:https://stackoverflow.com/questions/53905525/aframe-how-to-render-a-camera-to-a-texture

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!