Android Camera will not work. startPreview fails

老子叫甜甜 提交于 2019-11-26 03:25:15

问题


I\'m getting these errors from LogCat:

10-30 00:31:51.494: D/CameraHal(1205): CameraHal setOverlay/1/00000000/00000000
10-30 00:31:51.494: E/CameraHal(1205): Trying to set overlay, but overlay is null!, line:3472
10-30 00:31:51.494: W/CameraService(1205): Overlay create failed - retrying
...
10-30 00:31:52.526: E/CameraService(1205): Overlay Creation Failed!
...
10-30 00:31:52.588: E/AndroidRuntime(5040): FATAL EXCEPTION: main
10-30 00:31:52.588: E/AndroidRuntime(5040): java.lang.RuntimeException: startPreview failed
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.hardware.Camera.startPreview(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.matthewmitchell.nightcam.CameraSurfaceView.surfaceCreated(CameraSurfaceView.java:47)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.SurfaceView.updateWindow(SurfaceView.java:544)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.SurfaceView.dispatchDraw(SurfaceView.java:341)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1640)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.drawChild(ViewGroup.java:1638)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewGroup.dispatchDraw(ViewGroup.java:1367)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.View.draw(View.java:6743)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.widget.FrameLayout.draw(FrameLayout.java:352)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.policy.impl.PhoneWindow$DecorView.draw(PhoneWindow.java:1876)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.draw(ViewRoot.java:1407)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.performTraversals(ViewRoot.java:1163)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.view.ViewRoot.handleMessage(ViewRoot.java:1727)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.os.Handler.dispatchMessage(Handler.java:99)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.os.Looper.loop(Looper.java:123)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at android.app.ActivityThread.main(ActivityThread.java:4627)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at java.lang.reflect.Method.invokeNative(Native Method)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at java.lang.reflect.Method.invoke(Method.java:521)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:868)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:626)
10-30 00:31:52.588: E/AndroidRuntime(5040):     at dalvik.system.NativeStart.main(Native Method)

Here is the Activity class:

public class NightCamActivity extends Activity {
    private GLSurfaceView mGLView;
    CameraSurfaceView surface_view;

    @Override
    public void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        // Create a GLSurfaceView instance and set it
        // as the ContentView for this Activity
        Debug.out(\"Welcome\");
        surface_view = new CameraSurfaceView(this);
        mGLView = new MySurfaceView(this);
        setContentView(mGLView);
        addContentView(surface_view, new LayoutParams(LayoutParams.FILL_PARENT, LayoutParams.FILL_PARENT));
    }

    @Override
    protected void onPause() {
        super.onPause();
        // The following call pauses the rendering thread.
        // If your OpenGL application is memory intensive,
        // you should consider de-allocating objects that
        // consume significant memory here.
        mGLView.onPause();
    }

    @Override
    protected void onResume() {
        super.onResume();
        // The following call resumes a paused rendering thread.
        // If you de-allocated graphic objects for onPause()
        // this is a good place to re-allocate them.
        mGLView.onResume();
    }
}

MySurfaceView class:

class MySurfaceView extends GLSurfaceView{

    public MySurfaceView(NightCamActivity context){
        super(context);
        // Create an OpenGL ES 2.0 context.
        Debug.out(\"Mysurfaceview welcome\");
        setEGLContextClientVersion(2);
        // Set the Renderer for drawing on the GLSurfaceView
        MyRenderer renderer = new MyRenderer();
        renderer.takeContext(context);
        context.surface_view.renderer = renderer;
        setRenderer(renderer);
    }
}

CameraSurfaceView class:

public class CameraSurfaceView extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback  {

    private Camera camera;
    Camera.Size use_size;
    MyRenderer renderer;

    public CameraSurfaceView(Context context) {
        super(context);
        SurfaceHolder holder = getHolder();
        holder.addCallback(this);
        Debug.out(\"Init CSV\");
        camera = Camera.open();
    }

    public void surfaceCreated(SurfaceHolder holder) {
        Debug.out(\"SC\");
        try {
            camera.setPreviewDisplay(holder);
        } catch (IOException e) {
            Debug.out(\"Could not set preview display for camera.\");
        }
        camera.setPreviewCallback(this);
    }

    public void surfaceDestroyed(SurfaceHolder holder) {
        // Surface will be destroyed when we return, so stop the preview.
        // Because the CameraDevice object is not a shared resource, it\'s very
        // important to release it when the activity is paused.
        try {
            if (camera != null) {
                camera.stopPreview();  
                camera.release();
            }
        } catch (Exception e) {
            Debug.out(\"Camera release failure.\");
        }
    }

    public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
        Camera.Parameters parameters = camera.getParameters();
        List<Camera.Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
        Camera.Size optimalPreviewSize = getOptimalPreviewSize(supportedPreviewSizes, w, h);
        if (optimalPreviewSize != null) {
            parameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
            camera.setParameters(parameters);
            camera.startPreview();
        }
    }
    static Camera.Size getOptimalPreviewSize(List<Camera.Size> sizes, int w, int h) {
        final double ASPECT_TOLERANCE = 0.1;
        final double MAX_DOWNSIZE = 1.5;

        double targetRatio = (double) w / h;
        if (sizes == null) return null;

        Camera.Size optimalSize = null;
        double minDiff = Double.MAX_VALUE;

        int targetHeight = h;

        // Try to find an size match aspect ratio and size
        for (Camera.Size size : sizes) {
            double ratio = (double) size.width / size.height;
            double downsize = (double) size.width / w;
            if (downsize > MAX_DOWNSIZE) {
                //if the preview is a lot larger than our display surface ignore it
                //reason - on some phones there is not enough heap available to show the larger preview sizes 
                continue;
            }
            if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }

        // Cannot find the one match the aspect ratio, ignore the requirement
        //keep the max_downsize requirement
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                double downsize = (double) size.width / w;
                if (downsize > MAX_DOWNSIZE) {
                    continue;
                }
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }
        //everything else failed, just take the closest match
        if (optimalSize == null) {
            minDiff = Double.MAX_VALUE;
            for (Camera.Size size : sizes) {
                if (Math.abs(size.height - targetHeight) < minDiff) {
                    optimalSize = size;
                    minDiff = Math.abs(size.height - targetHeight);
                }
            }
        }

        return optimalSize;
    }

    public void onPreviewFrame(byte[] data, Camera arg1) {
        Debug.out(\"PREVIEW FRAME:\");
        byte[] pixels = new byte[use_size.width * use_size.height * 3]; ;
        decodeYUV420SP(pixels, data, use_size.width,  use_size.height); 
        renderer.bindCameraTexture(pixels, use_size.width,  use_size.height);
    }

    void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {  

        final int frameSize = width * height;  

        for (int j = 0, yp = 0; j < height; j++) {       
            int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;  
            for (int i = 0; i < width; i++, yp++) {  
                int y = (0xff & ((int) yuv420sp[yp])) - 16;  
                if (y < 0){  
                    y = 0; 
                }
                if ((i & 1) == 0) {  
                    v = (0xff & yuv420sp[uvp++]) - 128;  
                    u = (0xff & yuv420sp[uvp++]) - 128;  
                }  

                int y1192 = 1192 * y;  
                int r = (y1192 + 1634 * v);  
                int g = (y1192 - 833 * v - 400 * u);  
                int b = (y1192 + 2066 * u);  

                if (r < 0){
                    r = 0;               
                }else if (r > 262143){  
                    r = 262143; 
                }
                if (g < 0){                  
                    g = 0;               
                }else if (g > 262143){
                    g = 262143; 
                }
                if (b < 0){                  
                    b = 0;               
                }else if (b > 262143){
                    b = 262143; 
                }
                rgb[yp*3] = (byte) (b << 6);
                rgb[yp*3 + 1] = (byte) (b >> 2);
                rgb[yp*3 + 2] = (byte) (b >> 10);
            }  
        }  
    }  

}

Finally the MyRender class:

public class MyRenderer implements GLSurfaceView.Renderer{
    private FloatBuffer vertices;
    private FloatBuffer texcoords;
    private int mProgram;
    private int maPositionHandle;
    private int gvTexCoordHandle;
    private int gvSamplerHandle;
    private static Context context;
    int[] camera_texture;
    public void onSurfaceCreated(GL10 unused, EGLConfig config) {
        initShapes();
        GLES20.glClearColor(0.0f, 1.0f, 0.2f, 1.0f);
        Debug.out(\"Hello init.\");
        //Shaders
        int vertexShader = 0;
        int fragmentShader = 0;
        try {
            vertexShader = loadShader(GLES20.GL_VERTEX_SHADER, readFile(\"vertex.vsh\"));
            fragmentShader = loadShader(GLES20.GL_FRAGMENT_SHADER, readFile(\"fragment.fsh\"));
        } catch (IOException e) {
            Debug.out(\"The shaders could not be found.\");
            e.printStackTrace();
        }
        mProgram = GLES20.glCreateProgram();             // create empty OpenGL Program
        GLES20.glAttachShader(mProgram, vertexShader);   // add the vertex shader to program
        GLES20.glAttachShader(mProgram, fragmentShader); // add the fragment shader to program
        GLES20.glLinkProgram(mProgram);                  // creates OpenGL program executables
        // get handles
        maPositionHandle = GLES20.glGetAttribLocation(mProgram, \"vPosition\");
        gvTexCoordHandle = GLES20.glGetAttribLocation(mProgram, \"a_texCoord\");
        gvSamplerHandle = GLES20.glGetAttribLocation(mProgram, \"s_texture\");
        GLES20.glPixelStorei(GLES20.GL_UNPACK_ALIGNMENT, 1);
        camera_texture = null;
    }


    private void initShapes(){
        float triangleCoords[] = {
            // X, Y, Z
            -1.0f, -1.0f, 0.0f,
             1.0f, -1.0f, 0.0f,
             -1.0f, 1.0f, 0.0f,
             1.0f,  1.0f, 0.0f,
        }; 
        float texcoordf[] = {
            // X, Y, Z
            -1.0f,-1.0f,
            1.0f,-1.0f,
            -1.0f,1.0f,
            1.0f,1.0f,
        };

        // initialize vertex Buffer for vertices
        ByteBuffer vbb = ByteBuffer.allocateDirect(triangleCoords.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware\'s native byte order
        vertices = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        vertices.put(triangleCoords);    // add the coordinates to the FloatBuffer
        vertices.position(0);            // set the buffer to read the first coordinate
        // initialize vertex Buffer for texcoords 
        vbb = ByteBuffer.allocateDirect(texcoordf.length * 4); 
        vbb.order(ByteOrder.nativeOrder());// use the device hardware\'s native byte order
        texcoords = vbb.asFloatBuffer();  // create a floating point buffer from the ByteBuffer
        texcoords.put(texcoordf);    // add the coordinates to the FloatBuffer
        texcoords.position(0);            // set the buffer to read the first coordinate
    }

    private static String readFile(String path) throws IOException {
        AssetManager assetManager = context.getAssets();
        InputStream stream = assetManager.open(path);
        try {
            return new Scanner(stream).useDelimiter(\"\\\\A\").next();
        }
        finally {
            stream.close();
        }
    }

    private int loadShader(int type, String shaderCode){
        // create a vertex shader type (GLES20.GL_VERTEX_SHADER)
        // or a fragment shader type (GLES20.GL_FRAGMENT_SHADER)
        int shader = GLES20.glCreateShader(type); 
        // add the source code to the shader and compile it
        GLES20.glShaderSource(shader, shaderCode);
        GLES20.glCompileShader(shader);
        return shader;
    }

    public void onDrawFrame(GL10 unused) {
        GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT | GLES20.GL_DEPTH_BUFFER_BIT);
        if(camera_texture == null){
            return;
        }
        // Add program to OpenGL environment
        GLES20.glUseProgram(mProgram);
        // Prepare the triangle data
        GLES20.glVertexAttribPointer(maPositionHandle, 3, GLES20.GL_FLOAT, false, 0, vertices);
        GLES20.glVertexAttribPointer(gvTexCoordHandle, 2, GLES20.GL_FLOAT, false, 0, texcoords);
        GLES20.glEnableVertexAttribArray(maPositionHandle);
        GLES20.glEnableVertexAttribArray(gvTexCoordHandle);
        GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
        GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, camera_texture[0]);
        GLES20.glUniform1i(gvSamplerHandle, 0);
        // Draw the triangle
        GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
        GLES20.glDisableVertexAttribArray(maPositionHandle);
        GLES20.glDisableVertexAttribArray(gvTexCoordHandle);
    }

    public void onSurfaceChanged(GL10 unused, int width, int height) {
        GLES20.glViewport(0, 0, width, height);
    }

    public void takeContext(Context ocontext) {
        Debug.out(\"Take context\");
        context = ocontext;
    }

    void bindCameraTexture(byte[] data,int w,int h) {
        byte[] pixels = new byte[256*256*3];
        for(int x = 0;x < 256;x++){
            for(int y = 0;x < 256;x++){
                pixels[x*256+y] = data[x*w+y];
            }
        }
        if (camera_texture==null){
            camera_texture=new int[1];
        }else{
            GLES20.glDeleteTextures(1, camera_texture, 0);
        }   
        GLES20.glGenTextures(1, camera_texture, 0);
        int tex = camera_texture[0];
        GLES20.glBindTexture(GL10.GL_TEXTURE_2D, tex);
        GLES20.glTexImage2D(GL10.GL_TEXTURE_2D, 0, GL10.GL_RGB, 256, 256, 0, GL10.GL_RGB, GL10.GL_UNSIGNED_BYTE, ByteBuffer.wrap(pixels));
        GLES20.glTexParameterf(GL10.GL_TEXTURE_2D, GL10.GL_TEXTURE_MIN_FILTER, GL10.GL_LINEAR);
    }
}

回答1:


I took your code and got the same error as you. However, on debugging it appears to me that the preview might be failing because it seemed like the width and height dimensions were round the wrong way but its not just a case of switching them around as I think the orientation also plays a part.

Anyway, I've substituted your CameraSurfaceView with my own (see below) and I think it works now. There is no exception but the screen is completely bright green (I think this might be because I don't have the vertex.vsh or the fragment.vsh files.

package stackOverflow.test;

import java.io.IOException;
import java.util.List;

import android.content.Context;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Display;
import android.view.Surface;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;

public class CameraSurfaceView extends ViewGroup implements SurfaceHolder.Callback
{

private Size mPreviewSize;
private List<Size> mSupportedPreviewSizes;        
private Context mContext;
private SurfaceView mSurfaceView;
private SurfaceHolder mHolder;
private final String TAG = "CameraSurfaceView";
private Camera mCamera;
private List<String> mSupportedFlashModes;

public CameraSurfaceView(Context context)
{
    super(context);
    mContext = context;
    mCamera = Camera.open();        
    setCamera(mCamera);

    mSurfaceView = new SurfaceView(context);
    addView(mSurfaceView, 0);        
    mHolder = mSurfaceView.getHolder();
    mHolder.addCallback(this);
    mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    mHolder.setKeepScreenOn(true);
}

public CameraSurfaceView(Context context, AttributeSet attrs)
{
    super(context, attrs);
    mContext = context;            
}

public void setSupportedPreviewSizes(List<Size> supportedPreviewSizes)
{
    mSupportedPreviewSizes = supportedPreviewSizes;
}

public Size getPreviewSize()
{
    return mPreviewSize;
}

public void setCamera(Camera camera)
{
    mCamera = camera;
    if (mCamera != null)
    {
        mSupportedPreviewSizes = mCamera.getParameters().getSupportedPreviewSizes();                
        mSupportedFlashModes = mCamera.getParameters().getSupportedFlashModes();
        // Set the camera to Auto Flash mode.
        if (mSupportedFlashModes.contains(Camera.Parameters.FLASH_MODE_AUTO))
        {
            Camera.Parameters parameters = mCamera.getParameters();
            parameters.setFlashMode(Camera.Parameters.FLASH_MODE_AUTO);             
            mCamera.setParameters(parameters);
        }                   
    }
    requestLayout();
}

@Override
public void surfaceDestroyed(SurfaceHolder holder)
{
    // Surface will be destroyed when we return, so stop the preview.
    if (mCamera != null)
    {
        mCamera.stopPreview();
    }
}

@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height)
{
    // Now that the size is known, set up the camera parameters and begin
    // the preview.
    if (mCamera != null)
    {
        Camera.Parameters parameters = mCamera.getParameters();        
        Size previewSize = getPreviewSize();
        parameters.setPreviewSize(previewSize.width, previewSize.height);                

        mCamera.setParameters(parameters);
        mCamera.startPreview();
    }

}

@Override
public void surfaceCreated(SurfaceHolder holder)
{
    // The Surface has been created, acquire the camera and tell it where
    // to draw.
    try
    {
        if (mCamera != null)
        {
            mCamera.setPreviewDisplay(holder);
        }
    }
    catch (IOException exception)
    {
        Log.e(TAG, "IOException caused by setPreviewDisplay()", exception);
    }
}

@Override
protected void onMeasure(int widthMeasureSpec, int heightMeasureSpec)
{        
    final int width = resolveSize(getSuggestedMinimumWidth(), widthMeasureSpec);
    final int height = resolveSize(getSuggestedMinimumHeight(), heightMeasureSpec);
    setMeasuredDimension(width, height);

    if (mSupportedPreviewSizes != null)
    {
        mPreviewSize = getOptimalPreviewSize(mSupportedPreviewSizes, width, height);
    }
}

@Override
protected void onLayout(boolean changed, int left, int top, int right, int bottom)
{
    if (changed)
    {                            
        final View cameraView = getChildAt(0);          

        final int width = right - left;
        final int height = bottom - top;

        int previewWidth = width;
        int previewHeight = height;
        if (mPreviewSize != null)
        {
            Display display = ((WindowManager)mContext.getSystemService(Context.WINDOW_SERVICE)).getDefaultDisplay();

            switch (display.getRotation())
            {
                case Surface.ROTATION_0:
                    previewWidth = mPreviewSize.height;
                    previewHeight = mPreviewSize.width;
                    mCamera.setDisplayOrientation(90);
                    break;
                case Surface.ROTATION_90:
                    previewWidth = mPreviewSize.width;
                    previewHeight = mPreviewSize.height;
                    break;
                case Surface.ROTATION_180:
                    previewWidth = mPreviewSize.height;
                    previewHeight = mPreviewSize.width;
                    break;
                case Surface.ROTATION_270:
                    previewWidth = mPreviewSize.width;
                    previewHeight = mPreviewSize.height;
                    mCamera.setDisplayOrientation(180);
                    break;
            }                                    
        }

        final int scaledChildHeight = previewHeight * width / previewWidth;

        cameraView.layout(0, height - scaledChildHeight, width, height);

    }
}


private Size getOptimalPreviewSize(List<Size> sizes, int width, int height)
{           
    Size optimalSize = null;                                

    final double ASPECT_TOLERANCE = 0.1;
    double targetRatio = (double) height / width;

    // Try to find a size match which suits the whole screen minus the menu on the left.
    for (Size size : sizes)
    {
        if (size.height != width) continue;
        double ratio = (double) size.width / size.height;
        if (ratio <= targetRatio + ASPECT_TOLERANCE && ratio >= targetRatio - ASPECT_TOLERANCE)
        {
            optimalSize = size;
        }               
    }

    // If we cannot find the one that matches the aspect ratio, ignore the requirement.
    if (optimalSize == null)
    {
        // TODO : Backup in case we don't get a size.
    }

    return optimalSize;
}

public void previewCamera()
{        
    try 
    {           
        mCamera.setPreviewDisplay(mHolder);         
        mCamera.startPreview();                 
    }
    catch(Exception e)
    {
        Log.d(TAG, "Cannot start preview.", e);    
    }
}


/*public void onPreviewFrame(byte[] data, Camera arg1) { 
    Log.d("CameraSurfaceView", "PREVIEW FRAME:"); 
    byte[] pixels = new byte[use_size.width * use_size.height * 3]; ; 
    decodeYUV420SP(pixels, data, use_size.width,  use_size.height);  
    renderer.bindCameraTexture(pixels, use_size.width,  use_size.height); 
}*/ 

void decodeYUV420SP(byte[] rgb, byte[] yuv420sp, int width, int height) {   

    final int frameSize = width * height;   

    for (int j = 0, yp = 0; j < height; j++) {        
        int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;   
        for (int i = 0; i < width; i++, yp++) {   
            int y = (0xff & ((int) yuv420sp[yp])) - 16;   
            if (y < 0){   
                y = 0;  
            } 
            if ((i & 1) == 0) {   
                v = (0xff & yuv420sp[uvp++]) - 128;   
                u = (0xff & yuv420sp[uvp++]) - 128;   
            }   

            int y1192 = 1192 * y;   
            int r = (y1192 + 1634 * v);   
            int g = (y1192 - 833 * v - 400 * u);   
            int b = (y1192 + 2066 * u);   

            if (r < 0){ 
                r = 0;                
            }else if (r > 262143){   
                r = 262143;  
            } 
            if (g < 0){                   
                g = 0;                
            }else if (g > 262143){ 
                g = 262143;  
            } 
            if (b < 0){                   
                b = 0;                
            }else if (b > 262143){ 
                b = 262143;  
            } 
            rgb[yp*3] = (byte) (b << 6); 
            rgb[yp*3 + 1] = (byte) (b >> 2); 
            rgb[yp*3 + 2] = (byte) (b >> 10); 
        }   
    }   
  }   
}

You'll notice I commented out your onPreviewFrame() method just to get it running and also the line context.surface_view.renderer = renderer.

I'm not familiar with the OpenGL library but perhaps this is enough to get you going again.




回答2:


I had the same problem, and removing/playing with preview size setting didn't work for me. I fixed it by next line of code:

mHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

Now it works fine for me!




回答3:


Check your logcat for something similar to this, right before your exception:

11-02 09:25:44.305: ERROR/QualcommCameraHardware(56): failed to construct master heap for pmem pool /dev/pmem_adsp
11-02 09:25:44.305: ERROR/QualcommCameraHardware(56): initPreview X: could not initialize preview heap.
11-02 09:25:44.305: ERROR/QualcommCameraHardware(56): startPreview X initPreview failed.  Not starting preview.

This happens on my phone in landscape orientation, when I try to set a preview size of 1024x768. The getSupportedPreviewSizes() method says this size is supported, however the OS appears to fail to allocate enough memory for a preview this large. Setting a smaller size does work.

Also, try the code below. You should call startPreview() from the surfaceChanged() method, at that point the dimensions of your SurfaceView are known. Calling it from surfaceCreated() is too early.

Use this to determine the optimal preview size for a given size of your SurfaceView (adapted from the Google CameraPreview sample)

static Size getOptimalPreviewSize(List<Size> sizes, int w, int h) {
    final double ASPECT_TOLERANCE = 0.1;
    final double MAX_DOWNSIZE = 1.5;

    double targetRatio = (double) w / h;
    if (sizes == null) return null;

    Size optimalSize = null;
    double minDiff = Double.MAX_VALUE;

    int targetHeight = h;

    // Try to find an size match aspect ratio and size
    for (Size size : sizes) {
        double ratio = (double) size.width / size.height;
        double downsize = (double) size.width / w;
        if (downsize > MAX_DOWNSIZE) {
            //if the preview is a lot larger than our display surface ignore it
            //reason - on some phones there is not enough heap available to show the larger preview sizes 
            continue;
        }
        if (Math.abs(ratio - targetRatio) > ASPECT_TOLERANCE) continue;
        if (Math.abs(size.height - targetHeight) < minDiff) {
            optimalSize = size;
            minDiff = Math.abs(size.height - targetHeight);
        }
    }

    // Cannot find the one match the aspect ratio, ignore the requirement
    //keep the max_downsize requirement
    if (optimalSize == null) {
        minDiff = Double.MAX_VALUE;
        for (Size size : sizes) {
            double downsize = (double) size.width / w;
            if (downsize > MAX_DOWNSIZE) {
                continue;
            }
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }
    }
    //everything else failed, just take the closest match
    if (optimalSize == null) {
        minDiff = Double.MAX_VALUE;
        for (Size size : sizes) {
            if (Math.abs(size.height - targetHeight) < minDiff) {
                optimalSize = size;
                minDiff = Math.abs(size.height - targetHeight);
            }
        }
    }

    return optimalSize;
}

You would call it from your surfaceChanged() method, like this:

public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
    Camera.Parameters parameters = camera.getParameters();
    List<Size> supportedPreviewSizes = parameters.getSupportedPreviewSizes();
    Size optimalPreviewSize = getOptimalPreviewSize(supportedPreviewSizes, w, h);
    if (optimalPreviewSize != null) {
        parameters.setPreviewSize(optimalPreviewSize.width, optimalPreviewSize.height);
        camera.setParameters(parameters);
        camera.startPreview();
    }
}



回答4:


Try setting the type of Surface in initCamera().

private void initCamera() {
mCamSV = (SurfaceView)findViewById(R.id.surface_camera);
mCamSH = mCamSV.getHolder();
mCamSH.addCallback(this);
**mCamSH.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);**

}

EDIT 1

I am copying all the files here which worked for me with android 2.2 sdk

Activity

package com.stack.camera;


import java.io.IOException;

import android.app.Activity;
import android.hardware.Camera;
import android.os.Bundle;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.WindowManager;
import android.widget.FrameLayout;

public class CameraStackActivity extends Activity implements SurfaceHolder.Callback {
    private Camera mCam;
    private SurfaceView mCamSV;
    private SurfaceHolder mCamSH;

/** Called when the activity is first created. */
@Override
public void onCreate(Bundle savedInstanceState) {
    super.onCreate(savedInstanceState);

    getWindow().setFlags(WindowManager.LayoutParams.FLAG_FULLSCREEN,
            WindowManager.LayoutParams.FLAG_FULLSCREEN);
    setContentView(R.layout.main);
    initCamera();
}

@Override
public void onDestroy() {
    stopCamera();
}

public void surfaceChanged(SurfaceHolder holder, int format, int width,
        int height) {

    startCamera(holder, width, height);
}

public void surfaceCreated(SurfaceHolder holder) {
    // TODO Auto-generated method stub
    mCam = Camera.open();
    try {
        mCam.setPreviewDisplay(holder);
    } catch (IOException e) {
        // TODO Auto-generated catch block
        e.printStackTrace();
    }
}

public void surfaceDestroyed(SurfaceHolder holder) {
    // TODO Auto-generated method stub

}

private void initCamera() {
    mCamSV = (SurfaceView)findViewById(R.id.surface_camera);
    mCamSH = mCamSV.getHolder();
    mCamSH.addCallback(this);
    mCamSH.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

}


private void startCamera(SurfaceHolder sh, int width, int height) {
    Camera.Parameters p = mCam.getParameters();
    // Camera.Size s = p.getSupportedPreviewSizes().get(0);
    p.setPreviewSize(width, height);

    mCam.setParameters(p);

    try {
        mCam.setPreviewDisplay(sh);
    } catch (Exception e) {
    }

    mCam.startPreview();
}

private void stopCamera() {
    mCamSH.removeCallback(this);

    mCam.stopPreview();
    mCam.release();
}
}

Layout

<?xml version="1.0" encoding="utf-8"?>
<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android"
    android:orientation="vertical" android:layout_width="fill_parent"
    android:layout_height="fill_parent">
    <SurfaceView android:id="@+id/surface_camera"
        android:layout_width="fill_parent" android:layout_height="fill_parent" />
</FrameLayout>

Manifest File

<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
      package="com.stack.camera"
      android:versionCode="1"
      android:versionName="1.0">

<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />

    <application android:icon="@drawable/icon" android:label="@string/app_name">
        <activity android:name="CameraStackActivity"
                  android:label="@string/app_name">
            <intent-filter>
                <action android:name="android.intent.action.MAIN" />
                <category android:name="android.intent.category.LAUNCHER" />
            </intent-filter>
        </activity>

    </application>
</manifest>



回答5:


Not really related to the OP question, but nevertheless...

I have seen the following gotcha giving me java.io.IOException: setPreviewDisplay failed:

If you do both video and photo, there are two functions, camera.unlock() and camera.reconnect(). You must camera.unlock() before recording a video and camera.reconnect() before taking a photo.




回答6:


Simple Solution: In your CameraSurfaceView class add this line:

holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);

below these lines:

SurfaceHolder holder = getHolder();
        holder.addCallback(this);



回答7:


I am a bit late to this game, but I was running cordova, and had two plugins trying to register for the camera at the same time.

Not sure if this will ever be of use for anyone except me.



来源:https://stackoverflow.com/questions/7942378/android-camera-will-not-work-startpreview-fails

易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!