Android Camera - app passed NULL surface

匿名 (未验证) 提交于 2019-12-03 08:36:05

问题:

Whenever I run the cam_thread I get the error "app passed NULL surface". This code supposedly works on the HTC Incredible 1. I've reconfigured it slightly to run on a droid x. However I still get this error.

public class Android_Activity extends Activity  {            Main_thread simulator;     ToggleButton togglebutton;     EditText ip_text;     SensorManager sm = null;     SurfaceView view;     Sensors_thread the_sensors=null;     String IP_address;     Android_Activity the_app;      @Override     protected void onCreate(Bundle savedInstanceState)      {                super.onCreate(savedInstanceState);         setContentView(R.layout.main);         view = new SurfaceView(this);                sm = (SensorManager) getSystemService(SENSOR_SERVICE);         ip_text = (EditText) findViewById(R.id.IP_edit_txt);         togglebutton = (ToggleButton) findViewById(R.id.CameraButton);         togglebutton.setOnClickListener(new btn_listener());                 the_app = this;     }      @Override     protected void onResume()      {         super.onResume();     }      protected void onStop()     {         super.onStop();         simulator.stop_simu();         this.finish();     }      private class btn_listener implements OnClickListener      {         public void onClick(View v)          {                    // Perform action on clicks             if (togglebutton.isChecked())              {                 IP_address = ip_text.getText().toString();                   simulator = new Main_thread(the_app, view, sm, IP_address);                 the_sensors = simulator.the_sensors;                     sm.registerListener(the_sensors,                          SensorManager.SENSOR_ORIENTATION |SensorManager.SENSOR_ACCELEROMETER,                         SensorManager.SENSOR_DELAY_UI);                  simulator.start();                  Toast.makeText(Android_Activity.this, "Start streaming" + IP_address, Toast.LENGTH_SHORT).show();             } else              {                 simulator.stop_simu();                 sm.unregisterListener(the_sensors);                 Toast.makeText(Android_Activity.this, "Stop streaming", Toast.LENGTH_SHORT).show();             }         }     } } 

Main Thread

package carl.IOIO_car;  import android.hardware.SensorManager; import android.util.Log; import android.view.SurfaceHolder; import android.view.SurfaceView;  public class Main_thread extends Thread {     SurfaceView parent_context;     SensorManager mSensorManager = null;     Cam_thread the_cam;     Sensors_thread the_sensors;      IOIO_Thread ioio_thread_;        String ip_address;     Android_Activity the_app;      public Main_thread(Android_Activity app, SurfaceView v, SensorManager m, String ip)     {         super();          parent_context = v;              mSensorManager = m;         ip_address = ip;         the_app = app;         Log.e("Debug Main", "IP is " + ip_address);         the_cam = new Cam_thread(parent_context,ip_address);         the_sensors = new Sensors_thread(mSensorManager,ip_address);         ioio_thread_ = new IOIO_Thread(the_app, ip_address);     }      public void run()      {                        //ioio_thread_.start();          the_cam.start_thread();       }      public void stop_simu()     {         the_cam.stop_thread();         the_sensors.stop_thread();         //ioio_thread_.abort();     }    } 

Cam_Thread

package carl.IOIO_car;  import java.io.ByteArrayOutputStream; import java.net.DatagramPacket; import java.net.DatagramSocket; import java.net.InetAddress; import java.util.List;  import android.graphics.Bitmap; import android.hardware.Camera; import android.hardware.Camera.PreviewCallback; import android.util.Log; import android.view.SurfaceView;  public class Cam_thread {     Camera mCamera;      public static int HEADER_SIZE = 5;     public static int DATAGRAM_MAX_SIZE = 1450 - HEADER_SIZE;        int frame_nb = 0;     int size_packet_sent = 0;        InetAddress serverAddr;     DatagramSocket socket;        Bitmap mBitmap;     int[] mRGBData;     int width_ima, height_ima;     private static final String TAG = "IP_cam";      SurfaceView parent_context;      private boolean STOP_THREAD;     String ip_address;      public Cam_thread(SurfaceView context, String ip)     {         parent_context = context;          ip_address = ip;     }      private void init()     {         try          {                         serverAddr = InetAddress.getByName(ip_address);             socket = new DatagramSocket();             if (mCamera!=null){                 Log.e(TAG, "Nulling camera");                 mCamera.stopPreview();                 mCamera.setPreviewCallback(null);                 mCamera.release();                 mCamera=null;         }             if(mCamera == null){             mCamera = Camera.open();                     Log.e(TAG, "Setting up camera");             Camera.Parameters parameters = mCamera.getParameters();              //get a list of supported preview sizes and assign one             List<Camera.Size> previewSizes = parameters.getSupportedPreviewSizes();             Camera.Size previewSize = previewSizes.get(0);             parameters.setPreviewSize(previewSize.width, previewSize.height);             //Set Frame rate             parameters.setPreviewFrameRate(30);             //Set Scene             List<String> modes = parameters.getSupportedSceneModes();             parameters.setSceneMode(modes.get(0));             //Set focus mode             List<String> focus = parameters.getSupportedFocusModes();             parameters.setFocusMode(focus.get(0));             //Apply parameters to camera object             mCamera.setParameters(parameters);             //Provide a surface             if(parent_context.getHolder()==null)                 Log.e(TAG, "Its a null holder");             Log.e("Debug", "Before");             mCamera.setPreviewDisplay(parent_context.getHolder());                       Log.e("Debug", "After");             //Sets a call when preview data is available             mCamera.setPreviewCallback(new cam_PreviewCallback());               Log.e(TAG, "Camera configured");             //Start the preview             Log.e(TAG, "Starting preview");             mCamera.startPreview();             /*             parameters.setPreviewSize(320, 240);             parameters.setPreviewFrameRate(30);             parameters.setSceneMode(Camera.Parameters.SCENE_MODE_SPORTS);             parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_AUTO);             parameters.setColorEffect(Camera.Parameters.EFFECT_NONE);             mCamera.setParameters(parameters);             mCamera.setPreviewDisplay(parent_context.getHolder());                       mCamera.setPreviewCallback(new cam_PreviewCallback());                         Log.e(TAG, "Starting preview");             mCamera.startPreview();             */             }         }          catch (Exception exception)          {              Log.e(TAG, "Error: ", exception);         }     }      public void start_thread()     {         Log.e("Cam", "Started the Cam thread");         init();     }      public void stop_thread()     {         STOP_THREAD = true;         if (mCamera!=null){             mCamera.stopPreview();             mCamera.release();             mCamera=null;         }         socket.close();     }      public void send_data_UDP()     {         Log.e(TAG, "Started sending cam data");         if(mBitmap != null)         {             int size_p=0,i;                  ByteArrayOutputStream byteStream = new ByteArrayOutputStream();               mBitmap.compress(Bitmap.CompressFormat.JPEG, 50, byteStream);   // !!!!!!!  change compression rate to change packets size              byte data[] = byteStream.toByteArray();             Log.e(TAG, "SIZE: " + data.length);              int nb_packets = (int) Math.ceil(data.length / (float)DATAGRAM_MAX_SIZE);             int size = DATAGRAM_MAX_SIZE;              /* Loop through slices */             for(i = 0; i < nb_packets; i++)              {                            if(i >0 && i == nb_packets-1) size = data.length - i * DATAGRAM_MAX_SIZE;                  /* Set additional header */                 byte[] data2 = new byte[HEADER_SIZE + size];                 data2[0] = (byte)frame_nb;                 data2[1] = (byte)nb_packets;                 data2[2] = (byte)i;                 data2[3] = (byte)(size >> 8);                 data2[4] = (byte)size;                  /* Copy current slice to byte array */                 System.arraycopy(data, i * DATAGRAM_MAX_SIZE, data2, HEADER_SIZE, size);                          try                  {                                size_p = data2.length;                     DatagramPacket packet = new DatagramPacket(data2, size_p, serverAddr, 9000);                     socket.send(packet);                     Log.e(TAG, "Sent a cam frame!");                 } catch (Exception e) { Log.e(TAG, "Error: ", e);}               }                frame_nb++;              if(frame_nb == 128) frame_nb=0;          }     }           /* function converting image to RGB format taken from project: ViewfinderEE368        * http://www.stanford.edu/class/ee368/Android/ViewfinderEE368/      *       * Copyright (C) 2007 The Android Open Source Project      *      * Licensed under the Apache License, Version 2.0 (the "License");      * you may not use this file except in compliance with the License.      * You may obtain a copy of the License at      *      *      http://www.apache.org/licenses/LICENSE-2.0      *      * Unless required by applicable law or agreed to in writing, software      * distributed under the License is distributed on an "AS IS" BASIS,      * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.      * See the License for the specific language governing permissions and      * limitations under the License.      */     static public void decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height)      {         final int frameSize = width * height;          for (int j = 0, yp = 0; j < height; j++) {             int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;             for (int i = 0; i < width; i++, yp++) {                 int y = (0xff & ((int) yuv420sp[yp])) - 16;                 if (y < 0) y = 0;                 if ((i & 1) == 0) {                     v = (0xff & yuv420sp[uvp++]) - 128;                     u = (0xff & yuv420sp[uvp++]) - 128;                 }                  int y1192 = 1192 * y;                 int r = (y1192 + 1634 * v);                 int g = (y1192 - 833 * v - 400 * u);                 int b = (y1192 + 2066 * u);                  if (r < 0) r = 0; else if (r > 262143) r = 262143;                 if (g < 0) g = 0; else if (g > 262143) g = 262143;                 if (b < 0) b = 0; else if (b > 262143) b = 262143;                  rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000) | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);             }         }     }      // Preview callback used whenever new frame is available...send image via UDP !!!     private class cam_PreviewCallback implements PreviewCallback      {         @Override         public void onPreviewFrame(byte[] data, Camera camera)         {             if(STOP_THREAD == true)             {                 mCamera.setPreviewCallback(null);                 mCamera.stopPreview();                 mCamera.release();                 mCamera = null;                 return;             }              if (mBitmap == null)        //create Bitmap image first time             {                 Camera.Parameters params = camera.getParameters();                 width_ima = params.getPreviewSize().width;                 height_ima = params.getPreviewSize().height;                                       mBitmap = Bitmap.createBitmap(width_ima, height_ima, Bitmap.Config.RGB_565);                 mRGBData = new int[width_ima * height_ima];             }              decodeYUV420SP(mRGBData, data, width_ima, height_ima);             mBitmap.setPixels(mRGBData, 0, width_ima, 0, 0, width_ima, height_ima);              send_data_UDP();         }     }  } 

Manifest

<?xml version="1.0" encoding="utf-8"?> <manifest xmlns:android="http://schemas.android.com/apk/res/android"       package="carl.IOIO_car"       android:versionCode="1"       android:versionName="1.0">           <uses-sdk android:minSdkVersion="8" />      <application android:icon="@drawable/icon" android:debuggable="true" android:label="@string/app_name">         <activity android:name="carl.IOIO_car.Android_Activity"                 android:label="@string/app_name"                   android:theme="@android:style/Theme.NoTitleBar.Fullscreen"                   android:screenOrientation="landscape">             <intent-filter>                 <action android:name="android.intent.action.MAIN" />                 <category android:name="android.intent.category.LAUNCHER" />             </intent-filter>         </activity>     </application>     <uses-permission android:name="android.permission.CAMERA" />     <uses-permission android:name="android.permission.ACCESS_NETWORK_STATE" />     <uses-permission android:name="android.permission.INTERNET" />     <uses-permission android:name="android.permission.WAKE_LOCK" />     <uses-permission android:name="android.permission.ACCESS_WIFI_STATE"></uses-permission> <uses-permission android:name="android.permission.UPDATE_DEVICE_STATS"></uses-permission> <uses-permission android:name="android.permission.CHANGE_WIFI_STATE"></uses-permission> <uses-permission android:name="android.permission.CHANGE_WIFI_MULTICAST_STATE"></uses-permission>     <uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />     <uses-feature android:name="android.hardware.camera" />     <uses-feature android:name="android.hardware.camera.autofocus" /> </manifest> 

This original code was written by Dr. Oros. http://www.cogsci.uci.edu/~noros/android_car.html

回答1:

Thats not true. Just set up a small delay with a Handler and it works fine on Nexus too.

Just had the problem myself with black Pictures on those devices but with 1sec of delay everything works fine



回答2:

05-22 11:27:24.977 16750-16750/com.egomez.facedetection I/CameraActivity: Switching Camera 05-22 11:27:25.219 16750-16750/com.egomez.facedetection D/Camera: app passed NULL surface

To get rid of that error I had to get holder of SurfaceView (on the scenario I was attempting to fix was Switching from front camera to back camera and viceversa).

    public void switchCamera(){     Log.i(TAG, "Switching Camera");     if (mCamera != null) {         mCamera.stopPreview();         mCamera.release();         //mCamera = null;     }      //swap the id of the camera to be used     if (camId == Camera.CameraInfo.CAMERA_FACING_BACK) {         camId = Camera.CameraInfo.CAMERA_FACING_FRONT;     }else {         camId = Camera.CameraInfo.CAMERA_FACING_BACK;     }     try {         mCamera = Camera.open(camId);         //mCamera.setDisplayOrientation(90);         //You must get the holder of SurfaceView!!!         mCamera.setPreviewDisplay(mView.getHolder());         //Then resume preview...         mCamera.startPreview();     }     catch (Exception e) { e.printStackTrace(); } } 

Hope it helps for anyone running into similar issue.



回答3:

All camera things and stuff must be created from Activity for example OnCreate or something, signal for take photo can be pushed from different context.



易学教程内所有资源均来自网络或用户发布的内容,如有违反法律规定的内容欢迎反馈
该文章没有解决你所遇到的问题?点击提问,说说你的问题,让更多的人一起探讨吧!