Logo Questions Linux Laravel Mysql Ubuntu Git Menu
 

Get color of touched pixel from camera preview

I have created a demo application which opens the camera.now I want to get the color of pixel when user touches the live camera preview.

I have tried by overridingonTouchEvent and I succeed in getting the position of pixel in x,y but I am not getting the RGB color value from it.it always shows 0,0,0

All suggestions are welcomed including any alternate way to achieve the same functionality. [Excluding OpenCv because it requires to install OpenCvManager apk also to support my application]

Code :

    public class CameraPreview extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {

    private Camera camera;
    private SurfaceHolder holder;
    int[] myPixels;

    public CameraPreview(Context context, AttributeSet attrs, int defStyle) {
        super(context, attrs, defStyle);
    }

    public CameraPreview(Context context, AttributeSet attrs) {
        super(context, attrs);
    }

    public CameraPreview(Context context) {
        super(context);
    }

    public void init(Camera camera) {
        this.camera = camera;
        initSurfaceHolder();
    }

    @SuppressWarnings("deprecation") // needed for < 3.0
    private void initSurfaceHolder() {
        holder = getHolder();
        holder.addCallback(this);
        holder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
    }

    @Override
    public void surfaceCreated(SurfaceHolder holder) {
        initCamera(holder);
    }

    private void initCamera(SurfaceHolder holder) {
        try {
            camera.setPreviewDisplay(holder);
            camera.getParameters().setPreviewFormat(ImageFormat.NV21);
            camera.setPreviewCallback(this);
            camera.startPreview();
        } catch (Exception e) {
            Log.d("Error setting camera preview", e);
        }
    }

    @Override
    public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
    }

    @Override
    public void surfaceDestroyed(SurfaceHolder holder) {
    }

    @Override
    public boolean onTouchEvent(MotionEvent event) {
        if(event.getAction() == MotionEvent.ACTION_DOWN)
        {
            android.util.Log.d("touched", "called");
/*          int x = (int)event.getX();
            int y = (int)event.getY();

            android.util.Log.d("touched pixel :", x+" "+y);

            setDrawingCacheEnabled(true);
            buildDrawingCache();
            Bitmap mBmp = getDrawingCache();

            int pixel = mBmp.getPixel(x, y);
            int redValue = Color.red(pixel);
            int blueValue = Color.blue(pixel);
            int greenValue = Color.green(pixel);        

            android.util.Log.d("touched pixel color :", redValue+" "+greenValue+" "+blueValue);
            android.util.Log.d("touched pixel color from preview:", redValue+" "+greenValue+" "+blueValue);
*/
            //how to get particular pixel from myPixels[]
        }
        return false;
    }

    @Override
    public void onPreviewFrame(byte[] data, Camera camera) {

        android.util.Log.d("onPreviewFrame", "called");

         int frameHeight = camera.getParameters().getPreviewSize().height;
            int frameWidth = camera.getParameters().getPreviewSize().width;
            // number of pixels//transforms NV21 pixel data into RGB pixels  
            int rgb[] = new int[frameWidth * frameHeight];
            // convertion
            myPixels = decodeYUV420SP(rgb, data, frameWidth, frameHeight);

    }



    public int[] decodeYUV420SP(int[] rgb, byte[] yuv420sp, int width, int height) {

        // here we're using our own internal PImage attributes
        final int frameSize = width * height;

        for (int j = 0, yp = 0; j < height; j++) {
                int uvp = frameSize + (j >> 1) * width, u = 0, v = 0;
                for (int i = 0; i < width; i++, yp++) {
                        int y = (0xff & ((int) yuv420sp[yp])) - 16;
                        if (y < 0)
                                y = 0;
                        if ((i & 1) == 0) {
                                v = (0xff & yuv420sp[uvp++]) - 128;
                                u = (0xff & yuv420sp[uvp++]) - 128;
                        }

                        int y1192 = 1192 * y;
                        int r = (y1192 + 1634 * v);
                        int g = (y1192 - 833 * v - 400 * u);
                        int b = (y1192 + 2066 * u);

                        if (r < 0)
                                r = 0;
                        else if (r > 262143)
                                r = 262143;
                        if (g < 0)
                                g = 0;
                        else if (g > 262143)
                                g = 262143;
                        if (b < 0)
                                b = 0;
                        else if (b > 262143)
                                b = 262143;

                        // use interal buffer instead of pixels for UX reasons
                        rgb[yp] = 0xff000000 | ((r << 6) & 0xff0000)
                                        | ((g >> 2) & 0xff00) | ((b >> 10) & 0xff);
                }
        }

        return rgb;
}


}
like image 369
Mehul Joisar Avatar asked Feb 15 '14 06:02

Mehul Joisar


2 Answers

I've followed a different approach to solve it. I'll post the code as soon as I get free.

Algorithm :
  • create an overlay on the live camera
  • when user touches , update the overlay with RGB data of latest YUV buffer stream got from live camera
  • pick RGB color from overlay image
like image 189
Mehul Joisar Avatar answered Sep 18 '22 01:09

Mehul Joisar


It seems like myPixels is a 1D representation of 2D data (width x height).

Which means myPixels has a length of (width * height).

Lets say the pixel is (x,y) then

int idx = ( height * y ) + x;

int color = myPixels[idx];

With the above information you can modify decodeYUV420SP method to output only the color of a particular pixel.

like image 32
Josnidhin Avatar answered Sep 19 '22 01:09

Josnidhin