I have got a video byte array format, how can i draw on surfaceViewer in android 2.2? i am trying video chat. I m using onPreviewFrame and i have got a byte[] i will send to server and than the server send to other client and the other client will has got a byte[], my problem is that: How can i draw this(byte[]) on client ?
java.io.ByteArrayOutputStream. This class implements an output stream in which the data is written into a byte array. The buffer automatically grows as data is written to it. The data can be retrieved using toByteArray() and toString() .
Provides a dedicated drawing surface embedded inside of a view hierarchy. You can control the format of this surface and, if you like, its size; the SurfaceView takes care of placing the surface at the correct location on the screen.
Here is all codes for your app , I mean here is your videochat
or part of this :) ,I didn't try my code on android 2.2, but it works for android 2.3.3 , it works like this , you are setting some device local ip address in editText
and then you must push chat video button , so other part , whose ip address you set , will see video of your front camera
here is the java code
import android.annotation.SuppressLint;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.ErrorCallback;
import android.hardware.Camera.Size;
import android.os.Bundle;
import android.os.StrictMode;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.View;
import android.view.View.OnClickListener;
import android.widget.Button;
import android.widget.EditText;
import android.widget.ImageView;
public class MainActivity extends Activity implements OnClickListener {
private ImageView m_VideCaptureBitmap;
Camera mCamera;
Button m_StartVideoChat;
Button m_close_camera;
EditText IPAdresse;
private SurfaceView m_VideoCaptureView;
private static final int TIMEOUT_MS = 10000;
private static final int server_port = 13011;
byte[] m_buffer ;
@SuppressLint({ "NewApi", "NewApi", "NewApi" })
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
StrictMode.ThreadPolicy policy = new StrictMode.ThreadPolicy.Builder().permitAll().build();
StrictMode.setThreadPolicy(policy);
m_VideoCaptureView = (SurfaceView) findViewById(R.id.imgSentView);
m_close_camera = (Button) findViewById(R.id.btnCameraClose);
m_close_camera.setOnClickListener(new OnClickListener() {
@Override
public void onClick(View v) {
mCamera.release();
}
});
m_VideCaptureBitmap = (ImageView) findViewById(R.id.videoView);
IPAdresse = (EditText) findViewById(R.id.etIPAdresse);
IPAdresse.setText("192.168.2.32");
m_StartVideoChat = (Button) findViewById(R.id.btnStartVideo);
m_StartVideoChat.setOnClickListener(this);
new Thread(new Runnable() {
@Override
public void run() {
byte[] buffer = new byte [2048];
DatagramSocket s;
try {
s= new DatagramSocket(server_port);
s.setSoTimeout(TIMEOUT_MS);
while(true){
try {
final DatagramPacket p = new DatagramPacket(buffer, buffer.length);
s.receive(p);
m_buffer= buffer.clone();
runOnUiThread(new Runnable() {
@Override
public void run() {
frameToBuffer(p.getData());
}
});
}
catch(Exception e) {
e.printStackTrace();
}
}
} catch (SocketException e) {
// TODO Auto-generated catch block
e.printStackTrace();
//rstep.setText("fail socket create");
}
}
}).start();
}
public void onClick(View arg0) {
switch(arg0.getId()){
case R.id.btnStartVideo:
startVideo();
}
}
private Camera openFrontFacingCameraGingerbread(Camera cam)
{
int cameraCount = 0;
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
cameraCount = Camera.getNumberOfCameras();
for ( int camIdx = 0; camIdx < cameraCount; camIdx++ ) {
Camera.getCameraInfo( camIdx, cameraInfo );
if ( cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT ) {
try {
cam = Camera.open( camIdx );
} catch (RuntimeException e) {
e.printStackTrace();
}
}
}
return cam;
}
private void startVideo() {
if(mCamera != null) return;
SurfaceHolder videoCaptureViewHolder = null;
try {
//openFrontFacingCameraGingerbread(mCamera);
int camId = Camera.CameraInfo.CAMERA_FACING_BACK;
if (Camera.getNumberOfCameras() > 1 && camId < Camera.getNumberOfCameras() - 1) {
int i = 1 + camId ;
mCamera = Camera.open(i);
}
//mCamera = Camera.open();
} catch (RuntimeException e) {
Log.e("CameraTest", "Camera Open filed");
return;
}
mCamera.setErrorCallback(new ErrorCallback() {
public void onError(int error, Camera camera) {
}
});
Camera.Parameters parameters = mCamera.getParameters();
parameters.setPictureSize(10, 10);
parameters.setPreviewFrameRate(5);
List<Size> supportedPreviewSizes=parameters.getSupportedPreviewSizes();
Iterator<Size> supportedPreviewSizesIterator=supportedPreviewSizes.iterator();
while(supportedPreviewSizesIterator.hasNext()){
Size tmpSize=supportedPreviewSizesIterator.next();
Log.v("CameraTest","supportedPreviewSize.width = "+tmpSize.width+"supportedPreviewSize.height = "+tmpSize.height);
}
mCamera.setParameters(parameters);
if (null != m_VideoCaptureView)
videoCaptureViewHolder = m_VideoCaptureView.getHolder();
try {
mCamera.setPreviewDisplay(videoCaptureViewHolder);
} catch (Throwable t) {
}
Log.v("CameraTest","Camera PreviewFrameRate = "+mCamera.getParameters().getPreviewFrameRate());
Size previewSize=mCamera.getParameters().getPreviewSize();
int dataBufferSize=(int)(previewSize.height*previewSize.width*
(ImageFormat.getBitsPerPixel(mCamera.getParameters().getPreviewFormat())/8.0));
mCamera.addCallbackBuffer(new byte[dataBufferSize]);
mCamera.setPreviewCallbackWithBuffer(new Camera.PreviewCallback() {
private long timestamp=0;
public synchronized void onPreviewFrame(byte[] data, Camera camera) {
int size = data.length;
Size previewSize = camera.getParameters().getPreviewSize();
YuvImage yuvimage=new YuvImage(data, ImageFormat.NV21, previewSize.width, previewSize.height, null);
ByteArrayOutputStream baos = new ByteArrayOutputStream();
//yuvimage.compressToJpeg(new Rect(0, 0, previewSize.width, previewSize.height), 80, baos);
yuvimage.compressToJpeg(new Rect(0, 0, 128, 96), 80, baos);
byte[] jdata = baos.toByteArray();
int sizeOfData = jdata.length;
DatagramSocket s;
try {
s = new DatagramSocket();
s.setBroadcast(true);
s.setSoTimeout(TIMEOUT_MS);
InetAddress local = InetAddress.getByName(IPAdresse.getText().toString());
DatagramPacket p = new DatagramPacket(jdata, jdata.length, local,server_port);
s.send(p);
} catch (SocketException e) {
e.printStackTrace();
} catch (UnknownHostException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
// Convert to Bitmap
//Bitmap bmp = BitmapFactory.decodeByteArray(jdata, 0, jdata.length);
//m_VideCaptureBitmap.setImageBitmap(bmp);
//bitmapArray.add(bmp);
// set our renderer to be the main renderer with
// the current activity context
//setContentView(glSurfaceView);
Log.v("CameraTest","Frame size = "+ data.length);
timestamp=System.currentTimeMillis();
try{
camera.addCallbackBuffer(data);
}catch (Exception e) {
Log.e("CameraTest", "addCallbackBuffer error");
return;
}
return;
}
});
try {
mCamera.startPreview();
} catch (Throwable e) {
mCamera.release();
mCamera = null;
return;
}
}
private void frameToBuffer(byte[] data) {
Bitmap bmp = BitmapFactory.decodeByteArray(data, 0, data.length);
m_VideCaptureBitmap.setImageBitmap(bmp);
}
}
here is the xml
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent" >
<LinearLayout
android:layout_height="wrap_content"
android:layout_width="fill_parent"
android:orientation="vertical">
<EditText
android:id="@+id/etIPAdresse"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignLeft="@+id/etTEXT"
android:layout_alignParentTop="true"
android:ems="10"
android:hint="Ip adress" />
<Button
android:id="@+id/btnStartVideo"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignLeft="@+id/etTEXT"
android:layout_below="@+id/etTEXT"
android:text="start video chat" />
<Button
android:id="@+id/btnCameraClose"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Cancel camera" />
<ImageView
android:id="@+id/videoView"
android:layout_width="match_parent"
android:layout_height="180dp" />
<SurfaceView
android:id="@+id/imgSentView"
android:layout_width="match_parent"
android:layout_height="180dp" />
</LinearLayout>
</RelativeLayout>
try it ;) , don't forget to upvote :) , and also don't forget to add these rows in manifest.xml
<uses-permission android:name="android.permission.CAMERA" />
<uses-feature android:name="android.hardware.camera" />
<uses-feature android:name="android.hardware.camera.autofocus" />
<uses-feature android:name="android.hardware.camera.front" android:required="false" />
<uses-permission android:name="android.permission.CHANGE_WIFI_MULTICAST_STATE"/>
<uses-permission android:name="android.permission.INTERNET"></uses-permission>
Regards
If you love us? You can donate to us via Paypal or buy me a coffee so we can maintain and grow! Thank you!
Donate Us With