android 播放socket发送的视频流问题
本人大四学生,初接触安卓,做毕设的时候有个问题,要求从一台安卓设备那里接收视频流,并在另一台安卓设备上播放。一台设备摄像头采集视频并用socket发送的的代码如下
package com.example.shipin;
import android.os.Bundle;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.view.Menu;
import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.net.Socket;
import android.app.Activity;
import android.content.Intent;
import android.graphics.ImageFormat;
import android.graphics.Rect;
import android.graphics.YuvImage;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceHolder.Callback;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
public class MainActivity extends Activity {
SurfaceView sView;
SurfaceHolder surfaceHolder;
int screenWidth, screenHeight;
Camera camera; // 定义系统所用的照相机
boolean isPreview = false; // 是否在浏览中
private String ipname = "192.168.1.101";
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
sView = (SurfaceView) findViewById(R.id.sView); // 获取界面中SurfaceView组件
surfaceHolder = sView.getHolder(); // 获得SurfaceView的SurfaceHolder
// 为surfaceHolder添加一个回调监听器
surfaceHolder.addCallback(new Callback() {
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) {
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
initCamera(); // 打开摄像头
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
// 如果camera不为null ,释放摄像头
if (camera != null) {
if (isPreview)
camera.stopPreview();
camera.release();
camera = null;
}
System.exit(0);
}
});
// 设置该SurfaceView自己不维护缓冲
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
}
@SuppressLint("NewApi")
private void initCamera() {
if (!isPreview) {
camera = Camera.open();
}
if (camera != null && !isPreview) {
try {
Camera.Parameters parameters = camera.getParameters();
parameters.setPreviewSize(screenWidth, screenHeight); // 设置预览照片的大小
parameters.setPreviewFpsRange(20, 30); // 每秒显示20~30帧
parameters.setPictureFormat(ImageFormat.NV21); // 设置图片格式
parameters.setPictureSize(screenWidth, screenHeight); // 设置照片的大小
camera.setParameters(parameters); // android2.3.3以后不需要此行代码
camera.setPreviewDisplay(surfaceHolder); // 通过SurfaceView显示取景画面
camera.setPreviewCallback(new StreamIt(ipname)); // 设置回调的类
camera.startPreview(); // 开始预览
camera.autoFocus(null); // 自动对焦
} catch (Exception e) {
e.printStackTrace();
}
isPreview = true;
}
}
class StreamIt implements Camera.PreviewCallback {
private String ipname;
public StreamIt(String ipname) {
this.ipname = ipname;
}
@Override
public void onPreviewFrame(byte[] data, Camera camera) {
Size size = camera.getParameters().getPreviewSize();
try {
// 调用image.compressToJpeg()将YUV格式图像数据data转为jpg格式
YuvImage image = new YuvImage(data, ImageFormat.NV21, size.width, size.height, null);
if (image != null) {
ByteArrayOutputStream outstream = new ByteArrayOutputStream();
image.compressToJpeg(new Rect(0, 0, size.width, size.height), 80, outstream);
outstream.flush();
// 启用线程将图像数据发送出去
Thread th = new MyThread(outstream, ipname);
th.start();
}
} catch (Exception ex) {
Log.e("Sys", "Error:" + ex.getMessage());
}
}
}
class MyThread extends Thread {
private byte byteBuffer[] = new byte[1024];
private OutputStream outsocket;
private ByteArrayOutputStream myoutputstream;
private String ipname;
public MyThread(ByteArrayOutputStream myoutputstream, String ipname) {
this.myoutputstream = myoutputstream;
this.ipname = ipname;
try {
myoutputstream.close();
} catch (IOException e) {
e.printStackTrace();
}
}
public void run() {
try {
// 将图像数据通过Socket发送出去
Socket tempSocket = new Socket(ipname, 6000);
outsocket = tempSocket.getOutputStream();
ByteArrayInputStream inputstream = new ByteArrayInputStream(myoutputstream.toByteArray());
int amount;
while ((amount = inputstream.read(byteBuffer)) != -1) {
outsocket.write(byteBuffer, 0, amount);
}
myoutputstream.flush();
myoutputstream.close();
tempSocket.close();
} catch (IOException e) {
e.printStackTrace();
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
该段代码运行正常,然后我写了接收和播放实时视频的程序在另一台安卓设备上测试,代码如下:
package com.example.andsershi;
import android.os.Bundle;
import android.os.Handler;
import android.os.Message;
import android.app.Activity;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.view.Menu;
import android.widget.ImageView;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.Socket;
import java.net.ServerSocket;
public class MainActivity extends Activity {
public ImageView image;
private Bitmap bitmap;
private static final int COMPLETED = 0;
private Handler handler = new Handler() {
@Override
public void handleMessage(Message msg) {
if (msg.what == COMPLETED) {
image.setImageBitmap(bitmap);
super.handleMessage(msg);
}
}
};
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
image=(ImageView)findViewById(R.id.imageView1);
Thread th = new MyThread();
th.start();
/*
while(true)
{
try {
Socket s = ss.accept();
System.out.println("连接成功!");
ins = s.getInputStream();
Bitmap bitmap = BitmapFactory.decodeStream(ins);
image.setImageBitmap(bitmap);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}*/
}
class MyThread extends Thread{
public Socket s;
public ServerSocket ss;
public void run()
{
byte [] buffer = new byte[1024];
int len = 0;
Message msg =new Message();
try {
ss = new ServerSocket(6000);
} catch (IOException e2) {
// TODO Auto-generated catch block
e2.printStackTrace();
}
ByteArrayOutputStream outStream = new ByteArrayOutputStream();
while(true){
InputStream ins = null;
try {
s = ss.accept();
ins = s.getInputStream();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
//Bitmap bitmap = BitmapFactory.decodeStream(ins);
try {
while( (len=ins.read(buffer)) != -1){
outStream.write(buffer, 0, len);
}
} catch (IOException e1) {
// TODO Auto-generated catch block
e1.printStackTrace();
}
byte data[] = outStream.toByteArray();
bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);
msg.what = COMPLETED;
handler.sendMessage(msg);
try {
ins.close();
outStream.flush();
outStream.close();
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}
}
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.main, menu);
return true;
}
}
}
我的想法是将接收的每一帧转化为bitmap,然后再imageview上用setImageBitmap上显示出来,但程序运行结果却是我只能显示第一帧画面,但显示我在不断接收数据,而且接收的数据越来越大,最终导致byte data[] = outStream.toByteArray()发生oom异常,折腾一天了都没解决问题,请教下我该怎么改才能实现我的预期效果?