ivideochat源码中关于视频通信中没有声音的问题

一颗大葱 2015-08-20 01:46:36
最近在弄android视频通话方面的东西,通过几天的研究,把一些东西都写出来,还有一些问题希望能和大家一起讨论下,我先把代码发上来,
这是videoActivity:
package net.john.activity;

import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.lang.reflect.Array;
import java.util.Arrays;
import java.util.Map;

import com.shouyanwang.h264encoder;
import com.smaxe.io.ByteArray;
import com.smaxe.uv.client.INetStream;
import com.smaxe.uv.client.NetStream;
import com.smaxe.uv.client.camera.AbstractCamera;
import com.smaxe.uv.stream.support.MediaDataByteArray;

import net.john.R;
import net.john.data.User;
import net.john.util.RTMPConnectionUtil;
import net.john.util.RemoteUtil;
import net.john.util.UltraNetStream;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.hardware.Camera;
import android.os.Bundle;
import android.util.Log;
import android.view.KeyEvent;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
import android.view.Window;

public class VideoActivity extends Activity{

final String TAG = "VideoActivity";

private boolean active;
public static AndroidCamera aCamera;
private h264encoder mH264encoder;
private long handle;
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
//the window without title
this.requestWindowFeature(Window.FEATURE_NO_TITLE);
setContentView(R.layout.layout_chat);
mH264encoder = new h264encoder();
aCamera = new AndroidCamera(VideoActivity.this);
active = true;
}

@Override
public boolean onKeyDown(int keyCode, KeyEvent event) {

if (keyCode == KeyEvent.KEYCODE_BACK) {
new AlertDialog.Builder(VideoActivity.this)
.setMessage(R.string.dialog_exit)
.setPositiveButton(R.string.dialog_ok, new DialogInterface.OnClickListener() {


public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
active = false;
finish();
}
})
.setNegativeButton(R.string.dialog_cancel, new DialogInterface.OnClickListener() {


public void onClick(DialogInterface dialog, int which) {
// TODO Auto-generated method stub
dialog.dismiss();
}
}).show();
return true;
} else {
return super.onKeyDown(keyCode, event);
}
}

@Override
public void onStop() {
super.onStop();
aCamera = null;
if (RTMPConnectionUtil.netStream != null) {
RTMPConnectionUtil.netStream.close();
}

Log.d("DEBUG", "onStop");
}

@Override
public void onDestroy() {
super.onDestroy();

Log.d("DEBUG", "onDestroy()");
}

//Camera.PreviewCallback获取到camera读取的字节数组
public class AndroidCamera extends AbstractCamera implements SurfaceHolder.Callback,
Camera.PreviewCallback {

private SurfaceView surfaceView;
private SurfaceHolder surfaceHolder;
private Camera camera;

private int width;
private int height;

private boolean init;

int blockWidth;
int blockHeight;
int timeBetweenFrames; // 1000 / frameRate
int frameCounter;
byte[] previous;

public AndroidCamera(Context context) {


surfaceView = (SurfaceView)((Activity) context).findViewById(R.id.surfaceView);
surfaceHolder = surfaceView.getHolder();
surfaceHolder.addCallback(AndroidCamera.this);
surfaceHolder.setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
//我修改过的分辨率
width = 176;
height = 144;

handle = mH264encoder.initEncoder(width, height);

init = false;
Log.d("DEBUG", "AndroidCamera()");
}

private void startVideo() {
Log.d("DEBUG", "startVideo()");

RTMPConnectionUtil.netStream = new UltraNetStream(RTMPConnectionUtil.connection);
RTMPConnectionUtil.netStream.addEventListener(new NetStream.ListenerAdapter() {

@Override
public void onNetStatus(final INetStream source, final Map<String, Object> info){
Log.d("DEBUG", "Publisher#NetStream#onNetStatus: " + info);

final Object code = info.get("code");

if (NetStream.PUBLISH_START.equals(code)) {
if (VideoActivity.aCamera != null) {
RTMPConnectionUtil.netStream.attachCamera(aCamera, -1 /*snapshotMilliseconds*/);
Log.d("DEBUG", "aCamera.start()");
aCamera.start();
} else {
Log.d("DEBUG", "camera == null");
}
}
}
});

RTMPConnectionUtil.netStream.publish("aaa", NetStream.RECORD);
}

public void start() {
camera.startPreview();
}

public void printHexString(byte[] b) {
for (int i = 0; i < b.length; i++) {
String hex = Integer.toHexString(b[i] & 0xFF);
if (hex.length() == 1) {
hex = '0' + hex;
}
}
}


public void onPreviewFrame(byte[] arg0, Camera arg1) {
// TODO Auto-generated method stub
if (!active) return;
if (!init) {
blockWidth = 32;
blockHeight = 32;
timeBetweenFrames = 100; // 1000 / frameRate
frameCounter = 0;
previous = null;
init = true;
}
final long ctime = System.currentTimeMillis();
Log.i(TAG, "*****相机采集到的数组长度"+arg0.length);

byte[] current = RemoteUtil.decodeYUV420SP2RGB(arg0, width, height);
try {
// int byte_result = Decode(arg0);
// byte[] bytes1 = copyOf(out,byte_result);
//fireOnVideoData
final byte[] packet = RemoteUtil.encode(current, previous, blockWidth, blockHeight, width, height);
fireOnVideoData(new MediaDataByteArray(timeBetweenFrames, new ByteArray(packet)));
previous = current;
if (++frameCounter % 10 == 0) previous = null;

}
catch (Exception e) {
e.printStackTrace();
}
final int spent = (int) (System.currentTimeMillis() - ctime);
try {
Thread.sleep(Math.max(0, timeBetweenFrames - spent));
} catch (InterruptedException e) {
// TODO Auto-generated catch block
e.printStackTrace();
}
}

public byte[] copyOf(byte[] arr,int len)
{
Class type=arr.getClass().getComponentType();
byte[] target=(byte[])Array.newInstance(type, len);
System.arraycopy(arr, 0, target, 0, len);
return target;
}

private byte[] out = new byte[20*1024];
long start = 0;
long end = 0;
private int Decode(byte[] yuvData){
start = System.currentTimeMillis();
int result = mH264encoder.encodeframe(handle, -1, yuvData, yuvData.length, out);
end = System.currentTimeMillis();
Log.e(TAG, "encode result:"+result+"--encode time:"+(end-start));
if(result > 0){
try {
FileOutputStream file_out = new FileOutputStream ("/sdcard/x264_video_activity.264",true);
file_out.write(out,0,result);
file_out.close();
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
}
}
// this.setPrewDataGetHandler();
return result;
}


public void surfaceChanged(SurfaceHolder holder, int format, int width,
int height) {
// TODO Auto-generated method stub
//camera.startPreview();
//camera.unlock();
startVideo();
Log.d("DEBUG", "surfaceChanged()");
}


public void surfaceCreated(SurfaceHolder holder) {
// TODO Auto-generated method stub
camera = Camera.open();
try {
camera.setPreviewDisplay(surfaceHolder);
camera.setPreviewCallback(this);
Camera.Parameters params = camera.getParameters();
params.setPreviewSize(width, height);
camera.setParameters(params);
} catch (IOException e) {
// TODO Auto-generated catch block
e.printStackTrace();
camera.release();
camera = null;
}

Log.d("DEBUG", "surfaceCreated()");
}


public void surfaceDestroyed(SurfaceHolder holder) {
// TODO Auto-generated method stub
if (camera != null) {
camera.stopPreview();
camera.release();
camera = null;
}
mH264encoder.destory(handle);
Log.d("DEBUG", "surfaceDestroy()");
}

} //AndroidCamera
}

zhe
...全文
464 10 打赏 收藏 转发到动态 举报
写回复
用AI写文章
10 条回复
切换为时间正序
请发表友善的回复…
发表回复
pasfzf 2016-11-09
  • 打赏
  • 举报
回复
楼主在吗,请问你的问题解决了吗 qq:842876582,请加下qq,有点问题咨询下,谢谢
liujiazhen841219 2016-10-12
  • 打赏
  • 举报
回复
请问楼主,视频不能实时传的服务器的问题,延时问题和声音问题都解决了吗
s2221219 2016-06-07
  • 打赏
  • 举报
回复
引用 7 楼 s2221219 的回复:
楼主 你好 netStream = new UltraNetStream(connection); 有问题, RTMPConnectionUtil.netStream.addEventListener(new NetStream.ListenerAdapter()监听并未进入;虽然连接到了red5服务器,但是流并没有上传到服务器,
请问楼主是否有这的经验
s2221219 2016-06-07
  • 打赏
  • 举报
回复
楼主 你好 netStream = new UltraNetStream(connection); 有问题, RTMPConnectionUtil.netStream.addEventListener(new NetStream.ListenerAdapter()监听并未进入;虽然连接到了red5服务器,但是流并没有上传到服务器,
  • 打赏
  • 举报
回复
楼主能不能加一下你的QQ啊 ,我最近在做这个类型的毕设,能不能请教一下您,QQ:810666264
一颗大葱 2015-12-10
  • 打赏
  • 举报
回复
引用 4 楼 goforbeauty 的回复:
楼主,请问一下,你搭的red5服务器是怎么搭的啊,为什么我的视频流总是上传不到服务器上去呢
就是red5的自带的demo啊
goforbeauty 2015-11-27
  • 打赏
  • 举报
回复
楼主,请问一下,你搭的red5服务器是怎么搭的啊,为什么我的视频流总是上传不到服务器上去呢
一颗大葱 2015-08-20
  • 打赏
  • 举报
回复
主要就是这几个用于视频通信方面,其中关于有些人说的视频颜色错误,我把RemoteUtil.java把r和b互换了一下就好了,我贴的是已经换过了的。我也把demo的地址贴出来http://download.csdn.net/detail/xiaoliouc/4933594 (这是未修改的)现在demo运行起来是可以连接到red5的,视频可显示,关于视频延时方面,没找到很好的方法,只能把分辨率减小,经测试我设置的值是最小值了,再小就无法运行了。现在出现的问题是视频画面是可以传到red5了,可是声音却没有,仔细研究代码,好像没有发现录制声音的代码,我对juv-remp-client包不是很熟悉,有没有熟悉的人能告知一下如何录制声音呢,据我所知这个包是可以录制声音的。
一颗大葱 2015-08-20
  • 打赏
  • 举报
回复
这是RemoteUtil.java用于解码
package net.john.util;

import java.io.ByteArrayOutputStream;
import java.io.OutputStream;
import java.util.zip.Deflater;
import java.util.zip.DeflaterOutputStream;

import android.util.Log;

public class RemoteUtil {
	
	private static Deflater deflater = new Deflater();
	
	public static byte[] decodeYUV420SP2RGB(byte[] yuv420sp, int width, int height) {
	    final int frameSize = width * height;   
	    
	    byte[] rgbBuf = new byte[frameSize * 3];
	    
	   // if (rgbBuf == null) throw new NullPointerException("buffer 'rgbBuf' is null");   
	    if (rgbBuf.length < frameSize * 3) throw new IllegalArgumentException("buffer 'rgbBuf' size "  + rgbBuf.length + " < minimum " + frameSize * 3);   
	  
	    if (yuv420sp == null) throw new NullPointerException("buffer 'yuv420sp' is null");   
	  
	    if (yuv420sp.length < frameSize * 3 / 2) throw new IllegalArgumentException("buffer 'yuv420sp' size " + yuv420sp.length + " < minimum " + frameSize * 3 / 2);   
	       
	    int i = 0, y = 0;   
	    int uvp = 0, u = 0, v = 0;   
	    int y1192 = 0, r = 0, g = 0, b = 0;   
	       
	    for (int j = 0, yp = 0; j < height; j++) {   
	         uvp = frameSize + (j >> 1) * width;   
	         u = 0;   
	         v = 0;   
	         for (i = 0; i < width; i++, yp++) {   
	             y = (0xff & ((int) yuv420sp[yp])) - 16;   
	             if (y < 0) y = 0;   
	             if ((i & 1) == 0) {   
	                 v = (0xff & yuv420sp[uvp++]) - 128;   
	                 u = (0xff & yuv420sp[uvp++]) - 128;   
	             }   
	               
	             y1192 = 1192 * y;   
	             b = (y1192 + 1634 * v);   
	             g = (y1192 - 833 * v - 400 * u);   
	             r = (y1192 + 2066 * u);   
	               
	             if (r < 0) r = 0; else if (r > 262143) r = 262143;   
	             if (g < 0) g = 0; else if (g > 262143) g = 262143;   
	             if (b < 0) b = 0; else if (b > 262143) b = 262143;   
	               
	             rgbBuf[yp * 3] = (byte)(r >> 10);   
	             rgbBuf[yp * 3 + 1] = (byte)(g >> 10);   
	             rgbBuf[yp * 3 + 2] = (byte)(b >> 10);
	         }   
	     }//for
	    return rgbBuf;
	 }// decodeYUV420Sp2RGB
	
	public static byte[] decodeYUV420SP2YUV420(byte[]data,int length) {
		int width = 176;
		int height = 144;
		byte[] str = new byte[length];
		System.arraycopy(data, 0, str, 0,width*height);

		int strIndex = width*height;

		for(int i = width*height+1; i < length ;i+=2) {
			str[strIndex++] = data[i];
		}
		for(int i = width*height;i<length;i+=2) {
			str[strIndex++] = data[i];
		}
		return str;
	} //YUV420SP2YUV420
	
	public static byte[] encode(final byte[] current, final byte[] previous, final int blockWidth, final int blockHeight, final int width, final int height) throws Exception {
		ByteArrayOutputStream baos = new ByteArrayOutputStream(16 * 1024);

		if (previous == null) {
			baos.write(getTag(0x01 /* key-frame */, 0x03 /* ScreenVideo codec */));
		} else {
			baos.write(getTag(0x02 /* inter-frame */, 0x03 /* ScreenVideo codec */));
		}

		// write header
		final int wh = width + ((blockWidth / 16 - 1) << 12);
		final int hh = height + ((blockHeight / 16 - 1) << 12);

		writeShort(baos, wh);
		writeShort(baos, hh);

		// write content
		int y0 = height;
		int x0 = 0;
		int bwidth = blockWidth;
		int bheight = blockHeight;

		while (y0 > 0) {
			bheight = Math.min(y0, blockHeight);
			y0 -= bheight;

			bwidth = blockWidth;
			x0 = 0;

			while (x0 < width) {
				bwidth = (x0 + blockWidth > width) ? width - x0 : blockWidth;

				final boolean changed = isChanged(current, previous, x0, y0, bwidth, bheight, width, height);

				if (changed) {
					ByteArrayOutputStream blaos = new ByteArrayOutputStream(4 * 1024);

					DeflaterOutputStream dos = new DeflaterOutputStream(blaos, deflater);

					for (int y = 0; y < bheight; y++) {
						Log.i("DEBUG", "current的长度:"+current.length+" 起始点:"+3 * ((y0 + bheight - y - 1) * width + x0)+" 终点:"+3 * bwidth);
						dos.write(current, 3 * ((y0 + bheight - y - 1) * width + x0), 3 * bwidth);
					}
//					dos.write(current, 0, current.length);
					dos.finish();
					deflater.reset();

					final byte[] bbuf = blaos.toByteArray();
					final int written = bbuf.length;

					// write DataSize
					writeShort(baos, written);
					// write Data
					baos.write(bbuf, 0, written);
				} else {
					// write DataSize
					writeShort(baos, 0);
				}
				x0 += bwidth;
			}
		}
		return baos.toByteArray();
     }
	
	/**
     * Writes short value to the {@link OutputStream <tt>os</tt>}.
     * 
     * @param os
     * @param n
     * @throws Exception if an exception occurred
     */
    private static void writeShort(OutputStream os, final int n) throws Exception {
        os.write((n >> 8) & 0xFF);
        os.write((n >> 0) & 0xFF);
    }
    
    /**
     * Checks if image block is changed.
     * 
     * @param current
     * @param previous
     * @param x0
     * @param y0
     * @param blockWidth
     * @param blockHeight
     * @param width
     * @param height
     * @return <code>true</code> if changed, otherwise <code>false</code>
     */
	public static boolean isChanged(final byte[] current, final byte[] previous, final int x0, final int y0, final int blockWidth, final int blockHeight, final int width, final int height) {
		if (previous == null)
			return true;

		for (int y = y0, ny = y0 + blockHeight; y < ny; y++) {
			final int foff = 3 * (x0 + width * y);
			final int poff = 3 * (x0 + width * y);

			for (int i = 0, ni = 3 * blockWidth; i < ni; i++) {
				if (current[foff + i] != previous[poff + i])
					return true;
			}
		}
		return false;
	}
    
    /**
     * @param frame
     * @param codec
     * @return tag
     */
    public static int getTag(final int frame, final int codec) {
        return ((frame & 0x0F) << 4) + ((codec & 0x0F) << 0);
    }
}
一颗大葱 2015-08-20
  • 打赏
  • 举报
回复
这是RTMPConnectionUtil:(用于连接red5)
package net.john.util;

import java.util.Date;
import java.util.Map;

import net.john.activity.CallActivity;
import net.john.activity.ChatActivity;
import net.john.activity.VideoActivity;
import net.john.data.User;
import android.content.Context;
import android.content.Intent;
import android.os.Message;
import android.util.Log;

import com.smaxe.uv.Responder;
import com.smaxe.uv.client.INetConnection;
import com.smaxe.uv.client.INetStream;
import com.smaxe.uv.client.License;

public class RTMPConnectionUtil {
	
	//private static final String red5_url = "rtmp://10.3.12.118";
	//自己电脑上的Red5地址,在此处自己修改
	private static final String red5_url = "rtmp://192.168.88.198/oflaDemo";
	
	public static UltraNetConnection connection;
	public static UltraNetStream netStream;
	
	public static String message;
	
	public static void ConnectRed5(Context context) {
//		License.setKey("63140-D023C-D7420-00B15-91FC7");
		connection = new UltraNetConnection();
		
		connection.configuration().put(UltraNetConnection.Configuration.INACTIVITY_TIMEOUT, -1);
        connection.configuration().put(UltraNetConnection.Configuration.RECEIVE_BUFFER_SIZE, 256 * 1024);
        connection.configuration().put(UltraNetConnection.Configuration.SEND_BUFFER_SIZE, 256 * 1024);
		
		connection.client(new ClientHandler(context));
		connection.addEventListener(new NetConnectionListener());
		Log.d("DEBUG", User.id + " - " + User.phone);
		connection.connect(red5_url);
	}
	
	private static class ClientHandler extends Object {
		
		private Context context;
		
		ClientHandler(Context context) {
			this.context = context;
		};
		
		//Server invoke this method
		public void getVideoInfo(String fromUserId, String fromUserName,String message) {
			System.out.println(fromUserId + " ++ " + fromUserName + " ++ " +  message);
			RTMPConnectionUtil.message = message;
			Intent intent = new Intent(context, ChatActivity.class);
			intent.putExtra("state", "callyou");
			intent.putExtra("who", fromUserName);
			context.startActivity(intent);
		}
		
		//Server invoke this method when receiver reject
		public void rejected(String userid, String username) {
			System.out.println(userid + " ++ " + username);
			Message msg = ChatActivity.handler.obtainMessage();
			msg.arg1 = 0; //receiver reject
			msg.sendToTarget();
		}
		
		//Server invoke this method when receiver receive call
		public void addMember(String userId, String userName) {
			System.out.println(userId + " ++ " + userName);
			
			Message msg = ChatActivity.handler.obtainMessage();
			msg.arg1 = 1;
			msg.sendToTarget();
			
			Log.d("DEBUG", "addMember()");
			
		}
		
		//Server invoke this method when receiver is not login
		public void Info(String information) {
			System.out.println("Info" + information);
			
			Message msg = ChatActivity.handler.obtainMessage();
			if (information.equals("client is not login the Red5 Server")) {
				msg.arg1 = 2;
				msg.sendToTarget();
			} else if (information.equals("the client is calling, please try again")) {
				msg.arg1 = 3;
				msg.sendToTarget();
			}
		}
		
	  public void onBWDone()
		  {
			  
		  }

	  public void onBWDone(Object[] paramArrayOfObject)
		  {
			  
		  }
	}
	
	private static class NetConnectionListener extends UltraNetConnection.ListenerAdapter {
		public NetConnectionListener() {}
		
		@Override
		public void onAsyncError(final INetConnection source, final String message, final Exception e) {
			System.out.println("NetConnection#onAsyncError: " + message + " "+ e);
		}

		@Override
		public void onIOError(final INetConnection source, final String message) {
			System.out.println("NetConnection#onIOError: " + message);
		}

		@Override
		public void onNetStatus(final INetConnection source, final Map<String, Object> info) {
			System.out.println("NetConnection#onNetStatus: " + info);
			final Object code = info.get("code");
			if (UltraNetConnection.CONNECT_SUCCESS.equals(code)) {
//				source.call("testConnection", new Responder() {
//					public void onResult(final Object result) {
//						System.out.println("Method testConnection result: " + result);
//					}
//
//					public void onStatus(final Map<String, Object> status) {
//						System.out.println("Method testConnection status: " + status);
//					}
//				});
			}
		}
	}// NetConnectionListener
	
	//invoke server method createMeeting
	public static void invokeMethodFormRed5(String toUserId) {
		Date nowDate = new Date();
		String time = nowDate.getTime() +  "" + (int)((Math.random()*100)%100);
		message = time;
		connection.call("createMeeting", responder, User.id + "", toUserId, message);
		Log.d("DEBUG", "call createMeeting");
	}
	
	private static Responder responder = new Responder() {


		public void onResult(Object arg0) {
			// TODO Auto-generated method stub
			System.out.println("Method createMeeting result: " + arg0);
			callback_createMeeting();
		}


		public void onStatus(Map<String, Object> arg0) {
			// TODO Auto-generated method stub
			System.out.println("Method createMeetiong status: " + arg0);
		}
		
	};
	
	//invoke server method reject
	public static void invokeRejectMethod() {
		connection.call("reject", null, message, User.id);
	}
	
	private static void callback_createMeeting() {
		
		//startVideo();
	}
	
	private static void startVideo() {
		
		Log.d("DEBUG", "startVideo()");

		netStream = new UltraNetStream(connection);
		netStream.addEventListener(new UltraNetStream.ListenerAdapter() {
			@Override
            public void onNetStatus(final INetStream source, final Map<String, Object> info){
                System.out.println("Publisher#NetStream#onNetStatus: " + info);
                Log.d("DEBUG", "Publisher#NetStream#onNetStatus: " + info);
                
                final Object code = info.get("code");
                
                if (UltraNetStream.PUBLISH_START.equals(code)) {
                    if (VideoActivity.aCamera != null) {
                        netStream.attachCamera(VideoActivity.aCamera, -1 /*snapshotMilliseconds*/);
                        Log.d("DEBUG", "aCamera.start()");
                        VideoActivity.aCamera.start();
                    } else {
                    	Log.d("DEBUG", "camera == null");
                    }
                }    
            }
			
		});
		
		Log.i("DEBUG", "User.id:"+User.id+" message"+message);
		netStream.publish(User.id + message, UltraNetStream.RECORD);//"mp4:"+User.id + message+".mp4"
	}
	
	//invoke server method enterMeeting
	public static void invokeEnterMeetingMethod() {
		connection.call("enterMeeting", enterResp, message, User.id);
	}
	
	private static Responder enterResp = new Responder() {


		public void onResult(Object arg0) {
			// TODO Auto-generated method stub
			System.out.println("Method enterMeeting result: " + arg0);
			callback_enterMeeting();
		}


		public void onStatus(Map<String, Object> arg0) {
			// TODO Auto-generated method stub
			System.out.println("Method enterMeetiong status: " + arg0);
		}
		
	};
	
	private static void callback_enterMeeting() {
		Message msg = ChatActivity.handler.obtainMessage();
		msg.arg1 = 1;
		msg.sendToTarget();
		
		//startVideo();
	}
}

80,360

社区成员

发帖
与我相关
我的任务
社区描述
移动平台 Android
androidandroid-studioandroidx 技术论坛(原bbs)
社区管理员
  • Android
  • yechaoa
  • 失落夏天
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧