80,350
社区成员
发帖
与我相关
我的任务
分享
package net.majorkernelpanic.streaming.video;
import java.nio.ByteBuffer;
import net.majorkernelpanic.streaming.misc.VideoServer;
import android.annotation.SuppressLint;
import android.media.MediaCodec;
import android.media.MediaCodecInfo;
import android.media.MediaCodecList;
import android.media.MediaFormat;
import android.util.Log;
public class AvcEncoder {
private final static String TAG = "TAG";
// private int TIMEOUT_USEC = 12 * 1000;// 微秒 1ms(毫秒)=1000(微秒)
private MediaCodec mediaCodec;
private int m_width;
private int m_height;
private int m_framerate;
private int m_bitrate;
private byte[] m_info = null;
private String mimeType = "video/avc";
public byte[] configbyte;
public VideoDecoderListener callBack;
public interface VideoDecoderListener {
public void onVideoDataCom(byte[] data, int len);
public void onAudioDataCom(byte[] data, int len);
}
private byte[] yuv420 = null;
@SuppressLint("NewApi")
public AvcEncoder(int width, int height, int framerate, int bitrate,
VideoDecoderListener callBack) {
m_width = width;
m_height = height;
m_framerate = framerate;
m_bitrate = bitrate;
this.callBack = callBack;
initMediaCodec();
yuv420 = new byte[width * height * 3 / 2];
}
@SuppressLint("NewApi")
private void initMediaCodec() {
int numCodecs = MediaCodecList.getCodecCount();
MediaCodecInfo codecInfo = null;
for (int i = 0; i < numCodecs && codecInfo == null; i++) {
MediaCodecInfo info = MediaCodecList.getCodecInfoAt(i);
if (!info.isEncoder()) {
continue;
}
String[] types = info.getSupportedTypes();
boolean found = false;
for (int j = 0; j < types.length && !found; j++) {
if (types[j].equals(mimeType))
found = true;
}
if (!found)
continue;
codecInfo = info;
}
int colorFormat = 0;
MediaCodecInfo.CodecCapabilities capabilities = codecInfo
.getCapabilitiesForType(mimeType);
for (int i = 0; i < capabilities.colorFormats.length
&& colorFormat == 0; i++) {
int format = capabilities.colorFormats[i];
switch (format) {
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
case MediaCodecInfo.CodecCapabilities.COLOR_TI_FormatYUV420PackedSemiPlanar:
colorFormat = format;
break;
default:
Log.i(TAG, "Skipping unsupported color format " + format);
break;
}
}
MediaFormat mediaFormat = MediaFormat.createVideoFormat(mimeType,
m_width, m_height);
if (android.os.Build.MODEL.equals("HUAWEI NXT-AL10")) {
colorFormat = MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar;
}
mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, colorFormat); //
mediaFormat
.setInteger(MediaFormat.KEY_BIT_RATE, m_width * m_height * 5);// width*height*5
mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, m_framerate);
mediaFormat.setInteger(MediaFormat.KEY_I_FRAME_INTERVAL, 1);
try {
Log.i(TAG, "---------initMediaCodec-------3----------colorFormat="
+ colorFormat);
mediaCodec = MediaCodec.createByCodecName(codecInfo.getName());
} catch (Exception e) {
e.printStackTrace();
}
mediaCodec.configure(mediaFormat, null, null,
MediaCodec.CONFIGURE_FLAG_ENCODE);
mediaCodec.start();
Log.d(TAG, "mediaCodec.start ");
}
@SuppressLint("NewApi")
private void StopEncoder() {
try {
mediaCodec.stop();
mediaCodec.release();
} catch (Exception e) {
e.printStackTrace();
}
}
ByteBuffer[] inputBuffers;
ByteBuffer[] outputBuffers;
public boolean isRuning = false;
public void StopThread() {
if (!isRuning)
return;
isRuning = false;
try {
StopEncoder();
} catch (Exception e) {
e.printStackTrace();
}
}
public void StartEncoderThread() {
Thread EncoderThread = new Thread(new Runnable() {
@SuppressLint("NewApi")
@Override
public void run() {
isRuning = true;
byte[] input = null;
long pts = 0;
long generateIndex = 0;
long lastTime = 0;
long bufferSize = 0;
while (isRuning) {
if (VideoServer.YUVQueue.size() > 0) {
input = VideoServer.YUVQueue.poll();
byte[] yuv420sp = new byte[m_width * m_height * 3 / 2];
NV21ToNV12(input, yuv420sp, m_width, m_height);
input = yuv420sp;
}
if (input != null) {
try {
ByteBuffer[] inputBuffers = mediaCodec
.getInputBuffers();
ByteBuffer[] outputBuffers = mediaCodec
.getOutputBuffers();
int inputBufferIndex = mediaCodec
.dequeueInputBuffer(-1);
if (inputBufferIndex >= 0) {
// pts = 132 + (curTime - startTime) * 1000;
// Log.d("差值", pts + "微秒");
pts = computePresentationTime(generateIndex);
ByteBuffer inputBuffer = inputBuffers[inputBufferIndex];
inputBuffer.clear();
inputBuffer.put(input);
mediaCodec.queueInputBuffer(inputBufferIndex,
0, input.length, pts, 0);
generateIndex += 1;
}
MediaCodec.BufferInfo bufferInfo = new MediaCodec.BufferInfo();
int outputBufferIndex = mediaCodec
.dequeueOutputBuffer(bufferInfo, 0);// 1000000/m_framerate
while (outputBufferIndex >= 0) {
Log.d("视频流", "索引=" + generateIndex + ",长度="
+ bufferInfo.size);
ByteBuffer outputBuffer = outputBuffers[outputBufferIndex];
byte[] outData = new byte[bufferInfo.size];
outputBuffer.get(outData);
if (bufferInfo.flags == 2
|| bufferInfo.flags == 3) {// 台电=3
configbyte = outData;
} else if (bufferInfo.flags == 1
|| bufferInfo.flags == 9) { // LETV(MTK)==9
byte[] keyframe = new byte[bufferInfo.size
+ configbyte.length];
System.arraycopy(configbyte, 0, keyframe,
0, configbyte.length);
System.arraycopy(outData, 0, keyframe,
configbyte.length, outData.length);
callBack.onVideoDataCom(keyframe,
keyframe.length);
} else {
callBack.onVideoDataCom(outData,
outData.length);
}
mediaCodec.releaseOutputBuffer(
outputBufferIndex, false);
outputBufferIndex = mediaCodec
.dequeueOutputBuffer(bufferInfo, 0);// 1000000/m_framerate
}
} catch (Throwable t) {
t.printStackTrace();
}
} else {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
}
});
EncoderThread.start();
}
private void NV21ToNV12(byte[] nv21, byte[] nv12, int width, int height) {
if (nv21 == null || nv12 == null)
return;
int framesize = width * height;
int i = 0, j = 0;
System.arraycopy(nv21, 0, nv12, 0, framesize);
for (i = 0; i < framesize; i++) {
nv12[i] = nv21[i];
}
for (j = 0; j < framesize / 2; j += 2) {
nv12[framesize + j - 1] = nv21[j + framesize];
}
for (j = 0; j < framesize / 2; j += 2) {
nv12[framesize + j] = nv21[j + framesize - 1];
}
}
/**
* Generates the presentation time for frame N, in microseconds.
*/
private long computePresentationTime(long frameIndex) {
return 132 + frameIndex * 1000000 / m_framerate;
}
}