80,350
社区成员
发帖
与我相关
我的任务
分享
X.MS.AVC.Encoder, version -..
01-01 08:22:16.578 4609 5000 I MediaCodec: MediaCodec will operate in async mode
01-01 08:22:16.578 4609 5001 D MS_VIDEO_ENC: enableStoreMetaDataInBuffers
01-01 08:22:16.578 4609 5001 E OMXNodeInstance: getParameter(42:MS.AVC.Encoder, ParamConsumerUsageBits(0x6f800004)) ERROR: UnsupportedIndex(0x8000101a)
01-01 08:22:16.579 4609 5001 E OMXNodeInstance: setParameter(42:MS.AVC.Encoder, OMX.google.android.index.storeMetaDataInBuffers(0x7f100004): Output:1 en=0 type=1) ERROR: BadPortIndex(0x8000101b)
01-01 08:22:16.579 4609 5001 E ACodec : [OMX.MS.AVC.Encoder] storeMetaDataInBuffers (output) failed w/ err -2147483648
01-01 08:22:16.579 4609 5001 I ACodec : setupVideoEncoder succeeded
01-01 08:22:16.579 4609 5001 E OMXNodeInstance: setConfig(42:MS.AVC.Encoder, ConfigPriority(0x6f800002)) ERROR: NotImplemented(0x80001006)
01-01 08:22:16.579 4609 5001 I ACodec : codec does not support config priority (err -2147483648)
01-01 08:22:16.580 4609 4634 I tinyalsa: open /dev/snd/pcmC1D0c in NON-Block mode
01-01 08:22:16.581 4609 5007 I AudioFlinger: AudioFlinger's thread 0xdb450008 ready to run
01-01 08:22:16.590 4609 4634 W AudioFlinger: acquireAudioSessionId() unknown client 4913 for session 21
01-01 08:22:16.606 4609 5008 I MediaCodec: MediaCodec will operate in async mode
01-01 08:22:16.607 4609 5008 E OMXNodeInstance: setConfig(43:google.aac.encoder, ConfigPriority(0x6f800002)) ERROR: Undefined(0x80001001)
01-01 08:22:16.607 4609 5008 I ACodec : codec does not support config priority (err -2147483648)
01-01 08:22:16.607 4609 4634 D MPEG4Writer: Video track stopping
01-01 08:22:16.607 4609 4634 E MPEG4Writer: Stop() called but track is not started
01-01 08:22:16.607 4609 4634 D MPEG4Writer: Audio track stopping
01-01 08:22:16.607 4609 4634 E MPEG4Writer: Stop() called but track is not started
01-01 08:22:16.607 4609 5002 I MstarVencApi: MApi_CMA_Pool_Init: pool_handle_id=0x1c, miu=1, offset=0x0, length=0x11000000
01-01 08:22:16.607 4609 5002 I MstarVencApi: alloc_param.pool_handle_id = 28
01-01 08:22:16.607 4609 5002 I MstarVencApi: alloc_param.length = 12582912
01-01 08:22:16.607 4609 5002 I MstarVencApi: alloc_param.offset_in_pool = 0
01-01 08:22:16.607 4609 5002 I MstarVencApi: alloc_param.flags = 4
01-01 08:22:16.618 4913 4913 E MediaRecorder: start failed: -2147483648
01-01 08:22:16.620 4913 4913 D AndroidRuntime: Shutting down VM
--------- beginning of crash
01-01 08:22:16.622 4913 4913 E AndroidRuntime: FATAL EXCEPTION: main
01-01 08:22:16.622 4913 4913 E AndroidRuntime: Process: com.jrm.localmm, PID: 4913
01-01 08:22:16.622 4913 4913 E AndroidRuntime: java.lang.RuntimeException: start failed.
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.media.MediaRecorder.start(Native Method)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at com.jrm.localmm.business.video.MRDerVideoPlayView.start(MRDerVideoPlayView.java:337)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at com.jrm.localmm.business.video.MRDerVideoPlayView$2.surfaceCreated(MRDerVideoPlayView.java:556)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.SurfaceView.updateWindow(SurfaceView.java:582)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.SurfaceView$3.onPreDraw(SurfaceView.java:177)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.ViewTreeObserver.dispatchOnPreDraw(ViewTreeObserver.java:944)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.ViewRootImpl.performTraversals(ViewRootImpl.java:2059)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.ViewRootImpl.doTraversal(ViewRootImpl.java:1111)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.ViewRootImpl$TraversalRunnable.run(ViewRootImpl.java:6017)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.Choreographer$CallbackRecord.run(Choreographer.java:858)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.Choreographer.doCallbacks(Choreographer.java:670)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.Choreographer.doFrame(Choreographer.java:606)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.view.Choreographer$FrameDisplayEventReceiver.run(Choreographer.java:844)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.os.Handler.handleCallback(Handler.java:739)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.os.Handler.dispatchMessage(Handler.java:95)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.os.Looper.loop(Looper.java:148)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at android.app.ActivityThread.main(ActivityThread.java:5417)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at java.lang.reflect.Method.invoke(Native Method)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at com.android.internal.os.ZygoteInit$MethodAndArgsCaller.run(ZygoteInit.java:731)
01-01 08:22:16.622 4913 4913 E AndroidRuntime: at com.android.internal.os.ZygoteInit.main(ZygoteInit.java:621)
01-01 08:22:16.629 2383 3139 W ActivityManager: Force finishing activity com.jrm.localmm/.ui.mediarecorder.MediaRecorderActivity
01-01 08:22:16.633 2383 3139 W ActivityManager: Force finishing activity com.jrm.localmm/.ui.main.FileBrowserActivity
01-01 08:22:16.650 4609 5002 I MstarVencApi: MsMfeEncInit:mfe mmap, mem_offset = 0x80000000, PA:0x92400000, VA:0xda001000, size:0xc00000
01-01 08:22:16.650 4609 5002 I MstarVencApi: Width x Height : 1280 x 720, Bitrate=1048576000, FrameRatex100=2500.
01-01 08:22:16.650 4609 5002 I MstarVencApi: Frame size = 0x15e000, convert_size = 0x15e000, OBUF_SIZE = 0x100000, Others = 0x5000
01-01 08:22:16.650 4609 5002 I MstarVencApi: Total = 0x51f000
01-01 08:22:16.650 4609 5002 I MstarVencApi: MsMfeEncInit: enable_color_convert = 1
01-01 08:22:16.650 4609 5002 I MstarVencApi: MsMfeEncInit: Y:PA:0x92400000,VA:0xda001000
01-01 08:22:16.650 4609 5002 I MstarVencApi[ 1335.020590] init: Service 'cpudvfsscaling' is being killed...
: MsMfeEncInit: C:PA:0x924e6000,[ 1335.029930] init: Service 'cpudvfsscaling' (pid 4928) killed by signal 9
@SuppressLint("NewApi")
public class MRDerVideoPlayView extends SurfaceView {
...(构造函数和变量声明省略)
private void initVideoView() {
mVideoWidth = 0;
mVideoHeight = 0;
// getHolder().setFormat(PixelFormat.RGBA_8888);
getHolder().addCallback(mSHCallback);
getHolder().setType(SurfaceHolder.SURFACE_TYPE_PUSH_BUFFERS);
setFocusable(true);
setFocusableInTouchMode(true);
requestFocus();
mCurrentState = STATE_IDLE;
}
public void setHandler(Handler handler) {
mHandler = handler;
}
public void init() {
requestLayout();
invalidate();
}
private void initSocket(){
receiver = new LocalSocket();
try {
lss = new LocalServerSocket("VideoCamera");
receiver.connect(new LocalSocketAddress("VideoCamera"));
receiver.setReceiveBufferSize(500000);
receiver.setSendBufferSize(500000);
sender = lss.accept();
sender.setReceiveBufferSize(500000);
sender.setSendBufferSize(500000);
} catch (IOException e) {
e.printStackTrace();
Log.e("", "localSocket error:" + e.getMessage());
}
}
private void initRecorder(){
initSocket();
if(mMediaRecorder == null)
mMediaRecorder = new MediaRecorder();
else
mMediaRecorder.reset();
mMediaRecorder.setVideoSource(MediaRecorder.VideoSource.CAMERA);
mMediaRecorder.setAudioSource(MediaRecorder.AudioSource.MIC);
//CamcorderProfile cpHigh = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
//mMediaRecorder.setProfile(cpHigh);
//mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.THREE_GPP);
mMediaRecorder.setOutputFormat(MediaRecorder.OutputFormat.MPEG_4);
mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H264);
//mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.H263);
//mMediaRecorder.setVideoEncoder(MediaRecorder.VideoEncoder.MPEG_4_SP);
//mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AMR_NB);
mMediaRecorder.setAudioEncoder(MediaRecorder.AudioEncoder.AAC);
//mMediaRecorder.setVideoSize(320, 240);
/*
I CameraHardware: Camera Supported Size list: 1280x720,640x480,352x288,320x240,176x144
I CameraHardware: KEY_PREVIEW_FPS_RANGE 15000 ~ 30000
I CameraHardware: setPreviewSize to 1280, 720
*/
mMediaRecorder.setVideoSize(1280, 720);
//CamcorderProfile cProfile = CamcorderProfile.get(CamcorderProfile.QUALITY_HIGH);
//mMediaRecorder.setProfile(cProfile);
mMediaRecorder.setVideoFrameRate(25);
mMediaRecorder.setVideoEncodingBitRate(1024*1024*1024);
//mOutputFileName = "/sdcard/videocapture1.mp4";
//mMediaRecorder.setOutputFile(mOutputFileName);
mMediaRecorder.setOutputFile(sender.getFileDescriptor());
mMediaRecorder.setMaxDuration(0);
mMediaRecorder.setMaxFileSize(0);
}
private void prepareRecorder(){
mMediaRecorder.setPreviewDisplay(getHolder().getSurface());
try {
mMediaRecorder.prepare();
} catch (IllegalStateException e) {
Log.i(TAG,"prepareRecorder IllegalStateException:"+e);
stopRecorder();
} catch (IOException e) {
Log.i(TAG,"prepareRecorder IOException:"+e);
stopRecorder();
}
}
public void openRecorder(){
if (!mRecording) {
init();
prepareRecorder();
start();
}
}
public void stopRecorder(){
stop();
release();
mRecording = false;
}
public void stop(){
if (mMediaRecorder == null) return;
mMediaRecorder.stop();
}
public void release(){
if (mMediaRecorder == null) return;
mMediaRecorder.release();
}
public void start(){
if (mMediaRecorder == null) return;
mMediaRecorder.start();
mRecording = true;
}
public void startVideoRecording() {
Log.i(TAG, "startVideoRecording");
new Thread(new Runnable(){
@Override
public void run() {
Log.i(TAG, "##run....");
// defines
DataInputStream dataInputStream = null;
int READ_SIZE = 20000;
byte[] h264frame = new byte[1024 * 64];
int offSet=0,tmp=0,beLeft=0;
try {
dataInputStream = new DataInputStream(receiver.getInputStream());
} catch (IOException e2){
// TODO Auto-generated catch block
e2.printStackTrace();
}
try
{
Thread.currentThread().sleep(500);
} catch (InterruptedException e1)
{
e1.printStackTrace();
}
try
{
dataInputStream.read(h264frame, 0, 32);
Log.i(TAG, "read mdat from");
} catch (IOException e1)
{
// TODO Auto-generated catch block
e1.printStackTrace();
}
byte[] h264sps = { 0x67, 0x42, 0x40, 0x1E, (byte)0xA6, (byte)0xC0,
(byte)0xA0, 0x3D, (byte)0xA1, 0x00, 0x00, 0x03, 0x00, 0x01,
0x00, 0x00, 0x03, 0x00, 0x32, 0x0F, 0x10, 0x22, (byte)0xE0};
byte[] h264pps = { 0x68, (byte)0xCA, 0x43, (byte)0x88 };
byte[] h264head = { 0, 0, 0, 1 };
try
{
h264FileOutputStream = new FileOutputStream("/sdcard/h264.h264");
Log.i(TAG, "h264FileOutputStream");
} catch (FileNotFoundException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
try
{
h264FileOutputStream.write(h264head);
h264FileOutputStream.write(h264sps);
h264FileOutputStream.write(h264head);
h264FileOutputStream.write(h264pps);
Log.i(TAG, "run-write SPS/PPS to file");
} catch (IOException e1)
{
// TODO Auto-generated catch block
e1.printStackTrace();
}
while (mRecording)
{
try
{
Log.i(TAG, "**while...");
int h264length = dataInputStream.readInt();
Log.i(TAG, "h264length:"+h264length);
tmp=0; offSet = 0; beLeft = 0;
h264FileOutputStream.write(h264head);
while (offSet < h264length)
{
beLeft = h264length - offSet;
tmp = dataInputStream.read(h264frame, 0, READ_SIZE < beLeft ? READ_SIZE : beLeft);
Log.i(TAG, String.format("H264 %d,%d,%d", h264length, offSet, tmp));
offSet += tmp;
h264FileOutputStream.write(h264frame, 0, tmp);
}
} catch (Exception e)
{
e.printStackTrace();
}
}
}
}).start();
}
/**
* Surface relevant callback interface.
*/
SurfaceHolder.Callback mSHCallback = new SurfaceHolder.Callback() {
public void surfaceChanged(SurfaceHolder holder, int format, int w,
int h) {
mSurfaceHolder = holder;
Log.i(TAG, "*************surfaceChanged************" + w + " " + h+" viewId:"+viewId);
mSurfaceWidth = w;
mSurfaceHeight = h;
//mSurfaceHolder.setFixedSize(mSurfaceWidth, mSurfaceWidth);
}
public void surfaceCreated(SurfaceHolder holder) {
Log.i(TAG, "*************surfaceCreated************"+viewId);
mSurfaceHolder = holder;
initRecorder();
prepareRecorder();
Log.i(TAG,"start() start");
start();
Log.i(TAG,"start() end");
startVideoRecording();
}
public void surfaceDestroyed(SurfaceHolder holder) {
// after we return from this we can't use the surface any more
mSurfaceHolder = null;
Log.i(TAG, "*************surfaceDestroyed************viewId:"+viewId);
Log.w(TAG,"why surfaceDestroyed:"+Log.getStackTraceString(new Throwable()));
if (mRecording) {
stop();
mRecording = false;
}
release();
/*
try {
findSPSAndPPS(mOutputFileName);
} catch (Exception e)
{
e.printStackTrace();
}*/
}
};
}