h.264里有类似于mpeg2里面的GOP header中的timecode吗?

suds 2009-09-17 06:43:28
加精
刚开始看h.264的东东,有点疑惑,不知道h.264里面的时间信息时存在什么地方的?
有没有类似于mpeg2 的gop header中的time code之类的东西?

谢谢
...全文
2876 33 打赏 收藏 转发到动态 举报
写回复
用AI写文章
33 条回复
切换为时间正序
请发表友善的回复…
发表回复
QQJADE777 2012-08-11
  • 打赏
  • 举报
回复
好东西,谢谢
tianyouairen 2010-06-21
  • 打赏
  • 举报
回复
good ,mark!
laowang2 2009-09-25
  • 打赏
  • 举报
回复
好,看看
inheaven2007 2009-09-24
  • 打赏
  • 举报
回复
非常感谢
Ryan裤子 2009-09-23
  • 打赏
  • 举报
回复
你是要把h264字节流复用到ts里面,给length和pts不知道咋个算? 我刚好有现成的代码.要不要?
suds 2009-09-23
  • 打赏
  • 举报
回复
多谢ttxk,我来看看!
Ryan裤子 2009-09-23
  • 打赏
  • 举报
回复
ffmpeg里面是找到h264字节流的每一帧数据的起始和结尾,然后在sps里分析出帧率.
根据帧率,根据数据每帧的起始和结束位置,就可以算出这段数据的时间信息.
macrojj 2009-09-23
  • 打赏
  • 举报
回复
你说的那个是应用层的事情。那个在标准里面是没用的。
phoeni_xin 2009-09-23
  • 打赏
  • 举报
回复
mark
rotus 2009-09-23
  • 打赏
  • 举报
回复
mpeg2 的gop header中的time code 没啥用吧,13818-2 里面有这样的描述,
The information carried by time_code plays no part in the decoding process。

如果不用这个time_code 的话,那如何才能快速得到一段码流的时间长度(Duration)?
我们知道PTS 可能会有重复的。

Bill1212 2009-09-23
  • 打赏
  • 举报
回复
在图像定时SEI消息里有一些
suds 2009-09-23
  • 打赏
  • 举报
回复
非常感谢TTXK ^_^
Ryan裤子 2009-09-23
  • 打赏
  • 举报
回复

UINT SplitH264Nal()
{
PBYTE& p_buffer = g_h264_buffer.m_pData;
UINT& i_buffer = g_h264_buffer.m_nDataSize;

UINT writeSize = 0;

while( i_buffer > 4 && ( p_buffer[0] != 0 || p_buffer[1] != 0 || p_buffer[2] != 1 ) )
{
i_buffer--;
p_buffer++;
}

/* Split nal units */
while( i_buffer > 4 )
{
int i_offset;
int i_size = i_buffer;
int i_skip = i_buffer;

/* search nal end */
for( i_offset = 4; i_offset+2 < i_buffer ; i_offset++)
{
if( p_buffer[i_offset] == 0 && p_buffer[i_offset+1] == 0 && p_buffer[i_offset+2] == 1 )
{
/* we found another startcode */
i_size = i_offset - ( p_buffer[i_offset-1] == 0 ? 1 : 0);
i_skip = i_offset;
break;
}
}
/* TODO add STAP-A to remove a lot of overhead with small slice/sei/... */
Buffer* p_nal_buffer = g_h264_nal_buffer.GetEmptyBuffer();
if (p_nal_buffer == NULL)
p_nal_buffer = new Buffer();
p_nal_buffer->FillData(p_buffer, i_size);
g_h264_nal_buffer.AddFullBuffer(p_nal_buffer);

writeSize += i_size;

i_buffer -= i_skip;
p_buffer += i_skip;
}
return writeSize;
}

BOOL H264Nal2TsMux(HANDLE h_ts_file)
{
Buffer* p_h264_nal;

while(TRUE)
{
p_h264_nal = g_h264_nal_buffer.GetFullBuffer();
if (p_h264_nal == NULL)
break;

int i_nal_hdr;
int i_nal_type;

i_nal_hdr = p_h264_nal->m_pData[3];
i_nal_type = i_nal_hdr&0x1f;

if (i_nal_type == 1/*NAL_SLICE*/ || i_nal_type == 2/*NAL_DPA*/ || i_nal_type == 5/*NAL_IDR_SLICE*/)
g_frame_start_found = TRUE;

if (i_nal_type == 7/*NAL_SPS*/ || i_nal_type == 8/*NAL_PPS*/ || i_nal_type == 9/*NAL_AUD*/)
g_frame_start_found = FALSE;

if(g_frame_start_found)
{
for (UINT i = 3; i < p_h264_nal->m_nDataSize; i++)
{
if (p_h264_nal->m_pData[i] & 0x80)
{
g_frame_start_found= FALSE;
g_frame_count++;
break;
}
}
}

if (g_has_sei == FALSE && i_nal_type == 6)
{
g_h264_header_info.AppendData(p_h264_nal->m_pData, p_h264_nal->m_nDataSize);
g_has_sei = TRUE;
}

if (/*g_has_sei &&*/ g_has_sps == FALSE && i_nal_type == 7)
{
g_h264_header_info.AppendData(p_h264_nal->m_pData, p_h264_nal->m_nDataSize);
g_has_sps = TRUE;

SPS_t sps;
h264_decode_seq_parameter_set(p_h264_nal->m_pData, p_h264_nal->m_nDataSize, &sps);
g_scale = sps.time_scale,
g_rate = sps.num_units_in_tick;
}

if (/*g_has_sei &&*/ g_has_sps && g_has_pps == FALSE && i_nal_type == 8)
{
g_h264_header_info.AppendData(p_h264_nal->m_pData, p_h264_nal->m_nDataSize);
g_has_pps = TRUE;
}

if (g_has_pps == TRUE && g_pTsMux == NULL)
{
es_format_t es_format;
libTsMuxParam_t param_ts_mux;

es_format_Init(&es_format, VIDEO_ES, FOURCC( 'h', '2', '6', '4' ));
es_format.p_extra = g_h264_header_info.m_pData;
es_format.i_extra = g_h264_header_info.m_nDataSize;

param_ts_mux.m_pGetTsDataCB = WriteTs2File;
param_ts_mux.m_pUserData = h_ts_file;

g_pTsMux = new libTsMux();
if (g_pTsMux->Open(param_ts_mux) == FALSE)
return FALSE;

g_stream_h264 = g_pTsMux->AddStream(&es_format);
if (g_stream_h264 == NULL)
return FALSE;
}

if (g_pTsMux)
{
int64_t i_pts,i_length;
int64_t i_dts;
int i_flags = 0;
int i_slice_type = 0;

i_length = INT64_C(1000) * g_scale / g_rate / 2;
i_pts = g_frame_count * i_length;

if (i_nal_type >= 1/*NAL_SLICE*/ && i_nal_type <= 5/*NAL_SLICE_IDR*/)
{
//uint8_t annexb[512]; /* Enough for the begining of the slice header */
//int annexb_len;

//h264_decode_annexb( annexb, &annexb_len, p_h264_nal->m_pData + 5,
// __MIN(p_h264_nal->m_nDataSize-5, 512) );

//bs_t s;
//bs_init( &s, annexb, annexb_len );

//bs_read_ue( &s ); // first_mb_in_slice
//i_slice_type = i_slice_type = bs_read_ue( &s ); // slice type

bs_t s;
bs_init(&s, p_h264_nal->m_pData+5, p_h264_nal->m_nDataSize-5);

bs_read_ue( &s ); // first_mb_in_slice
i_slice_type = i_slice_type = bs_read_ue( &s ); // slice type
}

if( i_slice_type == H264_SLICE_TYPE_IDR || i_slice_type == H264_SLICE_TYPE_I )
i_flags |= BLOCK_FLAG_TYPE_I;
else if( i_slice_type == H264_SLICE_TYPE_P )
i_flags |= BLOCK_FLAG_TYPE_P;
else if( i_slice_type == H264_SLICE_TYPE_B )
i_flags |= BLOCK_FLAG_TYPE_B;

if( g_bframe_count > 0 )
{
if( i_flags & BLOCK_FLAG_TYPE_B )
{
/* FIXME : this is wrong if bpyramid is set */
i_dts = i_pts;
g_interpolated_dts = i_dts;
}
else
{
if( g_interpolated_dts )
{
i_dts = g_interpolated_dts;
}
else
{
/* Let's put something sensible */
i_dts = i_pts;
}

g_interpolated_dts += i_length;
}
}
else
{
i_dts = i_pts;
}

if (g_h264_header_info.m_nDataSize > 0)
{
g_pTsMux->Mux(g_stream_h264, g_h264_header_info.m_pData, g_h264_header_info.m_nDataSize,
i_length, i_pts, i_dts, i_flags);
g_h264_header_info.ClearData();
}

g_pTsMux->Mux(g_stream_h264, p_h264_nal->m_pData, p_h264_nal->m_nDataSize,
i_length, i_pts, i_dts, i_flags);
}

g_h264_nal_buffer.AddEmptyBuffer(p_h264_nal);
}

return TRUE;
}

BOOL RawH264File2Ts(LPCTSTR raw_h264_file_path, HANDLE h_ts_file)
{
HANDLE h_raw_h264_file;
Buffer buffer_read;
BOOL b_result;

h_raw_h264_file = CreateFile (raw_h264_file_path,
GENERIC_READ,
0,
NULL,
OPEN_EXISTING, // overwrite existing
FILE_ATTRIBUTE_NORMAL, // normal file
NULL);
if (h_raw_h264_file == INVALID_HANDLE_VALUE)
{
int iError = GetLastError();
printf("Could not open file (error %d)\n", iError);
return FALSE;
}
buffer_read.AllocateBuffer(H264_READ_SIZE);
g_h264_buffer.AllocateBuffer(H264_READ_SIZE*2);

while (TRUE)
{
if (g_h264_buffer.m_nDataSize < H264_READ_SIZE)
{
buffer_read.m_nDataSize = 0;
b_result = ReadFile(h_raw_h264_file, buffer_read.m_pBuffer, buffer_read.m_nBufferSize,
(DWORD*)&buffer_read.m_nDataSize, NULL);
if (!b_result || !buffer_read.m_nDataSize)
break;
g_h264_buffer.AppendData(buffer_read.m_pData, buffer_read.m_nDataSize);
}

SplitH264Nal();
H264Nal2TsMux(h_ts_file);
}

return TRUE;
}

int _tmain(int argc, _TCHAR* argv[])
{
int iError;

string sTsFilePath;
HANDLE h_ts_file;

printf("Please input ts file's save path:\n");
ReadSTDIN(sTsFilePath);

h_ts_file = CreateFile (sTsFilePath.c_str(),
GENERIC_WRITE,
0,
NULL,
CREATE_NEW,
FILE_ATTRIBUTE_NORMAL, // normal file
NULL);
if (h_ts_file == INVALID_HANDLE_VALUE)
{
iError = GetLastError();
printf("Could not open file (error %d)\n", iError);
return iError;
}

string input_file_path;

printf("Please input raw-YV12-avi file or raw-h264 file path:\n");
ReadSTDIN(input_file_path);

do
{
if (EncodeAvi2Ts(input_file_path.c_str(), h_ts_file) == FALSE)
{
RawH264File2Ts(input_file_path.c_str(), h_ts_file);
}

} while (FALSE);


// 结束编码
if (g_pTsMux)
{
if (g_stream_h264)
g_pTsMux->DelStream(g_stream_h264);
g_stream_h264 = NULL;
delete g_pTsMux;
}
g_pTsMux = NULL;

if (g_pH264Encoder)
delete g_pH264Encoder;
g_pH264Encoder = NULL;

// 清理文件
DWORD dwResult;
INT32 nDataSize;
do
{
nDataSize = g_ts_buffer.GetDataSize();
if (nDataSize == 0)
break;

if (nDataSize > g_ts_write_buffer.m_nBufferSize)
nDataSize = g_ts_write_buffer.m_nBufferSize;

if (g_ts_buffer.GetData(g_ts_write_buffer.m_pBuffer, nDataSize))
{
dwResult = WriteFile (h_ts_file, g_ts_write_buffer.m_pBuffer, nDataSize, &dwResult, NULL);
printf("Wrote to file %d bytes.\n", nDataSize);
}

}while(TRUE);

CloseHandle(h_ts_file);
h_ts_file = NULL;

printf("\nWork is finished, input any key to exit.\n");
getchar();
return 0;
}
Ryan裤子 2009-09-23
  • 打赏
  • 举报
回复

/**
* Rational number num/den.
*/
typedef struct AVRational_t{
int num; ///< numerator
int den; ///< denominator
} AVRational_t;

/**
* Sequence parameter set
*/
typedef struct SPS_t{

int profile_idc;
int level_idc;
int transform_bypass; ///< qpprime_y_zero_transform_bypass_flag
int log2_max_frame_num; ///< log2_max_frame_num_minus4 + 4
int poc_type; ///< pic_order_cnt_type
int log2_max_poc_lsb; ///< log2_max_pic_order_cnt_lsb_minus4
int delta_pic_order_always_zero_flag;
int offset_for_non_ref_pic;
int offset_for_top_to_bottom_field;
int poc_cycle_length; ///< num_ref_frames_in_pic_order_cnt_cycle
int ref_frame_count; ///< num_ref_frames
int gaps_in_frame_num_allowed_flag;
int mb_width; ///< frame_width_in_mbs_minus1 + 1
int mb_height; ///< frame_height_in_mbs_minus1 + 1
int frame_mbs_only_flag;
int mb_aff; ///<mb_adaptive_frame_field_flag
int direct_8x8_inference_flag;
int crop; ///< frame_cropping_flag
int crop_left; ///< frame_cropping_rect_left_offset
int crop_right; ///< frame_cropping_rect_right_offset
int crop_top; ///< frame_cropping_rect_top_offset
int crop_bottom; ///< frame_cropping_rect_bottom_offset
int vui_parameters_present_flag;
AVRational_t sar;
int timing_info_present_flag;
uint32_t num_units_in_tick;
uint32_t time_scale;
int fixed_frame_rate_flag;
short offset_for_ref_frame[256]; //FIXME dyn aloc?
int bitstream_restriction_flag;
int num_reorder_frames;
int scaling_matrix_present;
uint8_t scaling_matrix4[6][16];
uint8_t scaling_matrix8[2][64];
}SPS_t;

static BOOL h264_decode_seq_parameter_set( uint8_t* p_nal, int n_nal_size, SPS_t* p_sps)
{
uint8_t *pb_dec = NULL;
int i_dec = 0;
bs_t s;
int i_tmp;
int i_sps_id;

pb_dec = p_nal + 4;
i_dec = n_nal_size - 4;
//CreateDecodedNAL( &pb_dec, &i_dec, &p_frag->p_buffer[5],
// p_frag->i_buffer - 5 );

bs_init( &s, pb_dec, i_dec );

// profile(8)
p_sps->profile_idc = bs_read( &s, 8);

/* constraint_set012, reserver(5), level(8) */
bs_skip( &s, 1+1+1 + 5 + 8 );
/* sps id */
i_sps_id = bs_read_ue( &s );
if( i_sps_id >= 32/*SPS_MAX*/ )
{
printf("invalid SPS (sps_id=%d)", i_sps_id );
return FALSE;
}

p_sps->scaling_matrix_present = 0;
if(p_sps->profile_idc >= 100) //high profile
{
if(bs_read_ue(&s) == 3) //chroma_format_idc
bs_read(&s, 1); //residual_color_transform_flag
bs_read_ue(&s); //bit_depth_luma_minus8
bs_read_ue(&s); //bit_depth_chroma_minus8
p_sps->transform_bypass = bs_read(&s, 1);
bs_skip(&s, 1); //decode_scaling_matrices(h, sps, NULL, 1, sps->scaling_matrix4, sps->scaling_matrix8);
}

/* Skip i_log2_max_frame_num */
p_sps->log2_max_frame_num = bs_read_ue( &s );
if( p_sps->log2_max_frame_num > 12)
p_sps->log2_max_frame_num = 12;
/* Read poc_type */
p_sps->poc_type/*->i_pic_order_cnt_type*/ = bs_read_ue( &s );
if( p_sps->poc_type == 0 )
{
/* skip i_log2_max_poc_lsb */
p_sps->log2_max_poc_lsb/*->i_log2_max_pic_order_cnt_lsb*/ = bs_read_ue( &s );
if( p_sps->log2_max_poc_lsb > 12 )
p_sps->log2_max_poc_lsb = 12;
}
else if( p_sps->poc_type/*p_sys->i_pic_order_cnt_type*/ == 1 )
{
int i_cycle;
/* skip b_delta_pic_order_always_zero */
p_sps->delta_pic_order_always_zero_flag/*->i_delta_pic_order_always_zero_flag*/ = bs_read( &s, 1 );
/* skip i_offset_for_non_ref_pic */
bs_read_se( &s );
/* skip i_offset_for_top_to_bottom_field */
bs_read_se( &s );
/* read i_num_ref_frames_in_poc_cycle */
i_cycle = bs_read_ue( &s );
if( i_cycle > 256 ) i_cycle = 256;
while( i_cycle > 0 )
{
/* skip i_offset_for_ref_frame */
bs_read_se(&s );
i_cycle--;
}
}
/* i_num_ref_frames */
bs_read_ue( &s );
/* b_gaps_in_frame_num_value_allowed */
bs_skip( &s, 1 );

/* Read size */
p_sps->mb_width/*->fmt_out.video.i_width*/ = 16 * ( bs_read_ue( &s ) + 1 );
p_sps->mb_height/*fmt_out.video.i_height*/ = 16 * ( bs_read_ue( &s ) + 1 );

/* b_frame_mbs_only */
p_sps->frame_mbs_only_flag/*->b_frame_mbs_only*/ = bs_read( &s, 1 );
if( p_sps->frame_mbs_only_flag == 0 )
{
bs_skip( &s, 1 );
}
/* b_direct8x8_inference */
bs_skip( &s, 1 );

/* crop */
p_sps->crop = bs_read( &s, 1 );
if( p_sps->crop )
{
/* left */
bs_read_ue( &s );
/* right */
bs_read_ue( &s );
/* top */
bs_read_ue( &s );
/* bottom */
bs_read_ue( &s );
}

/* vui */
p_sps->vui_parameters_present_flag = bs_read( &s, 1 );
if( p_sps->vui_parameters_present_flag )
{
int aspect_ratio_info_present_flag = bs_read( &s, 1 );
if( aspect_ratio_info_present_flag )
{
static const struct { int num, den; } sar[17] =
{
{ 0, 0 }, { 1, 1 }, { 12, 11 }, { 10, 11 },
{ 16, 11 }, { 40, 33 }, { 24, 11 }, { 20, 11 },
{ 32, 11 }, { 80, 33 }, { 18, 11 }, { 15, 11 },
{ 64, 33 }, { 160,99 }, { 4, 3 }, { 3, 2 },
{ 2, 1 },
};

int i_sar = bs_read( &s, 8 );

if( i_sar < 17 )
{
p_sps->sar.num = sar[i_sar].num;
p_sps->sar.den = sar[i_sar].den;
}
else if( i_sar == 255 )
{
p_sps->sar.num = bs_read( &s, 16 );
p_sps->sar.den = bs_read( &s, 16 );
}
else
{
p_sps->sar.num = 0;
p_sps->sar.den = 0;
}

//if( den != 0 )
// p_dec->fmt_out.video.i_aspect = (int64_t)VOUT_ASPECT_FACTOR *
// ( num * p_dec->fmt_out.video.i_width ) /
// ( den * p_dec->fmt_out.video.i_height);
//else
// p_dec->fmt_out.video.i_aspect = VOUT_ASPECT_FACTOR;
}
else
{
p_sps->sar.num = 0;
p_sps->sar.den = 0;
}

if(bs_read(&s, 1)) /* overscan_info_present_flag */
{
bs_read(&s, 1); /* overscan_appropriate_flag */
}

if(bs_read(&s, 1)) /* video_signal_type_present_flag */
{
bs_read(&s, 3); /* video_format */
bs_read(&s, 1); /* video_full_range_flag */

if(bs_read(&s, 1)) /* colour_description_present_flag */
{
bs_read(&s, 8); /* colour_primaries */
bs_read(&s, 8); /* transfer_characteristics */
bs_read(&s, 8); /* matrix_coefficients */
}
}

if(bs_read(&s, 1)) /* chroma_location_info_present_flag */
{
bs_read_ue(&s); /* chroma_sample_location_type_top_field */
bs_read_ue(&s); /* chroma_sample_location_type_bottom_field */
}

p_sps->timing_info_present_flag = bs_read(&s, 1);
if(p_sps->timing_info_present_flag)
{
p_sps->num_units_in_tick = bs_read(&s, 32);
p_sps->time_scale = bs_read(&s, 32);
p_sps->fixed_frame_rate_flag = bs_read(&s, 1);
}

//nal_hrd_parameters_present_flag = get_bits1(&s, 1);
//if(nal_hrd_parameters_present_flag)
// decode_hrd_parameters(h, sps);
//vcl_hrd_parameters_present_flag = get_bits1(&s, 1);
//if(vcl_hrd_parameters_present_flag)
// decode_hrd_parameters(h, sps);
//if(nal_hrd_parameters_present_flag || vcl_hrd_parameters_present_flag)
// get_bits1(&s->gb); /* low_delay_hrd_flag */
//get_bits1(&s->gb); /* pic_struct_present_flag */

//sps->bitstream_restriction_flag = get_bits1(&s->gb);
//if(sps->bitstream_restriction_flag)
//{
// unsigned int num_reorder_frames;
// get_bits1(&s->gb); /* motion_vectors_over_pic_boundaries_flag */
// get_ue_golomb(&s->gb); /* max_bytes_per_pic_denom */
// get_ue_golomb(&s->gb); /* max_bits_per_mb_denom */
// get_ue_golomb(&s->gb); /* log2_max_mv_length_horizontal */
// get_ue_golomb(&s->gb); /* log2_max_mv_length_vertical */
// num_reorder_frames= get_ue_golomb(&s->gb);
// get_ue_golomb(&s->gb); /*max_dec_frame_buffering*/

// if(num_reorder_frames > 16 /*max_dec_frame_buffering || max_dec_frame_buffering > 16*/){
// av_log(h->s.avctx, AV_LOG_ERROR, "illegal num_reorder_frames %d\n", num_reorder_frames);
// return -1;
// }

// sps->num_reorder_frames= num_reorder_frames;
//}

}

return TRUE;
}
Ryan裤子 2009-09-23
  • 打赏
  • 举报
回复
我就贴这里吧,大家都可以用.

TsMux_main.cpp
复用h264字节流文件代码在RawH264File2Ts函数里面

// TsMux_main.cpp : 定义控制台应用程序的入口点。
//
#include "stdafx.h"

#ifdef __cplusplus
extern "C" {
#endif

#include "vlc_bits.h"

#ifdef __cplusplus
};
#endif

#include "vfw.h"
#pragma comment(lib, "Vfw32.lib")

#include "libH264Encoder.h"
#ifdef _DEBUG
#pragma comment(lib, "libH264EncoderD.lib")
#else
#pragma comment(lib, "libH264Encoder.lib")
#endif

#include "libTsMux.h"
#ifdef _DEBUG
#pragma comment(lib, "libTsMuxD.lib")
#else
#pragma comment(lib, "libTsMux.lib")
#endif

INT ReadSTDIN()
{
int c;
string text;

while(TRUE)
{
c = getchar();
if ( c == '\n' || c == EOF)
break;
text += (CHAR)c;
}

return atoi( text.c_str());
}

UINT ReadSTDIN(string& text)
{
int i,c;

i = 0;

text = "";

while(TRUE)
{
c = getchar();
if ( c == '\n' || c == EOF)
break;
text += (CHAR)c;
i++;

}
return i;
}

static int TS_FILE_WRITE_SIZE = 1024*1024*2 - 1024*1024*2%188;
static int H264_READ_SIZE = 1024*1024*2;

Buffer g_ts_write_buffer;
VBuffer g_ts_buffer;

Buffer g_h264_buffer;
VBuffer g_h264_nal_buffer;
Buffer g_h264_header_info;
BOOL g_has_sei = FALSE;
BOOL g_has_sps = FALSE;
BOOL g_has_pps = FALSE;
int g_scale = 1001, g_rate = 30000;
int g_bframe_count = 0;

libH264Encoder* g_pH264Encoder = NULL;
libTsMux* g_pTsMux = NULL;
HANDLE g_stream_h264 = NULL;

mtime_t g_interpolated_dts = 0;
int g_frame_count = 1;
BOOL g_frame_start_found = FALSE;

void WriteTs2File(PVOID pUserData, PBYTE pTsData, DWORD nTsDataSize)
{
Buffer *pTsBuffer = g_ts_buffer.GetEmptyBuffer();
if (!pTsBuffer)
pTsBuffer = new Buffer();
pTsBuffer->FillData(pTsData, nTsDataSize);

g_ts_buffer.AddFullBuffer(pTsBuffer);

g_ts_write_buffer.ExtendBuffer(TS_FILE_WRITE_SIZE);

if (g_ts_buffer.GetDataSize() > g_ts_write_buffer.m_nBufferSize)
{
if (g_ts_buffer.GetData(g_ts_write_buffer.m_pBuffer, g_ts_write_buffer.m_nBufferSize))
{
HANDLE hFile = (HANDLE)pUserData;

DWORD dwResult;

if(!WriteFile (hFile, g_ts_write_buffer.m_pBuffer, g_ts_write_buffer.m_nBufferSize, &dwResult, NULL))
{
printf("Could not write to ts file (error %d)\n", GetLastError());
return;
}
printf("Wrote to file %d bytes.\n", g_ts_write_buffer.m_nBufferSize);
}
}
}

BOOL EncodeAvi2Ts(LPCTSTR avi_file_path, HANDLE h_ts_file)
{
HRESULT hr = S_OK;
IAVIFile* h_avi_file = NULL;
IAVIStream* p_avi_stream_video = NULL;

AVIFileInit();

hr = AVIFileOpen( &h_avi_file, avi_file_path, OF_READ, NULL );
if( FAILED( hr ) )
return FALSE;

//获取视频流对象
hr = AVIFileGetStream( h_avi_file, &p_avi_stream_video, streamtypeVIDEO, 0 );
if( FAILED( hr ) )
return FALSE;

//获取视频流格式
BITMAPINFOHEADER bmiHeader;
LONG cbBMI = sizeof(BITMAPINFOHEADER);

hr = AVIStreamReadFormat( p_avi_stream_video, 0, &bmiHeader, &cbBMI );
if( FAILED( hr ) )
return FALSE;

if (bmiHeader.biCompression != MAKEFOURCC(' Y', 'V', '1', '2'))
{
printf("the AVI file's VIDEO stream is not raw YV12.\n");
return FALSE;
}

//获取视频流信息
AVISTREAMINFO videoStreamInfo;

hr = AVIStreamInfo( p_avi_stream_video, &videoStreamInfo, sizeof( videoStreamInfo ) );
if( FAILED( hr ) )
{
printf("Read stream info error.\n");
return FALSE;
}

//==============================================================================
//x264 codec param
//==============================================================================
EncodeH264Param_t param_h264;
param_h264.m_nPicWidth = bmiHeader.biWidth;
param_h264.m_nPicHeight = bmiHeader.biHeight;
param_h264.m_nFpsNum = videoStreamInfo.dwRate;
param_h264.m_nFpsDen = videoStreamInfo.dwScale;
param_h264.m_nBitrate = (double)bmiHeader.biWidth * bmiHeader.biHeight * bmiHeader.biBitCount *
videoStreamInfo.dwRate / videoStreamInfo.dwScale / 100;

g_bframe_count = param_h264.m_nBFrameCount;

g_pH264Encoder = new libH264Encoder();
if (g_pH264Encoder->Open(param_h264) == FALSE)
{
printf("H264Encoder open error!\n");
return FALSE;
}

//==============================================================================
//ts mux param
//==============================================================================
es_format_t es_format;
libTsMuxParam_t param_ts_mux;

byte* p_h264_header_info;
int n_h264_header_info_size;
Buffer h264_header_buffer;

if (g_pH264Encoder->EncodeHeader(p_h264_header_info, n_h264_header_info_size) == FALSE)
return FALSE;
h264_header_buffer.FillData(p_h264_header_info, n_h264_header_info_size);

es_format_Init(&es_format, VIDEO_ES, FOURCC( 'h', '2', '6', '4' ));
es_format.p_extra = h264_header_buffer.m_pBuffer;
es_format.i_extra = h264_header_buffer.m_nBufferSize;

g_pTsMux = new libTsMux();

param_ts_mux.m_pGetTsDataCB = WriteTs2File;
param_ts_mux.m_pUserData = h_ts_file;

if (g_pTsMux->Open(param_ts_mux) == FALSE)
{
printf("libTsMux Open error!\n");
return FALSE;
}

g_stream_h264 = g_pTsMux->AddStream(&es_format);
if (g_stream_h264 == NULL)
{
printf("Add es stream error!\n");
return FALSE;
}

//=======================================================================================
// 开始编码

DWORD n_video_end = AVIStreamEnd(p_avi_stream_video);
DWORD n_video_begin = AVIStreamStart(p_avi_stream_video);
DWORD n_video_current = n_video_begin;

DWORD n_frame_data_size;
BYTE* p_buffer_raw;
DWORD n_buffer_raw_size;

n_frame_data_size = bmiHeader.biWidth * bmiHeader.biHeight * bmiHeader.biBitCount / 8;
n_buffer_raw_size = n_frame_data_size;
p_buffer_raw = new BYTE [n_buffer_raw_size];

//static HANDLE g_h264_file_handle = NULL;
//if (g_h264_file_handle == NULL)
//{
// g_h264_file_handle = CreateFile(_T("e:\\234.h264"),
// GENERIC_WRITE,
// 0,
// NULL,
// CREATE_ALWAYS, // overwrite existing
// FILE_ATTRIBUTE_NORMAL, // normal file
// NULL);
//}

while(n_video_current < n_video_end)
{
LONG cbSample = 0;
LONG cSamples = 0;

if (n_video_current < n_video_end)
{
//读取并编码视频数据
hr = AVIStreamRead( p_avi_stream_video, n_video_current, 1,
p_buffer_raw, n_buffer_raw_size, &cbSample, &cSamples );
if( FAILED( hr ) )
{
printf("Read avi stream error.\n");
return FALSE;
}

int64_t i_pts,i_length;
int i_h264_slice_type;
byte* p_h264_buffer;
int n_h264_buffer_size;

i_length = INT64_C(1000000) * videoStreamInfo.dwScale / videoStreamInfo.dwRate;
i_pts = n_video_current * i_length;

if (g_pH264Encoder->Encode(p_buffer_raw, cbSample, i_pts, p_h264_buffer, n_h264_buffer_size, i_h264_slice_type) == FALSE)
{
printf("Encode error.\n");
return FALSE;
}

int64_t i_dts;
int i_flags;

i_flags = 0;
if( i_h264_slice_type == H264_SLICE_TYPE_IDR || i_h264_slice_type == H264_SLICE_TYPE_I )
i_flags |= BLOCK_FLAG_TYPE_I;
else if( i_h264_slice_type == H264_SLICE_TYPE_P )
i_flags |= BLOCK_FLAG_TYPE_P;
else if( i_h264_slice_type == H264_SLICE_TYPE_B )
i_flags |= BLOCK_FLAG_TYPE_B;

if( param_h264.m_nBFrameCount > 0 )
{
if( i_flags & BLOCK_FLAG_TYPE_B )
{
/* FIXME : this is wrong if bpyramid is set */
i_dts = i_pts;
g_interpolated_dts = i_dts;
}
else
{
if( g_interpolated_dts )
{
i_dts = g_interpolated_dts;
}
else
{
/* Let's put something sensible */
i_dts = i_pts;
}

g_interpolated_dts += i_length;
}
}
else
{
i_dts = i_pts;
}

//if (g_h264_file_handle)
//{
// DWORD n_write = 0;
// WriteFile(g_h264_file_handle, p_h264_buffer, n_h264_buffer_size, &n_write, NULL);
//}

g_pTsMux->Mux(g_stream_h264, p_h264_buffer, n_h264_buffer_size,
i_length, i_pts, i_dts, i_flags);

n_video_current++;
}
}

//if (g_h264_file_handle)
//{
// CloseHandle(g_h264_file_handle);
// g_h264_file_handle = NULL;
//}
return TRUE;
}

static void h264_decode_annexb( uint8_t *dst, int *dstlen,
const uint8_t *src, const int srclen )
{
uint8_t *dst_sav = dst;
const uint8_t *end = &src[srclen];

while (src < end)
{
if (src < end - 3 && src[0] == 0x00 && src[1] == 0x00 &&
src[2] == 0x03)
{
*dst++ = 0x00;
*dst++ = 0x00;

src += 3;
continue;
}
*dst++ = *src++;
}

*dstlen = dst - dst_sav;
}
rotus 2009-09-23
  • 打赏
  • 举报
回复
[Quote=引用 19 楼 ttxk 的回复:]
ffmpeg里面是找到h264字节流的每一帧数据的起始和结尾,然后在sps里分析出帧率.
根据帧率,根据数据每帧的起始和结束位置,就可以算出这段数据的时间信息.
[/Quote]

时间长度=总帧数/帧率= 总帧数/(帧/s),
那么根据每帧的起始和结束位置 得到总帧数 == 找到每一帧的开始后统计帧数?

如果文件很大,那计算所耗时间不就偏长了么?

另外,如果没有时间信息的话那么如果要把h264的es 包装到TS的话,应该如何确定PCR呢?
LZ搞明白了话别忘了在这贴一下啊 ^^

suds 2009-09-23
  • 打赏
  • 举报
回复
非常感谢
我的邮箱是 suds1980 at 126 dot com
^_^
gshlshrenxiong 2009-09-22
  • 打赏
  • 举报
回复
Anybody ? Help!
gshlshrenxiong 2009-09-22
  • 打赏
  • 举报
回复
真是抱歉,我看了一下iso14496-10.但是确实没有找到类似于timecode一样的时间信息。
lmx091001 2009-09-22
  • 打赏
  • 举报
回复
Anybody ? Help!
加载更多回复(10)

2,543

社区成员

发帖
与我相关
我的任务
社区描述
专题开发/技术/项目 多媒体/流媒体开发
社区管理员
  • 多媒体/流媒体开发社区
加入社区
  • 近7日
  • 近30日
  • 至今
社区公告
暂无公告

试试用AI创作助手写篇文章吧