标签:android ios 录制视频 ffmpeg x264
/*
*read rotation angle by video param and key
*return angle in [0,360]
*/
char* get_video_metadate(char* key ,AVFormatContext *ic, AVStream *video_st)
{
char *value;
if (!ic) {
return 0;
}
if (key) {
if (av_dict_get(ic->metadata, key, NULL, AV_DICT_IGNORE_SUFFIX)) {
value =av_dict_get(ic->metadata, key, NULL, AV_DICT_IGNORE_SUFFIX)->value;
}else if (video_st && av_dict_get(video_st->metadata, key, NULL,
AV_DICT_IGNORE_SUFFIX)) {
value = av_dict_get(video_st->metadata, key, NULL,
AV_DICT_IGNORE_SUFFIX)->value;
}
}
return value;
}
/*
*read rotation angle by param
*@param ic :the context of video
*@param video_st: the video avstream
*@return angle in [0,360]
*/
int get_rotation_byparam(AVFormatContext *ic, AVStream *video_st)
{
char * value;
int ret;
value = get_video_metadate("rotate",ic,video_st);
if(value ==NULL)
return 0;
else if((strcmp(value,"90")&&strcmp(value,"180")&&strcmp(value,"270")))
ret = 0;
else
{
ret = chartoint(value);
}
if(ret!=90&&ret!=180&&ret!=270)
ret = 0;
dmprint("[@metadate]the rotation angle is ",ret);
return (ret > 0 && ret < 360) ? ret : 0;
}
其实就是解析头文件中"rotate"所对应的value。另外头文件中还有很多内容,例如分辨率、时长、日期、地址等。这是使用ffmpeg,Android上使用mp4info这个第三方jar包也可已实现,而且轻量级,比ffmpeg小很多。(方案二三四五可以解决)。List<String> fileList = new ArrayList<String>();
List<Movie> moviesList = new LinkedList<Movie>();
fileList.add("/1387865774255.mp4");
fileList.add("/1387865800664.mp4");
try
{
for (String file : fileList)
{
moviesList.add(MovieCreator.build(file));
}
}
catch (IOException e)
{
e.printStackTrace();
}
List<Track> videoTracks = new LinkedList<Track>();
List<Track> audioTracks = new LinkedList<Track>();
for (Movie m : moviesList)
{
for (Track t : m.getTracks())
{
if (t.getHandler().equals("soun"))
{
audioTracks.add(t);
}
if (t.getHandler().equals("vide"))
{
videoTracks.add(t);
}
}
}
Movie result = new Movie();
try
{
if (audioTracks.size() > 0)
{
result.addTrack(new AppendTrack(audioTracks.toArray(new Track[audioTracks.size()])));
}
if (videoTracks.size() > 0)
{
result.addTrack(new AppendTrack(videoTracks.toArray(new Track[videoTracks.size()])));
}
}
catch (IOException e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
Container out = new DefaultMp4Builder().build(result);
try
{
FileChannel fc = new RandomAccessFile("output.mp4", "rw").getChannel();
out.writeContainer(fc);
fc.close();
}
catch (Exception e)
{
// TODO Auto-generated catch block
e.printStackTrace();
}
moviesList.clear();
fileList.clear();此部分代码出自: http://cstriker1407.info/blog/android-application-development-notes-mp4parser/。IsoFile isoFile = new IsoFile(getCompleteFilePath(i));
Movie m = new Movie();
List<TrackBox> trackBoxes = isoFile.getMovieBox().getBoxes(
TrackBox.class);
for (TrackBox trackBox : trackBoxes) {
trackBox.getTrackHeaderBox().setMatrix(Matrix.ROTATE_90);
m.addTrack(new Mp4TrackImpl(trackBox));
}
inMovies[i - 1] = m;JNIEXPORT jint JNICALL Java_com_dangyutao_test_encode_startEncode(
JNIEnv *env, jclass class, jstring jstr, jint w, jint h, jint o,jboolean inv) {
//定义屏幕录制方向
if(o!=3&&o!=0)
{
isInversion = inv;
orientation = o;
}
//初始化编码器
WIDTH = w;
HEIGHT = h;
yuv_size = w * h * 3 / 2;
x264_param_t param;
x264_param_default(@param);
x264_param_default_preset(@param, "ultrafast", "zerolatency");
param.i_threads = ENCODE_THREAD;
param.i_width = WIDTH;
param.i_height = HEIGHT;
param.i_fps_num = FPS;
param.i_fps_den = 1;
param.i_frame_total = 0;
param.i_csp = CSP;
param.i_keyint_min = FPS*3;
param.i_keyint_max = FPS*10;
param.i_bframe=30;
param.i_bframe_bias = 100;
param.rc.i_qp_min = 25;
param.rc.i_qp_max =50;
param.rc.i_rc_method = X264_RC_CRF;//参数i_rc_method表示码率控制,CQP(恒定质量/视频很大,码率 和 图像效果参数失效),CRF(恒定码率/会根据参数定义),ABR(平均码率/会根据参数设定)
param.rc.i_bitrate = 2000000;
//图片质量损失损失 越小越清晰,默认23 最小0
param.rc.f_rf_constant = 3;
//流参数*/
/*
param.i_bframe = 5;
param.b_open_gop = 0;
param.i_bframe_pyramid = 0;
param.i_bframe_adaptive = X264_B_ADAPT_TRELLIS;
param.b_annexb = 1;
*/
x264_param_apply_profile(¶m, "baseline");
encoder = x264_encoder_open(¶m);
//初始化输入文件描述符
outf = open(jstringTostring(env, jstr), O_CREAT | O_WRONLY, 444);
if (outf < 0) {
x264_encoder_close(encoder);
free(yuv_buffer);
close(inf);
return -1;
}
//申請處理的緩存内存
yuv = (uint8_t *) malloc(WIDTH * HEIGHT * 3 / 2);
return 0;
}JNIEXPORT jint JNICALL Java_com_dangyutao_test_encode_addDetailFrameByBuff(
JNIEnv *env, jclass class, jbyteArray jb, jint nw, jint nh, jint w,
jint h, jboolean isFrontCamera) {
jbyte* dataPtr = (*env)->GetByteArrayElements(env, jb, NULL);
uint8_t* buffer = (uint8_t*) dataPtr;
detailYuvPic(buffer, yuv, nw, nh, w, h, isFrontCamera);
//初始化pic——in
x264_picture_alloc(&pic_in, CSP, WIDTH, HEIGHT);
//用java传来的buff,将yuvbuff填充,
yuv_buffer = (uint8_t*) yuv;
/*
//rgb:
pic_in.img.i_plane = 1;
pic_in.img.plane[0] = yuv_buffer;
pic_in.img.i_stride[0] = 3 * WIDTH;
*/
//yuv420:将yuvbuff 填充进pic_in
pic_in.img.plane[0] = yuv_buffer;
pic_in.img.plane[1] = &yuv_buffer[WIDTH * HEIGHT];
pic_in.img.plane[2] = &yuv_buffer[WIDTH * HEIGHT * 5 / 4];
pic_in.img.i_plane = 3;
/*
pic_in.img.i_stride[0] = WIDTH;
pic_in.img.i_stride[1] = WIDTH / 2;
pic_in.img.i_stride[2] = WIDTH / 2;
*/
pic_in.img.i_csp = CSP;
//pic_in.i_type = X264_TYPE_AUTO;
//编码
x264_nal_t *nals;
int nnal;
pic_in.i_pts = i_pts++;
x264_encoder_encode(encoder, &nals, &nnal, &pic_in, &pic_out);
x264_nal_t *nal;
for (nal = nals; nal < nals + nnal; nal++) {
write(outf, nal->p_payload, nal->i_payload);
}
//释放多余内存
(*env)->ReleaseByteArrayElements(env, jb, dataPtr, JNI_ABORT);
//LOG("ENCODE OVER");
return 0;
}扫尾:JNIEXPORT jint JNICALL Java_com_dangyutao_test_encode_finishEncode(
JNIEnv *env, jclass class) {
//扫尾
x264_encoder_close(encoder);
free(yuv);
close(outf);
//free(yuv_buffer); //添加buff时内存已经释放
return 0;
}int dm_mux(char* h264file,char *aacfile, char* mp4file,int usefilter)
{
AVOutputFormat *ofmt = NULL;
//Input AVFormatContext and Output AVFormatContext
AVFormatContext *ifmt_ctx_v = NULL, *ifmt_ctx_a = NULL,*ofmt_ctx = NULL;
AVPacket pkt;
int ret, i,retu =0,filter_ret=0;
// int fps;
int videoindex_v=-1,videoindex_out=-1;
int audioindex_a=-1,audioindex_out=-1;
int frame_index=0;
int64_t cur_pts_v=0,cur_pts_a=0;
//set file path
const char *in_filename_v = h264file;
const char *in_filename_a = aacfile;
const char *out_filename = mp4file;
AVBitStreamFilterContext* aacbsfc;
//register before use
av_register_all();
//open Input and set avformatcontext
if ((ret = avformat_open_input(&ifmt_ctx_a, in_filename_a, 0, 0)) < 0) {
retu = -1;//-1 mean audio file opened failed
dmprint("open audio file failed",ret);
goto end;
}
if ((ret = avformat_open_input(&ifmt_ctx_v, in_filename_v, 0, 0)) < 0) {
retu = -2; //-2 mean video file opened failed
dmprint("open video file failed",ret);
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_v, 0)) < 0) {
retu = -3; //-3 mean get video info failed
dmprint("get video info failed",ret);
goto end;
}
if ((ret = avformat_find_stream_info(ifmt_ctx_a, 0)) < 0) {
retu = -4;//-4 mean get audio info failed
dmprint("get audio info failed ret = ",ret);
goto end;
}
//open Output
avformat_alloc_output_context2(&ofmt_ctx, NULL, NULL, out_filename);
if (!ofmt_ctx) {
dmprint("open output file failed",ret);
retu = -5;
goto end;
}
ofmt = ofmt_ctx->oformat;
//find all video stream input type
for (i = 0; i < ifmt_ctx_v->nb_streams; i++) {
//Create output AVStream according to input AVStream
if(ifmt_ctx_v->streams[i]->codec->codec_type==AVMEDIA_TYPE_VIDEO){
AVStream *in_stream = ifmt_ctx_v->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
videoindex_v=i;
if (!out_stream) {
dmprint_string( "Failed allocating output stream");
retu = -6;
goto end;
}
videoindex_out=out_stream->index;
//Copy the settings of AVCodecContext
if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
dmprint_string( "Failed to copy context from input to output stream codec context");
retu = -7;
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
break;
}
}
//find all audio stream input type
for (i = 0; i < ifmt_ctx_a->nb_streams; i++) {
//Create output AVStream according to input AVStream
if(ifmt_ctx_a->streams[i]->codec->codec_type==AVMEDIA_TYPE_AUDIO){
AVStream *in_stream = ifmt_ctx_a->streams[i];
AVStream *out_stream = avformat_new_stream(ofmt_ctx, in_stream->codec->codec);
audioindex_a=i;
if (!out_stream) {
dmprint_string("Failed allocating output stream");
retu = -8;
goto end;
}
audioindex_out=out_stream->index;
//Copy the settings of AVCodecContext
if (avcodec_copy_context(out_stream->codec, in_stream->codec) < 0) {
dmprint_string( "Failed to copy context from input to output stream codec context");
retu =-9;
goto end;
}
out_stream->codec->codec_tag = 0;
if (ofmt_ctx->oformat->flags & AVFMT_GLOBALHEADER)
out_stream->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
break;
}
}
//Open output file
if (!(ofmt->flags & AVFMT_NOFILE)) {
if (avio_open(&ofmt_ctx->pb, out_filename, AVIO_FLAG_WRITE) < 0) {
dmprint_string( "Could not open output file ");
retu = -10;
goto end;
}
}
//Write file header
if (avformat_write_header(ofmt_ctx, NULL) < 0) {
dmprint_string( "Error occurred when opening output file");
retu = -11;
goto end;
}
if(usefilter)
aacbsfc = av_bitstream_filter_init("aac_adtstoasc");
while (IS_GOING) {
AVFormatContext *ifmt_ctx;
int stream_index=0;
AVStream *in_stream, *out_stream;
//Get an AVPacket
if(av_compare_ts(cur_pts_v,ifmt_ctx_v->streams[videoindex_v]->time_base,cur_pts_a,
ifmt_ctx_a->streams[audioindex_a]->time_base) <= 0)
{
ifmt_ctx=ifmt_ctx_v;
stream_index=videoindex_out;
if(av_read_frame(ifmt_ctx, &pkt) >= 0){
do{
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[stream_index];
if(pkt.stream_index==videoindex_v){
//Simple Write PTS
if(pkt.pts==AV_NOPTS_VALUE){
//Write PTS
AVRational time_base1=in_stream->time_base;
//Duration between 2 frames (us)
int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate);
//Parameters
pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts=pkt.pts;
pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}
cur_pts_v=pkt.pts;
break;
}
}
while(av_read_frame(ifmt_ctx, &pkt) >= 0);
}
else
{
break;
}
}
else
{
ifmt_ctx=ifmt_ctx_a;
stream_index=audioindex_out;
if(av_read_frame(ifmt_ctx, &pkt) >= 0){
do
{
in_stream = ifmt_ctx->streams[pkt.stream_index];
out_stream = ofmt_ctx->streams[stream_index];
if(pkt.stream_index==audioindex_a)
{
//Simple Write PTS
if(pkt.pts==AV_NOPTS_VALUE)
{
//Write PTS
AVRational time_base1=in_stream->time_base;
//Duration between 2 frames (us)
int64_t calc_duration=(double)AV_TIME_BASE/av_q2d(in_stream->r_frame_rate);
//Parameters
pkt.pts=(double)(frame_index*calc_duration)/(double)(av_q2d(time_base1)*AV_TIME_BASE);
pkt.dts=pkt.pts;
pkt.duration=(double)calc_duration/(double)(av_q2d(time_base1)*AV_TIME_BASE);
frame_index++;
}
cur_pts_a=pkt.pts;
break;
}
}
while(av_read_frame(ifmt_ctx, &pkt) >= 0);
}
else
{
break;
}
}
if(usefilter)
filter_ret = av_bitstream_filter_filter(aacbsfc, out_stream->codec, NULL, &pkt.data,&pkt.size, pkt.data, pkt.size, 0);
if(filter_ret)
{
dmprint_string("failt to use :filter");
retu = -10;
goto end;
}
//Convert PTS/DTS
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base,(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.dts = av_rescale_q_rnd(pkt.dts, in_stream->time_base, out_stream->time_base,(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
pkt.stream_index=stream_index;
//Write
if (av_interleaved_write_frame(ofmt_ctx, &pkt) < 0) {
av_free_packet(&pkt);
dmprint_string( "Error muxing packet");
break;
}
//av_packet_unref(&pkt);
//av_interleaved_write_frame(ofmt_ctx, &pkt);
av_free_packet(&pkt);
}
if(IS_GOING)
{
//Write file trailer
av_write_trailer(ofmt_ctx);
}
else
retu =RET_CLOSE;//-77 mean is close by user
if(usefilter)
av_bitstream_filter_close(aacbsfc);
end:
avformat_close_input(&ifmt_ctx_v);
avformat_close_input(&ifmt_ctx_a);
/* close output */
if (ofmt_ctx && !(ofmt->flags & AVFMT_NOFILE))
avio_close(ofmt_ctx->pb);
avformat_free_context(ofmt_ctx);
avformat_free_context(ifmt_ctx_v);
avformat_free_context(ifmt_ctx_a);
if (ret < 0 && ret != AVERROR_EOF) {
dmprint_string( "Error occurred.");
}
dmprint("return is ",retu);
return retu;
}
假如音频是拥有封装格式得需要使用那个滤镜。版权声明:本文为博主原创文章,未经博主允许不得转载。
标签:android ios 录制视频 ffmpeg x264
原文地址:http://blog.csdn.net/dangxw_/article/details/48104617