当前位置: 首页 > news >正文

Android MediaCodec将h264实时视频流数据解码为yuv,并转换yuv的颜色格式为nv21

初始化mediacodec

    private MediaCodec mediaCodec;private ByteBuffer[] inputBuffers;private void initMediaCodec(Surface surface) {try {Log.d(TAG, "onGetNetVideoData: ");//创建解码器 H264的Type为  AACmediaCodec = MediaCodec.createDecoderByType("video/avc");//创建配置MediaFormat mediaFormat = MediaFormat.createVideoFormat("video/avc", Width, Height);//设置解码预期的帧速率【以帧/秒为单位的视频格式的帧速率的键】mediaFormat.setInteger(MediaFormat.KEY_FRAME_RATE, 20);mediaFormat.setInteger(MediaFormat.KEY_COLOR_FORMAT, COLOR_FormatYUV420Flexible);//
//            byte[] headerSps = {0, 0, 0, 1, 103, 66, 0, 41, -115, -115, 64, 80, 30, -48, 15, 8, -124, 83, -128};
//            byte[] headerPps = {0, 0, 0, 1, 104, -54, 67, -56};
//
//            mediaFormat.setByteBuffer("csd-0", ByteBuffer.wrap(headerSps));
//            mediaFormat.setByteBuffer("csd-1", ByteBuffer.wrap(headerPps));//配置绑定mediaFormat和surfacemediaCodec.configure(mediaFormat, null, null, 0);mediaCodec.start();} catch (IOException e) {e.printStackTrace();//创建解码失败Log.e(TAG, "创建解码失败");}inputBuffers = mediaCodec.getInputBuffers();}

处理数据,解码h264数据为yuv格式

这里传入的是h264格式的实时视频流数据。

    private void onFrame(byte[] buf, int offset, int length) {MediaCodec.BufferInfo info = new MediaCodec.BufferInfo();//查询10000毫秒后,如果dSP芯片的buffer全部被占用,返回-1;存在则大于0int inIndex = mediaCodec.dequeueInputBuffer(10000);if (inIndex >= 0) {//根据返回的index拿到可以用的bufferByteBuffer byteBuffer = inputBuffers[inIndex];//清空缓存byteBuffer.clear();//开始为buffer填充数据byteBuffer.put(buf);//填充数据后通知mediacodec查询inIndex索引的这个buffer,mediaCodec.queueInputBuffer(inIndex, 0, length, mCount * 20, 0);mCount++;} else {Log.i(TAG, "inIndex < 0");//等待查询空的bufferreturn;}//mediaCodec 查询 "mediaCodec的输出方队列"得到索引int outIndex = mediaCodec.dequeueOutputBuffer(info, 10000);Log.e(TAG, "解码输出outIndex " + outIndex);if (outIndex >= 0) {//dsp的byteBuffer无法直接使用ByteBuffer byteBuffer = mediaCodec.getOutputBuffer(outIndex);//设置偏移量byteBuffer.position(info.offset);byteBuffer.limit(info.size + info.offset);byte[] ba = new byte[byteBuffer.remaining()];byteBuffer.get(ba);//需要预先分配与NV12相同大小的字节数组byte[] yuv = new byte[ba.length];//不确定是什么颜色格式,挨个试的//convertI420ToNV21(ba, yuv, Width, Height);//convertYV12toNV21(ba, yuv, Width, Height);convertNV12toNV21(ba, yuv, Width, Height);NV21Data(yuv);//检查所支持的颜色格式
//            MediaCodecInfo.CodecCapabilities capabilities = codecInfo.getCapabilitiesForType("video/avc");
//             for (int i = 0; i < capabilities.colorFormats.length; i++) {
//                int format = capabilities.colorFormats[i];
//
//                //华为平板:COLOR_FormatYUV420SemiPlanar、COLOR_FormatYUV420Planar
//                //魅族手机:COLOR_FormatYUV420SemiPlanar
//                //rk3588s: COLOR_FormatYUV420Planar、COLOR_FormatYUV420Flexible、COLOR_FormatYUV420PackedSemiPlanar、COLOR_FormatYUV420SemiPlanar
//                switch (format) {
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Planar://(对应 I420 or YV12)
//                        Log.i("COLOR_Format_TAG", "=========COLOR_FormatYUV420Planar");
//                        byte[] convertNv21YUV420Planar = new byte[ba.length];
//                        //不确定是什么颜色格式,挨个试的convertI420ToNV21(ba, convertNv21YUV420Planar, Width, Height);convertYV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
//                        long l1 = System.currentTimeMillis();
//                        convertNV12toNV21(ba, convertNv21YUV420Planar, Width, Height);
//                        Log.i("耗时测试", "转为nv21的耗时: " + (System.currentTimeMillis() - l1));
//                        long l2 = System.currentTimeMillis();
//                        NV21Data(convertNv21YUV420Planar);
//                        Log.i("耗时测试", "识别耗时: " + (System.currentTimeMillis() - l2));
//                        continue;
//
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420SemiPlanar://NV12
//                        Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420SemiPlanar");
//                        byte[] nv21YUV420SemiPlanar = new byte[ba.length];
//                        convertNV12toNV21(ba, nv21YUV420SemiPlanar, Width, Height);
//                        NV21Data(nv21YUV420SemiPlanar);
//
//                        continue;
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420PackedSemiPlanar:
//                        Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420PackedSemiPlanar");
//                        byte[] nv21YUV420PackedSemiPlanar = new byte[ba.length];
//                        convertNV12toNV21(ba, nv21YUV420PackedSemiPlanar, Width, Height);
//                        NV21Data(nv21YUV420PackedSemiPlanar);
//                        continue;
//                    case MediaCodecInfo.CodecCapabilities.COLOR_FormatYUV420Flexible:
//                        byte[] nv21YUV420YUV420Flexible = new byte[ba.length];
//                        convertNV12toNV21(ba, nv21YUV420YUV420Flexible, Width, Height);
//                        NV21Data(nv21YUV420YUV420Flexible);
//                        Log.i("COLOR_Format_TAG", "=======COLOR_FormatYUV420Flexible");
//                        continue;
//                    default:
//                        continue;
//
//                }
//
//            }//如果surface绑定了,则直接输入到surface渲染并释放mediaCodec.releaseOutputBuffer(outIndex, false);} else {Log.e(TAG, "没有解码成功");}}

处理获取到的nv21颜色格式的yuv数据

    private int printImageStatus = 0;private void NV21Data(byte[] nv21) {//将nv21视频流数据传入YuvImage中,转换成bitmap之后,显示在imageview上、//或者保存为png图片到本地,如果不出现灰色、不出现蓝色图像和红色图像颜色颠倒,//图像显示正常,则说明是标准的nv21格式视频流数据YuvImage yuvImage = new YuvImage(nv21, ImageFormat.NV21, Width, Height, null);ByteArrayOutputStream baos = new ByteArrayOutputStream();yuvImage.compressToJpeg(new Rect(0, 0, Width, Height), 100, baos);byte[] data = baos.toByteArray();Log.i(TAG, "NV21Data-data: " + data.length);Bitmap bitmap = BitmapFactory.decodeByteArray(data, 0, data.length);if (bitmap != null) {runOnUiThread(new Runnable() {@Overridepublic void run() {mIvShowImage.setImageBitmap(bitmap);}});//保存bitmap为png图片if (printImageStatus == 0) {printImageStatus = 1;try {File myCaptureFile = new File(Environment.getExternalStorageDirectory(), "img.png");BufferedOutputStream bos = new BufferedOutputStream(new FileOutputStream(myCaptureFile));bitmap.compress(Bitmap.CompressFormat.JPEG, 100, bos);bos.flush();bos.close();} catch (Exception e) {e.printStackTrace();}}}}

 yuv视频数据颜色格式转换

    public static void convertI420ToNV21(byte[] i420, byte[] nv21, int width, int height) {System.arraycopy(i420, 0, nv21, 0, width * height);int offset = width * height;for (int i = 0; i < width * height / 4; i++) {nv21[offset + 2 * i] = i420[offset + i + width * height / 4];nv21[offset + 2 * i + 1] = i420[offset + i];}}public static void convertYV12toNV21(byte[] yv12, byte[] nv21, int width, int height) {int size = width * height;int vOffset = size;int uOffset = size + (size / 4);// Copy Y channel as it isSystem.arraycopy(yv12, 0, nv21, 0, size);for (int i = 0; i < size / 4; i++) {nv21[vOffset + (i * 2)] = yv12[vOffset + i];      // Vnv21[vOffset + (i * 2) + 1] = yv12[uOffset + i];  // U}}public static void convertNV12toNV21(byte[] nv12, byte[] nv21, int width, int height) {int size = width * height;int offset = size;// copy Y channel as it isSystem.arraycopy(nv12, 0, nv21, 0, offset);for (int i = 0; i < size / 4; i++) {nv21[offset + (i * 2) + 1] = nv12[offset + (i * 2)];       // Unv21[offset + (i * 2)] = nv12[offset + (i * 2) + 1];       // V}}

h264实时视频流的数据来源

    @Overridepublic void onPacketEvent(byte[] data) {onFrame(data, 0, data.length);//写入h264视频流到sdcard中//wirte2file(data, data.length);}

写入h264视频流到sdcard中

private String dsetfilePath = Environment.getExternalStorageDirectory() + "/" + "test.h264";private void wirte2file(byte[] buf, int length) {if (isStart) {if (BufOs == null) {destfile = new File(dsetfilePath);try {destfs = new FileOutputStream(destfile);BufOs = new BufferedOutputStream(destfs);Log.d(TAG, "wirte2file-new ");} catch (FileNotFoundException e) {// TODO: handle exceptionLog.i("TRACK", "initerro" + e.getMessage());Log.d(TAG, "wirte2file-FileNotFoundException:" + e.getMessage());e.printStackTrace();}}try {BufOs.write(buf, 0, length);BufOs.flush();Log.d(TAG, "wirte2file-write");} catch (Exception e) {Log.d(TAG, "wirte2file-e: " + e.getMessage());// TODO: handle exception}}}private boolean isStart;public void onStop(View view) {isStart = false;Toast.makeText(this, "停止保存", Toast.LENGTH_SHORT).show();}public void onStart(View view) {isStart = true;Toast.makeText(this, "开始保存", Toast.LENGTH_SHORT).show();}

rtsp获取h264实时视频流数据

public class FFDemuxJava {static {System.loadLibrary("demux");}private long m_handle = 0;private EventCallback mEventCallback = null;public void init(String url) {m_handle = native_Init(url);}public void Start() {native_Start(m_handle);}public void stop() {native_Stop(m_handle);}public void unInit() {native_UnInit(m_handle);}public void addEventCallback(EventCallback callback) {mEventCallback = callback;}private void playerEventCallback(int msgType, float msgValue) {if(mEventCallback != null)mEventCallback.onMessageEvent(msgType, msgValue);}private void packetEventCallback(byte[]data) {if(mEventCallback != null)mEventCallback.onPacketEvent(data);}private native long native_Init(String url);private native void native_Start(long playerHandle);private native void native_Stop(long playerHandle);private native void native_UnInit(long playerHandle);public interface EventCallback {void onMessageEvent(int msgType, float msgValue);void onPacketEvent(byte []data);}}
 编写C代码加载ffmpeg库
#include <jni.h>
#include <string>#include "FFBridge.h"extern "C"
{
#include <libavutil/time.h>
#include <libavcodec/avcodec.h>
#include <libavcodec/packet.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
#include <libavformat/avformat.h>
#include <libswscale/swscale.h>
#include <libavutil/opt.h>
};extern "C" JNIEXPORT jstring JNICALL
Java_com_qmcy_demux_MainActivity_stringFromJNI(JNIEnv* env,jobject /* this */) {std::string hello = "Hello from C++";return env->NewStringUTF(hello.c_str());
}extern "C" JNIEXPORT jstring JNICALL
Java_com_qmcy_demux_MainActivity_GetVersion(JNIEnv* env,jobject /* this */) {char strBuffer[1024 * 4] = {0};strcat(strBuffer, "libavcodec : ");strcat(strBuffer, AV_STRINGIFY(LIBAVCODEC_VERSION));strcat(strBuffer, "\nlibavformat : ");strcat(strBuffer, AV_STRINGIFY(LIBAVFORMAT_VERSION));strcat(strBuffer, "\nlibavutil : ");strcat(strBuffer, AV_STRINGIFY(LIBAVUTIL_VERSION));strcat(strBuffer, "\nlibavfilter : ");strcat(strBuffer, AV_STRINGIFY(LIBAVFILTER_VERSION));strcat(strBuffer, "\nlibswresample : ");strcat(strBuffer, AV_STRINGIFY(LIBSWRESAMPLE_VERSION));strcat(strBuffer, "\nlibswscale : ");strcat(strBuffer, AV_STRINGIFY(LIBSWSCALE_VERSION));strcat(strBuffer, "\navcodec_configure : \n");strcat(strBuffer, avcodec_configuration());strcat(strBuffer, "\navcodec_license : ");strcat(strBuffer, avcodec_license());//LOGCATE("GetFFmpegVersion\n%s", strBuffer);return env->NewStringUTF(strBuffer);
}extern "C" JNIEXPORT jlong JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Init(JNIEnv *env, jobject obj, jstring jurl)
{const char* url = env->GetStringUTFChars(jurl, nullptr);FFBridge *bridge = new FFBridge();bridge->Init(env, obj, const_cast<char *>(url));env->ReleaseStringUTFChars(jurl, url);return reinterpret_cast<jlong>(bridge);
}extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Start(JNIEnv *env, jobject obj, jlong handle)
{if(handle != 0){FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);bridge->Start();}}extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1Stop(JNIEnv *env, jobject obj, jlong handle)
{if(handle != 0){FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);bridge->Stop();}
}extern "C"
JNIEXPORT void JNICALL Java_com_qmcy_demux_FFDemuxJava_native_1UnInit(JNIEnv *env, jobject obj, jlong handle)
{if(handle != 0){FFBridge *bridge = reinterpret_cast<FFBridge *>(handle);bridge->UnInit();delete bridge;}
}

源码地址icon-default.png?t=N7T8https://gitee.com/baipenggui/demux_demo.git

http://www.lryc.cn/news/195927.html

相关文章:

  • Postgresql SQL 字段拼接
  • MySQL 迁移完不能快速导数据了?
  • Lazysysadmin靶机
  • LeetCode09——回文数
  • 云安全—分布式基础
  • Spring(18) @Order注解介绍、使用、底层原理
  • 目标检测YOLO实战应用案例100讲-基于改进YOLOv6的轧钢表面细小缺陷检测
  • leetcode:507. 完美数(python3解法)
  • 智能物联网解决方案:蓝牙IOT主控模块打造高效监测和超低功耗
  • vue 拿到数据后,没有重新渲染视图,nuxt.js拿到数据后,没有重新渲染视图,强制更新视图
  • Docker基础操作命令演示
  • XTU-OJ 1175-Change
  • Python环境安装
  • 苏轼在密州的四首千古名作
  • [计算机提升] 域及域用户(组)
  • 命令行配置文件
  • MPP产品介绍-定位-应用场景-技术特点
  • Linux性能优化--性能工具:磁盘I/O
  • Archive Team: The Twitter Stream Grab
  • Vue-props配置功能
  • iMazing 3中文版功能介绍免费下载安装教程
  • 给课题组师弟师妹的开荒手册(终篇)
  • 【Eclipse】安装与卸载教程
  • WordPress还是Shopify?如何选择最适合您业务的网站建设平台?
  • Java Kids-百倍提速【Mac IOS】
  • uniapp-vue3-微信小程序-按钮组wo-btn-group
  • mysql查询当天,近一周,近一个月,近一年的数据
  • Python快速入门教程
  • 注释的重要性:代码的明晰之道
  • 将 vue2+ElementU 项目打包成安卓app