ICode9

精准搜索请尝试: 精确搜索
首页 > 其他分享> 文章详细

ffmpeg 解码播放 streaming of audio and video using rtmp

2020-07-29 02:00:16  阅读:293  来源: 互联网

标签:null ffmpeg error streaming av var new audio


下载开发包。 提取码:9yz1

 

  1     public unsafe class Pull_Rtmp
  2     {
  3         /// <summary>
  4         /// 显示图片委托
  5         /// </summary>
  6         /// <param name="bitmap"></param>
  7         public delegate void ShowBitmap(Bitmap bitmap);
  8         /// <summary>
  9         /// 执行控制变量
 10         /// </summary>
 11         bool CanRun;
 12         string ProgramInfo;
 13 
 14         ShowBitmap Show;
 15         // 分配音视频格式上下文
 16         AVFormatContext* _pFormatContext;
 17         /// <summary>
 18         /// 对读取的264数据包进行解码和转换
 19         /// </summary>
 20         /// <param name="show">解码完成回调函数</param>
 21         /// <param name="url">播放地址,也可以是本地文件地址</param>
 22         public unsafe void Start(ShowBitmap show, string url)
 23         {
 24             CanRun = true;
 25             ProgramInfo = string.Format("\n线程[{0}]:函数[{1}]", System.Threading.Thread.CurrentThread.ManagedThreadId, "Start");
 26 
 27             Console.WriteLine(@"Current directory: " + Environment.CurrentDirectory);
 28             Console.WriteLine(@"Runnung in {0}-bit mode.", Environment.Is64BitProcess ? @"64" : @"32");
 29             //FFmpegDLL目录查找和设置
 30             FFmpegBinariesHelper.RegisterFFmpegBinaries();
 31 
 32             #region ffmpeg 初始化
 33             // 初始化注册ffmpeg相关的编码器
 34             ffmpeg.av_register_all();
 35             ffmpeg.avcodec_register_all();
 36             ffmpeg.avformat_network_init();
 37 
 38             Console.WriteLine($"FFmpeg version info: {ffmpeg.av_version_info()}");
 39             #endregion
 40 
 41             #region ffmpeg 日志
 42             // 设置记录ffmpeg日志级别
 43             ffmpeg.av_log_set_level(ffmpeg.AV_LOG_VERBOSE);
 44             av_log_set_callback_callback logCallback = (p0, level, format, vl) =>
 45             {
 46                 if (level > ffmpeg.av_log_get_level()) return;
 47 
 48                 var lineSize = 1024;
 49                 var lineBuffer = stackalloc byte[lineSize];
 50                 var printPrefix = 1;
 51                 ffmpeg.av_log_format_line(p0, level, format, vl, lineBuffer, lineSize, &printPrefix);
 52                 var line = Marshal.PtrToStringAnsi((IntPtr)lineBuffer);
 53                 Console.Write(line);
 54             };
 55             ffmpeg.av_log_set_callback(logCallback);
 56 
 57             #endregion
 58 
 59             #region ffmpeg 转码
 60 
 61 
 62             // 分配音视频格式上下文
 63             var pFormatContext = ffmpeg.avformat_alloc_context();
 64             _pFormatContext = pFormatContext;
 65 
 66             int error;
 67 
 68             //打开流
 69             error = ffmpeg.avformat_open_input(&pFormatContext, url, null, null);
 70             if (error != 0) throw new ApplicationException(GetErrorMessage(error));
 71 
 72             // 读取媒体流信息
 73             error = ffmpeg.avformat_find_stream_info(pFormatContext, null);
 74             if (error != 0) throw new ApplicationException(GetErrorMessage(error));
 75 
 76             // 这里只是为了打印些视频参数
 77             AVDictionaryEntry* tag = null;
 78             while ((tag = ffmpeg.av_dict_get(pFormatContext->metadata, "", tag, ffmpeg.AV_DICT_IGNORE_SUFFIX)) != null)
 79             {
 80                 var key = Marshal.PtrToStringAnsi((IntPtr)tag->key);
 81                 var value = Marshal.PtrToStringAnsi((IntPtr)tag->value);
 82                 Console.WriteLine($"{key} = {value}");
 83             }
 84 
 85             // 从格式化上下文获取流索引
 86             AVStream* pStream = null, aStream = null;
 87             for (var i = 0; i < pFormatContext->nb_streams; i++)
 88             {
 89                 if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_VIDEO)
 90                 {
 91                     pStream = pFormatContext->streams[i];
 92                     Show = show;
 93                     Vqueue.Clear();
 94                     AVPacket_Option packet_Option = Video;
 95                     packet_Option.BeginInvoke(i, null, null);
 96 
 97                 }
 98                 else if (pFormatContext->streams[i]->codec->codec_type == AVMediaType.AVMEDIA_TYPE_AUDIO)
 99                 {
100                     aStream = pFormatContext->streams[i];
101                     WaveOut_init();
102                     AVPacket_Option packet_Option = Audio;
103                     packet_Option.BeginInvoke(i, null, null);
104                 }
105             }
106             if (pStream == null) throw new ApplicationException(@"Could not found video stream.");
107 
108             // 获取流的编码器上下文
109             var codecContext = *pStream->codec;
110 
111             Console.WriteLine($"codec name: {ffmpeg.avcodec_get_name(codecContext.codec_id)}");
112             // 获取图像的宽、高及像素格式
113             var width = codecContext.width;
114             var height = codecContext.height;
115             var sourcePixFmt = codecContext.pix_fmt;
116 
117             // 得到编码器ID
118             var codecId = codecContext.codec_id;
119             // 目标像素格式
120             var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
121 
122 
123             // 某些264格式codecContext.pix_fmt获取到的格式是AV_PIX_FMT_NONE 统一都认为是YUV420P
124             if (sourcePixFmt == AVPixelFormat.AV_PIX_FMT_NONE && codecId == AVCodecID.AV_CODEC_ID_H264)
125             {
126                 sourcePixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
127             }
128 
129             // 得到SwsContext对象:用于图像的缩放和转换操作
130             var pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt,
131                 width, height, destinationPixFmt,
132                 ffmpeg.SWS_FAST_BILINEAR, null, null, null);
133             if (pConvertContext == null) throw new ApplicationException(@"Could not initialize the conversion context.");
134 
135             //分配一个默认的帧对象:AVFrame
136             var pConvertedFrame = ffmpeg.av_frame_alloc();
137             // 目标媒体格式需要的字节长度
138             var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
139             // 分配目标媒体格式内存使用
140             var convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
141             var dstData = new byte_ptrArray4();
142             var dstLinesize = new int_array4();
143             // 设置图像填充参数
144             ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte*)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);
145 
146             #endregion
147 
148             #region ffmpeg 解码
149             // 根据编码器ID获取对应的解码器
150             var pCodec = ffmpeg.avcodec_find_decoder(codecId);
151             if (pCodec == null) throw new ApplicationException(@"Unsupported codec.");
152 
153             var pCodecContext = &codecContext;
154 
155             if ((pCodec->capabilities & ffmpeg.AV_CODEC_CAP_TRUNCATED) == ffmpeg.AV_CODEC_CAP_TRUNCATED)
156                 pCodecContext->flags |= ffmpeg.AV_CODEC_FLAG_TRUNCATED;
157 
158             // 通过解码器打开解码器上下文:AVCodecContext pCodecContext
159             error = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
160             if (error < 0) throw new ApplicationException(GetErrorMessage(error));
161 
162             // 分配解码帧对象:AVFrame pDecodedFrame
163             var pDecodedFrame = ffmpeg.av_frame_alloc();
164 
165             // 初始化媒体数据包
166             var packet = new AVPacket();
167             var pPacket = &packet;
168             ffmpeg.av_init_packet(pPacket);
169 
170             uint VframeNumber = 0;
171             uint AframeNumber = 0;
172             while (CanRun)
173             {
174                 try
175                 {
176                     do
177                     {
178                         // 读取一帧未解码数据
179                         error = ffmpeg.av_read_frame(pFormatContext, pPacket);
180                         // Console.WriteLine(pPacket->dts);
181                         if (error == ffmpeg.AVERROR_EOF) break;
182                         if (error < 0) throw new ApplicationException(GetErrorMessage(error));
183 
184                         if (pPacket->stream_index == pStream->index) { }
185                         else if (pPacket->stream_index == aStream->index)
186                         {
187                             AVPacket* aVPacket = ffmpeg.av_packet_clone(pPacket);
188                             if (Aqueue.Count > 49) Aqueue.Dequeue();
189                             Aqueue.Enqueue(*aVPacket);
190 
191                             ++AframeNumber;
192                             continue;
193                         }
194                         else
195                         {
196                             ffmpeg.av_packet_unref(pPacket);//释放数据包对象引用
197                             continue;
198                         }
199 
200                         // 解码
201                         error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket);
202                         if (error < 0) throw new ApplicationException(GetErrorMessage(error));
203                         // 解码输出解码数据
204                         error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame);
205                     } while (error == ffmpeg.AVERROR(ffmpeg.EAGAIN) && CanRun);
206                     if (error == ffmpeg.AVERROR_EOF) break;
207                     if (error < 0) throw new ApplicationException(GetErrorMessage(error));
208                     if (pPacket->stream_index != pStream->index) continue;
209 
210                     AVFrame* aVFrame = ffmpeg.av_frame_clone(pDecodedFrame);
211                     if (Vqueue.Count > 49) Vqueue.Dequeue();
212                     Vqueue.Enqueue(*aVFrame);
213                 }
214                 finally
215                 {
216                     ffmpeg.av_packet_unref(pPacket);//释放数据包对象引用
217                     ffmpeg.av_frame_unref(pDecodedFrame);//释放解码帧对象引用
218                 }
219 
220                 VframeNumber++;
221                 FFmpeg_Manager.ShowMessage = string.Format(ProgramInfo, VframeNumber, AframeNumber, exhibitionNum, effectiveNum);
222             }
223             //播放完置空播放图片 
224             show(null);
225 
226             #endregion
227 
228             #region 释放资源
229             Marshal.FreeHGlobal(convertedFrameBufferPtr);
230             ffmpeg.av_free(pConvertedFrame);
231             ffmpeg.sws_freeContext(pConvertContext);
232 
233             ffmpeg.av_free(pDecodedFrame);
234             ffmpeg.avcodec_close(pCodecContext);
235             ffmpeg.avformat_close_input(&pFormatContext);
236             #endregion
237 
238             System.Threading.Thread.Sleep(2000);
239         }
240 
241         System.Collections.Generic.Queue<AVFrame> Vqueue = new System.Collections.Generic.Queue<AVFrame>();
242         uint exhibitionNum = 0;
243         public unsafe void Video(int Index)
244         {
245             ProgramInfo += string.Format("\n线程[{0}]:函数[{1}]帧数[{2}]展示[{3}]", System.Threading.Thread.CurrentThread.ManagedThreadId, "Video", "{0}", "{2}");
246             // 获取流的编码器上下文
247             AVCodecContext codecContext = *(_pFormatContext->streams[Index]->codec);
248 
249             // 获取图像的宽、高及像素格式
250             int width = codecContext.width;
251             int height = codecContext.height;
252             var sourcePixFmt = codecContext.pix_fmt;
253 
254             // 目标像素格式
255             var destinationPixFmt = AVPixelFormat.AV_PIX_FMT_BGR24;
256 
257             // 某些264格式codecContext.pix_fmt获取到的格式是AV_PIX_FMT_NONE 统一都认为是YUV420P
258             if (sourcePixFmt == AVPixelFormat.AV_PIX_FMT_NONE && codecContext.codec_id == AVCodecID.AV_CODEC_ID_H264)
259             {
260                 sourcePixFmt = AVPixelFormat.AV_PIX_FMT_YUV420P;
261             }
262 
263             // 得到SwsContext对象:用于图像的缩放和转换操作
264             SwsContext* pConvertContext = ffmpeg.sws_getContext(width, height, sourcePixFmt,
265                  width, height, destinationPixFmt,
266                  ffmpeg.SWS_FAST_BILINEAR, null, null, null);
267             if (pConvertContext == null) throw new ApplicationException(@"Could not initialize the conversion context.");
268 
269             ////分配一个默认的帧对象:AVFrame
270             //var pConvertedFrame = ffmpeg.av_frame_alloc();
271             // 目标媒体格式需要的字节长度
272             var convertedFrameBufferSize = ffmpeg.av_image_get_buffer_size(destinationPixFmt, width, height, 1);
273             // 分配目标媒体格式内存使用
274             IntPtr convertedFrameBufferPtr = Marshal.AllocHGlobal(convertedFrameBufferSize);
275             var dstData = new byte_ptrArray4();
276             var dstLinesize = new int_array4();
277             //// 设置图像填充参数
278             ffmpeg.av_image_fill_arrays(ref dstData, ref dstLinesize, (byte*)convertedFrameBufferPtr, destinationPixFmt, width, height, 1);
279 
280             while (CanRun)
281             {
282                 if (Vqueue.Count < 25)
283                 {
284                     System.Threading.Thread.Sleep(40);
285                     continue;
286                 }
287                 AVFrame aVFrame = Vqueue.Dequeue();
288                 AVFrame* pDecodedFrame = &aVFrame;
289 
290                 try
291                 {
292                     ffmpeg.sws_scale(pConvertContext, pDecodedFrame->data, pDecodedFrame->linesize, 0, height, dstData, dstLinesize);
293                 }
294                 finally
295                 {
296                     ffmpeg.av_frame_unref(pDecodedFrame);//释放解码帧对象引用
297                 }
298                 // 封装Bitmap图片
299                 var bitmap = new Bitmap(width, height, dstLinesize[0], PixelFormat.Format24bppRgb, convertedFrameBufferPtr);
300                 //System.Threading.Thread.Sleep(2000);
301                 // 回调
302                 Show(bitmap);
303                 ++exhibitionNum;
304             }
305 
306             Marshal.FreeHGlobal(convertedFrameBufferPtr);
307             //ffmpeg.av_free(pConvertedFrame);
308             ffmpeg.sws_freeContext(pConvertContext);
309         }
310 
311         public delegate void AVPacket_Option(int Index);
312 
313         System.Collections.Generic.Queue<AVPacket> Aqueue = new System.Collections.Generic.Queue<AVPacket>();
314         System.Collections.Generic.Queue<byte[]> Bqueue = new System.Collections.Generic.Queue<byte[]>();
315         uint effectiveNum = 0;
316         public unsafe void Audio(int Index)
317         {
318             ProgramInfo += string.Format("\n线程[{0}]:函数[{1}]帧数[{2}]解码[{3}]", System.Threading.Thread.CurrentThread.ManagedThreadId, "Audio", "{1}", "{3}");
319             int error = 0;
320 
321             // 分配解码帧对象:AVFrame pDecodedFrame
322             AVFrame* pDecodedFrame = ffmpeg.av_frame_alloc();
323             // 获取流的编码器上下文
324             //AVCodecContext* pCodecContext =_pFormatContext->streams[Index]->codec;
325             //AVCodecContext codecContext = *pCodecContext;
326             AVCodecParameters* pCodecParameters = _pFormatContext->streams[Index]->codecpar;
327             //查找解码器
328             AVCodec* pCodec = ffmpeg.avcodec_find_decoder(pCodecParameters->codec_id);
329             //开辟pCodecContext
330             AVCodecContext* pCodecContext = ffmpeg.avcodec_alloc_context3(pCodec);
331             int res = ffmpeg.avcodec_parameters_to_context(pCodecContext, pCodecParameters);
332             //打开解码器
333             res = ffmpeg.avcodec_open2(pCodecContext, pCodec, null);
334 
335             AVSampleFormat out_sample_fmt = AVSampleFormat.AV_SAMPLE_FMT_S16;
336             int out_sample_rate = 44100;
337             long out_channel_layout = ffmpeg.AV_CH_LAYOUT_STEREO;
338             int out_nb_samples = pCodecContext->frame_size;
339             int out_channels = ffmpeg.av_get_channel_layout_nb_channels((ulong)out_channel_layout);
340             int out_buf_size = ffmpeg.av_samples_get_buffer_size(null, out_channels, out_nb_samples, out_sample_fmt, 1);
341             long in_channel_layout = ffmpeg.av_get_default_channel_layout(pCodecContext->channels);
342 
343             //打开转码器
344             SwrContext* pSwrContext = ffmpeg.swr_alloc();
345             //设置转码参数
346             pSwrContext = ffmpeg.swr_alloc_set_opts(pSwrContext, out_channel_layout, out_sample_fmt, out_sample_rate,
347                     in_channel_layout, pCodecContext->sample_fmt, pCodecContext->sample_rate, 0, null);
348             //初始化转码器
349             ffmpeg.swr_init(pSwrContext);
350 
351             //缓存时间(ms)
352             const int BUFFER_DURATION = 2;
353             TimeSpan audioMaxBufferedDuration = new TimeSpan(0, 0, 0, BUFFER_DURATION, 0); //音频最大缓冲时间
354 
355             AVPacket* pPacket;
356             byte[] bytes = new byte[out_buf_size + 1];
357             byte* out_buf = null;
358 
359             while (CanRun)
360             {
361 
362                 if (Aqueue.Count == 0)
363                 {
364                     System.Threading.Thread.Sleep(40);
365                     continue;
366                 }
367 
368                 AVPacket packet = Aqueue.Dequeue();
369                 pPacket = &packet;
370 
371                 try
372                 {
373                     if (bufferedWaveProvider.BufferedDuration.CompareTo(audioMaxBufferedDuration) > 0)
374                     {
375                         bufferedWaveProvider.ClearBuffer();
376                     }
377 
378                     //开始解码流
379                     //音频流
380                     if ((error = ffmpeg.avcodec_send_packet(pCodecContext, pPacket)) == 0)
381                     {
382                         if ((error = ffmpeg.avcodec_receive_frame(pCodecContext, pDecodedFrame)) == 0)
383                         {
384                             //int bufSize = ffmpeg.av_samples_get_buffer_size(null, ffmpeg.av_frame_get_channels(pDecodedFrame)
385                             //    ,pDecodedFrame->nb_samples, (AVSampleFormat)pDecodedFrame->format, 1);
386 
387                             out_buf = (byte*)Marshal.AllocHGlobal(out_buf_size + 1);
388                             for (int i = 0; i < bytes.Length; ++i)
389                             {
390                                 out_buf[i] = 0;
391                             }
392 
393                             // ----------------------- 对音频帧进行重采样,并给系统缓冲区数据 -------------------------- //
394                             ffmpeg.swr_convert(pSwrContext, &out_buf, pCodecContext->frame_size, //对音频的采样率进行转换
395                                     pDecodedFrame->extended_data, pDecodedFrame->nb_samples);
396 
397                             // byte*转为byte[]
398                             for (int i = 0; i < bytes.Length; ++i)
399                             {
400                                 bytes[i] = out_buf[i];
401                             }
402                             bufferedWaveProvider.AddSamples(bytes, 0, out_buf_size);
403                             ++effectiveNum;
404                             //System.IO.FileInfo info = new System.IO.FileInfo(@"C:\Users\yin_y\Desktop\M3U8\佛教歌曲 - 大悲咒.wav");
405                             //System.IO.FileStream fs = info.OpenRead();
406                             //byte[] buffer = new byte[info.Length + 1];
407                             //fs.Read(buffer, 0, buffer.Length);
408                             ////System.IO.MemoryStream ms = new System.IO.MemoryStream(buffer);
409                             // 播放音频数据
410                             //System.IO.MemoryStream ms = new System.IO.MemoryStream(bytes);
411                             //System.Media.SoundPlayer player = new System.Media.SoundPlayer(ms);
412                             //ms.Position = 0;//player.Stream = ms;
413                             //player.Play();
414                         }
415                     }
416                     else
417                     {
418                         string str = GetErrorMessage(error);
419                     }
420                 }
421                 catch (Exception ex)
422                 {
423                     throw new ApplicationException("Application:" + ex.Message);
424                 }
425                 finally
426                 {
427                     ffmpeg.av_packet_unref(pPacket);//释放数据包对象引用
428                     ffmpeg.av_frame_unref(pDecodedFrame);//释放解码帧对象引用
429                     Marshal.FreeHGlobal((IntPtr)out_buf);
430                 }
431             }
432             //Marshal.FreeHGlobal((IntPtr)out_buf);
433             ffmpeg.swr_free(&pSwrContext);
434             //Marshal.FreeHGlobal((IntPtr)out_buf);
435         }
436         /// <summary>
437         /// 获取ffmpeg错误信息
438         /// </summary>
439         /// <param name="error"></param>
440         /// <returns></returns>
441         private static unsafe string GetErrorMessage(int error)
442         {
443             var bufferSize = 1024;
444             var buffer = stackalloc byte[bufferSize];
445             ffmpeg.av_strerror(error, buffer, (ulong)bufferSize);
446             var message = Marshal.PtrToStringAnsi((IntPtr)buffer);
447             return message;
448         }
449 
450         public void Stop()
451         {
452             CanRun = false;
453         }
454 
455         //NAudio音频播放组件
456         private WaveOut waveOut;
457         private BufferedWaveProvider bufferedWaveProvider;
458         /// <summary>
459         /// 做一些初始化操作
460         /// </summary>
461         private void WaveOut_init()
462         {
463             waveOut = new WaveOut();
464             bufferedWaveProvider = new BufferedWaveProvider(new WaveFormat());
465             waveOut.Init(bufferedWaveProvider);
466             waveOut.Play();
467         }
468     }

 

标签:null,ffmpeg,error,streaming,av,var,new,audio
来源: https://www.cnblogs.com/shangguanxiaoxian/p/13394851.html

本站声明: 1. iCode9 技术分享网(下文简称本站)提供的所有内容,仅供技术学习、探讨和分享;
2. 关于本站的所有留言、评论、转载及引用,纯属内容发起人的个人观点,与本站观点和立场无关;
3. 关于本站的所有言论和文字,纯属内容发起人的个人观点,与本站观点和立场无关;
4. 本站文章均是网友提供,不完全保证技术分享内容的完整性、准确性、时效性、风险性和版权归属;如您发现该文章侵犯了您的权益,可联系我们第一时间进行删除;
5. 本站为非盈利性的个人网站,所有内容不会用来进行牟利,也不会利用任何形式的广告来间接获益,纯粹是为了广大技术爱好者提供技术内容和技术思想的分享性交流网站。

专注分享技术,共同学习,共同进步。侵权联系[81616952@qq.com]

Copyright (C)ICode9.com, All Rights Reserved.

ICode9版权所有