Home > Net >  C # ffmpeg RTSP stream in MP4, LAN RTSP stream to keep normal, but the network RTSP streaming after
C # ffmpeg RTSP stream in MP4, LAN RTSP stream to keep normal, but the network RTSP streaming after

Time:03-12

CanRun=true;
//FFmpegDLL directory lookup and Settings
FFmpegBinariesHelper. RegisterFFmpegBinaries ();

# region ffmpeg initialization
//initialize the registered ffmpeg related encoder
Ffmpeg. Av_register_all ();
Ffmpeg. Avcodec_register_all ();
Ffmpeg. Avformat_network_init ();
# endregion

# region ffmpeg log
//set records ffmpeg log level
//ffmpeg. Av_log_set_level (ffmpeg. AV_LOG_VERBOSE);
//av_log_set_callback_callback logCallback=(p0, level, format, vl)=& gt;
//{
//if (level & gt; Ffmpeg. Av_log_get_level ()) return;

//var lineSize=1024;
//var lineBuffer=stackalloc byte [lineSize];
//var printPrefix=1;
//ffmpeg. Av_log_format_line (p0, level, the format and vl, lineBuffer, lineSize, & amp; PrintPrefix);
//var line=Marshal. PtrToStringAnsi ((IntPtr) lineBuffer);
//the Console. Write (line);
//};
//ffmpeg. Av_log_set_callback (logCallback);

# endregion


AVFormatContext * i_fmt_ctx;
AVStream * i_video_stream=null;
AVFormatContext * o_fmt_ctx;
AVStream * o_video_stream;

/* should set to null so that avformat_open_input () the allocate a new one */
I_fmt_ctx=null;
If (ffmpeg. Avformat_open_input (& amp; I_fmt_ctx, url, null, null).=0)
{
return;
}

If (ffmpeg. Avformat_find_stream_info (i_fmt_ctx, null) & lt; 0)
{
return;
}

//av_dump_format (i_fmt_ctx, 0, argv [1], 0).

/* find first video stream */
For (uint I=0; I & lt; I_fmt_ctx - & gt; Nb_streams; I++)
{
If (i_fmt_ctx - & gt; Streams [I] - & gt; The codec - & gt; Codec_type==AVMediaType. AVMEDIA_TYPE_VIDEO)
{
I_video_stream=i_fmt_ctx - & gt; Streams [I];
break;
}
}

Ffmpeg. Avformat_alloc_output_context2 (& amp; O_fmt_ctx, null, null, filename);

/*
* since all input files are supposed to be identical (framerate, dimension, color format,... )
* we can safely set output codec values from the first input file
*/
O_video_stream=ffmpeg. Avformat_new_stream (o_fmt_ctx, null);
{
AVCodecContext * c;
C=o_video_stream - & gt; The codec.
C - & gt; Bit_rate=400000;
C - & gt; Codec_id=i_video_stream - & gt; The codec - & gt; Codec_id;
C - & gt; Codec_type=i_video_stream - & gt; The codec - & gt; Codec_type;
C - & gt; Time_base. Num=i_video_stream - & gt; Time_base. Num;
C - & gt; Time_base. Den=i_video_stream - & gt; Time_base. Den;
//fprintf (stderr, "time_base. Num=% d time_base. Den=% d \ n", c - & gt; Time_base num, c - & gt; Time_base. Den);
C - & gt; Width=i_video_stream - & gt; The codec - & gt; Width;
C - & gt; Height=i_video_stream - & gt; The codec - & gt; Height;
C - & gt; Pix_fmt=i_video_stream - & gt; The codec - & gt; Pix_fmt;
//printf (" % d % d % d ", a c - & gt; Width, c - & gt; Height, c - & gt; Pix_fmt);
C - & gt; Flags=i_video_stream - & gt; The codec - & gt; Flags;
C - & gt; Flags |=ffmpeg. CODEC_FLAG_GLOBAL_HEADER;
C - & gt; Me_range=i_video_stream - & gt; The codec - & gt; Me_range;
C - & gt; Max_qdiff=i_video_stream - & gt; The codec - & gt; Max_qdiff;
C - & gt; Qmin=i_video_stream - & gt; The codec - & gt; Qmin;
C - & gt; Qmax=i_video_stream - & gt; The codec - & gt; Qmax;
C - & gt; Qcompress=i_video_stream - & gt; The codec - & gt; Qcompress;
}
//DateTime StartTime=DateTime. Now;
//DateTime StopTime=StartTime. AddSeconds (Secodes);

Ffmpeg. Avio_open (& amp; O_fmt_ctx - & gt; Pb, filename, ffmpeg. AVIO_FLAG_WRITE);
Ffmpeg. Avformat_write_header (o_fmt_ctx, null);

Long last_pts=0;
Long last_dts=0;
Long PTS=0;
Long DTS=0;
While (CanRun)
{
AVPacket i_pkt;
Ffmpeg. Av_init_packet (& amp; I_pkt);
I_pkt. Size=0;
I_pkt. Data=https://bbs.csdn.net/topics/null;
If (ffmpeg. Av_read_frame (i_fmt_ctx, & amp; I_pkt) & lt; 0)
break;
/*
* PTS and DTS should happens monotonically
* PTS should be & gt;=DTS
*/
I_pkt. Flags |=ffmpeg. AV_PKT_FLAG_KEY;
PTS=i_pkt. PTS;
I_pkt. PTS +=last_pts;
DTS=i_pkt. DTS;
I_pkt. DTS +=last_dts;
I_pkt. Stream_index=0;

Ffmpeg. Av_interleaved_write_frame (o_fmt_ctx, & amp; I_pkt);
//if (DateTime. Now & gt; StopTime)
//{
//CanRun=false;
//}
}
Last_dts +=DTS;
Last_pts +=PTS;
Ffmpeg. Avformat_close_input (& amp; I_fmt_ctx);
Ffmpeg. Av_write_trailer (o_fmt_ctx);
Ffmpeg. Avcodec_close (o_fmt_ctx - & gt; Streams [0] - & gt; The codec);
Ffmpeg. Av_freep (& amp; O_fmt_ctx - & gt; Streams [0] - & gt; The codec);
Ffmpeg. Av_freep (& amp; O_fmt_ctx - & gt; Streams of [0]);
Ffmpeg. Avio_close (o_fmt_ctx - & gt; Pb);
Ffmpeg. Av_free (o_fmt_ctx);




Above is code, LAN RTSP stream to keep normal, but the network flow RTSP save play video with 13 seconds after the empty data, such as save 20 seconds of video data, LAN RTSP 20 seconds video stream is completed, outside the network video is 20 seconds, but there are 13 seconds is countless according to (no data) don't understand is, in short play video 13 seconds is a blank screen, there are a great god help me see

CodePudding user response:

When no image has no voice, if it is only receive the I frame no P frame, will also screen, not completely no images,

CodePudding user response:

reference 1/f, rabbit party at large response:
when no image has no voice, if it is only receive the I frame no P frame, will also screen, not completely no images,

null
  •  Tags:  
  • C#
  • Related