Advertisement
Guest User

ffmpeg stream

a guest
Nov 24th, 2015
57
0
Never
Not a member of Pastebin yet? Sign Up, it unlocks many cool features!
C++ 7.34 KB | None | 0 0
  1. extern "C" {
  2. #include <libavcodec/avcodec.h>
  3. #include <libavformat/avformat.h>
  4. #include <libavfilter/avfiltergraph.h>
  5. #include <libavfilter/buffersink.h>
  6. #include <libavfilter/buffersrc.h>
  7. #include <libavutil/opt.h>
  8. #include <libavutil/pixdesc.h>
  9. #include <libavdevice/avdevice.h>
  10. #include <libavutil/time.h>
  11. };
  12. #include <iostream>
  13. #ifdef  __cplusplus
  14. static const std::string av_make_error_string(int errnum) {
  15.     char errbuf[AV_ERROR_MAX_STRING_SIZE];
  16.     av_strerror(errnum, errbuf, AV_ERROR_MAX_STRING_SIZE);
  17.     return (std::string)errbuf;
  18. }
  19. #undef av_err2str
  20. #define av_err2str(errnum) av_make_error_string(errnum).c_str()
  21. #endif // __cplusplus
  22.  
  23. #define INPUT_SOURCE "video=Integrated Webcam"
  24. #define OUTPUT_SOURCE "udp://127.0.0.1:8888"
  25. #define INPUT_FORMAT av_find_input_format("dshow")
  26.  
  27. int videoindex = 0;
  28.  
  29. AVFormatContext *inputFormatContext = avformat_alloc_context();
  30. AVFormatContext *outputFormatContext = avformat_alloc_context();
  31. //AVInputFormat *inputFormat = av_find_input_format("dshow");
  32.  
  33. typedef struct FilteringContext {
  34.     AVFilterContext *buffersink_ctx;
  35.     AVFilterContext *buffersrc_ctx;
  36.     AVFilterGraph *filter_graph;
  37. } FilteringContext;
  38. static FilteringContext *filter_ctx;
  39.  
  40. static int open_input_file(const char *filename)
  41. {
  42.     int ret;
  43.     unsigned int i;
  44.  
  45.     if ((ret = avformat_open_input(&inputFormatContext, INPUT_SOURCE, INPUT_FORMAT, NULL)) < 0) {
  46.         av_log(NULL, AV_LOG_ERROR, "Cannot open input file\n");
  47.         return ret;
  48.     }
  49.     if ((ret = avformat_find_stream_info(inputFormatContext, NULL)) < 0) {
  50.         av_log(NULL, AV_LOG_ERROR, "Cannot find stream information\n");
  51.         return ret;
  52.     }
  53.     for (i = 0; i < inputFormatContext->nb_streams; i++) {
  54.         AVStream *stream;
  55.         AVCodecContext *codecContext;
  56.         stream = inputFormatContext->streams[i];
  57.         codecContext = stream->codec;
  58.         /* Reencode video & audio and remux subtitles etc. */
  59.         if (codecContext->codec_type == AVMEDIA_TYPE_VIDEO
  60.             || codecContext->codec_type == AVMEDIA_TYPE_AUDIO) {
  61.             /* Open decoder */
  62.             ret = avcodec_open2(codecContext,
  63.                 avcodec_find_decoder(codecContext->codec_id), NULL);
  64.             if (ret < 0) {
  65.                 av_log(NULL, AV_LOG_ERROR, "Failed to open decoder for stream #%u\n", i);
  66.                 return ret;
  67.             }
  68.         }
  69.         if (inputFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_VIDEO
  70.             || inputFormatContext->streams[i]->codec->codec_type == AVMEDIA_TYPE_AUDIO) {
  71.             videoindex = i;
  72.             break;
  73.         }
  74.     }
  75.     av_dump_format(inputFormatContext, 0, INPUT_SOURCE, 0);
  76.     return 0;
  77. }
  78. static int open_output_file(const char *filename) {
  79.     AVCodecContext *decoderContext, *encoderContext;
  80.     AVCodec *encoder;
  81.     int ret;
  82.     unsigned int i;
  83.     avformat_alloc_output_context2(&outputFormatContext, NULL, "mpegts", OUTPUT_SOURCE);
  84.     if (!outputFormatContext) {
  85.         av_log(NULL, AV_LOG_ERROR, "Could not create output context\n");
  86.         return AVERROR_UNKNOWN;
  87.     }
  88.     for (i = 0; i < inputFormatContext->nb_streams; i++) {
  89.         AVStream *streamIn = inputFormatContext->streams[i];
  90.         AVStream *streamOut = avformat_new_stream(outputFormatContext, streamIn->codec->codec);
  91.         if (!streamOut) {
  92.             av_log(NULL, AV_LOG_ERROR, "Failed allocating output stream\n");
  93.             return AVERROR_UNKNOWN;
  94.         }
  95.         ret = avcodec_copy_context(streamOut->codec, streamIn->codec);
  96.         if (ret < 0) {
  97.             av_log(NULL, AV_LOG_ERROR, "Failed to copy context from input to output stream codec context\n");
  98.             return AVERROR_UNKNOWN;
  99.         }
  100.         streamOut->codec->codec_tag = 0;
  101.         if (outputFormatContext->oformat->flags & AVFMT_GLOBALHEADER)
  102.             streamOut->codec->flags |= CODEC_FLAG_GLOBAL_HEADER;
  103.     }
  104.     av_dump_format(outputFormatContext, 0, OUTPUT_SOURCE, 1);
  105.     if (!(outputFormatContext->oformat->flags & AVFMT_NOFILE)) {
  106.         ret = avio_open(&outputFormatContext->pb, OUTPUT_SOURCE, AVIO_FLAG_WRITE);
  107.         if (ret < 0) {
  108.             av_log(NULL, AV_LOG_ERROR, "Could not open output file '%s'", filename);
  109.             return ret;
  110.         }
  111.     }
  112.     return 0;
  113. }
  114. int main(int argc, char **argv)
  115. {
  116.     av_register_all();
  117.     avformat_network_init();
  118.     avdevice_register_all();
  119.     avcodec_register_all();
  120.     avfilter_register_all();
  121.  
  122.     int ret, start_time, frame_index = 0;
  123.     AVPacket packet;// = { .data = NULL,.size = 0 };
  124.     packet.data = NULL;
  125.     packet.size = 0;
  126.     AVFrame *frame = NULL;
  127.     enum AVMediaType type;
  128.     unsigned int stream_index;
  129.     unsigned int i;
  130.     int got_frame;
  131.     int(*dec_func)(AVCodecContext *, AVFrame *, int *, const AVPacket *);
  132.  
  133.     open_input_file(INPUT_SOURCE);
  134.     open_output_file(OUTPUT_SOURCE);
  135.  
  136.     if ((ret = avformat_write_header(outputFormatContext, NULL)) < 0) {
  137.         av_log(NULL, AV_LOG_DEBUG, "Error occurred when opening output URL\n");
  138.         return ret;
  139.     }
  140.     start_time = av_gettime();
  141.  
  142.    
  143.     /* read all packets */
  144.     while (1) {
  145.         AVStream *streamIn, *streamOut;
  146.         if ((ret = av_read_frame(inputFormatContext, &packet)) < 0)
  147.             break;
  148.         if (packet.pts == AV_NOPTS_VALUE) {
  149.             AVRational time_base_aux = inputFormatContext->streams[videoindex]->time_base;
  150.             // Duration between 2 frames(us)
  151.             int64_t calc_duration = (double)AV_TIME_BASE / av_q2d(inputFormatContext->streams[videoindex]->r_frame_rate);
  152.             //Parameters
  153.             packet.pts = (double)(frame_index*calc_duration) / (double)(av_q2d(time_base_aux)*AV_TIME_BASE);
  154.             packet.dts = packet.pts;
  155.             packet.duration = (double)calc_duration / (double)(av_q2d(time_base_aux)*AV_TIME_BASE);
  156.         }
  157.         if (packet.stream_index == videoindex) {
  158.             AVRational time_base = inputFormatContext->streams[videoindex]->time_base;
  159.             AVRational time_base_q = { 1,AV_TIME_BASE };
  160.             int64_t pts_time = av_rescale_q(packet.dts, time_base, time_base_q);
  161.             int64_t now_time = av_gettime() - start_time;
  162.             if (pts_time > now_time)
  163.                 av_usleep(pts_time - now_time);
  164.         }
  165.         streamIn = inputFormatContext->streams[packet.stream_index];
  166.         streamOut = outputFormatContext->streams[packet.stream_index];
  167.         /* copy packet */
  168.         //Convert PTS/DTS
  169.         packet.pts = av_rescale_q_rnd(packet.pts, streamIn->time_base, streamOut->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
  170.         packet.dts = av_rescale_q_rnd(packet.dts, streamIn->time_base, streamOut->time_base, (AVRounding)(AV_ROUND_NEAR_INF | AV_ROUND_PASS_MINMAX));
  171.         packet.duration = av_rescale_q(packet.duration, streamIn->time_base, streamOut->time_base);
  172.         packet.pos = -1;
  173.         //Print to Screen
  174.         if (packet.stream_index == videoindex) {
  175.             //printf("Send %8d video frames to output URL\n", frame_index);
  176.             frame_index++;
  177.         }
  178.         //ret = av_write_frame(ofmt_ctx, &pkt);
  179.         ret = av_interleaved_write_frame(outputFormatContext, &packet);
  180.  
  181.         if (ret < 0) {
  182.             printf("Error muxing packet\n");
  183.             break;
  184.         }
  185.     }
  186.     av_write_trailer(outputFormatContext);
  187. end:
  188.     av_packet_unref(&packet);
  189.     av_frame_free(&frame);
  190.     for (i = 0; i < inputFormatContext->nb_streams; i++) {
  191.         avcodec_close(inputFormatContext->streams[i]->codec);
  192.         if (outputFormatContext && outputFormatContext->nb_streams > i && outputFormatContext->streams[i] && outputFormatContext->streams[i]->codec)
  193.             avcodec_close(outputFormatContext->streams[i]->codec);
  194.         if (filter_ctx && filter_ctx[i].filter_graph)
  195.             avfilter_graph_free(&filter_ctx[i].filter_graph);
  196.     }
  197.     av_free(filter_ctx);
  198.     avformat_close_input(&inputFormatContext);
  199.     if (outputFormatContext && !(outputFormatContext->oformat->flags & AVFMT_NOFILE))
  200.         avio_closep(&outputFormatContext->pb);
  201.     avformat_free_context(outputFormatContext);
  202.     if (ret < 0)
  203.         av_log(NULL, AV_LOG_ERROR, "Error occurred: %s\n", av_err2str(ret));
  204.     return ret ? 1 : 0;
  205. }
Advertisement
Add Comment
Please, Sign In to add comment
Advertisement