Benutzer-Werkzeuge

Webseiten-Werkzeuge


ffmpeg

Dies ist eine alte Version des Dokuments!


Stream

Mit VLC kann man auch direkt streamen.

// save to file
ffmpeg.exe -i http://example.com/playlist.m3u8 -vcodec libx264 -acodec copy -f mp4 record.mp4 

// restream to hls
ffmpeg.exe -re -i manifest.m3u8 -f hls -hls_time 1 -hls_list_size 5 -hls_flags delete_segments live.m3u8
ffmpeg -i manifest.m3u8 -f ssegment -strftime 1 -segment_list live.m3u8 -segment_time 10 live_%%Y%%m%%d%%H%%M%%S.ts

// restream to dash
ffmpeg -i manifest.m3u8 -f dash live.mpd

// restream to rtmp
ffmpeg.exe -re -i test.mp4 -vcodec copy -acodec copy -f flv rtmp://localhost:1935/live/test
ffmpeg -re -i file.mp4 -vcodec libx264 -f flv rtmp://live.twitch.tv/app/<STREAM KEY>

//restream to hls
ffmpeg -re -i https://cnn-cnninternational-1-de.samsung.wurl.com/manifest/playlist.m3u8 -hls_time 1 -hls_list_size 3 -hls_flags delete_segments -s 1366x768 -strict -2 -ab 128k -ar 44100 c:\xampp\htdocs\playlist.m3u8

// find recording devices
ffmpeg -list_devices true -f dshow -i dummy
// list recording device options
ffmpeg -f dshow -list_options true -i video="Integrated Camera"
// start recording
ffmpeg -f dshow -s 320x240 -r 30 -vcodec mjpeg -i video="Integrated Camera" output.mp4

// stream desktop
ffmpeg -f gdigrab -r 30 -i desktop -c:v libx264 -g 250 -c:a libfdk_aac -ac 2 -hls_time 1 -hls_list_size 4 -hls_wrap 8 -s 1366x768 -strict -2 -ab 128k -ar 44100 D:/xampp/htdocs/playlist.m3u8

// stream image
ffmpeg.exe -loop 1 -i test.png -vcodec libx264 -acodec copy -vf "drawtext=fontsize=340:fontcolor=white:font=SansSerif:textfile='xxx.txt':reload=1:x=(w-text_w)/2:y=(h-text_h)/2" -f flv rtmp://

Convert

https://bytescout.com/blog/2016/12/ffmpeg-command-lines-convert-various-video-formats.html

ffmpeg -i file.mp4 file.mp3

// mp4 to flv
ffmpeg -i v.mp4 -c:v libx264 -crf 19 v.flv

// avi to gif
FFmpeg –i – v.avi v.gif

// mp4 to ts
ffmpeg -i test.mp4 -bsf:v h264_mp4toannexb -codec copy output.ts

// mix png and mp3 to mp4
ffmpeg -loop 1 -framerate 2 -i test.png -i test.mp3 -c:v libx264 -preset medium -tune stillimage -crf 18 -c:a copy -shortest -pix_fmt yuv420p test.mp4

ffmpeg -i video.mp4 -i overlay.png -filter_complex "[0:v][1:v] overlay=0:0" -c:a copy output.mp4

// extract frames
ffmpeg -i test.mp4 frame_%05d.bmp

// extract all frames from between 1 and 5 seconds, and also between 11 and 15 seconds:
ffmpeg -i in.mp4 -vf select='between(t,1,5)+between(t,11,15)' -vsync 0 out%d.png

Modify

// extract metadata
ffmpeg -i in.mp3 -f ffmetadata metadata.txt

// add metadata
ffmpeg -i in.mp3 -acodec copy -metadata title="t" -metadata artist="a" -metadata album="a" out.mp3

// remove audio
ffmpeg in.mp4 -an out.mp4

// remove video
ffmpeg -i in.mp4 -vn out.mp3

// increase volume to 150%
ffmpeg -i in.mp4 -filter:a "volume=1.5" out.mp4

// decrease volume by 50%
ffmpeg -i in.mp4 -filter:a "volume=0.5" out.mp4

//deinterlace
ffmpeg -i in.mp4 -vf yadif out.mp4

//replace the first 90 seconds of audio with silence
ffmpeg -i in.mp4 -vcodec copy -af "volume=enable='lte(t,90)':volume=0" out.mp4

//replace all audio between 1'20" and 1'30" with silence:
ffmpeg -i in.mp4 -vcodec copy -af "volume=enable='between(t,80,90)':volume=0" out.mp4

// rotate 2x 90' clockwise
// 0 = 90CounterCLockwise and Vertical Flip (default)
// 1 = 90Clockwise
// 2 = 90CounterClockwise
// 3 = 90Clockwise and Vertical Flip
ffmpeg -i in.mp4 -vf "transpose=1, transpose=1" out.mp4

// flip vertical/horizontal
ffmpeg -i in.mp4 -vf "hflip,vflip" out.mp4

//scale
ffmpeg -i in.mp4 -vf scale=1024:789 out.mp4
ffmpeg -i in.mp4 -s 1280x720 -c:a copy out.mp4

// overlay, 1st input on layer 0, 2nd input on layer 1
ffmpeg -i in.mp4 -i overlay.png -filter_complex "[0:v][1:v] overlay=100:100" out.mp4
ffmpeg -loop 1 -i in.png -i in.mp3 -c:v libx264 -c:a aac -b:a 192k -shortest out.mp4
ffmpeg -i in.mp4 -i overlay.png -filter_complex "overlay=x=2160-800:y=3840-400" out.mp4

// cut out a clip, start at 0 seconds and record 3 seconds long
ffmpeg -i in.mp4 -ss 0 -c copy -t 3 out.mp4
ffmpeg -i in.mp4 -ss 00:00:00 -c copy -t 00:00:03 out.mp4

// cut out a clip, start at 00:00:00 seconds and record until 00:01:30
ffmpeg -i in.mp4 -ss 00:00:00 -to 00:01:30 -c:v copy -c:a copy out.mp4

// make transparent background
ffmpeg -i in.png -vf colorkey=white:0.3:0.5 out.png

//change color (lt, lte, eq, gte, gt)
ffmpeg -i logo.png -vf "yadif,format=rgb24,lutrgb=r='if(gt(val,128),255,val)':g='if(gt(val,128),255,val)':b='if(gt(val,128),255,val)'" out.png

// 0:0:0:0 == rgba
ffmpeg -i logo.png -filter_complex "colorchannelmixer=0:0:1:0:0:1:0:0:1:0:0:0" out.png

//Delay video by 3.84 seconds:
ffmpeg -i in.mp4 -itsoffset 3.84 -i in.mp4 -map 1:v -map 0:a -vcodec copy -acodec copy out.mp4

//Delay audio by 3.84 seconds:
ffmpeg -i in.mp4 -itsoffset 3.84 -i in.mp4 -map 0:v -map 1:a -vcodec copy -acodec copy out.mp4

// demuxing
// file.txt example
file 'in1.mp4'
file 'in2.mp4'
file 'in3.mp4'
file 'in4.mp4'
// then run
ffmpeg -f concat -i file.txt -c copy out.mp4

//copy the video from in0.mp4 and audio from in1.mp4:
ffmpeg -i in0.mp4 -i in1.mp4 -c copy -map 0:0 -map 1:1 -shortest out.mp4

Build

Nur MinGW (gcc/g++), kein MSYS2. Du brauchst dafür vorkompilierte FFmpeg-“dev”-Dateien (Headers + *.dll.a Import-Libs) für Win32, plus die shared DLLs zum Ausführen. FFmpeg selbst liefert nur Source; Windows-Binaries kommen von Drittanbietern.

Von diesem Win32-Build-Verzeichnis:

  • DEV (Headers + Import-Libs) ffmpeg-7.1-2362-6aafe61-win32-dev-xpmod-sse.7z
  • SHARED (DLLs + Tools) ffmpeg-7.1-2362-6aafe61-win32-shared-xpmod-sse.7z

Beide in einen Ordner entpacken. Die DLLs etc. aus dem „shared“ Archiv in einen Ordner /bin legen.

Lege an: C:\ffmpeg\

Entpacke beide Archive da hinein (Ordnerstruktur soll am Ende ungefähr so sein):

  • C:\ffmpeg\include\libavformat\…
  • C:\ffmpeg\lib\libavformat.dll.a (u. a.)
  • C:\ffmpeg\bin\avformat-*.dll (u. a.)

Demo Programm

Probe/Demux-Reader

#include <iostream>
#include <string>
 
extern "C" {
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
}
 
static std::string fferr(int e) {
    char buf[AV_ERROR_MAX_STRING_SIZE] = {0};
    av_strerror(e, buf, sizeof(buf));
    return buf;
}
 
int main(int argc, char** argv) {
    if (argc < 2) {
        std::cerr << "Usage: probe.exe <mediafile>\n";
        return 1;
    }
 
    AVFormatContext* fmt = nullptr;
 
    int r = avformat_open_input(&fmt, argv[1], nullptr, nullptr);
    if (r < 0) {
        std::cerr << "avformat_open_input failed: " << fferr(r) << "\n";
        return 1;
    }
 
    r = avformat_find_stream_info(fmt, nullptr);
    if (r < 0) {
        std::cerr << "avformat_find_stream_info failed: " << fferr(r) << "\n";
        avformat_close_input(&fmt);
        return 1;
    }
 
    std::cout << "Format: " << (fmt->iformat ? fmt->iformat->name : "unknown") << "\n";
    std::cout << "Streams: " << fmt->nb_streams << "\n";
    std::cout << "Duration (us): " << fmt->duration << "\n";
 
    av_dump_format(fmt, 0, argv[1], 0);
 
    avformat_close_input(&fmt);
    return 0;
}

Kompilieren:

g++ -std=c++11 -O2 main.cpp -o probe.exe -IC:\ffmpeg\include -LC:\ffmpeg\lib -lavformat -lavcodec -lavutil

Test (Die benötigten DLLs neben die EXE legen)

probe.exe test.mp4

Remux

#include <iostream>
#include <vector>
#include <string>
 
extern "C" {
#include <libavformat/avformat.h>
#include <libavutil/avutil.h>
}
 
static std::string fferr(int e) {
    char buf[AV_ERROR_MAX_STRING_SIZE] = {0};
    av_strerror(e, buf, sizeof(buf));
    return buf;
}
 
int main(int argc, char** argv) {
    if (argc < 3) {
        std::cerr << "Usage: remux.exe <infile> <outfile>\n";
        return 1;
    }
 
    const char* in_filename  = argv[1];
    const char* out_filename = argv[2];
 
    AVFormatContext* ifmt = nullptr;
    AVFormatContext* ofmt = nullptr;
 
    int ret = avformat_open_input(&ifmt, in_filename, nullptr, nullptr);
    if (ret < 0) { std::cerr << "open input failed: " << fferr(ret) << "\n"; return 1; }
 
    ret = avformat_find_stream_info(ifmt, nullptr);
    if (ret < 0) { std::cerr << "find stream info failed: " << fferr(ret) << "\n"; avformat_close_input(&ifmt); return 1; }
 
    ret = avformat_alloc_output_context2(&ofmt, nullptr, nullptr, out_filename);
    if (ret < 0 || !ofmt) { std::cerr << "alloc output failed: " << fferr(ret) << "\n"; avformat_close_input(&ifmt); return 1; }
 
    std::vector<int> stream_mapping(ifmt->nb_streams, -1);
 
    for (unsigned i = 0; i < ifmt->nb_streams; i++) {
        AVStream* in_stream = ifmt->streams[i];
        AVCodecParameters* in_par = in_stream->codecpar;
 
        // nur A/V/Subs remuxen
        if (in_par->codec_type != AVMEDIA_TYPE_AUDIO &&
            in_par->codec_type != AVMEDIA_TYPE_VIDEO &&
            in_par->codec_type != AVMEDIA_TYPE_SUBTITLE) {
            continue;
        }
 
        AVStream* out_stream = avformat_new_stream(ofmt, nullptr);
        if (!out_stream) { std::cerr << "new stream failed\n"; ret = AVERROR(ENOMEM); break; }
 
        ret = avcodec_parameters_copy(out_stream->codecpar, in_par);
        if (ret < 0) { std::cerr << "copy codecpar failed: " << fferr(ret) << "\n"; break; }
 
        out_stream->codecpar->codec_tag = 0;
        out_stream->time_base = in_stream->time_base;
 
        stream_mapping[i] = out_stream->index;
    }
 
    if (ret >= 0 && !(ofmt->oformat->flags & AVFMT_NOFILE)) {
        ret = avio_open(&ofmt->pb, out_filename, AVIO_FLAG_WRITE);
        if (ret < 0) std::cerr << "avio_open failed: " << fferr(ret) << "\n";
    }
 
    if (ret >= 0) {
        ret = avformat_write_header(ofmt, nullptr);
        if (ret < 0) std::cerr << "write_header failed: " << fferr(ret) << "\n";
    }
 
    AVPacket* pkt = av_packet_alloc();
    if (!pkt) ret = AVERROR(ENOMEM);
 
    while (ret >= 0) {
        ret = av_read_frame(ifmt, pkt);
        if (ret < 0) break;
 
        int in_si = pkt->stream_index;
        if (in_si < 0 || in_si >= (int)stream_mapping.size() || stream_mapping[in_si] < 0) {
            av_packet_unref(pkt);
            continue;
        }
 
        AVStream* in_stream  = ifmt->streams[in_si];
        AVStream* out_stream = ofmt->streams[stream_mapping[in_si]];
 
        pkt->stream_index = stream_mapping[in_si];
        av_packet_rescale_ts(pkt, in_stream->time_base, out_stream->time_base);
        pkt->pos = -1;
 
        ret = av_interleaved_write_frame(ofmt, pkt);
        av_packet_unref(pkt);
 
        if (ret < 0) std::cerr << "write_frame failed: " << fferr(ret) << "\n";
    }
 
    // EOF ist ok
    if (ret == AVERROR_EOF) ret = 0;
 
    av_write_trailer(ofmt);
 
    av_packet_free(&pkt);
    avformat_close_input(&ifmt);
    if (ofmt && !(ofmt->oformat->flags & AVFMT_NOFILE)) avio_closep(&ofmt->pb);
    avformat_free_context(ofmt);
 
    return ret < 0 ? 1 : 0;
}
ffmpeg.1765378726.txt.gz · Zuletzt geändert: 2025/12/10 15:58 von jango