Compare commits

...

59 Commits

Author SHA1 Message Date
Ashley
b2d5fcadf3 Merge pull request #315 from amiaopensource/italic
use italic
2018-03-30 11:40:13 -04:00
Reto Kromer
05395f4670 use italic 2018-03-30 10:01:48 +02:00
Ashley
078d20824c Merge pull request #314 from amiaopensource/imagemagick
adds imagemagick section
2018-03-29 13:32:00 -04:00
Ashley Blewer
44c8661e96 subs amp for escaped amp 2018-03-29 13:28:00 -04:00
Ashley Blewer
13bc9bef10 adds imagemagick section 2018-03-29 11:46:02 -04:00
Katherine Frances Nagels
585e0e1f18 Add info on concatenating files of different framerates (#308) 2018-02-18 20:29:35 +01:00
Katherine Frances Nagels
1ef6c3305b Concatenate files of different resolutions (#307) 2018-02-10 09:31:48 +01:00
Katherine Frances Nagels
9c4da4102a Merge pull request #306 from amiaopensource/style
unify coding style
2018-02-10 19:25:31 +13:00
Reto Kromer
c47a7a534f unify coding style 2018-02-10 06:53:13 +01:00
Reto Kromer
4d8fdc9f4e uniform coding style 2018-02-10 06:50:06 +01:00
Reto Kromer
ae590706b0 Merge pull request #305 (Code style: add enclosing quotation marks)
Code style: add enclosing quotation marks
2018-02-10 06:45:22 +01:00
kfrn
e84f0a9fb6 Code style: add enclosing quotation marks 2018-02-10 12:20:21 +13:00
Reto Kromer
e2850d38c0 simplify code (#304) 2018-02-09 21:22:31 +01:00
Reto Kromer
8927478efb Merge pull request #303 (uniform syntax) 2018-02-09 17:06:52 +01:00
Reto Kromer
3c815b1f3b uniform syntax 2018-02-09 16:20:26 +01:00
Ashley
58aa0549ff Merge pull request #301 from amiaopensource/css
replace tabs by spaces
2018-02-05 11:00:11 -05:00
Ashley
4a83b45e7e Merge pull request #302 from amiaopensource/html
fix HTML5
2018-02-05 10:59:59 -05:00
Reto Kromer
f6b44c56ce fix HTML5 2018-02-04 20:58:14 +01:00
Reto Kromer
8149aa163c replace tabs by spaces 2018-02-04 20:45:25 +01:00
Reto Kromer
94f935198f Merge pull request #300 (add alias) 2018-01-25 18:44:17 +01:00
Reto Kromer
c04c9ff12f add alias 2018-01-25 15:55:21 +01:00
Reto Kromer
64787edd4e Merge pull request #299 (Add explanation about input files for join_different_files recipe) 2018-01-25 10:41:06 +01:00
kfrn
f995e8b483 Improve explanation about input files for join_different_files recipe 2018-01-25 18:28:18 +13:00
Katherine Frances Nagels
dea85d1e47 Merge pull request #297 from amiaopensource/uniform_style
uniform style
2018-01-25 07:47:24 +13:00
Reto Kromer
e9fd3fd002 uniform style 2018-01-24 13:16:28 +01:00
Katherine Frances Nagels
debc510205 New recipe: concat files of different types (+ a few fixups) (#296) 2018-01-24 13:10:22 +01:00
Andrew Weaver
32260c405f Add stream md5s (#294) 2018-01-05 18:26:10 +01:00
Reto Kromer
d142564f36 add note about quantisation rates and MD5 (#293) 2017-12-31 09:16:48 +01:00
Reto Kromer
d41674a267 fix typo 2017-12-28 07:07:40 +01:00
Reto Kromer
99450e61de a little housekeeping
- use secure protocol whenever possible
- alignment
2017-12-24 13:19:54 +01:00
Ashley
1fec9b21c8 Merge pull request #290 from amiaopensource/embed-subs
adds embedded subtitles command c/o @federicomenaquintero #289
2017-12-11 09:43:46 -05:00
Ashley Blewer
42189e5b94 capitalize container/sub formats 2017-12-11 09:22:22 -05:00
Ashley
929f92a52a Merge pull request #291 from amiaopensource/mod-strip-metadata
consistent aliases used for strip metadata
2017-12-11 09:16:26 -05:00
Ashley Blewer
02e2f11718 consistent aliases used for strip metadata 2017-12-10 21:41:57 -05:00
Ashley Blewer
10636e24e2 adds embedded subtitles command c/o @federicomenaquintero #289 2017-12-10 21:38:55 -05:00
Reto Kromer
61b890d31c delete double spaces 2017-12-05 21:05:51 +01:00
Reto Kromer
85a79d2eb5 Merge pull request #288 (delete useless spaces) 2017-12-05 21:00:30 +01:00
Reto Kromer
b5ec56174a delete useless spaces 2017-12-05 20:51:45 +01:00
Ashley
f0e0cf8ed3 Merge pull request #286 from amiaopensource/life
adds game of life to test files
2017-12-04 14:31:08 -05:00
Ashley
5c1c336d77 adds extra linebreak 2017-12-04 14:30:35 -05:00
Ashley
d71793583e Merge pull request #285 from amiaopensource/link-to-site-locally
links to website permalink even when loading locally
2017-12-04 14:26:54 -05:00
Ashley
6705bdf41d Merge pull request #282 from amiaopensource/smpte-sine
smpte_sinewave should not end in .wav
2017-12-04 14:26:38 -05:00
Ashley
25e779a59f Merge pull request #284 from amiaopensource/streaming-vs-saving
explains difference between streaming w ffplay and saving w ffmpeg
2017-12-03 21:10:48 -05:00
Ashley
ba0852a957 Merge pull request #287 from amiaopensource/http-patch
link without http wasn't cutting it; threw error
2017-12-03 21:10:25 -05:00
Ashley
88024c040f link without http wasn't cutting it; threw error 2017-12-03 20:02:32 -05:00
Ashley Blewer
3d9b9edf1c adds game of life to test files 2017-12-03 13:31:03 -05:00
Ashley Blewer
c0326ad7d9 links to website permalink even when loading locally 2017-12-03 12:14:33 -05:00
Ashley Blewer
0cb6827b39 explains difference between streaming w ffplay and saving w ffmpeg 2017-12-03 11:52:51 -05:00
Reto Kromer
0d68614c04 Merge pull request #283 (removes extra colon in BRNG recipe description) 2017-12-03 00:08:36 +01:00
Ashley
1e86b70ba4 removes extra colon in BRNG recipe description 2017-12-01 17:55:54 -05:00
Ashley
ced142a215 Merge pull request #281 from amiaopensource/rm-breakall
lifts the break-all and sets a max-width on code for long lines
2017-11-30 16:52:15 -05:00
Ashley
bf301daa71 smpte_sinewave should not end in .wav 2017-11-30 16:23:24 -05:00
Ashley Blewer
278ac2baae allows code to break for small screens only 2017-11-28 16:16:50 -05:00
Ashley Blewer
10b8e4c941 lifts the break-all and sets a max-width on code for long lines 2017-11-28 16:06:43 -05:00
Ashley
1d1b3e4eac Merge pull request #280 from amiaopensource/andrew
adds andrew as maintainer
2017-11-28 14:11:16 -05:00
Ashley Blewer
5a3e437d76 adds andrew as maintainer 2017-11-28 12:38:41 -05:00
Ashley
75a7aa1299 Merge pull request #278 from amiaopensource/modern_HTML
use modern HTML
2017-11-13 15:57:46 +01:00
Reto Kromer
cf13529485 use modern HTML 2017-11-13 15:31:07 +01:00
Katherine Frances Nagels
7c03ae2f80 Add recipes for merging files & cropping video (#277) 2017-11-05 20:13:29 +01:00
12 changed files with 434 additions and 138 deletions

View File

@@ -44,6 +44,9 @@ html, body {
"content"
"footer";
}
code {
word-break: break-all;
}
}
@media only screen and (min-width: 1000px) {
@@ -108,6 +111,10 @@ h3 {
font-size: 1.5em;
}
h4 {
font-size: 1.2em;
}
.intro-lead {
font-family: 'Montserrat', sans-serif;
font-size: 1em;
@@ -118,8 +125,8 @@ code {
color: #c7254e;
background-color: #f9f2f4;
border-radius: 4px;
word-break: break-all;
word-wrap: break-word;
max-width: 800px;
white-space: normal;
display: inline-block;
}
@@ -150,6 +157,13 @@ img {
text-align: center;
}
.sample-image-small {
margin: 0 auto;
margin-bottom: 18px;
max-width: 250px;
text-align: center;
}
div {
font-family: 'Merriweather', serif;
color: white;
@@ -240,17 +254,17 @@ nav .heading {
.hiding {
opacity: 0;
height: 0;
height: 0;
overflow: hidden;
}
input {
position: absolute;
left: -999em
position: absolute;
left: -999em;
}
input[type=checkbox]:checked + div {
opacity: 1;
opacity: 1;
height: auto;
overflow: hidden;
transition: opacity .5s linear, height .5s linear;

Binary file not shown.

After

Width:  |  Height:  |  Size: 245 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 167 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 146 KiB

BIN
img/crop_example_orig.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 436 KiB

BIN
img/life.gif Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 574 KiB

View File

@@ -1,14 +1,14 @@
<!DOCTYPE html>
<html lang="en">
<head>
<title>ffmprovisr</title>
<meta name="viewport" charset="utf-8" content="text/html, width=device-width, initial-scale=1">
<link href="https://fonts.googleapis.com/css?family=Montserrat%7CMerriweather" rel="stylesheet" type="text/css">
<link rel="stylesheet" href="css/css.css">
<link rel="icon" href="img/vhs.ico">
<script src="js/jquery.min.js"></script>
<script src="js/js.js"></script>
</head>
<head>
<title>ffmprovisr</title>
<meta name="viewport" charset="utf-8" content="text/html, width=device-width, initial-scale=1">
<link href="https://fonts.googleapis.com/css?family=Montserrat%7CMerriweather" rel="stylesheet" type="text/css">
<link rel="stylesheet" href="css/css.css">
<link rel="icon" href="img/vhs.ico">
<script src="js/jquery.min.js"></script>
<script src="js/js.js"></script>
</head>
<body>
<div class="grid">
@@ -37,6 +37,8 @@
<a href="#ocr"><div class="contents-list">Use OCR</div></a>
<a href="#perceptual-similarity"><div class="contents-list">Compare similarity of videos</div></a>
<a href="#other"><div class="contents-list">Something else</div></a>
<a href="#similar-tools"><div class="contents-list">Similar tools: tips &amp; tricks</div></a>
<a href="#imagemagick"><div class="contents-list">ImageMagick</div></a>
</nav>
<div class="content">
@@ -46,16 +48,16 @@
<p>FFmpeg is a powerful tool for manipulating audiovisual files. Unfortunately, it also has a steep learning curve, especially for users unfamiliar with a command line interface. This app helps users through the command generation process so that more people can reap the benefits of FFmpeg.</p>
<p>Each button displays helpful information about how to perform a wide variety of tasks using FFmpeg. To use this site, click on the task you would like to perform. A new window will open up with a sample command and a description of how that command works. You can copy this command and understand how the command works with a breakdown of each of the flags.</p>
<span class="intro-lead">Tutorials</span>
<p>For FFmpeg basics, check out the programs <a href="https://www.ffmpeg.org/" target="_blank">official website</a>.</p>
<p>For FFmpeg basics, check out the programs <a href="https://ffmpeg.org/" target="_blank">official website</a>.</p>
<p>For instructions on how to install FFmpeg on Mac, Linux, and Windows, refer to Reto Kromers <a href="https://avpres.net/FFmpeg/#ch1" target="_blank">installation instructions</a>.</p>
<p>For Bash and command line basics, try the <a href="https://learnpythonthehardway.org/book/appendixa.html" target="_blank">Command Line Crash Course</a>. For a little more context presented in an ffmprovisr style, try <a href="http://explainshell.com/" target="_blank">explainshell.com</a>!</p>
<p>For Bash and command line basics, try the <a href="https://learnpythonthehardway.org/book/appendixa.html" target="_blank">Command Line Crash Course</a>. For a little more context presented in an ffmprovisr style, try <a href="https://explainshell.com/" target="_blank">explainshell.com</a>!</p>
<span class="intro-lead">License</span>
<p class="license">
<a href="https://creativecommons.org/licenses/by/4.0/" target="_blank"><img alt="Creative Commons License" src="img/cc.png"></a><br>
This work is licensed under a <a href="https://creativecommons.org/licenses/by/4.0/" target="_blank">Creative Commons Attribution 4.0 International License</a>.
</p>
<span class="intro-lead">Sister projects</span>
<p><a href="http://dd388.github.io/crals/" target="_blank">Script Ahoy</a>: Community Resource for Archivists and Librarians Scripting</p>
<p><a href="https://dd388.github.io/crals/" target="_blank">Script Ahoy</a>: Community Resource for Archivists and Librarians Scripting</p>
<p><a href="https://datapraxis.github.io/sourcecaster/" target="_blank">The Sourcecaster</a>: an app that helps you use the command line to work through common challenges that come up when working with digital primary sources.</p>
<p><a href="https://amiaopensource.github.io/cable-bible/" target="_blank">Cable Bible</a>: A Guide to Cables and Connectors Used for Audiovisual Tech</p>
</div>
@@ -83,32 +85,47 @@
</div>
<!-- End Basic structure of an FFmpeg command -->
<!-- Streaming vs. Saving -->
<label class="recipe" for="streaming-saving">Streaming vs. Saving</label>
<input type="checkbox" id="streaming-saving">
<div class="hiding">
<h3>Streaming vs. Saving</h3>
<p>FFplay allows you to stream created video and FFmpeg allows you to save video.</p>
<p>The following command creates and saves a 10-second video of SMPTE bars:</p>
<p><code>ffmpeg -f lavfi -i smptebars=size=640x480 -t 5 output_file</code></p>
<p>This command plays and streams SMPTE bars but does not save them on the computer:</p>
<p><code>ffplay -f lavfi smptebars=size=640x480</code></p>
<p>The main difference is small but significant: the <code>-i</code> flag is required for FFmpeg but not required for FFplay. Additionally, the FFmpeg script needs to have <code>-t 5</code> and <code>output.mkv</code> added to specify the length of time to record and the place to save the video.</p>
<p class="link"></p>
</div>
<!-- End Streaming vs. Saving -->
</div>
<div class="well">
<h2 id="concepts">Learn about more advanced FFmpeg concepts</h2>
<h2 id="concepts">Learn about more advanced FFmpeg concepts</h2>
<!-- Filtergraph explanation -->
<label class="recipe" for="filtergraphs">Filtergraphs</label>
<input type="checkbox" id="filtergraphs">
<div class="hiding">
<h3>Filtergraphs</h3>
<p>Many FFmpeg commands use filters that manipulate the video or audio stream in some way: for example, <a href="http://ffmpeg.org/ffmpeg-filters.html#hflip" target="_blank">hflip</a> to horizontally flip a video, or <a href="http://ffmpeg.org/ffmpeg-filters.html#amerge-1" target="_blank">amerge</a> to merge two or more audio tracks into a single stream.</p>
<p>Many FFmpeg commands use filters that manipulate the video or audio stream in some way: for example, <a href="https://ffmpeg.org/ffmpeg-filters.html#hflip" target="_blank">hflip</a> to horizontally flip a video, or <a href="https://ffmpeg.org/ffmpeg-filters.html#amerge-1" target="_blank">amerge</a> to merge two or more audio tracks into a single stream.</p>
<p>The use of a filter is signalled by the flag <code>-vf</code> (video filter) or <code>-af</code> (audio filter), followed by the name and options of the filter itself. For example, take the <a href="#convert-colourspace">convert colourspace</a> command:</p>
<p><code>ffmpeg -i <i>input_file</i> -c:v libx264 -vf colormatrix=<i>src</i>:<i>dst</i> <i>output_file</i></code>
<p>Here, <a href="http://ffmpeg.org/ffmpeg-filters.html#colormatrix" target="_blank">colormatrix</a> is the filter used, with <i>src</i> and <i>dst</i> representing the source and destination colourspaces. This part following the <code>-vf</code> is a <b>filtergraph</b>.</p>
<p>Here, <a href="https://ffmpeg.org/ffmpeg-filters.html#colormatrix" target="_blank">colormatrix</a> is the filter used, with <i>src</i> and <i>dst</i> representing the source and destination colourspaces. This part following the <code>-vf</code> is a <b>filtergraph</b>.</p>
<p>It is also possible to apply multiple filters to an input, which are sequenced together in the filtergraph. A chained set of filters is called a filter chain, and a filtergraph may include multiple filter chains. Filters in a filterchain are separated from each other by commas (<code>,</code>), and filterchains are separated from each other by semicolons (<code>;</code>). For example, take the <a href="#inverse-telecine">inverse telecine</a> command:</p>
<p><code>ffmpeg -i <i>input_file</i> -c:v libx264 -vf "fieldmatch,yadif,decimate" <i>output_file</i></code></p>
<p>Here we have a filtergraph including one filter chain, which is made up of three video filters.</p>
<p>It is often prudent to enclose your filtergraph in quotation marks; this means that you can use spaces within the filtergraph. Using the inverse telecine example again, the following filter commands are all valid and equivalent:
<ul>
<li><code>-vf fieldmatch,yadif,decimate</code></li>
<li><code>-vf "fieldmatch,yadif,decimate"</code></li>
<li><code>-vf "fieldmatch, yadif, decimate"</code></li>
</ul>
but <code>-vf fieldmatch, yadif, decimate</code> is not valid.</p>
<p>It is often prudent to enclose your filtergraph in quotation marks; this means that you can use spaces within the filtergraph. Using the inverse telecine example again, the following filter commands are all valid and equivalent:</p>
<ul>
<li><code>-vf fieldmatch,yadif,decimate</code></li>
<li><code>-vf "fieldmatch,yadif,decimate"</code></li>
<li><code>-vf "fieldmatch, yadif, decimate"</code></li>
</ul>
<p>but <code>-vf fieldmatch, yadif, decimate</code> is not valid.</p>
<p>The ordering of the filters is significant. Video filters are applied in the order given, with the output of one filter being passed along as the input to the next filter in the chain. In the example above, <code>fieldmatch</code> reconstructs the original frames from the inverse telecined video, <code>yadif</code> deinterlaces (this is a failsafe in case any combed frames remain, for example if the source mixes telecined and real interlaced content), and <code>decimate</code> deletes duplicated frames. Clearly, it is not possible to delete duplicated frames before those frames are reconstructed.</p>
<h4>Notes</h4>
<ul>
<li><code>-vf</code> is an alias for <code>-filter:v</code></li>
<li>If the command involves more than one input or output, you must use the flag <code>-filter_complex</code> instead of <code>-vf</code>.</li>
<li>Straight quotation marks ("like this") rather than curved quotation marks (“like this”) should be used.</li>
</ul>
@@ -140,7 +157,7 @@
</ul>
<p>To map <i>all</i> streams in the input file to the output file, use <code>-map 0</code>. However, note that not all container formats can include all stream types: for example, .mp4 cannot contain timecode.</p>
<p>When no mapping is specified in an ffmpeg command, the default for video files is to take just one video and one audio stream for the output: other stream types, such as timecode or subtitles, will not be copied to the output file by default. If multiple video or audio streams are present, the best quality one is automatically selected by FFmpeg.</p>
<p>For more information, check out the FFmpeg wiki <a href="https://trac.ffmpeg.org/wiki/Map" target="_blank">Map</a> page, and the official FFmpeg <a href="http://ffmpeg.org/ffmpeg.html#Advanced-options" target="_blank">documentation on <code>-map</code></a>.</p>
<p>For more information, check out the FFmpeg wiki <a href="https://trac.ffmpeg.org/wiki/Map" target="_blank">Map</a> page, and the official FFmpeg <a href="https://ffmpeg.org/ffmpeg.html#Advanced-options" target="_blank">documentation on <code>-map</code></a>.</p>
<p class="link"></p>
</div>
<!-- End Stream Mapping explanation -->
@@ -236,18 +253,19 @@
<input type="checkbox" id="transcode_h264">
<div class="hiding">
<h3>Transcode to H.264</h3>
<p><code>ffmpeg -i <i>input_file</i> -c:v libx264 -pix_fmt yuv420p -c:a copy <i>output_file</i></code></p>
<p><code>ffmpeg -i <i>input_file</i> -c:v libx264 -pix_fmt yuv420p -c:a aac <i>output_file</i></code></p>
<p>This command takes an input file and transcodes it to H.264 with an .mp4 wrapper, keeping the audio the same codec as the original. The libx264 codec defaults to a “medium” preset for compression quality and a CRF of 23. CRF stands for constant rate factor and determines the quality and file size of the resulting H.264 video. A low CRF means high quality and large file size; a high CRF means the opposite.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-c:v libx264</dt><dd>tells FFmpeg to encode the video stream as H.264</dd>
<dt>-pix_fmt yuv420p</dt><dd> libx264 will use a chroma subsampling scheme that is the closest match to that of the input. This can result in YC<sub>B</sub>C<sub>R</sub> 4:2:0, 4:2:2, or 4:4:4 chroma subsampling. QuickTime and most other non-FFmpeg based players cant decode H.264 files that are not 4:2:0. In order to allow the video to play in all players, you can specify 4:2:0 chroma subsampling.</dd>
<dt>-c:a copy</dt><dd>tells FFmpeg to copy the audio stream without re-encoding it</dd>
<dt>-pix_fmt yuv420p</dt><dd>libx264 will use a chroma subsampling scheme that is the closest match to that of the input. This can result in YC<sub>B</sub>C<sub>R</sub> 4:2:0, 4:2:2, or 4:4:4 chroma subsampling. QuickTime and most other non-FFmpeg based players cant decode H.264 files that are not 4:2:0. In order to allow the video to play in all players, you can specify 4:2:0 chroma subsampling.</dd>
<dt>-c:a aac</dt><dd>encode audio as AAC.<br>
AAC is the codec most often used for audio streams within an .mp4 container.</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p>In order to use the same basic command to make a higher quality file, you can add some of these presets:</p>
<p><code>ffmpeg -i <i>input_file</i> -c:v libx264 -pix_fmt yuv420p -preset veryslow -crf 18 -c:a copy <i>output_file</i></code></p>
<p><code>ffmpeg -i <i>input_file</i> -c:v libx264 -pix_fmt yuv420p -preset veryslow -crf 18 -c:a aac <i>output_file</i></code></p>
<dl>
<dt>-preset <i>veryslow</i></dt><dd>This option tells FFmpeg to use the slowest preset possible for the best compression quality.<br>
Available presets, from slowest to fastest, are: <code>veryslow</code>, <code>slower</code>, <code>slow</code>, <code>medium</code>, <code>fast</code>, <code>faster</code>, <code>veryfast</code>, <code>superfast</code>, <code>ultrafast</code>.</dd>
@@ -305,7 +323,7 @@
<dt>-slices 16</dt><dd>Each frame is split into 16 slices. 16 is a good trade-off between filesize and encoding time.</dd>
<dt>-c:a copy</dt><dd>copies all mapped audio streams.</dd>
<dt><i>output_file</i>.mkv</dt><dd>path and name of the output file. Use the <code>.mkv</code> extension to save your file in a Matroska container. Optionally, choose a different extension if you want a different container, such as <code>.mov</code> or <code>.avi</code>.</dd>
<dt>-f framemd5</dt><dd> Decodes video with the framemd5 muxer in order to generate MD5 checksums for every frame of your input file. This allows you to verify losslessness when compared against the framemd5s of the output file.</dd>
<dt>-f framemd5</dt><dd>Decodes video with the framemd5 muxer in order to generate MD5 checksums for every frame of your input file. This allows you to verify losslessness when compared against the framemd5s of the output file.</dd>
<dt>-an</dt><dd>ignores the audio stream when creating framemd5 (audio no)</dd>
<dt><i>framemd5_output_file</i></dt><dd>path, name and extension of the framemd5 file.</dd>
</dl>
@@ -318,7 +336,7 @@
<input type="checkbox" id="dvd_to_file">
<div class="hiding">
<h3>Convert DVD to H.264</h3>
<p><code>ffmpeg -i concat:<i>input_file1</i>\|<i>input_file2</i>\|<i>input_file3</i> -c:v libx264 -c:a copy <i>output_file</i>.mp4</code></p>
<p><code>ffmpeg -i concat:<i>input_file_1</i>\|<i>input_file_2</i>\|<i>input_file_3</i> -c:v libx264 -c:a aac <i>output_file</i>.mp4</code></p>
<p>This command allows you to create an H.264 file from a DVD source that is not copy-protected.</p>
<p>Before encoding, youll need to establish which of the .VOB files on the DVD or .iso contain the content that you wish to encode. Inside the VIDEO_TS directory, you will see a series of files with names like VTS_01_0.VOB, VTS_01_1.VOB, etc. Some of the .VOB files will contain menus, special features, etc, so locate the ones that contain target content by playing them back in VLC.</p>
<dl>
@@ -327,17 +345,18 @@
<code>-i concat:VTS_01_1.VOB\|VTS_01_2.VOB\|VTS_01_3.VOB</code><br>
The backslash is simply an escape character for the pipe (<b>|</b>).</dd>
<dt>-c:v libx264</dt><dd>sets the video codec as H.264</dd>
<dt>-c:a copy</dt><dd>audio remains as-is (no re-encode)</dd>
<dt>-c:a aac</dt><dd>encode audio as AAC.<br>
AAC is the codec most often used for audio streams within an .mp4 container.</dd>
<dt><i>output_file.mp4</i></dt><dd>path and name of the output file</dd>
</dl>
<p>Its also possible to adjust the quality of your output by setting the <b>-crf</b> and <b>-preset</b> values:</p>
<p><code>ffmpeg -i concat:<i>input_file1</i>\|<i>input_file2</i>\|<i>input_file3</i> -c:v libx264 -crf 18 -preset veryslow -c:a copy <i>output_file</i>.mp4</code></p>
<p><code>ffmpeg -i concat:<i>input_file_1</i>\|<i>input_file_2</i>\|<i>input_file_3</i> -c:v libx264 -crf 18 -preset veryslow -c:a aac <i>output_file</i>.mp4</code></p>
<dl>
<dt>-crf 18</dt><dd>sets the constant rate factor to a visually lossless value. Libx264 defaults to a <a href="https://trac.ffmpeg.org/wiki/Encode/H.264#crf" target="_blank">crf of 23</a>, considered medium quality; a smaller CRF value produces a larger and higher quality video.</dd>
<dt>-preset veryslow</dt><dd>A slower preset will result in better compression and therefore a higher-quality file. The default is <b>medium</b>; slower presets are <b>slow</b>, <b>slower</b>, and <b>veryslow</b>.</dd>
</dl>
<p>Bear in mind that by default, libx264 will only encode a single video stream and a single audio stream, picking the best of the options available. To preserve all video and audio streams, add <b>-map</b> parameters:</p>
<p><code>ffmpeg -i concat:<i>input_file1</i>\|<i>input_file2</i> -map 0:v -map 0:a -c:v libx264 -c:a copy <i>output_file</i>.mp4</code></p>
<p><code>ffmpeg -i concat:<i>input_file_1</i>\|<i>input_file_2</i> -map 0:v -map 0:a -c:v libx264 -c:a aac <i>output_file</i>.mp4</code></p>
<dl>
<dt>-map 0:v</dt><dd>encodes all video streams</dd>
<dt>-map 0:a</dt><dd>encodes all audio streams</dd>
@@ -408,7 +427,7 @@
<input type="checkbox" id="append_mp3">
<div class="hiding">
<h3>Generate two access MP3s from input. One with appended audio (such as a copyright notice) and one unmodified.</h3>
<p> <code>ffmpeg -i <i>input_file</i> -i <i>input_file_to_append</i> -filter_complex "[0:a:0]asplit=2[a][b];[b]afifo[bb];[1:a:0][bb]concat=n=2:v=0:a=1[concatout]" -map "[a]" -codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2 <i>output_file.mp3</i> -map "[concatout]" -codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2 <i>output_file_appended.mp3</i></code></p>
<p><code>ffmpeg -i <i>input_file</i> -i <i>input_file_to_append</i> -filter_complex "[0:a:0]asplit=2[a][b];[b]afifo[bb];[1:a:0][bb]concat=n=2:v=0:a=1[concatout]" -map "[a]" -codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2 <i>output_file.mp3</i> -map "[concatout]" -codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2 <i>output_file_appended.mp3</i></code></p>
<p>This script allows you to generate two derivative audio files from a master while appending audio from a separate file (for example a copyright or institutional notice) to one of them.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
@@ -568,7 +587,7 @@
For example, to convert from Rec.601 to Rec.709, you would use <code>-vf colormatrix=bt601:bt709</code>.</dd>
<dt><i>output file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p><b>Note:</b> Converting between colourspaces with FFmpeg can be done via either the <b>colormatrix</b> or <b>colorspace</b> filters, with colorspace allowing finer control (individual setting of colourspace, transfer characteristics, primaries, range, pixel format, etc). See <a href="https://trac.ffmpeg.org/wiki/colorspace" target="_blank">this</a> entry on the FFmpeg wiki, and the FFmpeg documentation for <a href="http://ffmpeg.org/ffmpeg-filters.html#colormatrix" target="_blank">colormatrix</a> and <a href="http://ffmpeg.org/ffmpeg-filters.html#colorspace" target="_blank">colorspace</a>.</p>
<p><b>Note:</b> Converting between colourspaces with FFmpeg can be done via either the <b>colormatrix</b> or <b>colorspace</b> filters, with colorspace allowing finer control (individual setting of colourspace, transfer characteristics, primaries, range, pixel format, etc). See <a href="https://trac.ffmpeg.org/wiki/colorspace" target="_blank">this</a> entry on the FFmpeg wiki, and the FFmpeg documentation for <a href="https://ffmpeg.org/ffmpeg-filters.html#colormatrix" target="_blank">colormatrix</a> and <a href="https://ffmpeg.org/ffmpeg-filters.html#colorspace" target="_blank">colorspace</a>.</p>
<hr>
<h4>Convert colourspace and embed colourspace metadata</h4>
<p><code>ffmpeg -i <i>input_file</i> -c:v libx264 -vf colormatrix=src:dst -color_primaries <i>val</i> -color_trc <i>val</i> -colorspace <i>val</i> <i>output_file</i></code></p>
@@ -597,7 +616,7 @@
<p><span class="beware"></span> Using this command it is possible to add Rec.709 tags to a file that is actually Rec.601 (etc), so apply with caution!</p>
<p>These commands are relevant for H.264 and H.265 videos, encoded with <code>libx264</code> and <code>libx265</code> respectively.</p>
<p><b>Note:</b> If you wish to embed colourspace metadata <i>without</i> changing to another colourspace, omit <code>-vf colormatrix=src:dst</code>. However, since it is <code>libx264</code>/<code>libx265</code> that writes the metadata, its not possible to add these tags without reencoding the video stream.</p>
<p>For all possible values for <code>-color_primaries</code>, <code>-color_trc</code>, and <code>-colorspace</code>, see the FFmpeg documentation on <a href="https://www.ffmpeg.org/ffmpeg-codecs.html#Codec-Options" target="_blank">codec options</a>.</p>
<p>For all possible values for <code>-color_primaries</code>, <code>-color_trc</code>, and <code>-colorspace</code>, see the FFmpeg documentation on <a href="https://ffmpeg.org/ffmpeg-codecs.html#Codec-Options" target="_blank">codec options</a>.</p>
<hr>
<p id="fn1" class="footnote">1. Out of step with the regular pattern, <code>-color_trc</code> doesnt accept <code>bt470bg</code>; it is instead here referred to directly as gamma.<br>
In the Rec.601 standard, 525-line/NTSC and 625-line/PAL video have assumed gammas of 2.2 and 2.8 respectively. <a href="#ref1" title="Jump back."></a></p>
@@ -661,11 +680,41 @@
<dt>-color_trc <i>VALUE</i> -movflags write_colr</dt><dd>Set a new color_transfer value.</dd>
<dt>-field_order <i>VALUE</i></dt><dd>Set interlacement values.</dd>
</dl>
<p>The possible values for <code>-color_primaries</code>, <code>-color_trc</code>, and <code>-field_order</code> are given in the <a href="http://ffmpeg.org/ffmpeg-all.html#toc-Codec-Options" target="_blank">Codec Options</a> section of the FFmpeg docs - scroll down to near the bottom of the section.</p>
<p>The possible values for <code>-color_primaries</code>, <code>-color_trc</code>, and <code>-field_order</code> are given in the <a href="https://ffmpeg.org/ffmpeg-all.html#toc-Codec-Options" target="_blank">Codec Options</a> section of the FFmpeg docs - scroll down to near the bottom of the section.</p>
<p class="link"></p>
</div>
<!-- ends Make stream properties explicate -->
<!-- Crop video -->
<label class="recipe" for="crop_video">Crop video</label>
<input type="checkbox" id="crop_video">
<div class="hiding">
<h3>Crop video</h3>
<p><code>ffmpeg -i <i>input_file</i> -vf "crop=<i>width</i>:<i>height</i>" <i>output_file</i></code></p>
<p>This command crops the input video to the dimensions defined</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-vf "<i>width</i>:<i>height</i>"</dt><dd>Crops the video to the given width and height (in pixels).<br>
By default, the crop area is centred: that is, the position of the top left of the cropped area is set to x = (<i>input_width</i> - <i>output_width</i>) / 2, y = <i>input_height</i> - <i>output_height</i>) / 2.
</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p>It's also possible to specify the crop position by adding the x and y coordinates representing the top left of your cropped area to your crop filter, as such:</p>
<p><code>ffmpeg -i <i>input_file</i> -vf "crop=<i>width</i>:<i>height</i>[:<i>x_position</i>:<i>y_position</i>]" <i>output_file</i></code></p>
<h3>Examples</h3>
<p>The original frame, a screenshot of the SMPTE colourbars:</p>
<img class="sample-image" src="img/crop_example_orig.png" alt="VLC screenshot of Maggie Cheung">
<p>Result of the command <code>ffmpeg -i <i>smpte_coloursbars.mov</i> -vf "crop=500:500" <i>output_file</i></code>:</p>
<img class="sample-image-small" src="img/crop_example_aftercrop1.png" alt="VLC screenshot of Maggie Cheung, cropped from original">
<p>Result of the command <code>ffmpeg -i <i>smpte_coloursbars.mov</i> -vf "crop=500:500:0:0" <i>output_file</i></code>, appending <code>:0:0</code> to crop from the top left corner:</p>
<img class="sample-image-small" src="img/crop_example_aftercrop2.png" alt="VLC screenshot of Maggie Cheung, cropped from original">
<p>Result of the command <code>ffmpeg -i <i>smpte_coloursbars.mov</i> -vf "crop=500:300:500:30" <i>output_file</i></code>:</p>
<img class="sample-image-small" src="img/crop_example_aftercrop3.png" alt="VLC screenshot of Maggie Cheung, cropped from original">
<p class="link"></p>
</div>
<!-- ends Crop video -->
</div>
<div class="well">
<h2 id="audio-files">Change or view audio properties</h2>
@@ -698,7 +747,7 @@
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-filter_complex </dt><dd>tells fmpeg that we will be using a complex filter</dd>
<dt>-filter_complex</dt><dd>tells ffmpeg that we will be using a complex filter</dd>
<dt>"</dt><dd>quotation mark to start filtergraph</dd>
<dt>[0:a:0][0:a:1]amerge[out]</dt><dd>combines the two audio tracks into one</dd>
<dt>"</dt><dd>quotation mark to end filtergraph</dd>
@@ -710,7 +759,7 @@
</dl>
<p class="link"></p>
</div>
<!-- ends Combine audio tracks -->
<!-- ends Combine audio tracks -->
<!-- phase shift -->
<label class="recipe" for="phase_shift">Inverses the audio phase of the second channel</label>
@@ -739,7 +788,7 @@
<p><code>ffmpeg -i <i>input_file</i> -af loudnorm=print_format=json -f null -</code></p>
<p>This filter calculates and outputs loudness information in json about an input file (labeled input) as well as what the levels would be if loudnorm were applied in its one pass mode (labeled output). The values generated can be used as inputs for a 'second pass' of the loudnorm filter allowing more accurate loudness normalization than if it is used in a single pass.</p>
<p>These instructions use the loudnorm defaults, which align well with PBS recommendations for target loudness. More information can be found at the <a href="https://ffmpeg.org/ffmpeg-filters.html#loudnorm" target="_blank">loudnorm documentation</a>.</p>
<p>Information about PBS loudness standards can be found in the <a href="http://www-tc.pbs.org/capt/Producing/TOS-2012-Pt2-Distribution.pdf" target="_blank">PBS Technical Operating Specifications</a> document. Information about EBU loudness standards can be found in the <a href="https://tech.ebu.ch/docs/r/r128-2014.pdf" target="_blank">EBU R 128</a> recommendation document.</p>
<p>Information about PBS loudness standards can be found in the <a href="https://www-tc.pbs.org/capt/Producing/TOS-2012-Pt2-Distribution.pdf" target="_blank">PBS Technical Operating Specifications</a> document. Information about EBU loudness standards can be found in the <a href="https://tech.ebu.ch/docs/r/r128-2014.pdf" target="_blank">EBU R 128</a> recommendation document.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt><i>input_file</i></dt><dd>path, name and extension of the input file</dd>
@@ -775,7 +824,7 @@
<h3>One Pass Loudness Normalization</h3>
<p><code>ffmpeg -i <i>input_file</i> -af loudnorm=dual_mono=true -ar 48k <i>output_file</i></code></p>
<p>This will normalize the loudness of an input using one pass, which is quicker but less accurate than using two passes. This command uses the loudnorm filter defaults for target loudness. These defaults align well with PBS recommendations, but loudnorm does allow targeting of specific loudness levels. More information can be found at the <a href="https://ffmpeg.org/ffmpeg-filters.html#loudnorm" target="_blank">loudnorm documentation</a>.</p>
<p>Information about PBS loudness standards can be found in the <a href="http://www-tc.pbs.org/capt/Producing/TOS-2012-Pt2-Distribution.pdf" target="_blank">PBS Technical Operating Specifications</a> document. Information about EBU loudness standards can be found in the <a href="https://tech.ebu.ch/docs/r/r128-2014.pdf" target="_blank">EBU R 128</a> recommendation document.</p>
<p>Information about PBS loudness standards can be found in the <a href="https://www-tc.pbs.org/capt/Producing/TOS-2012-Pt2-Distribution.pdf" target="_blank">PBS Technical Operating Specifications</a> document. Information about EBU loudness standards can be found in the <a href="https://tech.ebu.ch/docs/r/r128-2014.pdf" target="_blank">EBU R 128</a> recommendation document.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt><i>input_file</i></dt><dd>path, name and extension of the input file</dd>
@@ -795,7 +844,7 @@
<h3>Two Pass Loudness Normalization</h3>
<p><code>ffmpeg -i <i>input_file</i> -af loudnorm=dual_mono=true:measured_I=<i>input_i</i>:measured_TP=<i>input_tp</i>:measured_LRA=<i>input_lra</i>:measured_thresh=<i>input_thresh</i>:offset=<i>target_offset</i>:linear=true -ar 48k <i>output_file</i></code></p>
<p>This command allows using the levels calculated using a <a href="#loudnorm_metadata">first pass of the loudnorm filter</a> to more accurately normalize loudness. This command uses the loudnorm filter defaults for target loudness. These defaults align well with PBS recommendations, but loudnorm does allow targeting of specific loudness levels. More information can be found at the <a href="https://ffmpeg.org/ffmpeg-filters.html#loudnorm" target="_blank">loudnorm documentation</a>.</p>
<p>Information about PBS loudness standards can be found in the <a href="http://www-tc.pbs.org/capt/Producing/TOS-2012-Pt2-Distribution.pdf" target="_blank">PBS Technical Operating Specifications</a> document. Information about EBU loudness standards can be found in the <a href="https://tech.ebu.ch/docs/r/r128-2014.pdf" target="_blank">EBU R 128</a> recommendation document.</p>
<p>Information about PBS loudness standards can be found in the <a href="https://www-tc.pbs.org/capt/Producing/TOS-2012-Pt2-Distribution.pdf" target="_blank">PBS Technical Operating Specifications</a> document. Information about EBU loudness standards can be found in the <a href="https://tech.ebu.ch/docs/r/r128-2014.pdf" target="_blank">EBU R 128</a> recommendation document.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt><i>input_file</i></dt><dd>path, name and extension of the input file</dd>
@@ -836,8 +885,8 @@
<div class="well">
<h2 id="join-trim">Join, trim, or excerpt a video</h2>
<!-- Join files together -->
<label class="recipe" for="join_files">Join (concatenate) two or more files into a single file</label>
<!-- Join files of the same type together -->
<label class="recipe" for="join_files">Join (concatenate) two or more files of the same type</label>
<input type="checkbox" id="join_files">
<div class="hiding">
<h3>Join files together</h3>
@@ -846,7 +895,7 @@
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-f concat</dt><dd>forces ffmpeg to concatenate the files and to keep the same file format</dd>
<dt>-i <i>mylist.txt</i></dt><dd>path, name and extension of the input file. Per the <a href="https://www.ffmpeg.org/ffmpeg-formats.html#Options" target="_blank">FFmpeg documentation</a>, it is preferable to specify relative rather than absolute file paths, as allowing absolute file paths may pose a security risk.<br>
<dt>-i <i>mylist.txt</i></dt><dd>path, name and extension of the input file. Per the <a href="https://ffmpeg.org/ffmpeg-formats.html#Options" target="_blank">FFmpeg documentation</a>, it is preferable to specify relative rather than absolute file paths, as allowing absolute file paths may pose a security risk.<br>
This text file contains the list of files to be concatenated and should be formatted as follows:
<pre>file '<i>./first_file.ext</i>'
file '<i>./second_file.ext</i>'
@@ -861,7 +910,66 @@
<p>For more information, see the <a href="https://trac.ffmpeg.org/wiki/Concatenate" target="_blank">FFmpeg wiki page on concatenating files</a>.</p>
<p class="link"></p>
</div>
<!-- ends Join files together -->
<!-- ends Join files of the same type together -->
<!-- Join files of different types together -->
<label class="recipe" for="join_different_files">Join (concatenate) two or more files of different types</label>
<input type="checkbox" id="join_different_files">
<div class="hiding">
<h3>Join files together</h3>
<p><code>ffmpeg -i input_1.avi -i input_2.mp4 -filter_complex "[0:v:0][0:a:0][1:v:0][1:a:0]concat=n=2:v=1:a=1[video_out][audio_out]" -map "[video_out]" -map "[audio_out]" <i>output_file</i></code></p>
<p>This command takes two or more files of the different file types and joins them together to make a single file.</p>
<p>The input files may differ in many respects - container, codec, chroma subsampling scheme, framerate, etc. However, the above command only works properly if the files to be combined have the same dimensions (e.g., 720x576). Also note that if the input files have different framerates, then the output file will be of variable framerate.</p>
<p>Some aspects of the input files will be normalised: for example, if an input file contains a video track and an audio track that do not have exactly the same duration, the shorter one will be padded. In the case of a shorter video track, the last frame will be repeated in order to cover the missing video; in the case of a shorter audio track, the audio stream will be padded with silence.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_1.ext</i></dt><dd>path, name and extension of the first input file</dd>
<dt>-i <i>input_2.ext</i></dt><dd>path, name and extension of the second input file</dd>
<dt>-filter_complex</dt><dd>states that a complex filtergraph will be used</dd>
<dt>"</dt><dd>quotation mark to start filtergraph</dd>
<dt>[0:v:0][0:a:0]</dt><dd>selects the first video stream and first audio stream from the first input.<br>
Each reference to a specific stream is enclosed in square brackets. In the first stream reference, <code>0:v:0</code>, the first zero refers to the first input file, <code>v</code> means video stream, and the second zero indicates that it is the <i>first</i> video stream in the file that should be selected. Likewise, <code>0:a:0</code> means the first audio stream in the first input file.<br>
As demonstrated above, ffmpeg uses zero-indexing: <code>0</code> means the first input/stream/etc, <code>1</code> means the second input/stream/etc, and <code>4</code> would mean the fifth input/stream/etc.</dd>
<dt>[1:v:0][1:a:0]</dt><dd>As described above, this means select the first video and audio streams from the second input file.</dd>
<dt>concat=</dt><dd>starts the <code>concat</code> filter</dd>
<dt>n=2</dt><dd>states that there are two input files</dd>
<dt>:</dt><dd>separator</dd>
<dt>v=1</dt><dd>sets the number of output video streams.<br>
Note that this must be equal to the number of video streams selected from each segment.</dd>
<dt>:</dt><dd>separator</dd>
<dt>a=1</dt><dd>sets the number of output audio streams.<br>
Note that this must be equal to the number of audio streams selected from each segment.</dd>
<dt>[video_out]</dt><dd>name of the concatenated output video stream. This is a variable name which you define, so you could call it something different, like “vOut”, “outv”, or “banana”.</dd>
<dt>[audio_out]</dt><dd>name of the concatenated output audio stream. Again, this is a variable name which you define.</dd>
<dt>"</dt><dd>quotation mark to end filtergraph</dd>
<dt>-map "[video_out]"</dt><dd>map the concatenated video stream into the output file by referencing the variable defined above</dd>
<dt>-map "[audio_out]"</dt><dd>map the concatenated audio stream into the output file by referencing the variable defined above</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p>If no characteristics of the output files are specified, ffmpeg will use the default encodings associated with the given output file type. To specify the characteristics of the output stream(s), add flags after each <code>-map "[out]"</code> part of the command.</p>
<p>For example, to ensure that the video stream of the output file is visually lossless H.264 with a 4:2:0 chroma subsampling scheme, the command above could be amended to include the following:<br>
<code>-map "[video_out]" -c:v libx264 -pix_fmt yuv420p -preset veryslow -crf 18</code></p>
<p>Likewise, to encode the output audio stream as mp3, the command could include the following:<br>
<code>-map "[audio_out]" -c:a libmp3lame -dither_method modified_e_weighted -qscale:a 2</code></p>
<h4>Variation: concatenating files of different resolutions</h4>
<p>To concatenate files of different resolutions, you need to resize the videos to have matching resolutions prior to concatenation. The most basic way to do this is by using a scale filter and giving the dimensions of the file you wish to match:</p>
<p><code>-vf scale=1920:1080:flags=lanczos</code></p>
<p>(The Lanczos scaling algorithm is recommended, as it is slower but better than the default bilinear algorithm).</p>
<p>The rescaling should be applied just before the point where the streams to be used in the output file are listed. Select the stream you want to rescale, apply the filter, and assign that to a variable name (<code>rescaled_video</code> in the below example). Then you use this variable name in the list of streams to be concatenated.</p>
<p><code>ffmpeg -i input_1.avi -i input_2.mp4 -filter_complex "[0:v:0] scale=1920:1080:flags=lanczos [rescaled_video], [rescaled_video] [0:a:0] [1:v:0] [1:a:0] concat=n=2:v=1:a=1 [video_out] [audio_out]" -map "[video_out]" -map "[audio_out]" <i>output_file</i></code></p>
<p>However, this will only have the desired visual output if the inputs have the same aspect ratio. If you wish to concatenate an SD and an HD file, you will also wish to pillarbox the SD file while upscaling. (See the <a href="https://amiaopensource.github.io/ffmprovisr/#SD_HD_2">Convert 4:3 to pillarboxed HD</a> command). The full command would look like this:</p>
<p><code>ffmpeg -i input_1.avi -i input_2.mp4 -filter_complex "[0:v:0] scale=1440:1080:flags=lanczos, pad=1920:1080:(ow-iw)/2:(oh-ih)/2 [to_hd_video], [to_hd_video] [0:a:0] [1:v:0] [1:a:0] concat=n=2:v=1:a=1 [video_out] [audio_out]" -map "[video_out]" -map "[audio_out]" <i>output_file</i></code></p>
<p>Here, the first input is an SD file which needs to be upscaled to match the second input, which is 1920x1080. The scale filter enlarges the SD input to the height of the HD frame, keeping the 4:3 aspect ratio; then, the video is pillarboxed within a 1920x1080 frame.</p>
<h4>Variation: concatenating files of different framerates</h4>
<p>If the input files have different framerates, then the output file may be of variable framerate. To explicitly obtain an output file of constant framerate, you may wish convert an input (or multiple inputs) to a different framerate prior to concatenation.</p>
<p>You can speed up or slow down a file using the <code>fps</code> and <code>atempo</code> filters (see also the <a href="https://amiaopensource.github.io/ffmprovisr/#modify_speed">Modify speed</a> command).</p>
<p>Here's an example of the full command, in which input_1 is 30fps, input_2 is 25fps, and 25fps is the desired output speed.</p>
<p><code>ffmpeg -i input_1.avi -i input_2.mp4 -filter_complex "[0:v:0] fps=fps=25 [video_to_25fps]; [0:a:0] atempo=(25/30) [audio_to_25fps]; [video_to_25fps] [audio_to_25fps] [1:v:0] [1:a:0] concat=n=2:v=1:a=1 [video_out] [audio_out]" -map "[video_out]" -map "[audio_out]" <i>output_file</i></code></p>
<p>Note that the <code>fps</code> filter will drop or repeat frames as necessary in order to achieve the desired frame rate - see the FFmpeg <a href="https://ffmpeg.org/ffmpeg-filters.html#fps-1">fps docs</a> for more details.</p>
<p>For more information, see the <a href="https://trac.ffmpeg.org/wiki/Concatenate#differentcodec" target="_blank">FFmpeg wiki page on concatenating files of different types</a>.</p>
<p class="link"></p>
</div>
<!-- ends Join files of the different types together -->
<!-- Split file into segments -->
<label class="recipe" for="segment_file">Split one file into several smaller segments</label>
@@ -874,7 +982,7 @@
<dt>-i <i>input_file</i></dt><dd>Takes in a normal file.</dd>
<dt>-c copy</dt><dd>Use stream copy mode to re-mux instead of re-encode.</dd>
<dt>-map 0</dt><dd>tells FFmpeg to map all streams of the input to the output.</dd>
<dt>-f segment</dt><dd>Use <a href="http://www.ffmpeg.org/ffmpeg-formats.html#toc-segment_002c-stream_005fsegment_002c-ssegment" target="_blank">segment muxer</a> for generating the output.</dd>
<dt>-f segment</dt><dd>Use <a href="https://ffmpeg.org/ffmpeg-formats.html#toc-segment_002c-stream_005fsegment_002c-ssegment" target="_blank">segment muxer</a> for generating the output.</dd>
<dt>-segment_time 60</dt><dd>Set duration of each segment (in seconds). This example creates segments with max. duration of 60s each.</dd>
<dt>-reset_timestamps 1</dt><dd>Reset timestamps of each segment to 0. Meant to ease the playback of the generated segments.</dd>
<dt><i>output_file-%03d.mkv</i></dt>
@@ -907,7 +1015,7 @@
<dt>-ss 00:02:00</dt><dd>sets in point at 00:02:00</dd>
<dt>-to 00:55:00</dt><dd>sets out point at 00:55:00</dd>
<dt>-c copy</dt><dd>use stream copy mode (no re-encoding)<br>
<dt>-map 0</dt><dd>tells FFmpeg to map all streams of the input to the output.</dd>
<dt>-map 0</dt><dd>tells FFmpeg to map all streams of the input to the output.<br>
<b>Note:</b> watch out when using <code>-ss</code> with <code>-c copy</code> if the source is encoded with an interframe codec (e.g., H.264). Since FFmpeg must split on i-frames, it will seek to the nearest i-frame to begin the stream copy.</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
@@ -992,6 +1100,7 @@
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-c:v libx264</dt><dd>encodes video stream with libx264 (h264)</dd>
<dt>-filter:v</dt><dd>a video filter will be used</dd>
<dt>"</dt><dd>quotation mark to start filtergraph</dd>
<dt>yadif</dt><dd>deinterlacing filter (yet another deinterlacing filter)<br>
By default, <a href="https://ffmpeg.org/ffmpeg-filters.html#yadif-1" target="_blank">yadif</a> will output one frame for each frame. Outputting one frame for each <i>field</i> (thereby doubling the frame rate) with <code>yadif=1</code> may produce visually better results.</dd>
@@ -1027,7 +1136,7 @@
<dt>"</dt><dd>end of filtergraph</dd>
<dt><i>output file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p> <code>"yadif,format=yuv420p"</code> is an FFmpeg <a href="https://trac.ffmpeg.org/wiki/FilteringGuide#FiltergraphChainFilterrelationship" target="_blank">filtergraph</a>. Here the filtergraph is made up of one filter chain, which is itself made up of the two filters (separated by the comma).<br>
<p><code>"yadif,format=yuv420p"</code> is an FFmpeg <a href="https://trac.ffmpeg.org/wiki/FilteringGuide#FiltergraphChainFilterrelationship" target="_blank">filtergraph</a>. Here the filtergraph is made up of one filter chain, which is itself made up of the two filters (separated by the comma).<br>
The enclosing quote marks are necessary when you use spaces within the filtergraph, e.g. <code>-vf "yadif, format=yuv420p"</code>, and are included above as an example of good practice.</p>
<p><b>Note:</b> FFmpeg includes several deinterlacers apart from <a href="https://ffmpeg.org/ffmpeg-filters.html#yadif-1" target="_blank">yadif</a>: <a href="https://ffmpeg.org/ffmpeg-filters.html#bwdif" target="_blank">bwdif</a>, <a href="https://ffmpeg.org/ffmpeg-filters.html#w3fdif" target="_blank">w3fdif</a>, <a href="https://ffmpeg.org/ffmpeg-filters.html#kerndeint" target="_blank">kerndeint</a>, and <a href="https://ffmpeg.org/ffmpeg-filters.html#nnedi" target="_blank">nnedi</a>.</p>
<p>For more H.264 encoding options, see the latter section of the <a href="./index.html#transcode_h264">encode H.264 command</a>.</p>
@@ -1122,12 +1231,12 @@
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-vf drawtext=</dt><dd>This calls the drawtext filter with the following options:
<dl>
<dt>fontfile=<i>font_path</i></dt><dd> Set path to font. For example in macOS: <code>fontfile=/Library/Fonts/AppleGothic.ttf</code></dd>
<dt>fontsize=<i>font_size</i></dt><dd> Set font size. <code>35</code> is a good starting point for SD. Ideally this value is proportional to video size, for example use ffprobe to acquire video height and divide by 14.</dd>
<dt>text=<i>watermark_text</i> </dt><dd> Set the content of your watermark text. For example: <code>text='FFMPROVISR EXAMPLE TEXT'</code></dd>
<dt>fontcolor=<i>font_colour</i> </dt><dd> Set colour of font. Can be a text string such as <code>fontcolor=white</code> or a hexadecimal value such as <code>fontcolor=0xFFFFFF</code></dd>
<dt>alpha=0.4</dt><dd> Set transparency value.</dd>
<dt>x=(w-text_w)/2:y=(h-text_h)/2</dt><dd> Sets <i>x</i> and <i>y</i> coordinates for the watermark. These relative values will centre your watermark regardless of video dimensions.</dd>
<dt>fontfile=<i>font_path</i></dt><dd>Set path to font. For example in macOS: <code>fontfile=/Library/Fonts/AppleGothic.ttf</code></dd>
<dt>fontsize=<i>font_size</i></dt><dd>Set font size. <code>35</code> is a good starting point for SD. Ideally this value is proportional to video size, for example use ffprobe to acquire video height and divide by 14.</dd>
<dt>text=<i>watermark_text</i></dt><dd>Set the content of your watermark text. For example: <code>text='FFMPROVISR EXAMPLE TEXT'</code></dd>
<dt>fontcolor=<i>font_colour</i></dt><dd>Set colour of font. Can be a text string such as <code>fontcolor=white</code> or a hexadecimal value such as <code>fontcolor=0xFFFFFF</code></dd>
<dt>alpha=0.4</dt><dd>Set transparency value.</dd>
<dt>x=(w-text_w)/2:y=(h-text_h)/2</dt><dd>Sets <i>x</i> and <i>y</i> coordinates for the watermark. These relative values will centre your watermark regardless of video dimensions.</dd>
</dl>
Note: <code>-vf</code> is a shortcut for <code>-filter:v</code>.</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file.</dd>
@@ -1146,7 +1255,7 @@
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_video_file</i></dt><dd>path, name and extension of the input video file</dd>
<dt>-i <i>input_image_file</i></dt><dd>path, name and extension of the image file</dd>
<dt>-filter_complex overlay=main_w-overlay_w-5:5</dt><dd>This calls the overlay filter and sets x and y coordinates for the position of the watermark on the video. Instead of hardcoding specific x and y coordinates, <code>main_w-overlay_w-5:5</code> uses relative coordinates to place the watermark in the upper right hand corner, based on the width of your input files. Please see the <a href="https://www.ffmpeg.org/ffmpeg-all.html#toc-Examples-102" target="_blank">FFmpeg documentation for more examples.</a></dd>
<dt>-filter_complex overlay=main_w-overlay_w-5:5</dt><dd>This calls the overlay filter and sets x and y coordinates for the position of the watermark on the video. Instead of hardcoding specific x and y coordinates, <code>main_w-overlay_w-5:5</code> uses relative coordinates to place the watermark in the upper right hand corner, based on the width of your input files. Please see the <a href="https://ffmpeg.org/ffmpeg-all.html#toc-Examples-102" target="_blank">FFmpeg documentation for more examples.</a></dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p class="link"></p>
@@ -1164,14 +1273,14 @@
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-vf drawtext=</dt><dd>This calls the drawtext filter with the following options:
<dt>"</dt><dd>quotation mark to start drawtext filter command</dd>
<dt>fontfile=<i>font_path</i></dt><dd> Set path to font. For example in macOS: <code>fontfile=/Library/Fonts/AppleGothic.ttf</code></dd>
<dt>fontsize=<i>font_size</i></dt><dd> Set font size. <code>35</code> is a good starting point for SD. Ideally this value is proportional to video size, for example use ffprobe to acquire video height and divide by 14.</dd>
<dt>timecode=<i>starting_timecode</i> </dt><dd> Set the timecode to be displayed for the first frame. Timecode is to be represented as <code>hh:mm:ss[:;.]ff</code>. Colon escaping is determined by O.S, for example in Ubuntu <code>timecode='09\\:50\\:01\\:23'</code>. Ideally, this value would be generated from the file itself using ffprobe.</dd>
<dt>fontcolor=<i>font_colour</i> </dt><dd> Set colour of font. Can be a text string such as <code>fontcolor=white</code> or a hexadecimal value such as <code>fontcolor=0xFFFFFF</code></dd>
<dt>box=1</dt><dd> Enable box around timecode</dd>
<dt>boxcolor=<i>box_colour</i></dt><dd> Set colour of box. Can be a text string such as <code>fontcolor=black</code> or a hexadecimal value such as <code>fontcolor=0x000000</code></dd>
<dt>rate=<i>timecode_rate</i></dt><dd> Framerate of video. For example <code>25/1</code></dd>
<dt>x=(w-text_w)/2:y=h/1.2</dt><dd> Sets <i>x</i> and <i>y</i> coordinates for the timecode. These relative values will horizontally centre your timecode in the bottom third regardless of video dimensions.</dd>
<dt>fontfile=<i>font_path</i></dt><dd>Set path to font. For example in macOS: <code>fontfile=/Library/Fonts/AppleGothic.ttf</code></dd>
<dt>fontsize=<i>font_size</i></dt><dd>Set font size. <code>35</code> is a good starting point for SD. Ideally this value is proportional to video size, for example use ffprobe to acquire video height and divide by 14.</dd>
<dt>timecode=<i>starting_timecode</i></dt><dd>Set the timecode to be displayed for the first frame. Timecode is to be represented as <code>hh:mm:ss[:;.]ff</code>. Colon escaping is determined by O.S, for example in Ubuntu <code>timecode='09\\:50\\:01\\:23'</code>. Ideally, this value would be generated from the file itself using ffprobe.</dd>
<dt>fontcolor=<i>font_colour</i></dt><dd>Set colour of font. Can be a text string such as <code>fontcolor=white</code> or a hexadecimal value such as <code>fontcolor=0xFFFFFF</code></dd>
<dt>box=1</dt><dd>Enable box around timecode</dd>
<dt>boxcolor=<i>box_colour</i></dt><dd>Set colour of box. Can be a text string such as <code>fontcolor=black</code> or a hexadecimal value such as <code>fontcolor=0x000000</code></dd>
<dt>rate=<i>timecode_rate</i></dt><dd>Framerate of video. For example <code>25/1</code></dd>
<dt>x=(w-text_w)/2:y=h/1.2</dt><dd>Sets <i>x</i> and <i>y</i> coordinates for the timecode. These relative values will horizontally centre your timecode in the bottom third regardless of video dimensions.</dd>
<dt>"</dt><dd>quotation mark to end drawtext filter command</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file.</dd>
</dl>
@@ -1180,6 +1289,25 @@
</div>
<!-- ends Burn in timecode -->
<!-- Embed subtitles-->
<label class="recipe" for="embed_subtitles">Embed subtitles</label>
<input type="checkbox" id="embed_subtitles">
<div class="hiding">
<h3>Embed a subtitle file into a movie file </h3>
<p><code>ffmpeg -i <i>input_file</i> -i <i>subtitles_file</i> -c copy -c:s mov_text <i>output_file</i></code></p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-i <i>subtitles_file</i></dt><dd>path to subtitles file, e.g. <code>subtitles.srt</code></dd>
<dt>-c copy</dt><dd>enable stream copy (no re-encode)</dd>
<dt>-c:s mov_text</dt><dd>Encode subtitles using the <code>mov_text</code> codec. Note: The <code>mov_text</code> codec works for MP4 and MOV containers. For the MKV container, acceptable formats are <code>ASS</code>, <code>SRT</code>, and <code>SSA</code>.</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p>Note: <code>-c:s</code> is a shortcut for <code>-scodec</code></p>
<p class="link"></p>
</div>
<!-- ends Embed subtitles -->
</div>
<div class="well">
<h2 id="create-images">Create thumbnails or GIFs</h2>
@@ -1354,7 +1482,7 @@
<p><code>ffplay -f lavfi "amovie='input.mp3', astats=metadata=1:reset=1, adrawgraph=lavfi.astats.Overall.Peak_level:max=0:min=-30.0:size=700x256:bg=Black[out]"</code></p>
<dl>
<dt>ffplay</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a></dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a></dd>
<dt>"</dt><dd>quotation mark to start the lavfi filtergraph</dd>
<dt>movie='<i>input.mp3</i>'</dt><dd>declares audio source file on which to apply filter</dd>
<dt>,</dt><dd>comma signifies the end of audio source section and the beginning of the filter section</dd>
@@ -1383,11 +1511,11 @@
<p><code>ffplay -f lavfi "movie='<i>input.mp4</i>', signalstats=out=brng:color=cyan[out]"</code></p>
<dl>
<dt>ffplay</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a></dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a></dd>
<dt>"</dt><dd>quotation mark to start the lavfi filtergraph</dd>
<dt>movie='<i>input.mp4</i>'</dt><dd>declares video file source to apply filter</dd>
<dt>,</dt><dd>comma signifies closing of video source assertion and ready for filter assertion</dd>
<dt>signalstats=out=brng:</dt><dd>tells ffplay to use the signalstats command, output the data, use the brng filter</dd>
<dt>signalstats=out=brng</dt><dd>tells ffplay to use the signalstats command, output the data, use the brng filter</dd>
<dt>:</dt><dd>indicates theres another parameter coming</dd>
<dt>color=cyan[out]</dt><dd>sets the color of out-of-range pixels to cyan</dd>
<dt>"</dt><dd>quotation mark to end the lavfi filtergraph</dd>
@@ -1438,7 +1566,7 @@
<dt>[a][b]hstack[out]</dt><dd>Takes the outputs from the previous steps ([a] and [b] and uses the hstack (horizontal stack) filter on them to create the side by side output. This output is then named [out])</dd>
<dt>"</dt><dd>quotation mark to end filtergraph</dd>
<dt>-map [out]</dt><dd>Maps the output of the filter chain</dd>
<dt>-f nut</dt><dd>Sets the format for the output video stream to <a href="https://www.ffmpeg.org/ffmpeg-formats.html#nut" target="_blank">Nut</a></dd>
<dt>-f nut</dt><dd>Sets the format for the output video stream to <a href="https://ffmpeg.org/ffmpeg-formats.html#nut" target="_blank">Nut</a></dd>
<dt>-c:v rawvideo</dt><dd>Sets the video codec of the output video stream to raw video</dd>
<dt>-</dt><dd>tells FFmpeg that the output will be piped to a new command (as opposed to a file)</dd>
<dt>|</dt><dd>Tells the system you will be piping the output of the previous command into a new command</dd>
@@ -1471,7 +1599,7 @@
<dt>-show_data</dt><dd>adds a short “hexdump” to show_streams command output</dd>
<dt>-print_format</dt><dd>Set the output printing format (in this example “xml”; other formats include “json” and “flat”)</dd>
</dl>
<p>See also the <a href="www.ffmpeg.org/ffprobe.html" target="_blank"> FFmpeg documentation on ffprobe</a> for a full list of flags, commands, and options.</p>
<p>See also the <a href="https://ffmpeg.org/ffprobe.html" target="_blank"> FFmpeg documentation on ffprobe</a> for a full list of flags, commands, and options.</p>
<p class="link"></p>
</div>
<!-- ends Pull specs -->
@@ -1486,10 +1614,11 @@
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-map_metadata -1</dt><dd>sets metadata copying to -1, which copies nothing</dd>
<dt>-vcodec copy</dt><dd>copies video track</dd>
<dt>-acodec copy</dt><dd>copies audio track</dd>
<dt>-c:v copy</dt><dd>copies video track</dd>
<dt>-c:a copy</dt><dd>copies audio track</dd>
<dt><i>output_file</i></dt><dd>Makes copy of original file and names output file</dd>
</dl>
<p>Note: <code>-c:v</code> and <code>-c:a</code> are shortcuts for <code>-vcodec</code> and <code>-acodec</code>.</p>
<p class="link"></p>
</div>
<!-- ends Strip metadata -->
@@ -1594,10 +1723,11 @@
<li>44.1 kHz: "asetnsamples=n=44100"</li>
<li>96 kHz: "asetnsamples=n=96000"</li>
</ul>
<p>Note: This filter trandscodes audio to 16 bit PCM by default. The generated framemd5s will represent this value. Validating these framemd5s will require using the same default settings.</p>
<p><b>Note:</b> This filter trandscodes audio to 16 bit PCM by default. The generated framemd5s will represent this value. Validating these framemd5s will require using the same default settings. Alternatively, when your file has another quantisation rates (e.g. 24 bit), then you might add the audio codec <code>-c:a pcm_s24le</code> to the command, for compatibility reasons with other tools, like <a href="https://mediaarea.net/BWFMetaEdit" target="_blank">BWF MetaEdit</a>.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-af "asetnsamples=n=<i>48000</i>"</dt><dd>the audio filter sets the sampling rate</dd>
<dt>-f framemd5</dt><dd>library used to calculate the MD5 checksums</dd>
<dt>-vn</dt><dd>ignores the video stream (video no)</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
@@ -1607,6 +1737,28 @@
</div>
<!-- ends Create frame md5s (audio) -->
<!-- Create stream md5s -->
<label class="recipe" for="create_stream_md5s">Create MD5 checksum(s) for A/V stream data only</label>
<input type="checkbox" id="create_stream_md5s">
<div class="hiding">
<h3>Create stream MD5s</h3>
<p><code>ffmpeg -i <i>input_file</i> -map 0:v:0 -c:v copy -f md5 <i>output_file_1</i> -map 0:a:0 -c:a copy -f md5 <i>output_file_2</i></code></p>
<p>This will create MD5 checksums for the first video and the first audio stream in a file. If only one of these is necessary (for example if used on a WAV file) either part of the command can be excluded to create the desired MD5 only. Use of this kind of checksum enables integrity of the A/V information to be verified independently of any changes to surrounding metadata.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-map 0:v:0</dt><dd>selects the first video stream from the input</dd>
<dt>-c:v copy</dt><dd>ensures that FFmpeg will not transcode the video to a different codec before generating the MD5</dd>
<dt><i>output_file_1</i></dt><dd>is the output file for the video stream MD5. Example file extensions are <code>.md5</code> and <code>.txt</code></dd>
<dt>-map 0:a:0</dt><dd>selects the first audio stream from the input</dd>
<dt>-c:a copy</dt><dd>ensures that FFmpeg will not transcode the audio to a different codec before generating the MD5 (by default FFmpeg will use 16 bit PCM for audio MD5s).</dd>
<dt><i>output_file_2</i></dt><dd>is the output file for the audio stream MD5.</dd>
</dl>
<p><b>Note:</b>The MD5s generated by running this command on WAV files are compatible with those embedded by the <a href="https://mediaarea.net/BWFMetaEdit" target="_blank">BWF MetaEdit</a> tool and can be compared.</p>
<p class="link"></p>
</div>
<!-- ends Create stream md5s -->
<!-- QCTools Report -->
<label class="recipe" for="qctools">QCTools report (with audio)</label>
<input type="checkbox" id="qctools">
@@ -1616,7 +1768,7 @@
<p>This will create an XML report for use in <a href="https://github.com/bavc/qctools" target="_blank">QCTools</a> for a video file with one video track and one audio track. See also the <a href="https://github.com/bavc/qctools/blob/master/docs/data_format.md#creating-a-qctools-document" target="_blank">QCTools documentation</a>.</p>
<dl>
<dt>ffprobe</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-i</dt><dd>input file and parameters</dd>
<dt>"movie=<i>input_file</i>:s=v+a[in0][in1], [in0]signalstats=stat=tout+vrep+brng, cropdetect=reset=1:round=1, idet=half_life=1, split[a][b];[a]field=top[a1];[b]field=bottom, split[b1][b2];[a1][b1]psnr[c1];[c1][b2]ssim[out0];[in1]ebur128=metadata=1, astats=metadata=1:reset=1:length=0.4[out1]"</dt>
<dd>This very large lump of commands declares the input file and passes in a request for all potential data signal information for a file with one video and one audio track</dd>
@@ -1641,7 +1793,7 @@
<p>This will create an XML report for use in <a href="https://github.com/bavc/qctools" target="_blank">QCTools</a> for a video file with one video track and NO audio track. See also the <a href="https://github.com/bavc/qctools/blob/master/docs/data_format.md#creating-a-qctools-document" target="_blank">QCTools documentation</a>.</p>
<dl>
<dt>ffprobe</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-i</dt><dd>input file and parameters</dd>
<dt>"movie=<i>input_file</i>,signalstats=stat=tout+vrep+brng, cropdetect=reset=1:round=1, idet=half_life=1, split[a][b];[a]field=top[a1];[b]field=bottom,split[b1][b2];[a1][b1]psnr[c1];[c1][b2]ssim"</dt>
<dd>This very large lump of commands declares the input file and passes in a request for all potential data signal information for a file with one video and one audio track</dd>
@@ -1674,7 +1826,7 @@
</dl>
<p class="link"></p>
</div>
<!-- ends Check FFV1 Fixity -->
<!-- ends Check FFV1 Fixity -->
<!-- Read/Extract EIA-608 Closed Captions -->
<label class="recipe" for="readeia608">Read/Extract EIA-608 Closed Captioning</label>
@@ -1686,7 +1838,7 @@
<p>If hex isn't your thing, closed captioning <a href="http://www.theneitherworld.com/mcpoodle/SCC_TOOLS/DOCS/CC_CHARS.HTML" target="_blank">character</a> and <a href="http://www.theneitherworld.com/mcpoodle/SCC_TOOLS/DOCS/CC_CODES.HTML" target="_blank">code</a> sets can be found in the documentation for SCTools.</p>
<dl>
<dt>ffprobe</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">libavfilter</a> input virtual device</dd>
<dt>-i <i>input_file</i></dt><dd>input file and parameters</dd>
<dt>readeia608 -show_entries frame=pkt_pts_time:frame_tags=lavfi.readeia608.0.line,lavfi.readeia608.0.cc,lavfi.readeia608.1.line,lavfi.readeia608.1.cc -of csv</dt><dd>specifies the first two lines of video in which EIA-608 data (hexadecimal byte pairs) are identifiable by ffprobe, outputting comma separated values (CSV)</dd>
<dt>&gt;</dt><dd>redirects the standard output (the data created by ffprobe about the video)</dd>
@@ -1713,7 +1865,7 @@
<p><code>ffmpeg -f lavfi -i mandelbrot=size=1280x720:rate=25 -c:v libx264 -t 10 <i>output_file</i></code></p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-i mandelbrot=size=1280x720:rate=25</dt><dd>asks for the <a href="https://ffmpeg.org/ffmpeg-filters.html#mandelbrot" target="_blank">mandelbrot test filter</a> as input. Adjusting the <code>size</code> and <code>rate</code> options allows you to choose a specific frame size and framerate.</dd>
<dt>-c:v libx264</dt><dd>transcodes video from rawvideo to H.264. Set <code>-pix_fmt</code> to <code>yuv420p</code> for greater H.264 compatibility with media players.</dd>
<dt>-t 10</dt><dd>specifies recording time of 10 seconds</dd>
@@ -1731,7 +1883,7 @@
<p><code>ffmpeg -f lavfi -i smptebars=size=720x576:rate=25 -c:v prores -t 10 <i>output_file</i></code></p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-i smptebars=size=720x576:rate=25</dt><dd>asks for the <a href="https://ffmpeg.org/ffmpeg-filters.html#allrgb_002c-allyuv_002c-color_002c-haldclutsrc_002c-nullsrc_002c-rgbtestsrc_002c-smptebars_002c-smptehdbars_002c-testsrc_002c-testsrc2_002c-yuvtestsrc" target="_blank">smptebars test filter</a> as input. Adjusting the <code>size</code> and <code>rate</code> options allows you to choose a specific frame size and framerate.</dd>
<dt>-c:v prores</dt><dd>transcodes video from rawvideo to Apple ProRes 4:2:2.</dd>
<dt>-t 10</dt><dd>specifies recording time of 10 seconds</dd>
@@ -1749,7 +1901,7 @@
<p><code>ffmpeg -f lavfi -i testsrc=size=720x576:rate=25 -c:v v210 -t 10 <i>output_file</i></code></p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">libavfilter</a> input virtual device</dd>
<dt>-i testsrc=size=720x576:rate=25</dt><dd>asks for the testsrc filter pattern as input. Adjusting the <code>size</code> and <code>rate</code> options allows you to choose a specific frame size and framerate. <br>
The different test patterns that can be generated are listed <a href="https://ffmpeg.org/ffmpeg-filters.html#allrgb_002c-allyuv_002c-color_002c-haldclutsrc_002c-nullsrc_002c-rgbtestsrc_002c-smptebars_002c-smptehdbars_002c-testsrc_002c-testsrc2_002c-yuvtestsrc" target="_blank">here</a>.</dd>
<dt>-c:v v210</dt><dd>transcodes video from rawvideo to 10-bit Uncompressed YC<sub>B</sub>C<sub>R</sub> 4:2:2. Alter this setting to set your desired codec.</dd>
@@ -1769,7 +1921,7 @@
<p><code>ffplay -f lavfi -i smptehdbars=size=1920x1080</code></p>
<dl>
<dt>ffplay</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-i smptehdbars=size=1920x1080</dt><dd>asks for the <a href="https://ffmpeg.org/ffmpeg-filters.html#allrgb_002c-allyuv_002c-color_002c-haldclutsrc_002c-nullsrc_002c-rgbtestsrc_002c-smptebars_002c-smptehdbars_002c-testsrc_002c-testsrc2_002c-yuvtestsrc" target="_blank">smptehdbars filter pattern</a> as input and sets the HD resolution. This generates a colour bars pattern, based on the SMPTE RP 2192002.</dd>
</dl>
<p class="link"></p>
@@ -1785,7 +1937,7 @@
<p><code>ffplay -f lavfi -i smptebars=size=640x480</code></p>
<dl>
<dt>ffplay</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-i smptebars=size=640x480</dt><dd>asks for the <a href="https://ffmpeg.org/ffmpeg-filters.html#allrgb_002c-allyuv_002c-color_002c-haldclutsrc_002c-nullsrc_002c-rgbtestsrc_002c-smptebars_002c-smptehdbars_002c-testsrc_002c-testsrc2_002c-yuvtestsrc" target="_blank">smptebars filter pattern</a> as input and sets the VGA (SD) resolution. This generates a colour bars pattern, based on the SMPTE Engineering Guideline EG 11990.</dd>
</dl>
<p class="link"></p>
@@ -1801,7 +1953,7 @@
<p><code>ffmpeg -f lavfi -i "sine=frequency=1000:sample_rate=48000:duration=5" -c:a pcm_s16le <i>output_file</i>.wav</code></p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>-i "sine=frequency=1000:sample_rate=48000:duration=5"</dt><dd>Sets the signal to 1000 Hz, sampling at 48 kHz, and for 5 seconds</dd>
<dt>-c:a pcm_s16le</dt><dd>encodes the audio codec in <code>pcm_s16le</code> (the default encoding for wav files). <code>pcm</code> represents pulse-code modulation format (raw bytes), <code>16</code> means 16 bits per sample, and <code>le</code> means "little endian"</dd>
<dt><i>output_file</i>.wav</dt><dd>path, name and extension of the output file</dd>
@@ -1819,14 +1971,14 @@
<p><code>ffmpeg -f lavfi -i smptebars=size=720x576:rate=25 -f lavfi -i "sine=frequency=1000:sample_rate=48000" -c:a pcm_s16le -t 10 -c:v ffv1 <i>output_file</i></code></p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">libavfilter</a> input virtual device</dd>
<dt>-f lavfi</dt><dd>tells FFmpeg to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">libavfilter</a> input virtual device</dd>
<dt>-i smptebars=size=720x576:rate=25</dt><dd>asks for the <a href="https://ffmpeg.org/ffmpeg-filters.html#allrgb_002c-allyuv_002c-color_002c-haldclutsrc_002c-nullsrc_002c-rgbtestsrc_002c-smptebars_002c-smptehdbars_002c-testsrc_002c-testsrc2_002c-yuvtestsrc" target="_blank">smptebars test filter</a> as input. Adjusting the <code>size</code> and <code>rate</code> options allows you to choose a specific frame size and framerate.</dd>
<dt>-f lavfi</dt><dd>use libavfilter again, but now for audio</dd>
<dt>-i "sine=frequency=1000:sample_rate=48000"</dt><dd>Sets the signal to 1000 Hz, sampling at 48 kHz.</dd>
<dt>-c:a pcm_s16le</dt><dd>encodes the audio codec in <code>pcm_s16le</code> (the default encoding for wav files). <code>pcm</code> represents pulse-code modulation format (raw bytes), <code>16</code> means 16 bits per sample, and <code>le</code> means "little endian"</dd>
<dt>-t 10</dt><dd>specifies recording time of 10 seconds</dd>
<dt>-c:v ffv1</dt><dd>Encodes to <a href="https://en.wikipedia.org/wiki/FFV1" target="_blank">FFV1</a>. Alter this setting to set your desired codec.</dd>
<dt><i>output_file</i>.wav</dt><dd>path, name and extension of the output file</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p class="link"></p>
</div>
@@ -1842,7 +1994,7 @@
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>takes in a normal file</dd>
<dt>-bsf noise=1</dt><dd>sets bitstream filters for all to 'noise'. Filters can be set on specific filters using syntax such as <code>-bsf:v</code> for video, <code>-bsf:a</code> for audio, etc. The <a href="https://www.ffmpeg.org/ffmpeg-bitstream-filters.html#noise" target="_blank">noise filter</a> intentionally damages the contents of packets without damaging the container. This sets the noise level to 1 but it could be left blank or any number above 0.</dd>
<dt>-bsf noise=1</dt><dd>sets bitstream filters for all to 'noise'. Filters can be set on specific filters using syntax such as <code>-bsf:v</code> for video, <code>-bsf:a</code> for audio, etc. The <a href="https://ffmpeg.org/ffmpeg-bitstream-filters.html#noise" target="_blank">noise filter</a> intentionally damages the contents of packets without damaging the container. This sets the noise level to 1 but it could be left blank or any number above 0.</dd>
<dt>-c copy</dt><dd>use stream copy mode to re-mux instead of re-encode</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
@@ -1850,6 +2002,31 @@
</div>
<!-- ends Broken File -->
<!-- Game of Life -->
<label class="recipe" for="game_of_life">Conway's Game of Life</label>
<input type="checkbox" id="game_of_life">
<div class="hiding">
<h3>Conway's Game of Life</h3>
<p>Simulates <a href="https://en.wikipedia.org/wiki/Conway%27s_Game_of_Life">Conway's Game of Life</a></p>
<p><code>ffplay -f lavfi life=s=300x200:mold=10:r=60:ratio=0.1:death_color=#C83232:life_color=#00ff00,scale=1200:800</code></p>
<dl>
<dt>ffplay</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>tells ffplay to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter</a> input virtual device</dd>
<dt>life=s=300x200</dt><dd>use the life filter and set the size of the video to 300x200</dd>
<dt>:</dt><dd>indicates theres another parameter coming</dd>
<dt>mold=10:r=60:ratio=0.1</dt><dd>sets up the rules of the game: cell mold speed, video rate, and random fill ratio</dd>
<dt>:</dt><dd>indicates theres another parameter coming</dd>
<dt>death_color=#C83232:life_color=#00ff00</dt><dd>specifies color for cell death and cell life; mold_color can also be set</dd>
<dt>,</dt><dd>comma signifies closing of video source assertion and ready for filter assertion</dd>
<dt>scale=1200:800</dt><dd>scale to 1280 width and 800 height</dd>
</dl>
<img src="img/life.gif" alt="GIF of above command">
<p>To save a portion of the stream instead of playing it back infinitely, use the following command:</p>
<p><code>ffmpeg -f lavfi -i life=s=300x200:mold=10:r=60:ratio=0.1:death_color=#C83232:life_color=#00ff00,scale=1200:800 -t 5 <i>output_file</i></code></p>
<p class="link"></p>
</div>
<!-- ends Game of Life -->
</div>
<div class="well">
<h2 id="ocr">Use OCR</h2>
@@ -1889,7 +2066,7 @@
<dt>ffprobe</dt><dd>starts the command</dd>
<dt>-show_entries</dt><dd>sets a list of entries to show</dd>
<dt>frame_tags=lavfi.ocr.text</dt><dd>shows the <i>lavfi.ocr.text</i> tag in the frame section of the video</dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a></dd>
<dt>-f lavfi</dt><dd>tells ffprobe to use the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a></dd>
<dt>-i "movie=<i>input_file</i>,ocr"</dt><dd>declares 'movie' as <i>input_file</i> and passes in the 'ocr' command</dd>
</dl>
<p class="link"></p>
@@ -1966,20 +2143,44 @@
<input type="checkbox" id="split_audio_video">
<div class="hiding">
<h3>Split audio and video tracks</h3>
<p><code>ffmpeg -i <i>input_file</i> -map <i>0:v:0 video_output_file</i> -map <i>0:a:0 audio_output_file</i></code></p>
<p><code>ffmpeg -i <i>input_file</i> -map 0:v:0 <i>video_output_file</i> -map 0:a:0 <i>audio_output_file</i></code></p>
<p>This command splits the original input file into a video and audio stream. The -map command identifies which streams are mapped to which file. To ensure that youre mapping the right streams to the right file, run ffprobe before writing the script to identify which streams are desired.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
<dt>-map <i>0:v:0</i></dt><dd>grabs the first video stream and maps it into:</dd>
<dt>-map 0:v:0</dt><dd>grabs the first video stream and maps it into:</dd>
<dt><i>video_output_file</i></dt><dd>path, name and extension of the video output file</dd>
<dt>-map <i>0:a:0</i></dt><dd>grabs the first audio stream and maps it into:</dd>
<dt>-map 0:a:0</dt><dd>grabs the first audio stream and maps it into:</dd>
<dt><i>audio_output_file</i></dt><dd>path, name and extension of the audio output file</dd>
</dl>
<p class="link"></p>
</div>
<!-- ends Split audio and video tracks -->
<!-- Merge audio and video tracks -->
<label class="recipe" for="merge_audio_video">Merge audio and video tracks</label>
<input type="checkbox" id="merge_audio_video">
<div class="hiding">
<h3>Merge audio and video tracks</h3>
<p><code>ffmpeg -i <i>video_file</i> -i <i>audio_file</i> -map 0:v -map 1:a -c copy <i>output_file</i></code></p>
<p>This command takes a video file and an audio file as inputs, and creates an output file that combines the video stream in the first file with the audio stream in the second file.</p>
<dl>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-i <i>video_file</i></dt><dd>path, name and extension of the first input file (the video file)</dd>
<dt>-i <i>audio_file</i></dt><dd>path, name and extension of the second input file (the audio file)</dd>
<dt>-map <i>0:v</i></dt><dd>selects the video streams from the first input file</dd>
<dt>-map <i>1:a</i></dt><dd>selects the audio streams from the second input file</dd>
<dt>-c copy</dt><dd>copies streams without re-encoding</dd>
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
</dl>
<p><b>Note:</b> in the example above, the video input file is given prior to the audio input file. However, input files can be added any order, as long as they are indexed correctly when stream mapping with <code>-map</code>. See the entry on <a href="#stream-mapping">stream mapping</a>.</p>
<h4>Variation:</h4>
<p>Include the audio tracks from both input files with the following command:</p>
<p><code>ffmpeg -i <i>video_file</i> -i <i>audio_file</i> -map 0:v -map 0:a -map 1:a -c copy <i>output_file</i></code></p>
<p class="link"></p>
</div>
<!-- ends Merge audio and video tracks -->
<!-- Create ISO -->
<label class="recipe" for="create_iso">Create ISO files for DVD access</label>
<input type="checkbox" id="create_iso">
@@ -2008,7 +2209,7 @@
<p>This ffprobe command prints a CSV correlating timestamps and their YDIF values, useful for determining cuts.</p>
<dl>
<dt>ffprobe</dt><dd>starts the command</dd>
<dt>-f lavfi</dt><dd>uses the <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a> as chosen format</dd>
<dt>-f lavfi</dt><dd>uses the <a href="https://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">Libavfilter input virtual device</a> as chosen format</dd>
<dt>-i movie=<i>input file</i></dt><dd>path, name and extension of the input video file</dd>
<dt>,</dt><dd>comma signifies closing of video source assertion and ready for filter assertion</dd>
<dt>signalstats</dt><dd>tells ffprobe to use the signalstats command</dd>
@@ -2060,26 +2261,26 @@
<li>This is in daily use to live-stream a real-world TV show. No errors for nearly 4 years. Some parameters were found by trial-and-error or empiric testing. So suggestions/questions are welcome.</li>
</ol>
<dl>
<dt>ffmpeg </dt><dd>starts the command</dd>
<dt>-re </dt><dd>Read input at native framerate</dd>
<dt>-i input.mov </dt><dd>The input file. Can also be a <code>-</code> to use STDIN if you pipe in from webcam or SDI.</dd>
<dt>-map 0 </dt><dd>map ALL streams from input file to output</dd>
<dt>-flags +global_header </dt><dd>Don't place extra data in every keyframe</dd>
<dt>-vf scale="1280:-1" </dt><dd>Scale to 1280 width, maintain aspect ratio.</dd>
<dt>-pix_fmt yuv420p </dt><dd>convert to 4:2:0 chroma subsampling scheme</dd>
<dt>-level 3.1 </dt><dd>H264 Level (defines some thresholds for bitrate)</dd>
<dt>-vsync passthrough </dt><dd>Each frame is passed with its timestamp from the demuxer to the muxer.</dd>
<dt>-crf 26 </dt><dd>Constant rate factor - basically the quality</dd>
<dt>-g 50 </dt><dd>GOP size.</dd>
<dt>-bufsize 3500k </dt><dd>Ratecontrol buffer size (~ maxrate x2)</dd>
<dt>-maxrate 1800k </dt><dd>Maximum bit rate</dd>
<dt>-c:v libx264 </dt><dd>encode output video stream as H.264</dd>
<dt>-c:a aac </dt><dd>encode output audio stream as AAC</dd>
<dt>-b:a 128000 </dt><dd>The audio bitrate</dd>
<dt>-r:a 44100 </dt><dd>The audio samplerate</dd>
<dt>-ac 2 </dt><dd>Two audio channels</dd>
<dt>-t ${STREAMDURATION} </dt><dd>Time (in seconds) after which the stream should automatically end.</dd>
<dt>-f tee </dt><dd>Use multiple outputs. Outputs defined below.</dd>
<dt>ffmpeg</dt><dd>starts the command</dd>
<dt>-re</dt><dd>Read input at native framerate</dd>
<dt>-i input.mov</dt><dd>The input file. Can also be a <code>-</code> to use STDIN if you pipe in from webcam or SDI.</dd>
<dt>-map 0</dt><dd>map ALL streams from input file to output</dd>
<dt>-flags +global_header</dt><dd>Don't place extra data in every keyframe</dd>
<dt>-vf scale="1280:-1"</dt><dd>Scale to 1280 width, maintain aspect ratio.</dd>
<dt>-pix_fmt yuv420p</dt><dd>convert to 4:2:0 chroma subsampling scheme</dd>
<dt>-level 3.1</dt><dd>H264 Level (defines some thresholds for bitrate)</dd>
<dt>-vsync passthrough</dt><dd>Each frame is passed with its timestamp from the demuxer to the muxer.</dd>
<dt>-crf 26</dt><dd>Constant rate factor - basically the quality</dd>
<dt>-g 50</dt><dd>GOP size.</dd>
<dt>-bufsize 3500k</dt><dd>Ratecontrol buffer size (~ maxrate x2)</dd>
<dt>-maxrate 1800k</dt><dd>Maximum bit rate</dd>
<dt>-c:v libx264</dt><dd>encode output video stream as H.264</dd>
<dt>-c:a aac</dt><dd>encode output audio stream as AAC</dd>
<dt>-b:a 128000</dt><dd>The audio bitrate</dd>
<dt>-r:a 44100</dt><dd>The audio samplerate</dd>
<dt>-ac 2</dt><dd>Two audio channels</dd>
<dt>-t ${STREAMDURATION}</dt><dd>Time (in seconds) after which the stream should automatically end.</dd>
<dt>-f tee</dt><dd>Use multiple outputs. Outputs defined below.</dd>
<dt>"[movflags=+faststart]target-file.mp4|[f=flv]rtmp://stream-url/stream-id"</dt><dd>The outputs, separated by a pipe (|). The first is the local file, the second is the live stream. Options for each target are given in square brackets before the target.</dd>
</dl>
<p class="link"></p>
@@ -2110,6 +2311,86 @@
</div>
<!-- ends View Subprogram info -->
</div>
<div class="well">
<h2 id="similar-tools">Similar tools: tips &amp; tricks 🎩🐰</h2>
<div class="well">
<p>This section introduces and explains the usage of some additional command line tools similar to FFmpeg for use in digital preservation workflows (and beyond!).</p>
</div>
</div>
<div class="well">
<h2 id="imagemagick">ImageMagick</h2>
<!-- About ImageMagick -->
<label class="recipe" for="im-basics">About ImageMagick</label>
<input type="checkbox" id="im-basics">
<div class="hiding">
<h3>About ImageMagick</h3>
<p>ImageMagick is a free and open-source software suite for displaying, converting, and editing raster image and vector image files.</p>
<p>It's official website can be found <a href="https://www.imagemagick.org/script/index.php">here</a>.</p>
<p>Another great resource with lots of supplemental explanations of filters is available at <a href="http://www.fmwconcepts.com/imagemagick/index.php">Fred's ImageMagick Scripts</a>.</p>
<p>Unlike many other command line tools, ImageMagick isn't summoned by calling its name. Rather, ImageMagick installs links to several more specific commands: <code>convert</code>, <code>montage</code>, and <code>mogrify</code>, to name a few.</p>
<p class="link"></p>
</div>
<!-- End About ImageMagick -->
<!-- Resize to width -->
<label class="recipe" for="im_resize">Resizes image to specific pixel width</label>
<input type="checkbox" id="im_resize">
<div class="hiding">
<h3>Resize to width</h3>
<p><code>convert <i>input_file.ext</i> -resize 750 <i>output_file.ext</i></code></p>
<p>This script will also convert the file format, if the output has a different file extension than the input.</p>
<dl>
<dt>convert</dt><dd>starts the command</dd>
<dt>-i <i>input_file.ext</i></dt><dd>path and name of the input file</dd>
<dt>-resize 750</dt><dd>resizes the image to 750 pixels wide, retaining aspect ratio</dd>
<dt><i>output_file.ext</i></dt><dd>path and name of the output file</dd>
</dl>
<p class="link"></p>
</div>
<!-- ends Resize to width -->
<!-- Create thumbnails -->
<label class="recipe" for="im_thumbs">Create thumbnails of images</label>
<input type="checkbox" id="im_thumbs">
<div class="hiding">
<h3>Create thumbnails</h3>
<p>Creates thumbnails for all files in a folder and saves them in that folder.</p>
<p><code>mogrify -resize 80x80 -format jpg -quality 75 -path thumbs *.jpg</code></p>
<dl>
<dt>montage</dt><dd>starts the command</dd>
<dt>-resize 80x80</dt><dd>resizes copies of original images to 80x80 pixels</dd>
<dt>-format jpg</dt><dd>reformats original images to jpg</dd>
<dt>-quality 75</dt><dd>sets quality to 75 (out of 100), adding light compression to smaller files</dd>
<dt>-path thumbs</dt><dd>specifies where to save the thumbnails -- this goes to a folder within the active folder called "thumbs".<br>
Note: You will have to make this folder if it doesn't already exist.</dd>
<dt><i>*.jpg</i></dt><dd>The astericks acts as a "wildcard" to be applied to every file in the directory.</dd>
</dl>
<p class="link"></p>
</div>
<!-- ends Create thumbnails -->
<!-- Create grid of images -->
<label class="recipe" for="im_grid">Creates grid of images from text file</label>
<input type="checkbox" id="im_grid">
<div class="hiding">
<h3>Create grid of images</h3>
<p><code>montage @<i>list.txt</i> -tile 6x12 -geometry +0+0 <i>output_grid.jpg</i></code></p>
<dl>
<dt>montage</dt><dd>starts the command</dd>
<dt>@list.txt</dt><dd>path and name of a text file containing a list of filenames, one per each line</dd>
<dt>-tile 6x12</dt><dd>specifies the dimensions of the proposed grid (6 images wide, 12 images long)</dd>
<dt>-geometry +0+0</dt><dd>specifies to include no spacing around any of the tiles; they will be flush against each other</dd>
<dt><i>output_grid.jpg</i></dt><dd>path and name of the output file</dd>
</dl>
<p class="link"></p>
</div>
<!-- ends Create grid of images -->
</div>
</div><!-- ends "content" -->
<!-- sample example -->
@@ -2129,9 +2410,9 @@ Change the above data-target field, the hover-over description, the button text,
</div> -->
<!-- ends sample example -->
<footer class="footer">
<p>Made with ♥ at <a href="http://wiki.curatecamp.org/index.php/Association_of_Moving_Image_Archivists_%26_Digital_Library_Federation_Hack_Day_2015" target="_blank">AMIA #AVhack15</a>! Contribute to the project via <a href="https://github.com/amiaopensource/ffmprovisr">our GitHub page</a>!</p>
</footer>
<footer class="footer">
<p>Made with ♥ at <a href="https://wiki.curatecamp.org/index.php/Association_of_Moving_Image_Archivists_%26_Digital_Library_Federation_Hack_Day_2015" target="_blank">AMIA #AVhack15</a>! Contribute to the project via <a href="https://github.com/amiaopensource/ffmprovisr">our GitHub page</a>!</p>
</footer>
</div><!-- ends "grid" -->
</body>
</html>

View File

@@ -3,10 +3,11 @@ $(document).ready(function() {
// open recipe window if a hash is found in URL
if(window.location.hash) {
id = window.location.hash
console.log(id.substring(1))
document.getElementById(id.substring(1)).checked = true;
$('html, body').animate({ scrollTop: $(id).offset().top}, 1000);
$(id).closest('div').find('.link').empty();
$(id).closest('div').find('.link').append("<small>Link to this command: <a href="+window.location.href+">"+window.location.href+"</a></small>");
$(id).closest('div').find('.link').append("<small>Link to this command: <a href='https://amiaopensource.github.io/ffmprovisr/index.html"+window.location.hash+"'>https://amiaopensource.github.io/ffmprovisr/index.html"+window.location.hash+"</a></small>");
}
// add hash URL when recipe is opened
@@ -14,7 +15,7 @@ $(document).ready(function() {
id = $(this).attr("for");
window.location.hash = ('#' + id)
$('#' + id).closest('div').find('.link').empty();
$('#' + id).closest('div').find('.link').append("<small>Link to this command: <a href="+window.location.href+">"+window.location.href+"</a></small>");
$('#' + id).closest('div').find('.link').append("<small>Link to this command: <a href='https://amiaopensource.github.io/ffmprovisr/index.html"+window.location.hash+"'>https://amiaopensource.github.io/ffmprovisr/index.html"+window.location.hash+"</a></small>");
});
});

View File

@@ -44,7 +44,7 @@ You can read our contributor code of conduct [here](https://github.com/amiaopens
## Maintainers
[Ashley Blewer](https://github.com/ablwr), [Katherine Frances Nagels](https://github.com/kfrn), [Kieran O'Leary](https://github.com/kieranjol) and [Reto Kromer](https://github.com/retokromer)
[Ashley Blewer](https://github.com/ablwr), [Katherine Frances Nagels](https://github.com/kfrn), [Kieran O'Leary](https://github.com/kieranjol), [Reto Kromer](https://github.com/retokromer) and [Andrew Weaver](https://github.com/privatezero)
## Contributors
* Gathered using [octohatrack](https://github.com/LABHR/octohatrack)
@@ -104,4 +104,6 @@ All Contributors: 22
## License
<a rel="license" href="http://creativecommons.org/licenses/by/4.0/"><img alt="Creative Commons License" style="border-width:0" src="https://i.creativecommons.org/l/by/4.0/80x15.png" /></a><br />This <span xmlns:dct="http://purl.org/dc/terms/" href="http://purl.org/dc/dcmitype/InteractiveResource" rel="dct:type">work</span> by <a xmlns:cc="http://creativecommons.org/ns#" href="http://amiaopensource.github.io/ffmprovisr/" property="cc:attributionName" rel="cc:attributionURL">ffmprovisr</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</a>.<br />Based on a work at <a xmlns:dct="http://purl.org/dc/terms/" href="https://github.com/amiaopensource/ffmprovisr" rel="dct:source">https://github.com/amiaopensource/ffmprovisr</a>.
<a rel="license" href="http://creativecommons.org/licenses/by/4.0/"><img alt="Creative Commons License" style="border-width:0" src="https://i.creativecommons.org/l/by/4.0/80x15.png"></a><br>
This <span xmlns:dct="http://purl.org/dc/terms/" href="http://purl.org/dc/dcmitype/InteractiveResource" rel="dct:type">work</span> by <a xmlns:cc="http://creativecommons.org/ns#" href="http://amiaopensource.github.io/ffmprovisr/" property="cc:attributionName" rel="cc:attributionURL">ffmprovisr</a> is licensed under a <a rel="license" href="http://creativecommons.org/licenses/by/4.0/">Creative Commons Attribution 4.0 International License</a>.<br>
Based on a work at <a xmlns:dct="http://purl.org/dc/terms/" href="https://github.com/amiaopensource/ffmprovisr" rel="dct:source">https://github.com/amiaopensource/ffmprovisr</a>.

View File

@@ -1,12 +1,12 @@
#!/usr/bin/env bash
SCRIPT=$(basename "${0}")
VERSION='2017-07-08'
AUTHOR='ffmprovisr'
RED='\033[1;31m'
BLUE='\033[1;34m'
NC='\033[0m'
VERSION="2018-02-10"
AUTHOR="ffmprovisr"
RED="\033[1;31m"
BLUE="\033[1;34m"
NC="\033[0m"
if [[ "${OSTYPE}" = "cygwin" ]] || [ ! $(which diff) ]; then
if [[ "${OSTYPE}" = "cygwin" ]] || [[ ! "$(which diff)" ]]; then
echo -e "${RED}Error: 'diff' is not installed by default. Please install 'diffutils' from Cygwin.${NC}"
exit 1
fi
@@ -67,9 +67,8 @@ old_file=$(grep -v '^#' "${input_hash}")
tmp_file=$(grep -v '^#' "${md5_tmp}")
if [[ "${old_file}" = "${tmp_file}" ]]; then
echo -e "${BLUE}'$(basename "${input_file}")' matches '$(basename "${input_hash}")'${NC}"
rm "${md5_tmp}"
else
echo -e "${RED}The following differences were detected between '$(basename "${input_file}")' and '$(basename "${input_hash}")':${NC}"
diff "${input_hash}" "${md5_tmp}"
rm "${md5_tmp}"
fi
rm "${md5_tmp}"

View File

@@ -1,12 +1,12 @@
#!/usr/bin/env bash
SCRIPT=$(basename "${0}")
VERSION='2017-07-08'
AUTHOR='ffmprovisr'
RED='\033[1;31m'
BLUE='\033[1;34m'
NC='\033[0m'
VERSION="2018-02-10"
AUTHOR="ffmprovisr"
RED="\033[1;31m"
BLUE="\033[1;34m"
NC="\033[0m"
if [[ "${OSTYPE}" = "cygwin" ]] || [ ! $(which diff) ]; then
if [[ "${OSTYPE}" = "cygwin" ]] || [[ ! "$(which diff)" ]]; then
echo -e "${RED}Error: 'diff' is not installed by default. Please install 'diffutils' from Cygwin.${NC}"
exit 1
fi
@@ -64,9 +64,8 @@ old_file=$(grep -v '^#' "${input_hash}")
tmp_file=$(grep -v '^#' "${md5_tmp}")
if [[ "${old_file}" = "${tmp_file}" ]]; then
echo -e "${BLUE}'$(basename "${input_file}")' matches '$(basename "${input_hash}")'${NC}"
rm "${md5_tmp}"
else
echo -e "${RED}The following differences were detected between '$(basename "${input_file}")' and '$(basename "${input_hash}")':${NC}"
diff "${input_hash}" "${md5_tmp}"
rm "${md5_tmp}"
fi
rm "${md5_tmp}"

View File

@@ -10,7 +10,7 @@ if [[ "$(uname -s)" = "Darwin" ]] ; then
else
ffmprovisr_path=$(find /usr/local/Cellar/ffmprovisr -iname 'index.html' | sort -M | tail -n1)
fi
if [ -n "${default_browser}" ] ; then
if [[ -n "${default_browser}" ]] ; then
open -b "${default_browser}" "${ffmprovisr_path}"
else
open "${ffmprovisr_path}"