mirror of
https://github.com/amiaopensource/ffmprovisr.git
synced 2024-12-26 19:58:20 +01:00
Add command for Windows batch scripting; CSS for code block
This commit is contained in:
commit
d34bcd90a0
@ -1,19 +1,19 @@
|
|||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
SCRIPT=$(basename "${0}")
|
SCRIPT=$(basename "${0}")
|
||||||
VERSION='2016-10-09'
|
VERSION='2016-12-31'
|
||||||
AUTHOR='ffmprovisr'
|
AUTHOR='ffmprovisr'
|
||||||
RED='\033[1;31m'
|
RED='\033[1;31m'
|
||||||
BLUE='\033[1;34m'
|
BLUE='\033[1;34m'
|
||||||
NC='\033[0m'
|
NC='\033[0m'
|
||||||
|
|
||||||
if [[ $OSTYPE = "cygwin" ]] || [ ! $(which diff) ]; then
|
if [[ ${OSTYPE} = "cygwin" ]] || [ ! $(which diff) ]; then
|
||||||
echo -e "${RED}ERROR:${NC} 'diff' is not installed by default. Please install 'diffutils' from Cygwin."
|
echo -e "${RED}Error: 'diff' is not installed by default. Please install 'diffutils' from Cygwin.${NC}"
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
_output_prompt(){
|
_output_prompt(){
|
||||||
cat <<EOF
|
cat <<EOF
|
||||||
Usage: ${SCRIPT} [-h | -i <av_file> -m <md5_file>]
|
Usage: ${SCRIPT} [-h] | [ -i <av_file> -m <md5_file> ]
|
||||||
EOF
|
EOF
|
||||||
exit 1
|
exit 1
|
||||||
}
|
}
|
||||||
@ -41,36 +41,34 @@ unset input_file
|
|||||||
unset input_hash
|
unset input_hash
|
||||||
|
|
||||||
while getopts ":hi:m:" opt; do
|
while getopts ":hi:m:" opt; do
|
||||||
case "${opt}" in
|
case "${opt}" in
|
||||||
h) _output_help ;;
|
h) _output_help ;;
|
||||||
i) input_file=$OPTARG ;;
|
i) input_file=$OPTARG ;;
|
||||||
m) input_hash=$OPTARG ;;
|
m) input_hash=$OPTARG ;;
|
||||||
:) echo -e "${RED}Error:${NC} option -${OPTARG} requires an argument" ; _output_prompt ;;
|
:) echo -e "${RED}Error: option -${OPTARG} requires an argument${NC}" ; _output_prompt ;;
|
||||||
*) echo -e "${RED}Error:${NC }bad option -${OPTARG}" ; _output_prompt ;;
|
*) echo -e "${RED}Error: bad option -${OPTARG}${NC}" ; _output_prompt ;;
|
||||||
esac
|
esac
|
||||||
done
|
done
|
||||||
|
|
||||||
[[ -z "${#}" || ! ${input_file} || ! ${input_hash} ]] && _output_prompt
|
[[ -z "${#}" || ! ${input_file} || ! ${input_hash} ]] && _output_prompt
|
||||||
echo -e "${BLUE}Please wait...${NC}"
|
echo -e "${BLUE}Please wait...${NC}"
|
||||||
unset md5_tmp
|
unset md5_tmp
|
||||||
if [[ $OSTYPE = "cygwin" ]]; then
|
if [[ $OSTYPE = "cygwin" ]]; then
|
||||||
md5_tmp=""${USERPROFILE}/$(basename ${input_hash}).tmp""
|
md5_tmp=""${USERPROFILE}/$(basename ${input_hash}).tmp""
|
||||||
else
|
else
|
||||||
md5_tmp="${HOME}/$(basename ${input_hash}).tmp"
|
md5_tmp="${HOME}/$(basename ${input_hash}).tmp"
|
||||||
fi
|
fi
|
||||||
$(ffmpeg -i ${input_file} -loglevel 0 -f framemd5 -an ${md5_tmp})
|
$(ffmpeg -i ${input_file} -loglevel 0 -f framemd5 -an ${md5_tmp})
|
||||||
[[ ! -f ${md5_tmp} ]] && { echo -e "${RED}Error:${NC} '${input_file}' is not a valid audio-visual file."; _output_prompt; }
|
[[ ! -f ${md5_tmp} ]] && { echo -e "${RED}Error: '${input_file}' is not a valid audio-visual file.${NC}" ; _output_prompt ; }
|
||||||
unset old_file
|
unset old_file
|
||||||
unset tmp_file
|
unset tmp_file
|
||||||
old_file=$(grep -v '^#' ${input_hash})
|
old_file=$(grep -v '^#' ${input_hash})
|
||||||
tmp_file=$(grep -v '^#' ${md5_tmp})
|
tmp_file=$(grep -v '^#' ${md5_tmp})
|
||||||
if [[ "${old_file}" = "${tmp_file}" ]]; then
|
if [[ "${old_file}" = "${tmp_file}" ]]; then
|
||||||
echo -e "${BLUE}OK${NC} '$(basename ${input_file})' matches '$(basename ${input_hash})'."
|
echo -e "${BLUE}'$(basename ${input_file})' matches '$(basename ${input_hash})'${NC}"
|
||||||
rm "${md5_tmp}"
|
rm "${md5_tmp}"
|
||||||
exit 0
|
|
||||||
else
|
else
|
||||||
echo -e "${RED}ERROR:${NC} The following differences were detected between '$(basename ${input_file})' and '$(basename ${input_hash})':"
|
echo -e "${RED}The following differences were detected between '$(basename ${input_file})' and '$(basename ${input_hash})':${NC}"
|
||||||
diff "${input_hash}" "${md5_tmp}"
|
diff "${input_hash}" "${md5_tmp}"
|
||||||
rm "${md5_tmp}"
|
rm "${md5_tmp}"
|
||||||
exit 1
|
|
||||||
fi
|
fi
|
||||||
|
14
css/css.css
14
css/css.css
@ -2,6 +2,10 @@ h1,h2,h3,h4,h5,h6 {
|
|||||||
font-family: 'Montserrat', sans-serif;
|
font-family: 'Montserrat', sans-serif;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
code {
|
||||||
|
word-break: break-all;
|
||||||
|
}
|
||||||
|
|
||||||
dd {
|
dd {
|
||||||
padding-left:24px;
|
padding-left:24px;
|
||||||
}
|
}
|
||||||
@ -10,6 +14,16 @@ dt {
|
|||||||
font-family: Menlo,Monaco,Consolas,"Courier New",monospace;
|
font-family: Menlo,Monaco,Consolas,"Courier New",monospace;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
img {
|
||||||
|
display: block;
|
||||||
|
margin-left: auto;
|
||||||
|
margin-right: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sample-image {
|
||||||
|
margin-bottom: 18px;
|
||||||
|
}
|
||||||
|
|
||||||
h1 {
|
h1 {
|
||||||
letter-spacing:8px;
|
letter-spacing:8px;
|
||||||
font-size:86px;
|
font-size:86px;
|
||||||
|
BIN
img/brng.gif
Normal file
BIN
img/brng.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 7.6 MiB |
BIN
img/showspectrum.gif
Normal file
BIN
img/showspectrum.gif
Normal file
Binary file not shown.
After Width: | Height: | Size: 4.2 MiB |
300
index.html
300
index.html
@ -29,7 +29,10 @@
|
|||||||
<p>FFmpeg is a powerful tool for manipulating audiovisual files. Unfortunately, it also has a steep learning curve, especially for users unfamiliar with a command line interface. This app helps users through the command generation process so that more people can reap the benefits of FFmpeg.</p>
|
<p>FFmpeg is a powerful tool for manipulating audiovisual files. Unfortunately, it also has a steep learning curve, especially for users unfamiliar with a command line interface. This app helps users through the command generation process so that more people can reap the benefits of FFmpeg.</p>
|
||||||
<p>Each button displays helpful information about how to perform a wide variety of tasks using FFmpeg. To use this site, click on the task you would like to perform. A new window will open up with a sample command and a description of how that command works. You can copy this command and understand how the command works with a breakdown of each of the flags.</p>
|
<p>Each button displays helpful information about how to perform a wide variety of tasks using FFmpeg. To use this site, click on the task you would like to perform. A new window will open up with a sample command and a description of how that command works. You can copy this command and understand how the command works with a breakdown of each of the flags.</p>
|
||||||
<p>For FFmpeg basics, check out the program’s <a href="https://www.ffmpeg.org/" target="_blank">official website</a>.</p>
|
<p>For FFmpeg basics, check out the program’s <a href="https://www.ffmpeg.org/" target="_blank">official website</a>.</p>
|
||||||
<p>For Bash and command line basics, try the <a href="http://cli.learncodethehardway.org/book/" target="_blank">Command Line Crash Course</a>.</p>
|
<p>For Bash and command line basics, try the <a href="https://learnpythonthehardway.org/book/appendixa.html" target="_blank">Command Line Crash Course</a>. For a little more context presented in an ffmprovisr style, try <a href="http://explainshell.com/" target="_blank">explainshell.com</a>!</p>
|
||||||
|
<h5>Sister projects</h5>
|
||||||
|
<p><a href="http://dd388.github.io/crals/">Script Ahoy</a>: Community Resource for Archivists and Librarians Scripting</p>
|
||||||
|
<p><a href="https://datapraxis.github.io/sourcecaster/">The Sourcecaster</a>: an app that helps you use the command line to work through common challenges that come up when working with digital primary sources.</p>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="well col-md-8 col-md-offset-0">
|
<div class="well col-md-8 col-md-offset-0">
|
||||||
@ -46,7 +49,7 @@
|
|||||||
<div class="modal-content">
|
<div class="modal-content">
|
||||||
<div class="well">
|
<div class="well">
|
||||||
<h3>WAV to MP3</h3>
|
<h3>WAV to MP3</h3>
|
||||||
<p><code>ffmpeg -i <i>input_file</i>.wav -write_id3v1 1 -id3v2_version 3 -dither_method modified_e_weighted -out_sample_rate 48k -b:a 320k <i>output_file</i>.mp3</code></p>
|
<p><code>ffmpeg -i <i>input_file</i>.wav -write_id3v1 1 -id3v2_version 3 -dither_method modified_e_weighted -out_sample_rate 48k -qscale:a 1 <i>output_file</i>.mp3</code></p>
|
||||||
<p>This will convert your WAV files to MP3s.</p>
|
<p>This will convert your WAV files to MP3s.</p>
|
||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
@ -55,7 +58,7 @@
|
|||||||
<dt>-id3v2_version <i>3</i></dt><dd>Write ID3v2 tag. This will add metadata to a newer MP3 format, assuming you’ve embedded metadata into the WAV file.</dd>
|
<dt>-id3v2_version <i>3</i></dt><dd>Write ID3v2 tag. This will add metadata to a newer MP3 format, assuming you’ve embedded metadata into the WAV file.</dd>
|
||||||
<dt>-dither_method <i>modified_e_weighted</i></dt><dd>Dither makes sure you don’t unnecessarily truncate the dynamic range of your audio.</dd>
|
<dt>-dither_method <i>modified_e_weighted</i></dt><dd>Dither makes sure you don’t unnecessarily truncate the dynamic range of your audio.</dd>
|
||||||
<dt>-out_sample_rate <i>48k</i></dt><dd>Sets the audio sampling frequency to 48 kHz. This can be omitted to use the same sampling frequency as the input.</dd>
|
<dt>-out_sample_rate <i>48k</i></dt><dd>Sets the audio sampling frequency to 48 kHz. This can be omitted to use the same sampling frequency as the input.</dd>
|
||||||
<dt>-b:a <i>320k</i></dt><dd>This sets the bit rate at the highest rate the MP3 format allows. Reduce this to 160k for mono files.</dd>
|
<dt>-qscale:a <i>1</i></dt><dd>This sets the encoder to use a constant quality with a variable bitrate of between 190-250kbit/s. If you would prefer to use a constant bitrate, this could be replaced with <code>-b:a 320k</code> to set to the maximum bitrate allowed by the MP3 format. For more detailed discussion on variable vs constant bitrates see <a href="https://trac.ffmpeg.org/wiki/Encode/MP3" target="_blank">here.</a></dd>
|
||||||
<dt><i>output_file</i></dt><dd>path and name of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path and name of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
<p class="link"></p>
|
<p class="link"></p>
|
||||||
@ -87,10 +90,15 @@
|
|||||||
</ul></dd>
|
</ul></dd>
|
||||||
<dt>-vf yadif</dt><dd>Runs a deinterlacing video filter (yet another deinterlacing filter) on the new file</dd>
|
<dt>-vf yadif</dt><dd>Runs a deinterlacing video filter (yet another deinterlacing filter) on the new file</dd>
|
||||||
<dt>-c:a pcm_s16le</dt><dd>Tells ffmpeg to encode the audio stream in 16-bit linear PCM</dd>
|
<dt>-c:a pcm_s16le</dt><dd>Tells ffmpeg to encode the audio stream in 16-bit linear PCM</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file<br/>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file<br>
|
||||||
The extension for the QuickTime container is <code>.mov</code>.</dd>
|
The extension for the QuickTime container is <code>.mov</code>.</dd>
|
||||||
</dl>
|
</dl>
|
||||||
<p class="link"></p>
|
<p>FFmpeg comes with more than one ProRes encoder:</p>
|
||||||
|
<ul>
|
||||||
|
<li><code>prores</code> is much faster, can be used for progressive video only, and seems to be better for video according to Rec. 601 (Recommendation ITU-R BT.601).</li>
|
||||||
|
<li><code>prores_ks</code> generates a better file, can also be used for interlaced video, allows also encoding of ProRes 4444 (<code>-c:v prores_ks -profile:v 4</code>), and seems to be better for video according to Rec. 709 (Recommendation ITU-R BT.709).</li>
|
||||||
|
</ul>
|
||||||
|
<p class="link"></p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
@ -142,7 +150,7 @@
|
|||||||
<dt>-i <i>input_audio_file</i></dt><dd>path and name of the audio input file. This extension must be .mxf</dd>
|
<dt>-i <i>input_audio_file</i></dt><dd>path and name of the audio input file. This extension must be .mxf</dd>
|
||||||
<dt>-c:v <i>libx264</i></dt><dd>transcodes video to H.264</dd>
|
<dt>-c:v <i>libx264</i></dt><dd>transcodes video to H.264</dd>
|
||||||
<dt>-pix_fmt <i>yuv420p</i></dt><dd>sets pixel format to yuv420p for greater compatibility with media players</dd>
|
<dt>-pix_fmt <i>yuv420p</i></dt><dd>sets pixel format to yuv420p for greater compatibility with media players</dd>
|
||||||
<dt>-c:a aac</dt><dd>re-encodes using the AAC audio codec<br/>
|
<dt>-c:a aac</dt><dd>re-encodes using the AAC audio codec<br>
|
||||||
Note that sadly MP4 cannot contain sound encoded by a PCM (Pulse-Code Modulation) audio codec</dd>
|
Note that sadly MP4 cannot contain sound encoded by a PCM (Pulse-Code Modulation) audio codec</dd>
|
||||||
<dt><i>output_file.mp4</i></dt><dd>path, name and <i>.mp4</i> extension of the output file</dd>
|
<dt><i>output_file.mp4</i></dt><dd>path, name and <i>.mp4</i> extension of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@ -193,8 +201,8 @@
|
|||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
<dt>-filter:v "pad=ih*16/9:ih:(ow-iw)/2:(oh-ih)/2"</dt><dd>video padding<br/>This resolution independent formula is actually padding any aspect ratio into 16:9 by pillarboxing, because the video filter uses relative values for input width (iw), input height (ih), output width (ow) and output height (oh).</dd>
|
<dt>-filter:v "pad=ih*16/9:ih:(ow-iw)/2:(oh-ih)/2"</dt><dd>video padding<br>This resolution independent formula is actually padding any aspect ratio into 16:9 by pillarboxing, because the video filter uses relative values for input width (iw), input height (ih), output width (ow) and output height (oh).</dd>
|
||||||
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br/>
|
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br>
|
||||||
For silent videos you can replace <code>-c:a copy</code> by <code>-an</code>.</dd>
|
For silent videos you can replace <code>-c:a copy</code> by <code>-an</code>.</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@ -217,13 +225,13 @@
|
|||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
<dt>-filter:v "colormatrix=bt601:bt709, scale=1440:1080:flags=lanczos, pad=1920:1080:240:0"</dt><dd>set colour matrix, video scaling and padding<br/>Three filters are applied:
|
<dt>-filter:v "colormatrix=bt601:bt709, scale=1440:1080:flags=lanczos, pad=1920:1080:240:0"</dt><dd>set colour matrix, video scaling and padding<br>Three filters are applied:
|
||||||
<ol>
|
<ol>
|
||||||
<li>The luma coefficients are modified from SD video (according to Rec. 601) to HD video (according to Rec. 709) by a colour matrix. Note that today Rec. 709 is often used also for SD and therefore you may cancel this parameter.</li>
|
<li>The luma coefficients are modified from SD video (according to Rec. 601) to HD video (according to Rec. 709) by a colour matrix. Note that today Rec. 709 is often used also for SD and therefore you may cancel this parameter.</li>
|
||||||
<li>The scaling filter (<code>scale=1440:1080</code>) works for both upscaling and downscaling. We use the Lanczos scaling algorithm (<code>flags=lanczos</code>), which is slower but gives better results than the default bilinear algorithm.</li>
|
<li>The scaling filter (<code>scale=1440:1080</code>) works for both upscaling and downscaling. We use the Lanczos scaling algorithm (<code>flags=lanczos</code>), which is slower but gives better results than the default bilinear algorithm.</li>
|
||||||
<li>The padding filter (<code>pad=1920:1080:240:0</code>) completes the transformation from SD to HD.</li>
|
<li>The padding filter (<code>pad=1920:1080:240:0</code>) completes the transformation from SD to HD.</li>
|
||||||
</ol></dd>
|
</ol></dd>
|
||||||
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br/>
|
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br>
|
||||||
For silent videos you can replace <code>-c:a copy</code> with <code>-an</code>.</dd>
|
For silent videos you can replace <code>-c:a copy</code> with <code>-an</code>.</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@ -246,8 +254,8 @@
|
|||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
<dt>-filter:v "pad=iw:iw*3/4:(ow-iw)/2:(oh-ih)/2"</dt><dd>video padding<br/>This resolution independent formula is actually padding any aspect ratio into 4:3 by letterboxing, because the video filter uses relative values for input width (iw), input height (ih), output width (ow) and output height (oh).</dd>
|
<dt>-filter:v "pad=iw:iw*3/4:(ow-iw)/2:(oh-ih)/2"</dt><dd>video padding<br>This resolution independent formula is actually padding any aspect ratio into 4:3 by letterboxing, because the video filter uses relative values for input width (iw), input height (ih), output width (ow) and output height (oh).</dd>
|
||||||
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br/>
|
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br>
|
||||||
For silent videos you can replace <code>-c:a copy</code> by <code>-an</code>.</dd>
|
For silent videos you can replace <code>-c:a copy</code> by <code>-an</code>.</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@ -323,13 +331,13 @@
|
|||||||
<p>This will convert your Matroska (MKV) files to MP4 files.</p>
|
<p>This will convert your Matroska (MKV) files to MP4 files.</p>
|
||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path and name of the input file<br/>
|
<dt>-i <i>input_file</i></dt><dd>path and name of the input file<br>
|
||||||
The extension for the Matroska container is <code>.mkv</code>.</dd>
|
The extension for the Matroska container is <code>.mkv</code>.</dd>
|
||||||
<dt>-c:v copy</dt><dd>re-encodes using the same video codec</dd>
|
<dt>-c:v copy</dt><dd>re-encodes using the same video codec</dd>
|
||||||
<dt>-c:a aac</dt><dd>re-encodes using the AAC audio codec<br/>
|
<dt>-c:a aac</dt><dd>re-encodes using the AAC audio codec<br>
|
||||||
Note that sadly MP4 cannot contain sound encoded by a PCM (Pulse-Code Modulation) audio codec.<br/>
|
Note that sadly MP4 cannot contain sound encoded by a PCM (Pulse-Code Modulation) audio codec.<br>
|
||||||
For silent videos you can replace <code>-c:a aac</code> by <code>-an</code>.</dd>
|
For silent videos you can replace <code>-c:a aac</code> by <code>-an</code>.</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path and name of the output file<br/>
|
<dt><i>output_file</i></dt><dd>path and name of the output file<br>
|
||||||
The extension for the MP4 container is <code>.mp4</code>.</dd>
|
The extension for the MP4 container is <code>.mp4</code>.</dd>
|
||||||
</dl>
|
</dl>
|
||||||
<p class="link"></p>
|
<p class="link"></p>
|
||||||
@ -353,7 +361,7 @@
|
|||||||
<dt>-f image2</dt><dd>forces input or output file format. <code>image2</code> specifies the image file demuxer.</dd>
|
<dt>-f image2</dt><dd>forces input or output file format. <code>image2</code> specifies the image file demuxer.</dd>
|
||||||
<dt>-framerate 9</dt><dd>sets framerate to 9 frames per second</dd>
|
<dt>-framerate 9</dt><dd>sets framerate to 9 frames per second</dd>
|
||||||
<dt>-pattern_type glob</dt><dd>tells ffmpeg that the following mapping should "interpret like a <a href="https://en.wikipedia.org/wiki/Glob_%28programming%29">glob</a>" (a "global command" function that relies on the * as a wildcard and finds everything that matches)</dd>
|
<dt>-pattern_type glob</dt><dd>tells ffmpeg that the following mapping should "interpret like a <a href="https://en.wikipedia.org/wiki/Glob_%28programming%29">glob</a>" (a "global command" function that relies on the * as a wildcard and finds everything that matches)</dd>
|
||||||
<dt>-i <i>"input_image_*.jpg"</i></dt><dd>maps all files in the directory that start with input_image_, for example input_image_001.jpg, input_image_002.jpg, input_image_003.jpg... etc.<br/>
|
<dt>-i <i>"input_image_*.jpg"</i></dt><dd>maps all files in the directory that start with input_image_, for example input_image_001.jpg, input_image_002.jpg, input_image_003.jpg... etc.<br>
|
||||||
(The quotation marks are necessary for the above "glob" pattern!)</dd>
|
(The quotation marks are necessary for the above "glob" pattern!)</dd>
|
||||||
<dt>-vf scale=250x250</dt><dd>filter the video to scale it to 250x250; -vf is an alias for -filter:v</dd>
|
<dt>-vf scale=250x250</dt><dd>filter the video to scale it to 250x250; -vf is an alias for -filter:v</dd>
|
||||||
<dt><i>output_file.gif</i></dt><dd>path and name of the output file</dd>
|
<dt><i>output_file.gif</i></dt><dd>path and name of the output file</dd>
|
||||||
@ -377,8 +385,8 @@
|
|||||||
<p>Before encoding, you’ll need to establish which of the .VOB files on the DVD or .iso contain the content that you wish to encode. Inside the VIDEO_TS directory, you will see a series of files with names like VTS_01_0.VOB, VTS_01_1.VOB, etc. Some of the .VOB files will contain menus, special features, etc, so locate the ones that contain target content by playing them back in VLC.</p>
|
<p>Before encoding, you’ll need to establish which of the .VOB files on the DVD or .iso contain the content that you wish to encode. Inside the VIDEO_TS directory, you will see a series of files with names like VTS_01_0.VOB, VTS_01_1.VOB, etc. Some of the .VOB files will contain menus, special features, etc, so locate the ones that contain target content by playing them back in VLC.</p>
|
||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i concat:<i>input files</i></dt><dd>lists the input VOB files and directs ffmpeg to concatenate them. Each input file should be separated by a backslash and a pipe, like so:<br/>
|
<dt>-i concat:<i>input files</i></dt><dd>lists the input VOB files and directs ffmpeg to concatenate them. Each input file should be separated by a backslash and a pipe, like so:<br>
|
||||||
<code>-i concat:VTS_01_1.VOB\|VTS_01_2.VOB\|VTS_01_3.VOB</code><br/>
|
<code>-i concat:VTS_01_1.VOB\|VTS_01_2.VOB\|VTS_01_3.VOB</code><br>
|
||||||
The backslash is simply an escape character for the pipe (<strong>|</strong>).
|
The backslash is simply an escape character for the pipe (<strong>|</strong>).
|
||||||
<dt>-c:v libx264</dt><dd>sets the video codec as H.264</dd>
|
<dt>-c:v libx264</dt><dd>sets the video codec as H.264</dd>
|
||||||
<dt>-c:a copy</dt><dd>audio remains as-is (no re-encode)</dd>
|
<dt>-c:a copy</dt><dd>audio remains as-is (no re-encode)</dd>
|
||||||
@ -404,6 +412,160 @@
|
|||||||
</div>
|
</div>
|
||||||
<!-- ends rip DVD -->
|
<!-- ends rip DVD -->
|
||||||
|
|
||||||
|
</div>
|
||||||
|
<div class="well">
|
||||||
|
<h4>Filters</h4>
|
||||||
|
|
||||||
|
<!-- astats -->
|
||||||
|
<span data-toggle="modal" data-target="#astats"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Plays a graphical output showing decibel levels of an input file">Graphic for audio</button></span>
|
||||||
|
<div id="astats" class="modal fade" tabindex="-1" role="dialog">
|
||||||
|
<div class="modal-dialog modal-lg">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="well">
|
||||||
|
<h3>Plays a graphical output showing decibel levels of an input file</h3>
|
||||||
|
<p><code>ffplay -f lavfi "amovie='input.mp3',astats=metadata=1:reset=1,adrawgraph=lavfi.astats.Overall.Peak_level:max=0:min=-30.0:size=700x256:bg=Black[out]"</code></p>
|
||||||
|
<dl>
|
||||||
|
<dt>ffplay</dt><dd>starts the command</dd>
|
||||||
|
<dt>-f lavfi</dt><dd>tells ffmpeg to use the Libavfilter input virtual device <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">[more]</a></dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to start command</dd>
|
||||||
|
<dt>movie='<i>input.mp3</i>'</dt><dd>declares audio source file on which to apply filter</dd>
|
||||||
|
<dt>,</dt><dd>comma signifies the end of audio source section and the beginning of the filter section</dd>
|
||||||
|
<dt>astats=metadata=1</dt><dd>tells the astats filter to ouput metadata that can be passed to another filter (in this case adrawgraph)</dd>
|
||||||
|
<dt>:</dt><dd>divides beteen options of the same filter</dd>
|
||||||
|
<dt>reset=1</dt><dd>tells the filter to calculate the stats on every frame (increasing this number would calculate stats for groups of frames)</dd>
|
||||||
|
<dt>,</dt><dd>comma divides one filter in the chain from another</dd>
|
||||||
|
<dt>adrawgraph=lavfi.astats.Overall.Peak_level:max=0:min=-30.0</dt><dd>draws a graph using the overall peak volume calculated by the astats filter. It sets the max for the graph to 0 (dB) and the minimum to -30 (dB). For more options on data points that can be graphed see <a href="https://ffmpeg.org/ffmpeg-filters.html#astats-1" target="_blank">[more]</a></dd>
|
||||||
|
<dt>size=700x256:bg=Black</dt><dd>sets the background color and size of the output</dd>
|
||||||
|
<dt>[out]</dt><dd>ends the filterchain and sets the output</dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to close command</dd>
|
||||||
|
</dl>
|
||||||
|
<div class="sample-image">
|
||||||
|
<!-- <h4>Example of filter output</h4> -->
|
||||||
|
<!-- <img src="" alt="astats example"> -->
|
||||||
|
</div>
|
||||||
|
<p class="link"></p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- ends astats -->
|
||||||
|
|
||||||
|
<!-- BRNG -->
|
||||||
|
<span data-toggle="modal" data-target="#brng"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Identifies pixels out of broadcast range">Broadcast Range</button></span>
|
||||||
|
<div id="brng" class="modal fade" tabindex="-1" role="dialog">
|
||||||
|
<div class="modal-dialog modal-lg">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="well">
|
||||||
|
<h3>Shows all pixels outside of broadcast range</h3>
|
||||||
|
<p><code>ffplay -f lavfi "movie='<i>input.mp4</i>',signalstats=out=brng:color=cyan[out]"</code></p>
|
||||||
|
<dl>
|
||||||
|
<dt>ffplay</dt><dd>starts the command</dd>
|
||||||
|
<dt>-f lavfi</dt><dd>tells ffmpeg to use the Libavfilter input virtual device <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">[more]</a></dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to start command</dd>
|
||||||
|
<dt>movie='<i>input.mp4</i>'</dt><dd>declares video file source to apply filter</dd>
|
||||||
|
<dt>,</dt><dd>comma signifies closing of video source assertion and ready for filter assertion</dd>
|
||||||
|
<dt>signalstats=out=brng:</dt><dd>tells ffplay to use the signalstats command, output the data, use the brng filter</dd>
|
||||||
|
<dt>:</dt><dd>indicates there's another parameter coming</dd>
|
||||||
|
<dt>color=cyan[out]</dt><dd>sets the color of out-of-range pixels to cyan</dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to close command</dd>
|
||||||
|
</dl>
|
||||||
|
<div class="sample-image">
|
||||||
|
<h4>Example of filter output</h4>
|
||||||
|
<img src="img/brng.gif" alt="BRNG example">
|
||||||
|
</div>
|
||||||
|
<p class="link"></p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- ends BRNG -->
|
||||||
|
|
||||||
|
<!-- ocr -->
|
||||||
|
<span data-toggle="modal" data-target="#ocr_on_top"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Plays video with OCR on top">Shows OCR</button></span>
|
||||||
|
<div id="ocr_on_top" class="modal fade" tabindex="-1" role="dialog">
|
||||||
|
<div class="modal-dialog modal-lg">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="well">
|
||||||
|
<h3>Plays video with OCR on top</h3>
|
||||||
|
<p><code>ffplay input_file -vf "ocr,drawtext=fontfile=/Library/Fonts/Andale Mono.ttf:text=%{metadata\\\:lavfi.ocr.text}:fontcolor=white"</code></p>
|
||||||
|
<dl>
|
||||||
|
<dt>ffplay</dt><dd>starts the command</dd>
|
||||||
|
<dt><i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
|
<dt>-vf</dt><dd>creates a filtergraph to use for the streams</dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to start filter command</dd>
|
||||||
|
<dt>ocr,</dt><dd>tells ffplay to use ocr as source and the comma signifies that the script is ready for filter assertion</dd>
|
||||||
|
<dt>drawtext=fontfile=/Library/Fonts/Andale Mono.ttf</dt><dd>tells ffplay to drawtext and use a specific font (Andale Mono) when doing so</dd>
|
||||||
|
<dt>:</dt><dd>indicates there's another parameter coming</dd>
|
||||||
|
<dt>text=%{metadata\\\:lavfi.ocr.text}</dt><dd>tells ffplay what text to use when playing. In this case, calls for metadata that lives in the lavfi.ocr.text library</dd>
|
||||||
|
<dt>:</dt><dd>indicates there's another parameter coming</dd>
|
||||||
|
<dt>fontcolor=white</dt><dd>specifies font color as white</dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to close filter command</dd>
|
||||||
|
</dl>
|
||||||
|
<div class="sample-image">
|
||||||
|
<!-- <h4>Example of filter output</h4> -->
|
||||||
|
<!-- <img src="" alt="ocr example"> -->
|
||||||
|
</div>
|
||||||
|
<p class="link"></p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- ends ocr -->
|
||||||
|
|
||||||
|
<!-- Exports OCR -->
|
||||||
|
<span data-toggle="modal" data-target="#ffprobe_ocr"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Exports OCR from video to screen">Exports OCR</button></span>
|
||||||
|
<div id="ffprobe_ocr" class="modal fade" tabindex="-1" role="dialog">
|
||||||
|
<div class="modal-dialog modal-lg">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="well">
|
||||||
|
<h3>Exports OCR data to screen</h3>
|
||||||
|
<p><code>ffprobe -show_entries frame_tags=lavfi.ocr.text -f lavfi -i "movie=<i>input_file</i>,ocr"</code></p>
|
||||||
|
<dl>
|
||||||
|
<dt>ffprobe</dt><dd>starts the command</dd>
|
||||||
|
<dt>-show_entries</dt><dd>sets a list of entries to show</dd>
|
||||||
|
<dt>frame_tags=lavfi.ocr.text</dt><dd>shows the <i>lavfi.ocr.text</i> tag in the frame section of the video</dd>
|
||||||
|
<dt>-f lavfi</dt><dd>tells ffmpeg to use the Libavfilter input virtual device <a href="http://ffmpeg.org/ffmpeg-devices.html#lavfi" target="_blank">[more]</a></dd>
|
||||||
|
<dt>-i "movie=<i>input_file</i>,ocr"</dt><dd>declares 'movie' as <i>input_file</i> and passes in the 'ocr' command</dd>
|
||||||
|
</dl>
|
||||||
|
<div class="sample-image">
|
||||||
|
<!-- <h4>Example of filter output</h4> -->
|
||||||
|
<!-- <img src="" alt="Exports OCR example"> -->
|
||||||
|
</div>
|
||||||
|
<p class="link"></p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- ends Exports OCR -->
|
||||||
|
|
||||||
|
<!-- Vectorscope -->
|
||||||
|
<span data-toggle="modal" data-target="#vectorscope"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Vectorscope from video to screen">Vectorscope</button></span>
|
||||||
|
<div id="vectorscope" class="modal fade" tabindex="-1" role="dialog">
|
||||||
|
<div class="modal-dialog modal-lg">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="well">
|
||||||
|
<h3>Plays vectorscope of video</h3>
|
||||||
|
<p><code>ffplay <i>input_file</i> -vf "split=2[m][v],[v]vectorscope=b=0.7:m=color3:g=green[v],[m][v]overlay=x=W-w:y=H-h"</code></p>
|
||||||
|
<dl>
|
||||||
|
<dt>ffplay</dt><dd>starts the command</dd>
|
||||||
|
<dt><i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
|
<dt>-vf</dt><dd>creates a filtergraph to use for the streams</dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to start command</dd>
|
||||||
|
<dt>,</dt><dd>comma signifies there is another parameter coming</dd>
|
||||||
|
<dt>split=2[m][v]</dt><dd>Splits the input into two identical outputs and names them [m] and [v]</dd>
|
||||||
|
<dt>,</dt><dd>comma signifies there is another parameter coming</dd>
|
||||||
|
<dt>[v]vectorscope=b=0.7:m=color3:g=green[v]</dt><dd>asserts usage of the vectorscope filter and sets a light background opacity (b, alias for bgopacity), sets a background color style (m, alias for mode), and graticule color (g, alias for graticule)</dd>
|
||||||
|
<dt>,</dt><dd>comma signifies there is another parameter coming</dd>
|
||||||
|
<dt>[m][v]overlay=x=W-w:y=H-h</dt><dd>declares where the vectorscope will overlay on top of the video image as it plays</dd>
|
||||||
|
<dt>"</dt><dd>quotation mark to end command</dd>
|
||||||
|
</dl>
|
||||||
|
<p class="link"></p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- ends Vectorscope -->
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<div class="well">
|
<div class="well">
|
||||||
<h4>Make derivative variations</h4>
|
<h4>Make derivative variations</h4>
|
||||||
@ -474,7 +636,7 @@
|
|||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
<dt>-ss <i>00:00:20</i></dt><dd>seeks video file to 20 seconds into the video</dd>
|
<dt>-ss <i>00:00:20</i></dt><dd>seeks video file to 20 seconds into the video</dd>
|
||||||
<dt>-vf fps=1/60</dt><dd>-vf is an alias for -filter:v, which creates a filtergraph to use for the streams. The rest of the command identifies filtering by frames per second, and sets the frames per second at 1/60 (which is one per minute). Omitting this will output all frames from the video</dd>
|
<dt>-vf fps=1/60</dt><dd>Creates a filtergraph to use for the streams. The rest of the command identifies filtering by frames per second, and sets the frames per second at 1/60 (which is one per minute). Omitting this will output all frames from the video.</dd>
|
||||||
<dt><i>output file</i></dt><dd>path, name and extension of the output file. In the example out%d.png where %d is a regular expression that adds a number (d is for digit) and increments with each frame (out1.png, out2.png, out3.png…). You may also chose a regular expression like out%04d.png which gives 4 digits with leading 0 (out0001.png, out0002.png, out0003.png, …).</dd>
|
<dt><i>output file</i></dt><dd>path, name and extension of the output file. In the example out%d.png where %d is a regular expression that adds a number (d is for digit) and increments with each frame (out1.png, out2.png, out3.png…). You may also chose a regular expression like out%04d.png which gives 4 digits with leading 0 (out0001.png, out0002.png, out0003.png, …).</dd>
|
||||||
</dl>
|
</dl>
|
||||||
<p class="link"></p>
|
<p class="link"></p>
|
||||||
@ -484,29 +646,6 @@
|
|||||||
</div>
|
</div>
|
||||||
<!-- ends Multi thumbnail -->
|
<!-- ends Multi thumbnail -->
|
||||||
|
|
||||||
<!-- Generate thumbnails -->
|
|
||||||
<span data-toggle="modal" data-target="#thumbnails"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Generate thumbnails from a video at regular intervals">Generate thumbnails</button></span>
|
|
||||||
<div id="thumbnails" class="modal fade" tabindex="-1" role="dialog">
|
|
||||||
<div class="modal-dialog modal-lg">
|
|
||||||
<div class="modal-content">
|
|
||||||
<div class="well">
|
|
||||||
<h3>Generate thumbnails from a video at regular intervals</h3>
|
|
||||||
<p><code>ffmpeg -i <i>input_file</i> -ss <i>00:12.235</i> -i "$f" -vframes 1 <i>output_file</i></code></p>
|
|
||||||
<p>Create one thumbnail in JPEG format from a video file at a specific time. In this example: 0hours:0minutes:12sec.235msec</p>
|
|
||||||
<dl>
|
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
|
||||||
<dt>-vframes <i>1</i></dt><dd>Tells ffmpeg to create a still image from file.</dd>
|
|
||||||
<dt>-ss <i>00:12.235</i></dt><dd>Specifies the specific point in input file where the still will be captured.</dd>
|
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
|
||||||
</dl>
|
|
||||||
<p class="link"></p>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<!-- ends Generate thumbnails -->
|
|
||||||
|
|
||||||
<!-- Excerpt from beginning -->
|
<!-- Excerpt from beginning -->
|
||||||
<span data-toggle="modal" data-target="#excerpt_from_start"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Create an excerpt, starting from the beginning of the file">Excerpt from beginning</button></span>
|
<span data-toggle="modal" data-target="#excerpt_from_start"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Create an excerpt, starting from the beginning of the file">Excerpt from beginning</button></span>
|
||||||
<div id="excerpt_from_start" class="modal fade" tabindex="-1" role="dialog">
|
<div id="excerpt_from_start" class="modal fade" tabindex="-1" role="dialog">
|
||||||
@ -544,7 +683,7 @@
|
|||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
<dt>-ss 00:02:00</dt><dd>sets in point at 00:02:00</dd>
|
<dt>-ss 00:02:00</dt><dd>sets in point at 00:02:00</dd>
|
||||||
<dt>-to 00:55:00</dt><dd>sets out point at 00:55:00</dd>
|
<dt>-to 00:55:00</dt><dd>sets out point at 00:55:00</dd>
|
||||||
<dt>-c copy</dt><dd>use stream copy mode (no re-encoding)<br/>
|
<dt>-c copy</dt><dd>use stream copy mode (no re-encoding)<br>
|
||||||
<i>Note:</i> watch out when using <code>-ss</code> with <code>-c copy</code> if the source is encoded with an interframe codec (e.g., H.264). Since ffmpeg must split on i-frames, it will seek to the nearest i-frame to begin the stream copy.</dd>
|
<i>Note:</i> watch out when using <code>-ss</code> with <code>-c copy</code> if the source is encoded with an interframe codec (e.g., H.264). Since ffmpeg must split on i-frames, it will seek to the nearest i-frame to begin the stream copy.</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@ -661,6 +800,37 @@
|
|||||||
</div>
|
</div>
|
||||||
<!-- ends Cover head switching noise -->
|
<!-- ends Cover head switching noise -->
|
||||||
|
|
||||||
|
<!-- append notice to access mp3 -->
|
||||||
|
<span data-toggle="modal" data-target="#append_mp3"><button type="button" class="btn btn-default" data-toggle="tooltip" data-placement="bottom" title="Generate two access MP3s from input. One with added audio (such as copyright notice) and one unmodified.">Append notice to access MP3</button></span>
|
||||||
|
<div id="append_mp3" class="modal fade" tabindex="-1" role="dialog">
|
||||||
|
<div class="modal-dialog modal-lg">
|
||||||
|
<div class="modal-content">
|
||||||
|
<div class="well">
|
||||||
|
<h3>Generate two access MP3s from input. One with appended audio (such as copyright notice) and one unmodified.</h3>
|
||||||
|
<p> <code>ffmpeg -i <i>input_file</i> -i <i>input_file_to_append</i> -filter_complex "[0:a:0]asplit=2[a][b];[b]afifo[bb];[1:a:0][bb]concat=n=2:v=0:a=1[concatout]" -map "[a]" -codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2 <i>output_file.mp3</i> -map "[concatout]" -codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2 <i>output_file_appended.mp3</i></code></p>
|
||||||
|
<p>This script allows you to generate two derivative audio files from a master while appending audio from a seperate file (for example a copyright or institutional notice) to one of them.</p>
|
||||||
|
<dl>
|
||||||
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file (The master file)</dd>
|
||||||
|
<dt>-i <i>input_file_to_append</i></dt><dd>path, name and extension of the input file (The file to be appended to access file)</dd>
|
||||||
|
<dt>-filter_complex</dt><dd>enables the complex filtering to manage splitting the input to two audio streams</dd>
|
||||||
|
<dt>[0:a:0]asplit=2[a][b];</dt><dd><code>asplit</code> allows audio streams to be split up for seperate manipulation. This command splits the audio from the first input (the master file) into two streams "a" and "b"</dd>
|
||||||
|
<dt>[b]afifo[bb];</dt><dd>this buffers the stream "b" to help prevent dropped samples and renames stream to "bb"</dd>
|
||||||
|
<dt>[1:a:0][bb]concat=n=2:v=0:a=1[concatout]</dt><dd><code>concat</code> is used to join files. <code>n=2</code> tells the filter there are two inputs. <code>v=0:a=1</code> Tells the filter there are 0 video outputs and 1 audio output. This command appends the audio from the second input to the beginning of stream "bb" and names the output "concatout"</dd>
|
||||||
|
<dt>-map "[a]"</dt><dd>this maps the unmodified audio stream to the first output</dd>
|
||||||
|
<dt>-codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2</dt><dd>sets up mp3 options (using constant quality)</dd>
|
||||||
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file (unmodified)</dd>
|
||||||
|
<dt>-map "[concatout]"</dt><dd>this maps the modified stream to the second output</dd>
|
||||||
|
<dt>-codec:a libmp3lame -dither_method modified_e_weighted -qscale:a 2</dt><dd>sets up mp3 options (using constant quality)</dd>
|
||||||
|
<dt><i>output_file_appended</i></dt><dd>path, name and extension of the output file (with appended notice)</dd>
|
||||||
|
</dl>
|
||||||
|
<p class="link"></p>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<!-- ends append notice to access mp3 -->
|
||||||
|
|
||||||
</div>
|
</div>
|
||||||
<div class="well">
|
<div class="well">
|
||||||
<h4>Preservation</h4>
|
<h4>Preservation</h4>
|
||||||
@ -676,19 +846,19 @@
|
|||||||
<p>“Rewrap-MXF.sh” contains the following text:</p>
|
<p>“Rewrap-MXF.sh” contains the following text:</p>
|
||||||
<p><code>for file in *.MXF; do ffmpeg -i "$file" -map 0 -c copy "${file%.MXF}.mov"; done</code></p>
|
<p><code>for file in *.MXF; do ffmpeg -i "$file" -map 0 -c copy "${file%.MXF}.mov"; done</code></p>
|
||||||
<dl>
|
<dl>
|
||||||
<dt>for file in *.MXF</dt><dd>starts the loop, and states what the input files will be. Here, the ffmpeg command within the loop will be applied to all files with an extension of .MXF.<br/>
|
<dt>for file in *.MXF</dt><dd>starts the loop, and states what the input files will be. Here, the ffmpeg command within the loop will be applied to all files with an extension of .MXF.<br>
|
||||||
The word ‘file’ is an arbitrary variable which will represent each .MXF file in turn as it is looped over.</dd>
|
The word ‘file’ is an arbitrary variable which will represent each .MXF file in turn as it is looped over.</dd>
|
||||||
<dt>do ffmpeg -i "$file"</dt><dd>carry out the following ffmpeg command for each input file.<br/>
|
<dt>do ffmpeg -i "$file"</dt><dd>carry out the following ffmpeg command for each input file.<br>
|
||||||
Per Bash syntax, within the command the variable is referred to by <strong>“$file”</strong>. The dollar sign is used to reference the variable ‘file’, and the enclosing quotation marks prevents reinterpretation of any special characters that may occur within the filename, ensuring that the original filename is retained.</dd>
|
Per Bash syntax, within the command the variable is referred to by <strong>“$file”</strong>. The dollar sign is used to reference the variable ‘file’, and the enclosing quotation marks prevents reinterpretation of any special characters that may occur within the filename, ensuring that the original filename is retained.</dd>
|
||||||
<dt>-map 0</dt><dd>retain all streams</dd>
|
<dt>-map 0</dt><dd>retain all streams</dd>
|
||||||
<dt>-c copy</dt><dd>enable stream copy (no re-encode)</dd>
|
<dt>-c copy</dt><dd>enable stream copy (no re-encode)</dd>
|
||||||
<dt>"${file%.MXF}.mov";</dt><dd>retaining the original file name, set the output file wrapper as .mov</dd>
|
<dt>"${file%.MXF}.mov";</dt><dd>retaining the original file name, set the output file wrapper as .mov</dd>
|
||||||
<dt>done</dt><dd>complete; all items have been processed.</dd>
|
<dt>done</dt><dd>complete; all items have been processed.</dd>
|
||||||
</dl>
|
</dl>
|
||||||
<p><strong>Note</strong>: the shell script (.sh file) and all .MXF files to be processed must be contained within the same directory, and the script must be run from that directory.<br/>
|
<p><strong>Note</strong>: the shell script (.sh file) and all .MXF files to be processed must be contained within the same directory, and the script must be run from that directory.<br>
|
||||||
Execute the .sh file with the command <code>sh Rewrap-MXF.sh</code>.</p>
|
Execute the .sh file with the command <code>sh Rewrap-MXF.sh</code>.</p>
|
||||||
<p>Modify the script as needed to perform different transcodes, or to use with ffprobe. :)</p>
|
<p>Modify the script as needed to perform different transcodes, or to use with ffprobe. :)</p>
|
||||||
<p>The basic pattern will look similar to this:<br/>
|
<p>The basic pattern will look similar to this:<br>
|
||||||
<code>for item in *.ext; do ffmpeg -i $item <i>(ffmpeg options here)</i> "${item%.ext}_suffix.ext"</code></p>
|
<code>for item in *.ext; do ffmpeg -i $item <i>(ffmpeg options here)</i> "${item%.ext}_suffix.ext"</code></p>
|
||||||
<p>e.g., if an input file is bestmovie002.avi, its output will be bestmovie002_suffix.avi.</p>
|
<p>e.g., if an input file is bestmovie002.avi, its output will be bestmovie002_suffix.avi.</p>
|
||||||
<p class="link"></p>
|
<p class="link"></p>
|
||||||
@ -963,14 +1133,14 @@ foreach ($file in $inputfiles) {
|
|||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-f concat</dt><dd>forces ffmpeg to concatenate the files and to keep the same file format</dd>
|
<dt>-f concat</dt><dd>forces ffmpeg to concatenate the files and to keep the same file format</dd>
|
||||||
<dt>-i <i>mylist.txt</i></dt><dd>path, name and extension of the input file. Per the <a href="https://www.ffmpeg.org/ffmpeg-formats.html#Options">ffmpeg documentation</a>, it is preferable to specify relative rather than absolute file paths, as allowing absolute file paths may pose a security risk.<br/>
|
<dt>-i <i>mylist.txt</i></dt><dd>path, name and extension of the input file. Per the <a href="https://www.ffmpeg.org/ffmpeg-formats.html#Options">ffmpeg documentation</a>, it is preferable to specify relative rather than absolute file paths, as allowing absolute file paths may pose a security risk.<br>
|
||||||
This text file contains the list of files to be concatenated and should be formatted as follows:
|
This text file contains the list of files to be concatenated and should be formatted as follows:
|
||||||
<pre>file '<i>./first_file.ext</i>'
|
<pre>file '<i>./first_file.ext</i>'
|
||||||
file '<i>./second_file.ext</i>'
|
file '<i>./second_file.ext</i>'
|
||||||
. . .
|
. . .
|
||||||
file '<i>./last_file.ext</i>'</pre>
|
file '<i>./last_file.ext</i>'</pre>
|
||||||
In the above, <strong>file</strong> is simply the word "file".<br/>
|
In the above, <strong>file</strong> is simply the word "file". Straight apostrophes ('like this') rather than curved quotation marks (‘like this’) must be used to enclose the file paths.<br>
|
||||||
<i>Note</i>: If specifying absolute file paths in the .txt file, add <code>-safe 0</code> before the input file.<br/>
|
<i>Note</i>: If specifying absolute file paths in the .txt file, add <code>-safe 0</code> before the input file.<br>
|
||||||
e.g.: <code>ffmpeg -f concat -safe 0 -i mylist.txt -c copy <i>output_file</i></code></dd>
|
e.g.: <code>ffmpeg -f concat -safe 0 -i mylist.txt -c copy <i>output_file</i></code></dd>
|
||||||
<dt>-c copy</dt><dd>use stream copy mode to re-mux instead of re-encode</dd>
|
<dt>-c copy</dt><dd>use stream copy mode to re-mux instead of re-encode</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
||||||
@ -994,10 +1164,10 @@ e.g.: <code>ffmpeg -f concat -safe 0 -i mylist.txt -c copy <i>output_file</i></c
|
|||||||
<p><code>ffplay -framerate 5 <i>input_file_%06d.ext</i></code></p>
|
<p><code>ffplay -framerate 5 <i>input_file_%06d.ext</i></code></p>
|
||||||
<dl>
|
<dl>
|
||||||
<dt>ffplay</dt><dd>starts the command</dd>
|
<dt>ffplay</dt><dd>starts the command</dd>
|
||||||
<dt>-framerate 5</dt><dd>plays image sequence at rate of 5 images per second<br/>
|
<dt>-framerate 5</dt><dd>plays image sequence at rate of 5 images per second<br>
|
||||||
<i>Note</i>: this low framerate will produce a slideshow effect.</dd>
|
<i>Note</i>: this low framerate will produce a slideshow effect.</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file<br/>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file<br>
|
||||||
This must match the naming convention used! The regex %06d matches six-digit-long numbers, possibly with leading zeroes. This allows the full sequence to be read in ascending order, one image after the other.<br/>
|
This must match the naming convention used! The regex %06d matches six-digit-long numbers, possibly with leading zeroes. This allows the full sequence to be read in ascending order, one image after the other.<br>
|
||||||
The extension for TIFF files is .tif or maybe .tiff; the extension for DPX files is .dpx (or even .cin for old files). Screenshots are often in .png format.</dd>
|
The extension for TIFF files is .tif or maybe .tiff; the extension for DPX files is .dpx (or even .cin for old files). Screenshots are often in .png format.</dd>
|
||||||
</dl>
|
</dl>
|
||||||
<p><i>Notes:</i></p>
|
<p><i>Notes:</i></p>
|
||||||
@ -1094,8 +1264,8 @@ e.g.: <code>ffmpeg -f concat -safe 0 -i mylist.txt -c copy <i>output_file</i></c
|
|||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
<dt>-filter:v "hflip,vflip"</dt><dd>flips the image horizontally and vertically<br/>By using only one of the parameters hflip or vflip for filtering the image is flipped on that axis only. The quote marks are not mandatory.</dd>
|
<dt>-filter:v "hflip,vflip"</dt><dd>flips the image horizontally and vertically<br>By using only one of the parameters hflip or vflip for filtering the image is flipped on that axis only. The quote marks are not mandatory.</dd>
|
||||||
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br/>
|
<dt>-c:a copy</dt><dd>re-encodes using the same audio codec<br>
|
||||||
For silent videos you can replace <code>-c:a copy</code> by <code>-an</code>.</dd>
|
For silent videos you can replace <code>-c:a copy</code> by <code>-an</code>.</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@ -1232,7 +1402,7 @@ e.g.: <code>ffmpeg -f concat -safe 0 -i mylist.txt -c copy <i>output_file</i></c
|
|||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-f image2</dt><dd>forces the image file de-muxer for single image files</dd>
|
<dt>-f image2</dt><dd>forces the image file de-muxer for single image files</dd>
|
||||||
<dt>-framerate 24</dt><dd>Sets the input framerate to 24 fps. The image2 demuxer defaults to 25 fps.</dd>
|
<dt>-framerate 24</dt><dd>Sets the input framerate to 24 fps. The image2 demuxer defaults to 25 fps.</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file<br/>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file<br>
|
||||||
This must match the naming convention actually used! The regex %06d matches six digits long numbers, possibly with leading zeroes. This allows to read in ascending order, one image after the other, the full sequence inside one folder. For image sequences starting with 086400 (i.e. captured with a timecode starting at 01:00:00:00 and at 24 fps), add the flag <code>-start_number 086400</code> before <code>-i input_file_%06d.ext</code>. The extension for TIFF files is .tif or maybe .tiff; the extension for DPX files is .dpx (or eventually .cin for old files).</dd>
|
This must match the naming convention actually used! The regex %06d matches six digits long numbers, possibly with leading zeroes. This allows to read in ascending order, one image after the other, the full sequence inside one folder. For image sequences starting with 086400 (i.e. captured with a timecode starting at 01:00:00:00 and at 24 fps), add the flag <code>-start_number 086400</code> before <code>-i input_file_%06d.ext</code>. The extension for TIFF files is .tif or maybe .tiff; the extension for DPX files is .dpx (or eventually .cin for old files).</dd>
|
||||||
<dt>-c:v v210</dt><dd>encodes an uncompressed 10-bit video stream</dd>
|
<dt>-c:v v210</dt><dd>encodes an uncompressed 10-bit video stream</dd>
|
||||||
<dt>-an copy</dt><dd>no audio</dd>
|
<dt>-an copy</dt><dd>no audio</dd>
|
||||||
@ -1283,7 +1453,7 @@ e.g.: <code>ffmpeg -f concat -safe 0 -i mylist.txt -c copy <i>output_file</i></c
|
|||||||
<dl>
|
<dl>
|
||||||
<dt>ffmpeg</dt><dd>starts the command</dd>
|
<dt>ffmpeg</dt><dd>starts the command</dd>
|
||||||
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
<dt>-i <i>input_file</i></dt><dd>path, name and extension of the input file</dd>
|
||||||
<dt>-filter:v <i>setfield=tff</i></dt><dd>Sets the field order to top field first. Use <code>setfield=bff</code> for bottom field first. </dd>
|
<dt>-filter:v <i>setfield=tff</i></dt><dd>Sets the field order to top field first. Use <code>setfield=bff</code> for bottom field first.</dd>
|
||||||
<dt>-c:v prores</dt><dd>Tells ffmpeg to transcode the video stream into Apple ProRes 422. Experiment with using other codecs.</dd>
|
<dt>-c:v prores</dt><dd>Tells ffmpeg to transcode the video stream into Apple ProRes 422. Experiment with using other codecs.</dd>
|
||||||
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
<dt><i>output_file</i></dt><dd>path, name and extension of the output file</dd>
|
||||||
</dl>
|
</dl>
|
||||||
@ -1347,13 +1517,13 @@ e.g.: <code>ffmpeg -f concat -safe 0 -i mylist.txt -c copy <i>output_file</i></c
|
|||||||
<dt>-h</dt><dd>Call the help option</dd>
|
<dt>-h</dt><dd>Call the help option</dd>
|
||||||
<dt>type=name</dt>
|
<dt>type=name</dt>
|
||||||
<dd>Tells ffmpeg which kind of option you want, for example:
|
<dd>Tells ffmpeg which kind of option you want, for example:
|
||||||
<ul>
|
<ul>
|
||||||
<li><code>encoder=libx264</code></li>
|
<li><code>encoder=libx264</code></li>
|
||||||
<li><code>decoder=mp3</code></li>
|
<li><code>decoder=mp3</code></li>
|
||||||
<li><code>muxer=matroska</code></li>
|
<li><code>muxer=matroska</code></li>
|
||||||
<li><code>demuxer=mov</code></li>
|
<li><code>demuxer=mov</code></li>
|
||||||
<li><code>filter=crop</code></li>
|
<li><code>filter=crop</code></li>
|
||||||
</ul>
|
</ul>
|
||||||
</dd>
|
</dd>
|
||||||
</dl>
|
</dl>
|
||||||
<p class="link"></p>
|
<p class="link"></p>
|
||||||
|
62
readme.md
62
readme.md
@ -32,37 +32,47 @@ You can read our contributor code of conduct [here](https://github.com/amiaopens
|
|||||||
* Gathered using [octohatrack](https://github.com/LABHR/octohatrack)
|
* Gathered using [octohatrack](https://github.com/LABHR/octohatrack)
|
||||||
|
|
||||||
GitHub Contributors:
|
GitHub Contributors:
|
||||||
* ablwr (Ashley)
|
ablwr (Ashley)
|
||||||
* edsu (Ed Summers)
|
dericed (Dave Rice)
|
||||||
* jfarbowitz (Jonathan Farbowitz)
|
edsu (Ed Summers)
|
||||||
* kfrn (Katherine Frances Nagels)
|
jfarbowitz (Jonathan Farbowitz)
|
||||||
* kgrons (Kathryn Gronsbell)
|
kfrn (Katherine Frances Nagels)
|
||||||
* kieranjol (Kieran O'Leary)
|
kgrons (Kathryn Gronsbell)
|
||||||
* llogan (Lou)
|
kieranjol (Kieran O'Leary)
|
||||||
* retokromer (Reto Kromer)
|
llogan (Lou)
|
||||||
* rfraimow
|
privatezero (Andrew Weaver)
|
||||||
|
retokromer (Reto Kromer)
|
||||||
|
rfraimow
|
||||||
|
|
||||||
All Contributors:
|
All Contributors:
|
||||||
* ablwr (Ashley)
|
ablwr (Ashley)
|
||||||
* brainwane (Sumana Harihareswara)
|
audiovisualopen
|
||||||
* dericed (Dave Rice)
|
brainwane (Sumana Harihareswara)
|
||||||
* edsu (Ed Summers)
|
dericed (Dave Rice)
|
||||||
* jamessam (Jim)
|
edsu (Ed Summers)
|
||||||
* jfarbowitz (Jonathan Farbowitz)
|
Fizz24
|
||||||
* jronallo (Jason Ronallo)
|
jamessam (Jim)
|
||||||
* kfrn (Katherine Frances Nagels)
|
jfarbowitz (Jonathan Farbowitz)
|
||||||
* kgrons (Kathryn Gronsbell)
|
jronallo (Jason Ronallo)
|
||||||
* kieranjol (Kieran O'Leary)
|
kfrn (Katherine Frances Nagels)
|
||||||
* llogan (Lou)
|
kgrons (Kathryn Gronsbell)
|
||||||
* mulvya
|
kieranjol (Kieran O'Leary)
|
||||||
* retokromer (Reto Kromer)
|
llogan (Lou)
|
||||||
* rfraimow
|
mulvya
|
||||||
* todrobbins (Tod Robbins)
|
privatezero (Andrew Weaver)
|
||||||
|
retokromer (Reto Kromer)
|
||||||
|
rfraimow
|
||||||
|
todrobbins (Tod Robbins)
|
||||||
|
|
||||||
Repo: amiaopensource/ffmprovisr
|
Repo: amiaopensource/ffmprovisr
|
||||||
* GitHub Contributors: 9
|
GitHub Contributors: 11
|
||||||
* All Contributors: 15
|
All Contributors: 18
|
||||||
|
|
||||||
## AVHack Team:
|
## AVHack Team:
|
||||||
|
|
||||||
[Ashley Blewer](https://github.com/ablwr), Eddy Colloton, Rebecca Dillmeier, [Jonathan Farbowitz](https://github.com/jfarbowitz), Rebecca Fraimow, Samuel Gutterman, Kelly Haydon, [Reto Kromer](https://github.com/retokromer), Nicole Martin, [Katherine Frances Nagels](https://github.com/kfrn), [Kieran O'Leary](https://github.com/kieranjol), Catriona Schlosser, Ben Turkus
|
[Ashley Blewer](https://github.com/ablwr), Eddy Colloton, Rebecca Dillmeier, [Jonathan Farbowitz](https://github.com/jfarbowitz), Rebecca Fraimow, Samuel Gutterman, Kelly Haydon, [Reto Kromer](https://github.com/retokromer), Nicole Martin, [Katherine Frances Nagels](https://github.com/kfrn), [Kieran O'Leary](https://github.com/kieranjol), Catriona Schlosser, Ben Turkus
|
||||||
|
|
||||||
|
## Sister projects
|
||||||
|
|
||||||
|
[Script Ahoy](http://dd388.github.io/crals/): Community Resource for Archivists and Librarians Scripting
|
||||||
|
[sourcecaster](https://datapraxis.github.io/sourcecaster/): helps you use the command line to work through common challenges that come up when working with digital primary sources.
|
||||||
|
Loading…
Reference in New Issue
Block a user