ffmpeg.texi 133 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056305730583059306030613062306330643065306630673068306930703071307230733074307530763077307830793080308130823083308430853086308730883089309030913092309330943095309630973098309931003101310231033104310531063107310831093110311131123113311431153116311731183119312031213122312331243125312631273128312931303131313231333134313531363137313831393140314131423143314431453146314731483149315031513152315331543155315631573158315931603161316231633164316531663167316831693170317131723173317431753176317731783179318031813182318331843185318631873188318931903191319231933194319531963197319831993200320132023203320432053206320732083209321032113212321332143215321632173218321932203221322232233224322532263227322832293230323132323233323432353236323732383239324032413242324332443245324632473248324932503251325232533254325532563257325832593260326132623263326432653266326732683269327032713272327332743275327632773278327932803281
  1. \input texinfo @c -*- texinfo -*-
  2. @documentencoding UTF-8
  3. @settitle ffmpeg Documentation
  4. @titlepage
  5. @center @titlefont{ffmpeg Documentation}
  6. @end titlepage
  7. @top
  8. @contents
  9. @chapter Synopsis
  10. ffmpeg [@var{global_options}] @{[@var{input_file_options}] -i @file{input_url}@} ... @{[@var{output_file_options}] @file{output_url}@} ...
  11. @chapter Description
  12. @c man begin DESCRIPTION
  13. @command{ffmpeg} is a universal media converter. It can read a wide variety of
  14. inputs - including live grabbing/recording devices - filter, and transcode them
  15. into a plethora of output formats.
  16. @command{ffmpeg} reads from an arbitrary number of inputs (which can be regular
  17. files, pipes, network streams, grabbing devices, etc.), specified by the
  18. @code{-i} option, and writes to an arbitrary number of outputs, which are
  19. specified by a plain output url. Anything found on the command line which cannot
  20. be interpreted as an option is considered to be an output url.
  21. Each input or output can, in principle, contain any number of elementary streams
  22. of different types (video/audio/subtitle/attachment/data), though the allowed
  23. stream counts and/or types may be limited by the container format. Selecting
  24. which streams from which inputs will go into which output is either done
  25. automatically or with the @code{-map} option (see the @ref{Stream selection}
  26. chapter).
  27. To refer to inputs/outputs in options, you must use their indices (0-based).
  28. E.g. the first input is @code{0}, the second is @code{1}, etc. Similarly,
  29. streams within an input/output are referred to by their indices. E.g. @code{2:3}
  30. refers to the fourth stream in the third input or output. Also see the
  31. @ref{Stream specifiers} chapter.
  32. As a general rule, options are applied to the next specified
  33. file. Therefore, order is important, and you can have the same
  34. option on the command line multiple times. Each occurrence is
  35. then applied to the next input or output file.
  36. Exceptions from this rule are the global options (e.g. verbosity level),
  37. which should be specified first.
  38. Do not mix input and output files -- first specify all input files, then all
  39. output files. Also do not mix options which belong to different files. All
  40. options apply ONLY to the next input or output file and are reset between files.
  41. Some simple examples follow.
  42. @itemize
  43. @item
  44. Convert an input media file to a different format, by re-encoding media streams:
  45. @example
  46. ffmpeg -i input.avi output.mp4
  47. @end example
  48. @item
  49. Set the video bitrate of the output file to 64 kbit/s:
  50. @example
  51. ffmpeg -i input.avi -b:v 64k -bufsize 64k output.mp4
  52. @end example
  53. @item
  54. Force the frame rate of the output file to 24 fps:
  55. @example
  56. ffmpeg -i input.avi -r 24 output.mp4
  57. @end example
  58. @item
  59. Force the frame rate of the input file (valid for raw formats only) to 1 fps and
  60. the frame rate of the output file to 24 fps:
  61. @example
  62. ffmpeg -r 1 -i input.m2v -r 24 output.mp4
  63. @end example
  64. @end itemize
  65. The format option may be needed for raw input files.
  66. @c man end DESCRIPTION
  67. @chapter Detailed description
  68. @c man begin DETAILED DESCRIPTION
  69. @command{ffmpeg} builds a transcoding pipeline out of the components listed
  70. below. The program's operation then consists of input data chunks flowing from
  71. the sources down the pipes towards the sinks, while being transformed by the
  72. components they encounter along the way.
  73. The following kinds of components are available:
  74. @itemize
  75. @item
  76. @emph{Demuxers} (short for "demultiplexers") read an input source in order to
  77. extract
  78. @itemize
  79. @item
  80. global properties such as metadata or chapters;
  81. @item
  82. list of input elementary streams and their properties
  83. @end itemize
  84. One demuxer instance is created for each @option{-i} option, and sends encoded
  85. @emph{packets} to @emph{decoders} or @emph{muxers}.
  86. In other literature, demuxers are sometimes called @emph{splitters}, because
  87. their main function is splitting a file into elementary streams (though some
  88. files only contain one elementary stream).
  89. A schematic representation of a demuxer looks like this:
  90. @verbatim
  91. ┌──────────┬───────────────────────┐
  92. │ demuxer │ │ packets for stream 0
  93. ╞══════════╡ elementary stream 0 ├──────────────────────⮞
  94. │ │ │
  95. │ global ├───────────────────────┤
  96. │properties│ │ packets for stream 1
  97. │ and │ elementary stream 1 ├──────────────────────⮞
  98. │ metadata │ │
  99. │ ├───────────────────────┤
  100. │ │ │
  101. │ │ ........... │
  102. │ │ │
  103. │ ├───────────────────────┤
  104. │ │ │ packets for stream N
  105. │ │ elementary stream N ├──────────────────────⮞
  106. │ │ │
  107. └──────────┴───────────────────────┘
  108. │ read from file, network stream,
  109. │ grabbing device, etc.
  110. @end verbatim
  111. @item
  112. @emph{Decoders} receive encoded (compressed) @emph{packets} for an audio, video,
  113. or subtitle elementary stream, and decode them into raw @emph{frames} (arrays of
  114. pixels for video, PCM for audio). A decoder is typically associated with (and
  115. receives its input from) an elementary stream in a @emph{demuxer}, but sometimes
  116. may also exist on its own (see @ref{Loopback decoders}).
  117. A schematic representation of a decoder looks like this:
  118. @verbatim
  119. ┌─────────┐
  120. packets │ │ raw frames
  121. ─────────⮞│ decoder ├────────────⮞
  122. │ │
  123. └─────────┘
  124. @end verbatim
  125. @item
  126. @emph{Filtergraphs} process and transform raw audio or video @emph{frames}. A
  127. filtergraph consists of one or more individual @emph{filters} linked into a
  128. graph. Filtergraphs come in two flavors - @emph{simple} and @emph{complex},
  129. configured with the @option{-filter} and @option{-filter_complex} options,
  130. respectively.
  131. A simple filtergraph is associated with an @emph{output elementary stream}; it
  132. receives the input to be filtered from a @emph{decoder} and sends filtered
  133. output to that output stream's @emph{encoder}.
  134. A simple video filtergraph that performs deinterlacing (using the @code{yadif}
  135. deinterlacer) followed by resizing (using the @code{scale} filter) can look like
  136. this:
  137. @verbatim
  138. ┌────────────────────────┐
  139. │ simple filtergraph │
  140. frames from ╞════════════════════════╡ frames for
  141. a decoder │ ┌───────┐ ┌───────┐ │ an encoder
  142. ────────────⮞├─⮞│ yadif ├─⮞│ scale ├─⮞│────────────⮞
  143. │ └───────┘ └───────┘ │
  144. └────────────────────────┘
  145. @end verbatim
  146. A complex filtergraph is standalone and not associated with any specific stream.
  147. It may have multiple (or zero) inputs, potentially of different types (audio or
  148. video), each of which receiving data either from a decoder or another complex
  149. filtergraph's output. It also has one or more outputs that feed either an
  150. encoder or another complex filtergraph's input.
  151. The following example diagram represents a complex filtergraph with 3 inputs and
  152. 2 outputs (all video):
  153. @verbatim
  154. ┌─────────────────────────────────────────────────┐
  155. │ complex filtergraph │
  156. ╞═════════════════════════════════════════════════╡
  157. frames ├───────┐ ┌─────────┐ ┌─────────┐ ┌────────┤ frames
  158. ─────────⮞│input 0├─⮞│ overlay ├─────⮞│ overlay ├─⮞│output 0├────────⮞
  159. ├───────┘ │ │ │ │ └────────┤
  160. frames ├───────┐╭⮞│ │ ╭⮞│ │ │
  161. ─────────⮞│input 1├╯ └─────────┘ │ └─────────┘ │
  162. ├───────┘ │ │
  163. frames ├───────┐ ┌─────┐ ┌─────┬─╯ ┌────────┤ frames
  164. ─────────⮞│input 2├⮞│scale├⮞│split├───────────────⮞│output 1├────────⮞
  165. ├───────┘ └─────┘ └─────┘ └────────┤
  166. └─────────────────────────────────────────────────┘
  167. @end verbatim
  168. Frames from second input are overlaid over those from the first. Frames from the
  169. third input are rescaled, then the duplicated into two identical streams. One of
  170. them is overlaid over the combined first two inputs, with the result exposed as
  171. the filtergraph's first output. The other duplicate ends up being the
  172. filtergraph's second output.
  173. @item
  174. @emph{Encoders} receive raw audio, video, or subtitle @emph{frames} and encode
  175. them into encoded @emph{packets}. The encoding (compression) process is
  176. typically @emph{lossy} - it degrades stream quality to make the output smaller;
  177. some encoders are @emph{lossless}, but at the cost of much higher output size. A
  178. video or audio encoder receives its input from some filtergraph's output,
  179. subtitle encoders receive input from a decoder (since subtitle filtering is not
  180. supported yet). Every encoder is associated with some muxer's @emph{output
  181. elementary stream} and sends its output to that muxer.
  182. A schematic representation of an encoder looks like this:
  183. @verbatim
  184. ┌─────────┐
  185. raw frames │ │ packets
  186. ────────────⮞│ encoder ├─────────⮞
  187. │ │
  188. └─────────┘
  189. @end verbatim
  190. @item
  191. @emph{Muxers} (short for "multiplexers") receive encoded @emph{packets} for
  192. their elementary streams from encoders (the @emph{transcoding} path) or directly
  193. from demuxers (the @emph{streamcopy} path), interleave them (when there is more
  194. than one elementary stream), and write the resulting bytes into the output file
  195. (or pipe, network stream, etc.).
  196. A schematic representation of a muxer looks like this:
  197. @verbatim
  198. ┌──────────────────────┬───────────┐
  199. packets for stream 0 │ │ muxer │
  200. ──────────────────────⮞│ elementary stream 0 ╞═══════════╡
  201. │ │ │
  202. ├──────────────────────┤ global │
  203. packets for stream 1 │ │properties │
  204. ──────────────────────⮞│ elementary stream 1 │ and │
  205. │ │ metadata │
  206. ├──────────────────────┤ │
  207. │ │ │
  208. │ ........... │ │
  209. │ │ │
  210. ├──────────────────────┤ │
  211. packets for stream N │ │ │
  212. ──────────────────────⮞│ elementary stream N │ │
  213. │ │ │
  214. └──────────────────────┴─────┬─────┘
  215. write to file, network stream, │
  216. grabbing device, etc. │
  217. @end verbatim
  218. @end itemize
  219. @section Streamcopy
  220. The simplest pipeline in @command{ffmpeg} is single-stream
  221. @emph{streamcopy}, that is copying one @emph{input elementary stream}'s packets
  222. without decoding, filtering, or encoding them. As an example, consider an input
  223. file called @file{INPUT.mkv} with 3 elementary streams, from which we take the
  224. second and write it to file @file{OUTPUT.mp4}. A schematic representation of
  225. such a pipeline looks like this:
  226. @verbatim
  227. ┌──────────┬─────────────────────┐
  228. │ demuxer │ │ unused
  229. ╞══════════╡ elementary stream 0 ├────────╳
  230. │ │ │
  231. │INPUT.mkv ├─────────────────────┤ ┌──────────────────────┬───────────┐
  232. │ │ │ packets │ │ muxer │
  233. │ │ elementary stream 1 ├─────────⮞│ elementary stream 0 ╞═══════════╡
  234. │ │ │ │ │OUTPUT.mp4 │
  235. │ ├─────────────────────┤ └──────────────────────┴───────────┘
  236. │ │ │ unused
  237. │ │ elementary stream 2 ├────────╳
  238. │ │ │
  239. └──────────┴─────────────────────┘
  240. @end verbatim
  241. The above pipeline can be constructed with the following commandline:
  242. @example
  243. ffmpeg -i INPUT.mkv -map 0:1 -c copy OUTPUT.mp4
  244. @end example
  245. In this commandline
  246. @itemize
  247. @item
  248. there is a single input @file{INPUT.mkv};
  249. @item
  250. there are no input options for this input;
  251. @item
  252. there is a single output @file{OUTPUT.mp4};
  253. @item
  254. there are two output options for this output:
  255. @itemize
  256. @item
  257. @code{-map 0:1} selects the input stream to be used - from input with index 0
  258. (i.e. the first one) the stream with index 1 (i.e. the second one);
  259. @item
  260. @code{-c copy} selects the @code{copy} encoder, i.e. streamcopy with no decoding
  261. or encoding.
  262. @end itemize
  263. @end itemize
  264. Streamcopy is useful for changing the elementary stream count, container format,
  265. or modifying container-level metadata. Since there is no decoding or encoding,
  266. it is very fast and there is no quality loss. However, it might not work in some
  267. cases because of a variety of factors (e.g. certain information required by the
  268. target container is not available in the source). Applying filters is obviously
  269. also impossible, since filters work on decoded frames.
  270. More complex streamcopy scenarios can be constructed - e.g. combining streams
  271. from two input files into a single output:
  272. @verbatim
  273. ┌──────────┬────────────────────┐ ┌────────────────────┬───────────┐
  274. │ demuxer 0│ │ packets │ │ muxer │
  275. ╞══════════╡elementary stream 0 ├────────⮞│elementary stream 0 ╞═══════════╡
  276. │INPUT0.mkv│ │ │ │OUTPUT.mp4 │
  277. └──────────┴────────────────────┘ ├────────────────────┤ │
  278. ┌──────────┬────────────────────┐ │ │ │
  279. │ demuxer 1│ │ packets │elementary stream 1 │ │
  280. ╞══════════╡elementary stream 0 ├────────⮞│ │ │
  281. │INPUT1.aac│ │ └────────────────────┴───────────┘
  282. └──────────┴────────────────────┘
  283. @end verbatim
  284. that can be built by the commandline
  285. @example
  286. ffmpeg -i INPUT0.mkv -i INPUT1.aac -map 0:0 -map 1:0 -c copy OUTPUT.mp4
  287. @end example
  288. The output @option{-map} option is used twice here, creating two streams in the
  289. output file - one fed by the first input and one by the second. The single
  290. instance of the @option{-c} option selects streamcopy for both of those streams.
  291. You could also use multiple instances of this option together with
  292. @ref{Stream specifiers} to apply different values to each stream, as will be
  293. demonstrated in following sections.
  294. A converse scenario is splitting multiple streams from a single input into
  295. multiple outputs:
  296. @verbatim
  297. ┌──────────┬─────────────────────┐ ┌───────────────────┬───────────┐
  298. │ demuxer │ │ packets │ │ muxer 0 │
  299. ╞══════════╡ elementary stream 0 ├─────────⮞│elementary stream 0╞═══════════╡
  300. │ │ │ │ │OUTPUT0.mp4│
  301. │INPUT.mkv ├─────────────────────┤ └───────────────────┴───────────┘
  302. │ │ │ packets ┌───────────────────┬───────────┐
  303. │ │ elementary stream 1 ├─────────⮞│ │ muxer 1 │
  304. │ │ │ │elementary stream 0╞═══════════╡
  305. └──────────┴─────────────────────┘ │ │OUTPUT1.mp4│
  306. └───────────────────┴───────────┘
  307. @end verbatim
  308. built with
  309. @example
  310. ffmpeg -i INPUT.mkv -map 0:0 -c copy OUTPUT0.mp4 -map 0:1 -c copy OUTPUT1.mp4
  311. @end example
  312. Note how a separate instance of the @option{-c} option is needed for every
  313. output file even though their values are the same. This is because non-global
  314. options (which is most of them) only apply in the context of the file before
  315. which they are placed.
  316. These examples can of course be further generalized into arbitrary remappings
  317. of any number of inputs into any number of outputs.
  318. @section Trancoding
  319. @emph{Transcoding} is the process of decoding a stream and then encoding it
  320. again. Since encoding tends to be computationally expensive and in most cases
  321. degrades the stream quality (i.e. it is @emph{lossy}), you should only transcode
  322. when you need to and perform streamcopy otherwise. Typical reasons to transcode
  323. are:
  324. @itemize
  325. @item
  326. applying filters - e.g. resizing, deinterlacing, or overlaying video; resampling
  327. or mixing audio;
  328. @item
  329. you want to feed the stream to something that cannot decode the original codec.
  330. @end itemize
  331. Note that @command{ffmpeg} will transcode all audio, video, and subtitle streams
  332. unless you specify @option{-c copy} for them.
  333. Consider an example pipeline that reads an input file with one audio and one
  334. video stream, transcodes the video and copies the audio into a single output
  335. file. This can be schematically represented as follows
  336. @verbatim
  337. ┌──────────┬─────────────────────┐
  338. │ demuxer │ │ audio packets
  339. ╞══════════╡ stream 0 (audio) ├─────────────────────────────────────╮
  340. │ │ │ │
  341. │INPUT.mkv ├─────────────────────┤ video ┌─────────┐ raw │
  342. │ │ │ packets │ video │ video frames │
  343. │ │ stream 1 (video) ├─────────⮞│ decoder ├──────────────╮ │
  344. │ │ │ │ │ │ │
  345. └──────────┴─────────────────────┘ └─────────┘ │ │
  346. ▼ ▼
  347. │ │
  348. ┌──────────┬─────────────────────┐ video ┌─────────┐ │ │
  349. │ muxer │ │ packets │ video │ │ │
  350. ╞══════════╡ stream 0 (video) │⮜─────────┤ encoder ├──────────────╯ │
  351. │ │ │ │(libx264)│ │
  352. │OUTPUT.mp4├─────────────────────┤ └─────────┘ │
  353. │ │ │ │
  354. │ │ stream 1 (audio) │⮜────────────────────────────────────╯
  355. │ │ │
  356. └──────────┴─────────────────────┘
  357. @end verbatim
  358. and implemented with the following commandline:
  359. @example
  360. ffmpeg -i INPUT.mkv -map 0:v -map 0:a -c:v libx264 -c:a copy OUTPUT.mp4
  361. @end example
  362. Note how it uses stream specifiers @code{:v} and @code{:a} to select input
  363. streams and apply different values of the @option{-c} option to them; see the
  364. @ref{Stream specifiers} section for more details.
  365. @section Filtering
  366. When transcoding, audio and video streams can be filtered before encoding, with
  367. either a @emph{simple} or @emph{complex} filtergraph.
  368. @subsection Simple filtergraphs
  369. Simple filtergraphs are those that have exactly one input and output, both of
  370. the same type (audio or video). They are configured with the per-stream
  371. @option{-filter} option (with @option{-vf} and @option{-af} aliases for
  372. @option{-filter:v} (video) and @option{-filter:a} (audio) respectively). Note
  373. that simple filtergraphs are tied to their output stream, so e.g. if you have
  374. multiple audio streams, @option{-af} will create a separate filtergraph for each
  375. one.
  376. Taking the trancoding example from above, adding filtering (and omitting audio,
  377. for clarity) makes it look like this:
  378. @verbatim
  379. ┌──────────┬───────────────┐
  380. │ demuxer │ │ ┌─────────┐
  381. ╞══════════╡ video stream │ packets │ video │ frames
  382. │INPUT.mkv │ ├─────────⮞│ decoder ├─────⮞───╮
  383. │ │ │ └─────────┘ │
  384. └──────────┴───────────────┘ │
  385. ╭───────────⮜───────────╯
  386. │ ┌────────────────────────┐
  387. │ │ simple filtergraph │
  388. │ ╞════════════════════════╡
  389. │ │ ┌───────┐ ┌───────┐ │
  390. ╰──⮞├─⮞│ yadif ├─⮞│ scale ├─⮞├╮
  391. │ └───────┘ └───────┘ ││
  392. └────────────────────────┘│
  393. ┌──────────┬───────────────┐ video ┌─────────┐ │
  394. │ muxer │ │ packets │ video │ │
  395. ╞══════════╡ video stream │⮜─────────┤ encoder ├───────⮜───────╯
  396. │OUTPUT.mp4│ │ │ │
  397. │ │ │ └─────────┘
  398. └──────────┴───────────────┘
  399. @end verbatim
  400. @subsection Complex filtergraphs
  401. Complex filtergraphs are those which cannot be described as simply a linear
  402. processing chain applied to one stream. This is the case, for example, when the
  403. graph has more than one input and/or output, or when output stream type is
  404. different from input. Complex filtergraphs are configured with the
  405. @option{-filter_complex} option. Note that this option is global, since a
  406. complex filtergraph, by its nature, cannot be unambiguously associated with a
  407. single stream or file. Each instance of @option{-filter_complex} creates a new
  408. complex filtergraph, and there can be any number of them.
  409. A trivial example of a complex filtergraph is the @code{overlay} filter, which
  410. has two video inputs and one video output, containing one video overlaid on top
  411. of the other. Its audio counterpart is the @code{amix} filter.
  412. @anchor{Loopback decoders}
  413. @section Loopback decoders
  414. While decoders are normally associated with demuxer streams, it is also possible
  415. to create "loopback" decoders that decode the output from some encoder and allow
  416. it to be fed back to complex filtergraphs. This is done with the @code{-dec}
  417. directive, which takes as a parameter the index of the output stream that should
  418. be decoded. Every such directive creates a new loopback decoder, indexed with
  419. successive integers starting at zero. These indices should then be used to refer
  420. to loopback decoders in complex filtergraph link labels, as described in the
  421. documentation for @option{-filter_complex}.
  422. Decoding AVOptions can be passed to loopback decoders by placing them before
  423. @code{-dec}, analogously to input/output options.
  424. E.g. the following example:
  425. @example
  426. ffmpeg -i INPUT \
  427. -map 0:v:0 -c:v libx264 -crf 45 -f null - \
  428. -threads 3 -dec 0:0 \
  429. -filter_complex '[0:v][dec:0]hstack[stack]' \
  430. -map '[stack]' -c:v ffv1 OUTPUT
  431. @end example
  432. reads an input video and
  433. @itemize
  434. @item
  435. (line 2) encodes it with @code{libx264} at low quality;
  436. @item
  437. (line 3) decodes this encoded stream using 3 threads;
  438. @item
  439. (line 4) places decoded video side by side with the original input video;
  440. @item
  441. (line 5) combined video is then losslessly encoded and written into
  442. @file{OUTPUT}.
  443. @end itemize
  444. Such a transcoding pipeline can be represented with the following diagram:
  445. @verbatim
  446. ┌──────────┬───────────────┐
  447. │ demuxer │ │ ┌─────────┐ ┌─────────┐ ┌────────────────────┐
  448. ╞══════════╡ video stream │ │ video │ │ video │ │ null muxer │
  449. │ INPUT │ ├──⮞│ decoder ├──┬────────⮞│ encoder ├─┬─⮞│(discards its input)│
  450. │ │ │ └─────────┘ │ │(libx264)│ │ └────────────────────┘
  451. └──────────┴───────────────┘ │ └─────────┘ │
  452. ╭───────⮜──╯ ┌─────────┐ │
  453. │ │loopback │ │
  454. │ ╭─────⮜──────┤ decoder ├────⮜──╯
  455. │ │ └─────────┘
  456. │ │
  457. │ │
  458. │ │ ┌───────────────────┐
  459. │ │ │complex filtergraph│
  460. │ │ ╞═══════════════════╡
  461. │ │ │ ┌─────────────┐ │
  462. ╰─╫─⮞├─⮞│ hstack ├─⮞├╮
  463. ╰─⮞├─⮞│ │ ││
  464. │ └─────────────┘ ││
  465. └───────────────────┘│
  466. ┌──────────┬───────────────┐ ┌─────────┐ │
  467. │ muxer │ │ │ video │ │
  468. ╞══════════╡ video stream │⮜─┤ encoder ├───────⮜──────────╯
  469. │ OUTPUT │ │ │ (ffv1) │
  470. │ │ │ └─────────┘
  471. └──────────┴───────────────┘
  472. @end verbatim
  473. @c man end DETAILED DESCRIPTION
  474. @anchor{Stream selection}
  475. @chapter Stream selection
  476. @c man begin STREAM SELECTION
  477. @command{ffmpeg} provides the @code{-map} option for manual control of stream selection in each
  478. output file. Users can skip @code{-map} and let ffmpeg perform automatic stream selection as
  479. described below. The @code{-vn / -an / -sn / -dn} options can be used to skip inclusion of
  480. video, audio, subtitle and data streams respectively, whether manually mapped or automatically
  481. selected, except for those streams which are outputs of complex filtergraphs.
  482. @section Description
  483. The sub-sections that follow describe the various rules that are involved in stream selection.
  484. The examples that follow next show how these rules are applied in practice.
  485. While every effort is made to accurately reflect the behavior of the program, FFmpeg is under
  486. continuous development and the code may have changed since the time of this writing.
  487. @subsection Automatic stream selection
  488. In the absence of any map options for a particular output file, ffmpeg inspects the output
  489. format to check which type of streams can be included in it, viz. video, audio and/or
  490. subtitles. For each acceptable stream type, ffmpeg will pick one stream, when available,
  491. from among all the inputs.
  492. It will select that stream based upon the following criteria:
  493. @itemize
  494. @item
  495. for video, it is the stream with the highest resolution,
  496. @item
  497. for audio, it is the stream with the most channels,
  498. @item
  499. for subtitles, it is the first subtitle stream found but there's a caveat.
  500. The output format's default subtitle encoder can be either text-based or image-based,
  501. and only a subtitle stream of the same type will be chosen.
  502. @end itemize
  503. In the case where several streams of the same type rate equally, the stream with the lowest
  504. index is chosen.
  505. Data or attachment streams are not automatically selected and can only be included
  506. using @code{-map}.
  507. @subsection Manual stream selection
  508. When @code{-map} is used, only user-mapped streams are included in that output file,
  509. with one possible exception for filtergraph outputs described below.
  510. @subsection Complex filtergraphs
  511. If there are any complex filtergraph output streams with unlabeled pads, they will be added
  512. to the first output file. This will lead to a fatal error if the stream type is not supported
  513. by the output format. In the absence of the map option, the inclusion of these streams leads
  514. to the automatic stream selection of their types being skipped. If map options are present,
  515. these filtergraph streams are included in addition to the mapped streams.
  516. Complex filtergraph output streams with labeled pads must be mapped once and exactly once.
  517. @subsection Stream handling
  518. Stream handling is independent of stream selection, with an exception for subtitles described
  519. below. Stream handling is set via the @code{-codec} option addressed to streams within a
  520. specific @emph{output} file. In particular, codec options are applied by ffmpeg after the
  521. stream selection process and thus do not influence the latter. If no @code{-codec} option is
  522. specified for a stream type, ffmpeg will select the default encoder registered by the output
  523. file muxer.
  524. An exception exists for subtitles. If a subtitle encoder is specified for an output file, the
  525. first subtitle stream found of any type, text or image, will be included. ffmpeg does not validate
  526. if the specified encoder can convert the selected stream or if the converted stream is acceptable
  527. within the output format. This applies generally as well: when the user sets an encoder manually,
  528. the stream selection process cannot check if the encoded stream can be muxed into the output file.
  529. If it cannot, ffmpeg will abort and @emph{all} output files will fail to be processed.
  530. @section Examples
  531. The following examples illustrate the behavior, quirks and limitations of ffmpeg's stream
  532. selection methods.
  533. They assume the following three input files.
  534. @verbatim
  535. input file 'A.avi'
  536. stream 0: video 640x360
  537. stream 1: audio 2 channels
  538. input file 'B.mp4'
  539. stream 0: video 1920x1080
  540. stream 1: audio 2 channels
  541. stream 2: subtitles (text)
  542. stream 3: audio 5.1 channels
  543. stream 4: subtitles (text)
  544. input file 'C.mkv'
  545. stream 0: video 1280x720
  546. stream 1: audio 2 channels
  547. stream 2: subtitles (image)
  548. @end verbatim
  549. @subsubheading Example: automatic stream selection
  550. @example
  551. ffmpeg -i A.avi -i B.mp4 out1.mkv out2.wav -map 1:a -c:a copy out3.mov
  552. @end example
  553. There are three output files specified, and for the first two, no @code{-map} options
  554. are set, so ffmpeg will select streams for these two files automatically.
  555. @file{out1.mkv} is a Matroska container file and accepts video, audio and subtitle streams,
  556. so ffmpeg will try to select one of each type.@*
  557. For video, it will select @code{stream 0} from @file{B.mp4}, which has the highest
  558. resolution among all the input video streams.@*
  559. For audio, it will select @code{stream 3} from @file{B.mp4}, since it has the greatest
  560. number of channels.@*
  561. For subtitles, it will select @code{stream 2} from @file{B.mp4}, which is the first subtitle
  562. stream from among @file{A.avi} and @file{B.mp4}.
  563. @file{out2.wav} accepts only audio streams, so only @code{stream 3} from @file{B.mp4} is
  564. selected.
  565. For @file{out3.mov}, since a @code{-map} option is set, no automatic stream selection will
  566. occur. The @code{-map 1:a} option will select all audio streams from the second input
  567. @file{B.mp4}. No other streams will be included in this output file.
  568. For the first two outputs, all included streams will be transcoded. The encoders chosen will
  569. be the default ones registered by each output format, which may not match the codec of the
  570. selected input streams.
  571. For the third output, codec option for audio streams has been set
  572. to @code{copy}, so no decoding-filtering-encoding operations will occur, or @emph{can} occur.
  573. Packets of selected streams shall be conveyed from the input file and muxed within the output
  574. file.
  575. @subsubheading Example: automatic subtitles selection
  576. @example
  577. ffmpeg -i C.mkv out1.mkv -c:s dvdsub -an out2.mkv
  578. @end example
  579. Although @file{out1.mkv} is a Matroska container file which accepts subtitle streams, only a
  580. video and audio stream shall be selected. The subtitle stream of @file{C.mkv} is image-based
  581. and the default subtitle encoder of the Matroska muxer is text-based, so a transcode operation
  582. for the subtitles is expected to fail and hence the stream isn't selected. However, in
  583. @file{out2.mkv}, a subtitle encoder is specified in the command and so, the subtitle stream is
  584. selected, in addition to the video stream. The presence of @code{-an} disables audio stream
  585. selection for @file{out2.mkv}.
  586. @subsubheading Example: unlabeled filtergraph outputs
  587. @example
  588. ffmpeg -i A.avi -i C.mkv -i B.mp4 -filter_complex "overlay" out1.mp4 out2.srt
  589. @end example
  590. A filtergraph is setup here using the @code{-filter_complex} option and consists of a single
  591. video filter. The @code{overlay} filter requires exactly two video inputs, but none are
  592. specified, so the first two available video streams are used, those of @file{A.avi} and
  593. @file{C.mkv}. The output pad of the filter has no label and so is sent to the first output file
  594. @file{out1.mp4}. Due to this, automatic selection of the video stream is skipped, which would
  595. have selected the stream in @file{B.mp4}. The audio stream with most channels viz. @code{stream 3}
  596. in @file{B.mp4}, is chosen automatically. No subtitle stream is chosen however, since the MP4
  597. format has no default subtitle encoder registered, and the user hasn't specified a subtitle encoder.
  598. The 2nd output file, @file{out2.srt}, only accepts text-based subtitle streams. So, even though
  599. the first subtitle stream available belongs to @file{C.mkv}, it is image-based and hence skipped.
  600. The selected stream, @code{stream 2} in @file{B.mp4}, is the first text-based subtitle stream.
  601. @subsubheading Example: labeled filtergraph outputs
  602. @example
  603. ffmpeg -i A.avi -i B.mp4 -i C.mkv -filter_complex "[1:v]hue=s=0[outv];overlay;aresample" \
  604. -map '[outv]' -an out1.mp4 \
  605. out2.mkv \
  606. -map '[outv]' -map 1:a:0 out3.mkv
  607. @end example
  608. The above command will fail, as the output pad labelled @code{[outv]} has been mapped twice.
  609. None of the output files shall be processed.
  610. @example
  611. ffmpeg -i A.avi -i B.mp4 -i C.mkv -filter_complex "[1:v]hue=s=0[outv];overlay;aresample" \
  612. -an out1.mp4 \
  613. out2.mkv \
  614. -map 1:a:0 out3.mkv
  615. @end example
  616. This command above will also fail as the hue filter output has a label, @code{[outv]},
  617. and hasn't been mapped anywhere.
  618. The command should be modified as follows,
  619. @example
  620. ffmpeg -i A.avi -i B.mp4 -i C.mkv -filter_complex "[1:v]hue=s=0,split=2[outv1][outv2];overlay;aresample" \
  621. -map '[outv1]' -an out1.mp4 \
  622. out2.mkv \
  623. -map '[outv2]' -map 1:a:0 out3.mkv
  624. @end example
  625. The video stream from @file{B.mp4} is sent to the hue filter, whose output is cloned once using
  626. the split filter, and both outputs labelled. Then a copy each is mapped to the first and third
  627. output files.
  628. The overlay filter, requiring two video inputs, uses the first two unused video streams. Those
  629. are the streams from @file{A.avi} and @file{C.mkv}. The overlay output isn't labelled, so it is
  630. sent to the first output file @file{out1.mp4}, regardless of the presence of the @code{-map} option.
  631. The aresample filter is sent the first unused audio stream, that of @file{A.avi}. Since this filter
  632. output is also unlabelled, it too is mapped to the first output file. The presence of @code{-an}
  633. only suppresses automatic or manual stream selection of audio streams, not outputs sent from
  634. filtergraphs. Both these mapped streams shall be ordered before the mapped stream in @file{out1.mp4}.
  635. The video, audio and subtitle streams mapped to @code{out2.mkv} are entirely determined by
  636. automatic stream selection.
  637. @file{out3.mkv} consists of the cloned video output from the hue filter and the first audio
  638. stream from @file{B.mp4}.
  639. @*
  640. @c man end STREAM SELECTION
  641. @chapter Options
  642. @c man begin OPTIONS
  643. @include fftools-common-opts.texi
  644. @section Main options
  645. @table @option
  646. @item -f @var{fmt} (@emph{input/output})
  647. Force input or output file format. The format is normally auto detected for input
  648. files and guessed from the file extension for output files, so this option is not
  649. needed in most cases.
  650. @item -i @var{url} (@emph{input})
  651. input file url
  652. @item -y (@emph{global})
  653. Overwrite output files without asking.
  654. @item -n (@emph{global})
  655. Do not overwrite output files, and exit immediately if a specified
  656. output file already exists.
  657. @item -stream_loop @var{number} (@emph{input})
  658. Set number of times input stream shall be looped. Loop 0 means no loop,
  659. loop -1 means infinite loop.
  660. @item -recast_media (@emph{global})
  661. Allow forcing a decoder of a different media type than the one
  662. detected or designated by the demuxer. Useful for decoding media
  663. data muxed as data streams.
  664. @item -c[:@var{stream_specifier}] @var{codec} (@emph{input/output,per-stream})
  665. @itemx -codec[:@var{stream_specifier}] @var{codec} (@emph{input/output,per-stream})
  666. Select an encoder (when used before an output file) or a decoder (when used
  667. before an input file) for one or more streams. @var{codec} is the name of a
  668. decoder/encoder or a special value @code{copy} (output only) to indicate that
  669. the stream is not to be re-encoded.
  670. For example
  671. @example
  672. ffmpeg -i INPUT -map 0 -c:v libx264 -c:a copy OUTPUT
  673. @end example
  674. encodes all video streams with libx264 and copies all audio streams.
  675. For each stream, the last matching @code{c} option is applied, so
  676. @example
  677. ffmpeg -i INPUT -map 0 -c copy -c:v:1 libx264 -c:a:137 libvorbis OUTPUT
  678. @end example
  679. will copy all the streams except the second video, which will be encoded with
  680. libx264, and the 138th audio, which will be encoded with libvorbis.
  681. @item -t @var{duration} (@emph{input/output})
  682. When used as an input option (before @code{-i}), limit the @var{duration} of
  683. data read from the input file.
  684. When used as an output option (before an output url), stop writing the
  685. output after its duration reaches @var{duration}.
  686. @var{duration} must be a time duration specification,
  687. see @ref{time duration syntax,,the Time duration section in the ffmpeg-utils(1) manual,ffmpeg-utils}.
  688. -to and -t are mutually exclusive and -t has priority.
  689. @item -to @var{position} (@emph{input/output})
  690. Stop writing the output or reading the input at @var{position}.
  691. @var{position} must be a time duration specification,
  692. see @ref{time duration syntax,,the Time duration section in the ffmpeg-utils(1) manual,ffmpeg-utils}.
  693. -to and -t are mutually exclusive and -t has priority.
  694. @item -fs @var{limit_size} (@emph{output})
  695. Set the file size limit, expressed in bytes. No further chunk of bytes is written
  696. after the limit is exceeded. The size of the output file is slightly more than the
  697. requested file size.
  698. @item -ss @var{position} (@emph{input/output})
  699. When used as an input option (before @code{-i}), seeks in this input file to
  700. @var{position}. Note that in most formats it is not possible to seek exactly,
  701. so @command{ffmpeg} will seek to the closest seek point before @var{position}.
  702. When transcoding and @option{-accurate_seek} is enabled (the default), this
  703. extra segment between the seek point and @var{position} will be decoded and
  704. discarded. When doing stream copy or when @option{-noaccurate_seek} is used, it
  705. will be preserved.
  706. When used as an output option (before an output url), decodes but discards
  707. input until the timestamps reach @var{position}.
  708. @var{position} must be a time duration specification,
  709. see @ref{time duration syntax,,the Time duration section in the ffmpeg-utils(1) manual,ffmpeg-utils}.
  710. @item -sseof @var{position} (@emph{input})
  711. Like the @code{-ss} option but relative to the "end of file". That is negative
  712. values are earlier in the file, 0 is at EOF.
  713. @item -isync @var{input_index} (@emph{input})
  714. Assign an input as a sync source.
  715. This will take the difference between the start times of the target and reference inputs and
  716. offset the timestamps of the target file by that difference. The source timestamps of the two
  717. inputs should derive from the same clock source for expected results. If @code{copyts} is set
  718. then @code{start_at_zero} must also be set. If either of the inputs has no starting timestamp
  719. then no sync adjustment is made.
  720. Acceptable values are those that refer to a valid ffmpeg input index. If the sync reference is
  721. the target index itself or @var{-1}, then no adjustment is made to target timestamps. A sync
  722. reference may not itself be synced to any other input.
  723. Default value is @var{-1}.
  724. @item -itsoffset @var{offset} (@emph{input})
  725. Set the input time offset.
  726. @var{offset} must be a time duration specification,
  727. see @ref{time duration syntax,,the Time duration section in the ffmpeg-utils(1) manual,ffmpeg-utils}.
  728. The offset is added to the timestamps of the input files. Specifying
  729. a positive offset means that the corresponding streams are delayed by
  730. the time duration specified in @var{offset}.
  731. @item -itsscale @var{scale} (@emph{input,per-stream})
  732. Rescale input timestamps. @var{scale} should be a floating point number.
  733. @item -timestamp @var{date} (@emph{output})
  734. Set the recording timestamp in the container.
  735. @var{date} must be a date specification,
  736. see @ref{date syntax,,the Date section in the ffmpeg-utils(1) manual,ffmpeg-utils}.
  737. @item -metadata[:metadata_specifier] @var{key}=@var{value} (@emph{output,per-metadata})
  738. Set a metadata key/value pair.
  739. An optional @var{metadata_specifier} may be given to set metadata
  740. on streams, chapters or programs. See @code{-map_metadata}
  741. documentation for details.
  742. This option overrides metadata set with @code{-map_metadata}. It is
  743. also possible to delete metadata by using an empty value.
  744. For example, for setting the title in the output file:
  745. @example
  746. ffmpeg -i in.avi -metadata title="my title" out.flv
  747. @end example
  748. To set the language of the first audio stream:
  749. @example
  750. ffmpeg -i INPUT -metadata:s:a:0 language=eng OUTPUT
  751. @end example
  752. @item -disposition[:stream_specifier] @var{value} (@emph{output,per-stream})
  753. Sets the disposition flags for a stream.
  754. Default value: by default, all disposition flags are copied from the input stream,
  755. unless the output stream this option applies to is fed by a complex filtergraph
  756. - in that case no disposition flags are set by default.
  757. @var{value} is a sequence of disposition flags separated by '+' or '-'. A '+'
  758. prefix adds the given disposition, '-' removes it. If the first flag is also
  759. prefixed with '+' or '-', the resulting disposition is the default value
  760. updated by @var{value}. If the first flag is not prefixed, the resulting
  761. disposition is @var{value}. It is also possible to clear the disposition by
  762. setting it to 0.
  763. If no @code{-disposition} options were specified for an output file, ffmpeg will
  764. automatically set the 'default' disposition flag on the first stream of each type,
  765. when there are multiple streams of this type in the output file and no stream of
  766. that type is already marked as default.
  767. The @code{-dispositions} option lists the known disposition flags.
  768. For example, to make the second audio stream the default stream:
  769. @example
  770. ffmpeg -i in.mkv -c copy -disposition:a:1 default out.mkv
  771. @end example
  772. To make the second subtitle stream the default stream and remove the default
  773. disposition from the first subtitle stream:
  774. @example
  775. ffmpeg -i in.mkv -c copy -disposition:s:0 0 -disposition:s:1 default out.mkv
  776. @end example
  777. To add an embedded cover/thumbnail:
  778. @example
  779. ffmpeg -i in.mp4 -i IMAGE -map 0 -map 1 -c copy -c:v:1 png -disposition:v:1 attached_pic out.mp4
  780. @end example
  781. To add the 'original' and remove the 'comment' disposition flag from the first
  782. audio stream without removing its other disposition flags:
  783. @example
  784. ffmpeg -i in.mkv -c copy -disposition:a:0 +original-comment out.mkv
  785. @end example
  786. To remove the 'original' and add the 'comment' disposition flag to the first
  787. audio stream without removing its other disposition flags:
  788. @example
  789. ffmpeg -i in.mkv -c copy -disposition:a:0 -original+comment out.mkv
  790. @end example
  791. To set only the 'original' and 'comment' disposition flags on the first audio
  792. stream (and remove its other disposition flags):
  793. @example
  794. ffmpeg -i in.mkv -c copy -disposition:a:0 original+comment out.mkv
  795. @end example
  796. To remove all disposition flags from the first audio stream:
  797. @example
  798. ffmpeg -i in.mkv -c copy -disposition:a:0 0 out.mkv
  799. @end example
  800. Not all muxers support embedded thumbnails, and those who do, only support a few formats, like JPEG or PNG.
  801. @item -program [title=@var{title}:][program_num=@var{program_num}:]st=@var{stream}[:st=@var{stream}...] (@emph{output})
  802. Creates a program with the specified @var{title}, @var{program_num} and adds the specified
  803. @var{stream}(s) to it.
  804. @item -stream_group [map=@var{input_file_id}=@var{stream_group}][type=@var{type}:]st=@var{stream}[:st=@var{stream}][:stg=@var{stream_group}][:id=@var{stream_group_id}...] (@emph{output})
  805. Creates a stream group of the specified @var{type} and @var{stream_group_id}, or by
  806. @var{map}ping an input group, adding the specified @var{stream}(s) and/or previously
  807. defined @var{stream_group}(s) to it.
  808. @var{type} can be one of the following:
  809. @table @option
  810. @item iamf_audio_element
  811. Groups @var{stream}s that belong to the same IAMF Audio Element
  812. For this group @var{type}, the following options are available
  813. @table @option
  814. @item audio_element_type
  815. The Audio Element type. The following values are supported:
  816. @table @option
  817. @item channel
  818. Scalable channel audio representation
  819. @item scene
  820. Ambisonics representation
  821. @end table
  822. @item demixing
  823. Demixing information used to reconstruct a scalable channel audio representation.
  824. This option must be separated from the rest with a ',', and takes the following
  825. key=value options
  826. @table @option
  827. @item parameter_id
  828. An identifier parameters blocks in frames may refer to
  829. @item dmixp_mode
  830. A pre-defined combination of demixing parameters
  831. @end table
  832. @item recon_gain
  833. Recon gain information used to reconstruct a scalable channel audio representation.
  834. This option must be separated from the rest with a ',', and takes the following
  835. key=value options
  836. @table @option
  837. @item parameter_id
  838. An identifier parameters blocks in frames may refer to
  839. @end table
  840. @item layer
  841. A layer defining a Channel Layout in the Audio Element.
  842. This option must be separated from the rest with a ','. Several ',' separated entries
  843. can be defined, and at least one must be set.
  844. It takes the following ":"-separated key=value options
  845. @table @option
  846. @item ch_layout
  847. The layer's channel layout
  848. @item flags
  849. The following flags are available:
  850. @table @option
  851. @item recon_gain
  852. Wether to signal if recon_gain is present as metadata in parameter blocks within frames
  853. @end table
  854. @item output_gain
  855. @item output_gain_flags
  856. Which channels output_gain applies to. The following flags are available:
  857. @table @option
  858. @item FL
  859. @item FR
  860. @item BL
  861. @item BR
  862. @item TFL
  863. @item TFR
  864. @end table
  865. @item ambisonics_mode
  866. The ambisonics mode. This has no effect if audio_element_type is set to channel.
  867. The following values are supported:
  868. @table @option
  869. @item mono
  870. Each ambisonics channel is coded as an individual mono stream in the group
  871. @end table
  872. @end table
  873. @item default_w
  874. Default weight value
  875. @end table
  876. @item iamf_mix_presentation
  877. Groups @var{stream}s that belong to all IAMF Audio Element the same
  878. IAMF Mix Presentation references
  879. For this group @var{type}, the following options are available
  880. @table @option
  881. @item submix
  882. A sub-mix within the Mix Presentation.
  883. This option must be separated from the rest with a ','. Several ',' separated entries
  884. can be defined, and at least one must be set.
  885. It takes the following ":"-separated key=value options
  886. @table @option
  887. @item parameter_id
  888. An identifier parameters blocks in frames may refer to, for post-processing the mixed
  889. audio signal to generate the audio signal for playback
  890. @item parameter_rate
  891. The sample rate duration fields in parameters blocks in frames that refer to this
  892. @var{parameter_id} are expressed as
  893. @item default_mix_gain
  894. Default mix gain value to apply when there are no parameter blocks sharing the same
  895. @var{parameter_id} for a given frame
  896. @item element
  897. References an Audio Element used in this Mix Presentation to generate the final output
  898. audio signal for playback.
  899. This option must be separated from the rest with a '|'. Several '|' separated entries
  900. can be defined, and at least one must be set.
  901. It takes the following ":"-separated key=value options:
  902. @table @option
  903. @item stg
  904. The @var{stream_group_id} for an Audio Element which this sub-mix refers to
  905. @item parameter_id
  906. An identifier parameters blocks in frames may refer to, for applying any processing to
  907. the referenced and rendered Audio Element before being summed with other processed Audio
  908. Elements
  909. @item parameter_rate
  910. The sample rate duration fields in parameters blocks in frames that refer to this
  911. @var{parameter_id} are expressed as
  912. @item default_mix_gain
  913. Default mix gain value to apply when there are no parameter blocks sharing the same
  914. @var{parameter_id} for a given frame
  915. @item annotations
  916. A key=value string describing the sub-mix element where "key" is a string conforming to
  917. BCP-47 that specifies the language for the "value" string. "key" must be the same as the
  918. one in the mix's @var{annotations}
  919. @item headphones_rendering_mode
  920. Indicates whether the input channel-based Audio Element is rendered to stereo loudspeakers
  921. or spatialized with a binaural renderer when played back on headphones.
  922. This has no effect if the referenced Audio Element's @var{audio_element_type} is set to
  923. channel.
  924. The following values are supported:
  925. @table @option
  926. @item stereo
  927. @item binaural
  928. @end table
  929. @end table
  930. @item layout
  931. Specifies the layouts for this sub-mix on which the loudness information was measured.
  932. This option must be separated from the rest with a '|'. Several '|' separated entries
  933. can be defined, and at least one must be set.
  934. It takes the following ":"-separated key=value options:
  935. @table @option
  936. @item layout_type
  937. @table @option
  938. @item loudspeakers
  939. The layout follows the loudspeaker sound system convention of ITU-2051-3.
  940. @item binaural
  941. The layout is binaural.
  942. @end table
  943. @item sound_system
  944. Channel layout matching one of Sound Systems A to J of ITU-2051-3, plus 7.1.2 and 3.1.2
  945. This has no effect if @var{layout_type} is set to binaural.
  946. @item integrated_loudness
  947. The program integrated loudness information, as defined in ITU-1770-4.
  948. @item digital_peak
  949. The digital (sampled) peak value of the audio signal, as defined in ITU-1770-4.
  950. @item true_peak
  951. The true peak of the audio signal, as defined in ITU-1770-4.
  952. @item dialog_anchored_loudness
  953. The Dialogue loudness information, as defined in ITU-1770-4.
  954. @item album_anchored_loudness
  955. The Album loudness information, as defined in ITU-1770-4.
  956. @end table
  957. @end table
  958. @item annotations
  959. A key=value string string describing the mix where "key" is a string conforming to BCP-47
  960. that specifies the language for the "value" string. "key" must be the same as the ones in
  961. all sub-mix element's @var{annotations}s
  962. @end table
  963. @end table
  964. E.g. to create an scalable 5.1 IAMF file from several WAV input files
  965. @example
  966. ffmpeg -i front.wav -i back.wav -i center.wav -i lfe.wav
  967. -map 0:0 -map 1:0 -map 2:0 -map 3:0 -c:a opus
  968. -stream_group type=iamf_audio_element:id=1:st=0:st=1:st=2:st=3,
  969. demixing=parameter_id=998,
  970. recon_gain=parameter_id=101,
  971. layer=ch_layout=stereo,
  972. layer=ch_layout=5.1,
  973. -stream_group type=iamf_mix_presentation:id=2:stg=0:annotations=en-us=Mix_Presentation,
  974. submix=parameter_id=100:parameter_rate=48000|element=stg=0:parameter_id=100:annotations=en-us=Scalable_Submix|layout=sound_system=stereo|layout=sound_system=5.1
  975. -streamid 0:0 -streamid 1:1 -streamid 2:2 -streamid 3:3 output.iamf
  976. @end example
  977. To copy the two stream groups (Audio Element and Mix Presentation) from an input IAMF file with four
  978. streams into an mp4 output
  979. @example
  980. ffmpeg -i input.iamf -c:a copy -stream_group map=0=0:st=0:st=1:st=2:st=3 -stream_group map=0=1:stg=0
  981. -streamid 0:0 -streamid 1:1 -streamid 2:2 -streamid 3:3 output.mp4
  982. @end example
  983. @item -target @var{type} (@emph{output})
  984. Specify target file type (@code{vcd}, @code{svcd}, @code{dvd}, @code{dv},
  985. @code{dv50}). @var{type} may be prefixed with @code{pal-}, @code{ntsc-} or
  986. @code{film-} to use the corresponding standard. All the format options
  987. (bitrate, codecs, buffer sizes) are then set automatically. You can just type:
  988. @example
  989. ffmpeg -i myfile.avi -target vcd /tmp/vcd.mpg
  990. @end example
  991. Nevertheless you can specify additional options as long as you know
  992. they do not conflict with the standard, as in:
  993. @example
  994. ffmpeg -i myfile.avi -target vcd -bf 2 /tmp/vcd.mpg
  995. @end example
  996. The parameters set for each target are as follows.
  997. @strong{VCD}
  998. @example
  999. @var{pal}:
  1000. -f vcd -muxrate 1411200 -muxpreload 0.44 -packetsize 2324
  1001. -s 352x288 -r 25
  1002. -codec:v mpeg1video -g 15 -b:v 1150k -maxrate:v 1150k -minrate:v 1150k -bufsize:v 327680
  1003. -ar 44100 -ac 2
  1004. -codec:a mp2 -b:a 224k
  1005. @var{ntsc}:
  1006. -f vcd -muxrate 1411200 -muxpreload 0.44 -packetsize 2324
  1007. -s 352x240 -r 30000/1001
  1008. -codec:v mpeg1video -g 18 -b:v 1150k -maxrate:v 1150k -minrate:v 1150k -bufsize:v 327680
  1009. -ar 44100 -ac 2
  1010. -codec:a mp2 -b:a 224k
  1011. @var{film}:
  1012. -f vcd -muxrate 1411200 -muxpreload 0.44 -packetsize 2324
  1013. -s 352x240 -r 24000/1001
  1014. -codec:v mpeg1video -g 18 -b:v 1150k -maxrate:v 1150k -minrate:v 1150k -bufsize:v 327680
  1015. -ar 44100 -ac 2
  1016. -codec:a mp2 -b:a 224k
  1017. @end example
  1018. @strong{SVCD}
  1019. @example
  1020. @var{pal}:
  1021. -f svcd -packetsize 2324
  1022. -s 480x576 -pix_fmt yuv420p -r 25
  1023. -codec:v mpeg2video -g 15 -b:v 2040k -maxrate:v 2516k -minrate:v 0 -bufsize:v 1835008 -scan_offset 1
  1024. -ar 44100
  1025. -codec:a mp2 -b:a 224k
  1026. @var{ntsc}:
  1027. -f svcd -packetsize 2324
  1028. -s 480x480 -pix_fmt yuv420p -r 30000/1001
  1029. -codec:v mpeg2video -g 18 -b:v 2040k -maxrate:v 2516k -minrate:v 0 -bufsize:v 1835008 -scan_offset 1
  1030. -ar 44100
  1031. -codec:a mp2 -b:a 224k
  1032. @var{film}:
  1033. -f svcd -packetsize 2324
  1034. -s 480x480 -pix_fmt yuv420p -r 24000/1001
  1035. -codec:v mpeg2video -g 18 -b:v 2040k -maxrate:v 2516k -minrate:v 0 -bufsize:v 1835008 -scan_offset 1
  1036. -ar 44100
  1037. -codec:a mp2 -b:a 224k
  1038. @end example
  1039. @strong{DVD}
  1040. @example
  1041. @var{pal}:
  1042. -f dvd -muxrate 10080k -packetsize 2048
  1043. -s 720x576 -pix_fmt yuv420p -r 25
  1044. -codec:v mpeg2video -g 15 -b:v 6000k -maxrate:v 9000k -minrate:v 0 -bufsize:v 1835008
  1045. -ar 48000
  1046. -codec:a ac3 -b:a 448k
  1047. @var{ntsc}:
  1048. -f dvd -muxrate 10080k -packetsize 2048
  1049. -s 720x480 -pix_fmt yuv420p -r 30000/1001
  1050. -codec:v mpeg2video -g 18 -b:v 6000k -maxrate:v 9000k -minrate:v 0 -bufsize:v 1835008
  1051. -ar 48000
  1052. -codec:a ac3 -b:a 448k
  1053. @var{film}:
  1054. -f dvd -muxrate 10080k -packetsize 2048
  1055. -s 720x480 -pix_fmt yuv420p -r 24000/1001
  1056. -codec:v mpeg2video -g 18 -b:v 6000k -maxrate:v 9000k -minrate:v 0 -bufsize:v 1835008
  1057. -ar 48000
  1058. -codec:a ac3 -b:a 448k
  1059. @end example
  1060. @strong{DV}
  1061. @example
  1062. @var{pal}:
  1063. -f dv
  1064. -s 720x576 -pix_fmt yuv420p -r 25
  1065. -ar 48000 -ac 2
  1066. @var{ntsc}:
  1067. -f dv
  1068. -s 720x480 -pix_fmt yuv411p -r 30000/1001
  1069. -ar 48000 -ac 2
  1070. @var{film}:
  1071. -f dv
  1072. -s 720x480 -pix_fmt yuv411p -r 24000/1001
  1073. -ar 48000 -ac 2
  1074. @end example
  1075. The @code{dv50} target is identical to the @code{dv} target except that the pixel format set is @code{yuv422p} for all three standards.
  1076. Any user-set value for a parameter above will override the target preset value. In that case, the output may
  1077. not comply with the target standard.
  1078. @item -dn (@emph{input/output})
  1079. As an input option, blocks all data streams of a file from being filtered or
  1080. being automatically selected or mapped for any output. See @code{-discard}
  1081. option to disable streams individually.
  1082. As an output option, disables data recording i.e. automatic selection or
  1083. mapping of any data stream. For full manual control see the @code{-map}
  1084. option.
  1085. @item -dframes @var{number} (@emph{output})
  1086. Set the number of data frames to output. This is an obsolete alias for
  1087. @code{-frames:d}, which you should use instead.
  1088. @item -frames[:@var{stream_specifier}] @var{framecount} (@emph{output,per-stream})
  1089. Stop writing to the stream after @var{framecount} frames.
  1090. @item -q[:@var{stream_specifier}] @var{q} (@emph{output,per-stream})
  1091. @itemx -qscale[:@var{stream_specifier}] @var{q} (@emph{output,per-stream})
  1092. Use fixed quality scale (VBR). The meaning of @var{q}/@var{qscale} is
  1093. codec-dependent.
  1094. If @var{qscale} is used without a @var{stream_specifier} then it applies only
  1095. to the video stream, this is to maintain compatibility with previous behavior
  1096. and as specifying the same codec specific value to 2 different codecs that is
  1097. audio and video generally is not what is intended when no stream_specifier is
  1098. used.
  1099. @anchor{filter_option}
  1100. @item -filter[:@var{stream_specifier}] @var{filtergraph} (@emph{output,per-stream})
  1101. Create the filtergraph specified by @var{filtergraph} and use it to
  1102. filter the stream.
  1103. @var{filtergraph} is a description of the filtergraph to apply to
  1104. the stream, and must have a single input and a single output of the
  1105. same type of the stream. In the filtergraph, the input is associated
  1106. to the label @code{in}, and the output to the label @code{out}. See
  1107. the ffmpeg-filters manual for more information about the filtergraph
  1108. syntax.
  1109. See the @ref{filter_complex_option,,-filter_complex option} if you
  1110. want to create filtergraphs with multiple inputs and/or outputs.
  1111. @item -reinit_filter[:@var{stream_specifier}] @var{integer} (@emph{input,per-stream})
  1112. This boolean option determines if the filtergraph(s) to which this stream is fed gets
  1113. reinitialized when input frame parameters change mid-stream. This option is enabled by
  1114. default as most video and all audio filters cannot handle deviation in input frame properties.
  1115. Upon reinitialization, existing filter state is lost, like e.g. the frame count @code{n}
  1116. reference available in some filters. Any frames buffered at time of reinitialization are lost.
  1117. The properties where a change triggers reinitialization are,
  1118. for video, frame resolution or pixel format;
  1119. for audio, sample format, sample rate, channel count or channel layout.
  1120. @item -filter_threads @var{nb_threads} (@emph{global})
  1121. Defines how many threads are used to process a filter pipeline. Each pipeline
  1122. will produce a thread pool with this many threads available for parallel processing.
  1123. The default is the number of available CPUs.
  1124. @item -pre[:@var{stream_specifier}] @var{preset_name} (@emph{output,per-stream})
  1125. Specify the preset for matching stream(s).
  1126. @item -stats (@emph{global})
  1127. Log encoding progress/statistics as "info"-level log (see @code{-loglevel}).
  1128. It is on by default, to explicitly disable it you need to specify @code{-nostats}.
  1129. @item -stats_period @var{time} (@emph{global})
  1130. Set period at which encoding progress/statistics are updated. Default is 0.5 seconds.
  1131. @item -progress @var{url} (@emph{global})
  1132. Send program-friendly progress information to @var{url}.
  1133. Progress information is written periodically and at the end of
  1134. the encoding process. It is made of "@var{key}=@var{value}" lines. @var{key}
  1135. consists of only alphanumeric characters. The last key of a sequence of
  1136. progress information is always "progress" with the value "continue" or "end".
  1137. The update period is set using @code{-stats_period}.
  1138. For example, log progress information to stdout:
  1139. @example
  1140. ffmpeg -progress pipe:1 -i in.mkv out.mkv
  1141. @end example
  1142. @anchor{stdin option}
  1143. @item -stdin
  1144. Enable interaction on standard input. On by default unless standard input is
  1145. used as an input. To explicitly disable interaction you need to specify
  1146. @code{-nostdin}.
  1147. Disabling interaction on standard input is useful, for example, if
  1148. ffmpeg is in the background process group. Roughly the same result can
  1149. be achieved with @code{ffmpeg ... < /dev/null} but it requires a
  1150. shell.
  1151. @item -debug_ts (@emph{global})
  1152. Print timestamp/latency information. It is off by default. This option is
  1153. mostly useful for testing and debugging purposes, and the output
  1154. format may change from one version to another, so it should not be
  1155. employed by portable scripts.
  1156. See also the option @code{-fdebug ts}.
  1157. @item -attach @var{filename} (@emph{output})
  1158. Add an attachment to the output file. This is supported by a few formats
  1159. like Matroska for e.g. fonts used in rendering subtitles. Attachments
  1160. are implemented as a specific type of stream, so this option will add
  1161. a new stream to the file. It is then possible to use per-stream options
  1162. on this stream in the usual way. Attachment streams created with this
  1163. option will be created after all the other streams (i.e. those created
  1164. with @code{-map} or automatic mappings).
  1165. Note that for Matroska you also have to set the mimetype metadata tag:
  1166. @example
  1167. ffmpeg -i INPUT -attach DejaVuSans.ttf -metadata:s:2 mimetype=application/x-truetype-font out.mkv
  1168. @end example
  1169. (assuming that the attachment stream will be third in the output file).
  1170. @item -dump_attachment[:@var{stream_specifier}] @var{filename} (@emph{input,per-stream})
  1171. Extract the matching attachment stream into a file named @var{filename}. If
  1172. @var{filename} is empty, then the value of the @code{filename} metadata tag
  1173. will be used.
  1174. E.g. to extract the first attachment to a file named 'out.ttf':
  1175. @example
  1176. ffmpeg -dump_attachment:t:0 out.ttf -i INPUT
  1177. @end example
  1178. To extract all attachments to files determined by the @code{filename} tag:
  1179. @example
  1180. ffmpeg -dump_attachment:t "" -i INPUT
  1181. @end example
  1182. Technical note -- attachments are implemented as codec extradata, so this
  1183. option can actually be used to extract extradata from any stream, not just
  1184. attachments.
  1185. @end table
  1186. @section Video Options
  1187. @table @option
  1188. @item -vframes @var{number} (@emph{output})
  1189. Set the number of video frames to output. This is an obsolete alias for
  1190. @code{-frames:v}, which you should use instead.
  1191. @item -r[:@var{stream_specifier}] @var{fps} (@emph{input/output,per-stream})
  1192. Set frame rate (Hz value, fraction or abbreviation).
  1193. As an input option, ignore any timestamps stored in the file and instead
  1194. generate timestamps assuming constant frame rate @var{fps}.
  1195. This is not the same as the @option{-framerate} option used for some input formats
  1196. like image2 or v4l2 (it used to be the same in older versions of FFmpeg).
  1197. If in doubt use @option{-framerate} instead of the input option @option{-r}.
  1198. As an output option:
  1199. @table @option
  1200. @item video encoding
  1201. Duplicate or drop frames right before encoding them to achieve constant output
  1202. frame rate @var{fps}.
  1203. @item video streamcopy
  1204. Indicate to the muxer that @var{fps} is the stream frame rate. No data is
  1205. dropped or duplicated in this case. This may produce invalid files if @var{fps}
  1206. does not match the actual stream frame rate as determined by packet timestamps.
  1207. See also the @code{setts} bitstream filter.
  1208. @end table
  1209. @item -fpsmax[:@var{stream_specifier}] @var{fps} (@emph{output,per-stream})
  1210. Set maximum frame rate (Hz value, fraction or abbreviation).
  1211. Clamps output frame rate when output framerate is auto-set and is higher than this value.
  1212. Useful in batch processing or when input framerate is wrongly detected as very high.
  1213. It cannot be set together with @code{-r}. It is ignored during streamcopy.
  1214. @item -s[:@var{stream_specifier}] @var{size} (@emph{input/output,per-stream})
  1215. Set frame size.
  1216. As an input option, this is a shortcut for the @option{video_size} private
  1217. option, recognized by some demuxers for which the frame size is either not
  1218. stored in the file or is configurable -- e.g. raw video or video grabbers.
  1219. As an output option, this inserts the @code{scale} video filter to the
  1220. @emph{end} of the corresponding filtergraph. Please use the @code{scale} filter
  1221. directly to insert it at the beginning or some other place.
  1222. The format is @samp{wxh} (default - same as source).
  1223. @item -aspect[:@var{stream_specifier}] @var{aspect} (@emph{output,per-stream})
  1224. Set the video display aspect ratio specified by @var{aspect}.
  1225. @var{aspect} can be a floating point number string, or a string of the
  1226. form @var{num}:@var{den}, where @var{num} and @var{den} are the
  1227. numerator and denominator of the aspect ratio. For example "4:3",
  1228. "16:9", "1.3333", and "1.7777" are valid argument values.
  1229. If used together with @option{-vcodec copy}, it will affect the aspect ratio
  1230. stored at container level, but not the aspect ratio stored in encoded
  1231. frames, if it exists.
  1232. @item -display_rotation[:@var{stream_specifier}] @var{rotation} (@emph{input,per-stream})
  1233. Set video rotation metadata.
  1234. @var{rotation} is a decimal number specifying the amount in degree by
  1235. which the video should be rotated counter-clockwise before being
  1236. displayed.
  1237. This option overrides the rotation/display transform metadata stored in
  1238. the file, if any. When the video is being transcoded (rather than
  1239. copied) and @code{-autorotate} is enabled, the video will be rotated at
  1240. the filtering stage. Otherwise, the metadata will be written into the
  1241. output file if the muxer supports it.
  1242. If the @code{-display_hflip} and/or @code{-display_vflip} options are
  1243. given, they are applied after the rotation specified by this option.
  1244. @item -display_hflip[:@var{stream_specifier}] (@emph{input,per-stream})
  1245. Set whether on display the image should be horizontally flipped.
  1246. See the @code{-display_rotation} option for more details.
  1247. @item -display_vflip[:@var{stream_specifier}] (@emph{input,per-stream})
  1248. Set whether on display the image should be vertically flipped.
  1249. See the @code{-display_rotation} option for more details.
  1250. @item -vn (@emph{input/output})
  1251. As an input option, blocks all video streams of a file from being filtered or
  1252. being automatically selected or mapped for any output. See @code{-discard}
  1253. option to disable streams individually.
  1254. As an output option, disables video recording i.e. automatic selection or
  1255. mapping of any video stream. For full manual control see the @code{-map}
  1256. option.
  1257. @item -vcodec @var{codec} (@emph{output})
  1258. Set the video codec. This is an alias for @code{-codec:v}.
  1259. @item -pass[:@var{stream_specifier}] @var{n} (@emph{output,per-stream})
  1260. Select the pass number (1 or 2). It is used to do two-pass
  1261. video encoding. The statistics of the video are recorded in the first
  1262. pass into a log file (see also the option -passlogfile),
  1263. and in the second pass that log file is used to generate the video
  1264. at the exact requested bitrate.
  1265. On pass 1, you may just deactivate audio and set output to null,
  1266. examples for Windows and Unix:
  1267. @example
  1268. ffmpeg -i foo.mov -c:v libxvid -pass 1 -an -f rawvideo -y NUL
  1269. ffmpeg -i foo.mov -c:v libxvid -pass 1 -an -f rawvideo -y /dev/null
  1270. @end example
  1271. @item -passlogfile[:@var{stream_specifier}] @var{prefix} (@emph{output,per-stream})
  1272. Set two-pass log file name prefix to @var{prefix}, the default file name
  1273. prefix is ``ffmpeg2pass''. The complete file name will be
  1274. @file{PREFIX-N.log}, where N is a number specific to the output
  1275. stream
  1276. @item -vf @var{filtergraph} (@emph{output})
  1277. Create the filtergraph specified by @var{filtergraph} and use it to
  1278. filter the stream.
  1279. This is an alias for @code{-filter:v}, see the @ref{filter_option,,-filter option}.
  1280. @item -autorotate
  1281. Automatically rotate the video according to file metadata. Enabled by
  1282. default, use @option{-noautorotate} to disable it.
  1283. @item -autoscale
  1284. Automatically scale the video according to the resolution of first frame.
  1285. Enabled by default, use @option{-noautoscale} to disable it. When autoscale is
  1286. disabled, all output frames of filter graph might not be in the same resolution
  1287. and may be inadequate for some encoder/muxer. Therefore, it is not recommended
  1288. to disable it unless you really know what you are doing.
  1289. Disable autoscale at your own risk.
  1290. @end table
  1291. @section Advanced Video options
  1292. @table @option
  1293. @item -pix_fmt[:@var{stream_specifier}] @var{format} (@emph{input/output,per-stream})
  1294. Set pixel format. Use @code{-pix_fmts} to show all the supported
  1295. pixel formats.
  1296. If the selected pixel format can not be selected, ffmpeg will print a
  1297. warning and select the best pixel format supported by the encoder.
  1298. If @var{pix_fmt} is prefixed by a @code{+}, ffmpeg will exit with an error
  1299. if the requested pixel format can not be selected, and automatic conversions
  1300. inside filtergraphs are disabled.
  1301. If @var{pix_fmt} is a single @code{+}, ffmpeg selects the same pixel format
  1302. as the input (or graph output) and automatic conversions are disabled.
  1303. @item -sws_flags @var{flags} (@emph{input/output})
  1304. Set default flags for the libswscale library. These flags are used by
  1305. automatically inserted @code{scale} filters and those within simple
  1306. filtergraphs, if not overridden within the filtergraph definition.
  1307. See the @ref{scaler_options,,ffmpeg-scaler manual,ffmpeg-scaler} for a list
  1308. of scaler options.
  1309. @item -rc_override[:@var{stream_specifier}] @var{override} (@emph{output,per-stream})
  1310. Rate control override for specific intervals, formatted as "int,int,int"
  1311. list separated with slashes. Two first values are the beginning and
  1312. end frame numbers, last one is quantizer to use if positive, or quality
  1313. factor if negative.
  1314. @item -vstats
  1315. Dump video coding statistics to @file{vstats_HHMMSS.log}. See the
  1316. @ref{vstats_file_format,,vstats file format} section for the format description.
  1317. @item -vstats_file @var{file}
  1318. Dump video coding statistics to @var{file}. See the
  1319. @ref{vstats_file_format,,vstats file format} section for the format description.
  1320. @item -vstats_version @var{file}
  1321. Specify which version of the vstats format to use. Default is @code{2}. See the
  1322. @ref{vstats_file_format,,vstats file format} section for the format description.
  1323. @item -vtag @var{fourcc/tag} (@emph{output})
  1324. Force video tag/fourcc. This is an alias for @code{-tag:v}.
  1325. @item -force_key_frames[:@var{stream_specifier}] @var{time}[,@var{time}...] (@emph{output,per-stream})
  1326. @item -force_key_frames[:@var{stream_specifier}] expr:@var{expr} (@emph{output,per-stream})
  1327. @item -force_key_frames[:@var{stream_specifier}] source (@emph{output,per-stream})
  1328. @var{force_key_frames} can take arguments of the following form:
  1329. @table @option
  1330. @item @var{time}[,@var{time}...]
  1331. If the argument consists of timestamps, ffmpeg will round the specified times to the nearest
  1332. output timestamp as per the encoder time base and force a keyframe at the first frame having
  1333. timestamp equal or greater than the computed timestamp. Note that if the encoder time base is too
  1334. coarse, then the keyframes may be forced on frames with timestamps lower than the specified time.
  1335. The default encoder time base is the inverse of the output framerate but may be set otherwise
  1336. via @code{-enc_time_base}.
  1337. If one of the times is "@code{chapters}[@var{delta}]", it is expanded into
  1338. the time of the beginning of all chapters in the file, shifted by
  1339. @var{delta}, expressed as a time in seconds.
  1340. This option can be useful to ensure that a seek point is present at a
  1341. chapter mark or any other designated place in the output file.
  1342. For example, to insert a key frame at 5 minutes, plus key frames 0.1 second
  1343. before the beginning of every chapter:
  1344. @example
  1345. -force_key_frames 0:05:00,chapters-0.1
  1346. @end example
  1347. @item expr:@var{expr}
  1348. If the argument is prefixed with @code{expr:}, the string @var{expr}
  1349. is interpreted like an expression and is evaluated for each frame. A
  1350. key frame is forced in case the evaluation is non-zero.
  1351. The expression in @var{expr} can contain the following constants:
  1352. @table @option
  1353. @item n
  1354. the number of current processed frame, starting from 0
  1355. @item n_forced
  1356. the number of forced frames
  1357. @item prev_forced_n
  1358. the number of the previous forced frame, it is @code{NAN} when no
  1359. keyframe was forced yet
  1360. @item prev_forced_t
  1361. the time of the previous forced frame, it is @code{NAN} when no
  1362. keyframe was forced yet
  1363. @item t
  1364. the time of the current processed frame
  1365. @end table
  1366. For example to force a key frame every 5 seconds, you can specify:
  1367. @example
  1368. -force_key_frames expr:gte(t,n_forced*5)
  1369. @end example
  1370. To force a key frame 5 seconds after the time of the last forced one,
  1371. starting from second 13:
  1372. @example
  1373. -force_key_frames expr:if(isnan(prev_forced_t),gte(t,13),gte(t,prev_forced_t+5))
  1374. @end example
  1375. @item source
  1376. If the argument is @code{source}, ffmpeg will force a key frame if
  1377. the current frame being encoded is marked as a key frame in its source.
  1378. In cases where this particular source frame has to be dropped,
  1379. enforce the next available frame to become a key frame instead.
  1380. @end table
  1381. Note that forcing too many keyframes is very harmful for the lookahead
  1382. algorithms of certain encoders: using fixed-GOP options or similar
  1383. would be more efficient.
  1384. @item -apply_cropping[:@var{stream_specifier}] @var{source} (@emph{input,per-stream})
  1385. Automatically crop the video after decoding according to file metadata.
  1386. Default is @emph{all}.
  1387. @table @option
  1388. @item none (0)
  1389. Don't apply any cropping metadata.
  1390. @item all (1)
  1391. Apply both codec and container level croppping. This is the default mode.
  1392. @item codec (2)
  1393. Apply codec level croppping.
  1394. @item container (3)
  1395. Apply container level croppping.
  1396. @end table
  1397. @item -copyinkf[:@var{stream_specifier}] (@emph{output,per-stream})
  1398. When doing stream copy, copy also non-key frames found at the
  1399. beginning.
  1400. @item -init_hw_device @var{type}[=@var{name}][:@var{device}[,@var{key=value}...]]
  1401. Initialise a new hardware device of type @var{type} called @var{name}, using the
  1402. given device parameters.
  1403. If no name is specified it will receive a default name of the form "@var{type}%d".
  1404. The meaning of @var{device} and the following arguments depends on the
  1405. device type:
  1406. @table @option
  1407. @item cuda
  1408. @var{device} is the number of the CUDA device.
  1409. The following options are recognized:
  1410. @table @option
  1411. @item primary_ctx
  1412. If set to 1, uses the primary device context instead of creating a new one.
  1413. @end table
  1414. Examples:
  1415. @table @emph
  1416. @item -init_hw_device cuda:1
  1417. Choose the second device on the system.
  1418. @item -init_hw_device cuda:0,primary_ctx=1
  1419. Choose the first device and use the primary device context.
  1420. @end table
  1421. @item dxva2
  1422. @var{device} is the number of the Direct3D 9 display adapter.
  1423. @item d3d11va
  1424. @var{device} is the number of the Direct3D 11 display adapter.
  1425. If not specified, it will attempt to use the default Direct3D 11 display adapter
  1426. or the first Direct3D 11 display adapter whose hardware VendorId is specified
  1427. by @samp{vendor_id}.
  1428. Examples:
  1429. @table @emph
  1430. @item -init_hw_device d3d11va
  1431. Create a d3d11va device on the default Direct3D 11 display adapter.
  1432. @item -init_hw_device d3d11va:1
  1433. Create a d3d11va device on the Direct3D 11 display adapter specified by index 1.
  1434. @item -init_hw_device d3d11va:,vendor_id=0x8086
  1435. Create a d3d11va device on the first Direct3D 11 display adapter whose hardware VendorId is 0x8086.
  1436. @end table
  1437. @item vaapi
  1438. @var{device} is either an X11 display name, a DRM render node or a DirectX adapter index.
  1439. If not specified, it will attempt to open the default X11 display (@emph{$DISPLAY})
  1440. and then the first DRM render node (@emph{/dev/dri/renderD128}), or the default
  1441. DirectX adapter on Windows.
  1442. The following options are recognized:
  1443. @table @option
  1444. @item kernel_driver
  1445. When @var{device} is not specified, use this option to specify the name of the kernel
  1446. driver associated with the desired device. This option is available only when
  1447. the hardware acceleration method @emph{drm} and @emph{vaapi} are enabled.
  1448. @item vendor_id
  1449. When @var{device} and @var{kernel_driver} are not specified, use this option to specify
  1450. the vendor id associated with the desired device. This option is available only when the
  1451. hardware acceleration method @emph{drm} and @emph{vaapi} are enabled and @emph{kernel_driver}
  1452. is not specified.
  1453. @end table
  1454. Examples:
  1455. @table @emph
  1456. @item -init_hw_device vaapi
  1457. Create a vaapi device on the default device.
  1458. @item -init_hw_device vaapi:/dev/dri/renderD129
  1459. Create a vaapi device on DRM render node @file{/dev/dri/renderD129}.
  1460. @item -init_hw_device vaapi:1
  1461. Create a vaapi device on DirectX adapter 1.
  1462. @item -init_hw_device vaapi:,kernel_driver=i915
  1463. Create a vaapi device on a device associated with kernel driver @samp{i915}.
  1464. @item -init_hw_device vaapi:,vendor_id=0x8086
  1465. Create a vaapi device on a device associated with vendor id @samp{0x8086}.
  1466. @end table
  1467. @item vdpau
  1468. @var{device} is an X11 display name.
  1469. If not specified, it will attempt to open the default X11 display (@emph{$DISPLAY}).
  1470. @item qsv
  1471. @var{device} selects a value in @samp{MFX_IMPL_*}. Allowed values are:
  1472. @table @option
  1473. @item auto
  1474. @item sw
  1475. @item hw
  1476. @item auto_any
  1477. @item hw_any
  1478. @item hw2
  1479. @item hw3
  1480. @item hw4
  1481. @end table
  1482. If not specified, @samp{auto_any} is used.
  1483. (Note that it may be easier to achieve the desired result for QSV by creating the
  1484. platform-appropriate subdevice (@samp{dxva2} or @samp{d3d11va} or @samp{vaapi}) and then deriving a
  1485. QSV device from that.)
  1486. The following options are recognized:
  1487. @table @option
  1488. @item child_device
  1489. Specify a DRM render node on Linux or DirectX adapter on Windows.
  1490. @item child_device_type
  1491. Choose platform-appropriate subdevice type. On Windows @samp{d3d11va} is used
  1492. as default subdevice type when @code{--enable-libvpl} is specified at configuration time,
  1493. @samp{dxva2} is used as default subdevice type when @code{--enable-libmfx} is specified at
  1494. configuration time. On Linux user can use @samp{vaapi} only as subdevice type.
  1495. @end table
  1496. Examples:
  1497. @table @emph
  1498. @item -init_hw_device qsv:hw,child_device=/dev/dri/renderD129
  1499. Create a QSV device with @samp{MFX_IMPL_HARDWARE} on DRM render node @file{/dev/dri/renderD129}.
  1500. @item -init_hw_device qsv:hw,child_device=1
  1501. Create a QSV device with @samp{MFX_IMPL_HARDWARE} on DirectX adapter 1.
  1502. @item -init_hw_device qsv:hw,child_device_type=d3d11va
  1503. Choose the GPU subdevice with type @samp{d3d11va} and create QSV device with @samp{MFX_IMPL_HARDWARE}.
  1504. @item -init_hw_device qsv:hw,child_device_type=dxva2
  1505. Choose the GPU subdevice with type @samp{dxva2} and create QSV device with @samp{MFX_IMPL_HARDWARE}.
  1506. @item -init_hw_device qsv:hw,child_device=1,child_device_type=d3d11va
  1507. Create a QSV device with @samp{MFX_IMPL_HARDWARE} on DirectX adapter 1 with subdevice type @samp{d3d11va}.
  1508. @item -init_hw_device vaapi=va:/dev/dri/renderD129 -init_hw_device qsv=hw1@@@var{va}
  1509. Create a VAAPI device called @samp{va} on @file{/dev/dri/renderD129}, then derive a QSV device called @samp{hw1}
  1510. from device @samp{va}.
  1511. @end table
  1512. @item opencl
  1513. @var{device} selects the platform and device as @emph{platform_index.device_index}.
  1514. The set of devices can also be filtered using the key-value pairs to find only
  1515. devices matching particular platform or device strings.
  1516. The strings usable as filters are:
  1517. @table @option
  1518. @item platform_profile
  1519. @item platform_version
  1520. @item platform_name
  1521. @item platform_vendor
  1522. @item platform_extensions
  1523. @item device_name
  1524. @item device_vendor
  1525. @item driver_version
  1526. @item device_version
  1527. @item device_profile
  1528. @item device_extensions
  1529. @item device_type
  1530. @end table
  1531. The indices and filters must together uniquely select a device.
  1532. Examples:
  1533. @table @emph
  1534. @item -init_hw_device opencl:0.1
  1535. Choose the second device on the first platform.
  1536. @item -init_hw_device opencl:,device_name=Foo9000
  1537. Choose the device with a name containing the string @emph{Foo9000}.
  1538. @item -init_hw_device opencl:1,device_type=gpu,device_extensions=cl_khr_fp16
  1539. Choose the GPU device on the second platform supporting the @emph{cl_khr_fp16}
  1540. extension.
  1541. @end table
  1542. @item vulkan
  1543. If @var{device} is an integer, it selects the device by its index in a
  1544. system-dependent list of devices. If @var{device} is any other string, it
  1545. selects the first device with a name containing that string as a substring.
  1546. The following options are recognized:
  1547. @table @option
  1548. @item debug
  1549. If set to 1, enables the validation layer, if installed.
  1550. @item linear_images
  1551. If set to 1, images allocated by the hwcontext will be linear and locally mappable.
  1552. @item instance_extensions
  1553. A plus separated list of additional instance extensions to enable.
  1554. @item device_extensions
  1555. A plus separated list of additional device extensions to enable.
  1556. @end table
  1557. Examples:
  1558. @table @emph
  1559. @item -init_hw_device vulkan:1
  1560. Choose the second device on the system.
  1561. @item -init_hw_device vulkan:RADV
  1562. Choose the first device with a name containing the string @emph{RADV}.
  1563. @item -init_hw_device vulkan:0,instance_extensions=VK_KHR_wayland_surface+VK_KHR_xcb_surface
  1564. Choose the first device and enable the Wayland and XCB instance extensions.
  1565. @end table
  1566. @end table
  1567. @item -init_hw_device @var{type}[=@var{name}]@@@var{source}
  1568. Initialise a new hardware device of type @var{type} called @var{name},
  1569. deriving it from the existing device with the name @var{source}.
  1570. @item -init_hw_device list
  1571. List all hardware device types supported in this build of ffmpeg.
  1572. @item -filter_hw_device @var{name}
  1573. Pass the hardware device called @var{name} to all filters in any filter graph.
  1574. This can be used to set the device to upload to with the @code{hwupload} filter,
  1575. or the device to map to with the @code{hwmap} filter. Other filters may also
  1576. make use of this parameter when they require a hardware device. Note that this
  1577. is typically only required when the input is not already in hardware frames -
  1578. when it is, filters will derive the device they require from the context of the
  1579. frames they receive as input.
  1580. This is a global setting, so all filters will receive the same device.
  1581. @item -hwaccel[:@var{stream_specifier}] @var{hwaccel} (@emph{input,per-stream})
  1582. Use hardware acceleration to decode the matching stream(s). The allowed values
  1583. of @var{hwaccel} are:
  1584. @table @option
  1585. @item none
  1586. Do not use any hardware acceleration (the default).
  1587. @item auto
  1588. Automatically select the hardware acceleration method.
  1589. @item vdpau
  1590. Use VDPAU (Video Decode and Presentation API for Unix) hardware acceleration.
  1591. @item dxva2
  1592. Use DXVA2 (DirectX Video Acceleration) hardware acceleration.
  1593. @item d3d11va
  1594. Use D3D11VA (DirectX Video Acceleration) hardware acceleration.
  1595. @item vaapi
  1596. Use VAAPI (Video Acceleration API) hardware acceleration.
  1597. @item qsv
  1598. Use the Intel QuickSync Video acceleration for video transcoding.
  1599. Unlike most other values, this option does not enable accelerated decoding (that
  1600. is used automatically whenever a qsv decoder is selected), but accelerated
  1601. transcoding, without copying the frames into the system memory.
  1602. For it to work, both the decoder and the encoder must support QSV acceleration
  1603. and no filters must be used.
  1604. @end table
  1605. This option has no effect if the selected hwaccel is not available or not
  1606. supported by the chosen decoder.
  1607. Note that most acceleration methods are intended for playback and will not be
  1608. faster than software decoding on modern CPUs. Additionally, @command{ffmpeg}
  1609. will usually need to copy the decoded frames from the GPU memory into the system
  1610. memory, resulting in further performance loss. This option is thus mainly
  1611. useful for testing.
  1612. @item -hwaccel_device[:@var{stream_specifier}] @var{hwaccel_device} (@emph{input,per-stream})
  1613. Select a device to use for hardware acceleration.
  1614. This option only makes sense when the @option{-hwaccel} option is also specified.
  1615. It can either refer to an existing device created with @option{-init_hw_device}
  1616. by name, or it can create a new device as if
  1617. @samp{-init_hw_device} @var{type}:@var{hwaccel_device}
  1618. were called immediately before.
  1619. @item -hwaccels
  1620. List all hardware acceleration components enabled in this build of ffmpeg.
  1621. Actual runtime availability depends on the hardware and its suitable driver
  1622. being installed.
  1623. @item -fix_sub_duration_heartbeat[:@var{stream_specifier}]
  1624. Set a specific output video stream as the heartbeat stream according to which
  1625. to split and push through currently in-progress subtitle upon receipt of a
  1626. random access packet.
  1627. This lowers the latency of subtitles for which the end packet or the following
  1628. subtitle has not yet been received. As a drawback, this will most likely lead
  1629. to duplication of subtitle events in order to cover the full duration, so
  1630. when dealing with use cases where latency of when the subtitle event is passed
  1631. on to output is not relevant this option should not be utilized.
  1632. Requires @option{-fix_sub_duration} to be set for the relevant input subtitle
  1633. stream for this to have any effect, as well as for the input subtitle stream
  1634. having to be directly mapped to the same output in which the heartbeat stream
  1635. resides.
  1636. @end table
  1637. @section Audio Options
  1638. @table @option
  1639. @item -aframes @var{number} (@emph{output})
  1640. Set the number of audio frames to output. This is an obsolete alias for
  1641. @code{-frames:a}, which you should use instead.
  1642. @item -ar[:@var{stream_specifier}] @var{freq} (@emph{input/output,per-stream})
  1643. Set the audio sampling frequency. For output streams it is set by
  1644. default to the frequency of the corresponding input stream. For input
  1645. streams this option only makes sense for audio grabbing devices and raw
  1646. demuxers and is mapped to the corresponding demuxer options.
  1647. @item -aq @var{q} (@emph{output})
  1648. Set the audio quality (codec-specific, VBR). This is an alias for -q:a.
  1649. @item -ac[:@var{stream_specifier}] @var{channels} (@emph{input/output,per-stream})
  1650. Set the number of audio channels. For output streams it is set by
  1651. default to the number of input audio channels. For input streams
  1652. this option only makes sense for audio grabbing devices and raw demuxers
  1653. and is mapped to the corresponding demuxer options.
  1654. @item -an (@emph{input/output})
  1655. As an input option, blocks all audio streams of a file from being filtered or
  1656. being automatically selected or mapped for any output. See @code{-discard}
  1657. option to disable streams individually.
  1658. As an output option, disables audio recording i.e. automatic selection or
  1659. mapping of any audio stream. For full manual control see the @code{-map}
  1660. option.
  1661. @item -acodec @var{codec} (@emph{input/output})
  1662. Set the audio codec. This is an alias for @code{-codec:a}.
  1663. @item -sample_fmt[:@var{stream_specifier}] @var{sample_fmt} (@emph{output,per-stream})
  1664. Set the audio sample format. Use @code{-sample_fmts} to get a list
  1665. of supported sample formats.
  1666. @item -af @var{filtergraph} (@emph{output})
  1667. Create the filtergraph specified by @var{filtergraph} and use it to
  1668. filter the stream.
  1669. This is an alias for @code{-filter:a}, see the @ref{filter_option,,-filter option}.
  1670. @end table
  1671. @section Advanced Audio options
  1672. @table @option
  1673. @item -atag @var{fourcc/tag} (@emph{output})
  1674. Force audio tag/fourcc. This is an alias for @code{-tag:a}.
  1675. @item -ch_layout[:@var{stream_specifier}] @var{layout} (@emph{input/output,per-stream})
  1676. Alias for @code{-channel_layout}.
  1677. @item -channel_layout[:@var{stream_specifier}] @var{layout} (@emph{input/output,per-stream})
  1678. Set the audio channel layout. For output streams it is set by default to the
  1679. input channel layout. For input streams it overrides the channel layout of the
  1680. input. Not all decoders respect the overridden channel layout. This option
  1681. also sets the channel layout for audio grabbing devices and raw demuxers
  1682. and is mapped to the corresponding demuxer option.
  1683. @item -guess_layout_max @var{channels} (@emph{input,per-stream})
  1684. If some input channel layout is not known, try to guess only if it
  1685. corresponds to at most the specified number of channels. For example, 2
  1686. tells to @command{ffmpeg} to recognize 1 channel as mono and 2 channels as
  1687. stereo but not 6 channels as 5.1. The default is to always try to guess. Use
  1688. 0 to disable all guessing. Using the @code{-channel_layout} option to
  1689. explicitly specify an input layout also disables guessing.
  1690. @end table
  1691. @section Subtitle options
  1692. @table @option
  1693. @item -scodec @var{codec} (@emph{input/output})
  1694. Set the subtitle codec. This is an alias for @code{-codec:s}.
  1695. @item -sn (@emph{input/output})
  1696. As an input option, blocks all subtitle streams of a file from being filtered or
  1697. being automatically selected or mapped for any output. See @code{-discard}
  1698. option to disable streams individually.
  1699. As an output option, disables subtitle recording i.e. automatic selection or
  1700. mapping of any subtitle stream. For full manual control see the @code{-map}
  1701. option.
  1702. @end table
  1703. @section Advanced Subtitle options
  1704. @table @option
  1705. @item -fix_sub_duration
  1706. Fix subtitles durations. For each subtitle, wait for the next packet in the
  1707. same stream and adjust the duration of the first to avoid overlap. This is
  1708. necessary with some subtitles codecs, especially DVB subtitles, because the
  1709. duration in the original packet is only a rough estimate and the end is
  1710. actually marked by an empty subtitle frame. Failing to use this option when
  1711. necessary can result in exaggerated durations or muxing failures due to
  1712. non-monotonic timestamps.
  1713. Note that this option will delay the output of all data until the next
  1714. subtitle packet is decoded: it may increase memory consumption and latency a
  1715. lot.
  1716. @item -canvas_size @var{size}
  1717. Set the size of the canvas used to render subtitles.
  1718. @end table
  1719. @section Advanced options
  1720. @table @option
  1721. @item -map [-]@var{input_file_id}[:@var{stream_specifier}][:@var{view_specifier}][:?] | @var{[linklabel]} (@emph{output})
  1722. Create one or more streams in the output file. This option has two forms for
  1723. specifying the data source(s): the first selects one or more streams from some
  1724. input file (specified with @code{-i}), the second takes an output from some
  1725. complex filtergraph (specified with @code{-filter_complex}).
  1726. In the first form, an output stream is created for every stream from the input
  1727. file with the index @var{input_file_id}. If @var{stream_specifier} is given,
  1728. only those streams that match the specifier are used (see the
  1729. @ref{Stream specifiers} section for the @var{stream_specifier} syntax).
  1730. A @code{-} character before the stream identifier creates a "negative" mapping.
  1731. It disables matching streams from already created mappings.
  1732. An optional @var{view_specifier} may be given after the stream specifier, which
  1733. for multiview video specifies the view to be used. The view specifier may have
  1734. one of the following formats:
  1735. @table @option
  1736. @item view:@var{view_id}
  1737. select a view by its ID; @var{view_id} may be set to 'all' to use all the views
  1738. interleaved into one stream;
  1739. @item vidx:@var{view_idx}
  1740. select a view by its index; i.e. 0 is the base view, 1 is the first non-base
  1741. view, etc.
  1742. @item vpos:@var{position}
  1743. select a view by its display position; @var{position} may be @code{left} or
  1744. @code{right}
  1745. @end table
  1746. The default for transcoding is to only use the base view, i.e. the equivalent of
  1747. @code{vidx:0}. For streamcopy, view specifiers are not supported and all views
  1748. are always copied.
  1749. A trailing @code{?} after the stream index will allow the map to be
  1750. optional: if the map matches no streams the map will be ignored instead
  1751. of failing. Note the map will still fail if an invalid input file index
  1752. is used; such as if the map refers to a non-existent input.
  1753. An alternative @var{[linklabel]} form will map outputs from complex filter
  1754. graphs (see the @option{-filter_complex} option) to the output file.
  1755. @var{linklabel} must correspond to a defined output link label in the graph.
  1756. This option may be specified multiple times, each adding more streams to the
  1757. output file. Any given input stream may also be mapped any number of times as a
  1758. source for different output streams, e.g. in order to use different encoding
  1759. options and/or filters. The streams are created in the output in the same order
  1760. in which the @code{-map} options are given on the commandline.
  1761. Using this option disables the default mappings for this output file.
  1762. Examples:
  1763. @table @emph
  1764. @item map everything
  1765. To map ALL streams from the first input file to output
  1766. @example
  1767. ffmpeg -i INPUT -map 0 output
  1768. @end example
  1769. @item select specific stream
  1770. If you have two audio streams in the first input file, these streams are
  1771. identified by @var{0:0} and @var{0:1}. You can use @code{-map} to select which
  1772. streams to place in an output file. For example:
  1773. @example
  1774. ffmpeg -i INPUT -map 0:1 out.wav
  1775. @end example
  1776. will map the second input stream in @file{INPUT} to the (single) output stream
  1777. in @file{out.wav}.
  1778. @item create multiple streams
  1779. To select the stream with index 2 from input file @file{a.mov} (specified by the
  1780. identifier @var{0:2}), and stream with index 6 from input @file{b.mov}
  1781. (specified by the identifier @var{1:6}), and copy them to the output file
  1782. @file{out.mov}:
  1783. @example
  1784. ffmpeg -i a.mov -i b.mov -c copy -map 0:2 -map 1:6 out.mov
  1785. @end example
  1786. @item create multiple streams 2
  1787. To select all video and the third audio stream from an input file:
  1788. @example
  1789. ffmpeg -i INPUT -map 0:v -map 0:a:2 OUTPUT
  1790. @end example
  1791. @item negative map
  1792. To map all the streams except the second audio, use negative mappings
  1793. @example
  1794. ffmpeg -i INPUT -map 0 -map -0:a:1 OUTPUT
  1795. @end example
  1796. @item optional map
  1797. To map the video and audio streams from the first input, and using the
  1798. trailing @code{?}, ignore the audio mapping if no audio streams exist in
  1799. the first input:
  1800. @example
  1801. ffmpeg -i INPUT -map 0:v -map 0:a? OUTPUT
  1802. @end example
  1803. @item map by language
  1804. To pick the English audio stream:
  1805. @example
  1806. ffmpeg -i INPUT -map 0:m:language:eng OUTPUT
  1807. @end example
  1808. @end table
  1809. @item -ignore_unknown
  1810. Ignore input streams with unknown type instead of failing if copying
  1811. such streams is attempted.
  1812. @item -copy_unknown
  1813. Allow input streams with unknown type to be copied instead of failing if copying
  1814. such streams is attempted.
  1815. @item -map_metadata[:@var{metadata_spec_out}] @var{infile}[:@var{metadata_spec_in}] (@emph{output,per-metadata})
  1816. Set metadata information of the next output file from @var{infile}. Note that
  1817. those are file indices (zero-based), not filenames.
  1818. Optional @var{metadata_spec_in/out} parameters specify, which metadata to copy.
  1819. A metadata specifier can have the following forms:
  1820. @table @option
  1821. @item @var{g}
  1822. global metadata, i.e. metadata that applies to the whole file
  1823. @item @var{s}[:@var{stream_spec}]
  1824. per-stream metadata. @var{stream_spec} is a stream specifier as described
  1825. in the @ref{Stream specifiers} chapter. In an input metadata specifier, the first
  1826. matching stream is copied from. In an output metadata specifier, all matching
  1827. streams are copied to.
  1828. @item @var{c}:@var{chapter_index}
  1829. per-chapter metadata. @var{chapter_index} is the zero-based chapter index.
  1830. @item @var{p}:@var{program_index}
  1831. per-program metadata. @var{program_index} is the zero-based program index.
  1832. @end table
  1833. If metadata specifier is omitted, it defaults to global.
  1834. By default, global metadata is copied from the first input file,
  1835. per-stream and per-chapter metadata is copied along with streams/chapters. These
  1836. default mappings are disabled by creating any mapping of the relevant type. A negative
  1837. file index can be used to create a dummy mapping that just disables automatic copying.
  1838. For example to copy metadata from the first stream of the input file to global metadata
  1839. of the output file:
  1840. @example
  1841. ffmpeg -i in.ogg -map_metadata 0:s:0 out.mp3
  1842. @end example
  1843. To do the reverse, i.e. copy global metadata to all audio streams:
  1844. @example
  1845. ffmpeg -i in.mkv -map_metadata:s:a 0:g out.mkv
  1846. @end example
  1847. Note that simple @code{0} would work as well in this example, since global
  1848. metadata is assumed by default.
  1849. @item -map_chapters @var{input_file_index} (@emph{output})
  1850. Copy chapters from input file with index @var{input_file_index} to the next
  1851. output file. If no chapter mapping is specified, then chapters are copied from
  1852. the first input file with at least one chapter. Use a negative file index to
  1853. disable any chapter copying.
  1854. @item -benchmark (@emph{global})
  1855. Show benchmarking information at the end of an encode.
  1856. Shows real, system and user time used and maximum memory consumption.
  1857. Maximum memory consumption is not supported on all systems,
  1858. it will usually display as 0 if not supported.
  1859. @item -benchmark_all (@emph{global})
  1860. Show benchmarking information during the encode.
  1861. Shows real, system and user time used in various steps (audio/video encode/decode).
  1862. @item -timelimit @var{duration} (@emph{global})
  1863. Exit after ffmpeg has been running for @var{duration} seconds in CPU user time.
  1864. @item -dump (@emph{global})
  1865. Dump each input packet to stderr.
  1866. @item -hex (@emph{global})
  1867. When dumping packets, also dump the payload.
  1868. @item -readrate @var{speed} (@emph{input})
  1869. Limit input read speed.
  1870. Its value is a floating-point positive number which represents the maximum duration of
  1871. media, in seconds, that should be ingested in one second of wallclock time.
  1872. Default value is zero and represents no imposed limitation on speed of ingestion.
  1873. Value @code{1} represents real-time speed and is equivalent to @code{-re}.
  1874. Mainly used to simulate a capture device or live input stream (e.g. when reading from a file).
  1875. Should not be used with a low value when input is an actual capture device or live stream as
  1876. it may cause packet loss.
  1877. It is useful for when flow speed of output packets is important, such as live streaming.
  1878. @item -re (@emph{input})
  1879. Read input at native frame rate. This is equivalent to setting @code{-readrate 1}.
  1880. @item -readrate_initial_burst @var{seconds}
  1881. Set an initial read burst time, in seconds, after which @option{-re/-readrate}
  1882. will be enforced.
  1883. @item -vsync @var{parameter} (@emph{global})
  1884. @itemx -fps_mode[:@var{stream_specifier}] @var{parameter} (@emph{output,per-stream})
  1885. Set video sync method / framerate mode. vsync is applied to all output video streams
  1886. but can be overridden for a stream by setting fps_mode. vsync is deprecated and will be
  1887. removed in the future.
  1888. For compatibility reasons some of the values for vsync can be specified as numbers (shown
  1889. in parentheses in the following table).
  1890. @table @option
  1891. @item passthrough (0)
  1892. Each frame is passed with its timestamp from the demuxer to the muxer.
  1893. @item cfr (1)
  1894. Frames will be duplicated and dropped to achieve exactly the requested
  1895. constant frame rate.
  1896. @item vfr (2)
  1897. Frames are passed through with their timestamp or dropped so as to
  1898. prevent 2 frames from having the same timestamp.
  1899. @item auto (-1)
  1900. Chooses between cfr and vfr depending on muxer capabilities. This is the
  1901. default method.
  1902. @end table
  1903. Note that the timestamps may be further modified by the muxer, after this.
  1904. For example, in the case that the format option @option{avoid_negative_ts}
  1905. is enabled.
  1906. With -map you can select from which stream the timestamps should be
  1907. taken. You can leave either video or audio unchanged and sync the
  1908. remaining stream(s) to the unchanged one.
  1909. @item -frame_drop_threshold @var{parameter}
  1910. Frame drop threshold, which specifies how much behind video frames can
  1911. be before they are dropped. In frame rate units, so 1.0 is one frame.
  1912. The default is -1.1. One possible usecase is to avoid framedrops in case
  1913. of noisy timestamps or to increase frame drop precision in case of exact
  1914. timestamps.
  1915. @item -apad @var{parameters} (@emph{output,per-stream})
  1916. Pad the output audio stream(s). This is the same as applying @code{-af apad}.
  1917. Argument is a string of filter parameters composed the same as with the @code{apad} filter.
  1918. @code{-shortest} must be set for this output for the option to take effect.
  1919. @item -copyts
  1920. Do not process input timestamps, but keep their values without trying
  1921. to sanitize them. In particular, do not remove the initial start time
  1922. offset value.
  1923. Note that, depending on the @option{vsync} option or on specific muxer
  1924. processing (e.g. in case the format option @option{avoid_negative_ts}
  1925. is enabled) the output timestamps may mismatch with the input
  1926. timestamps even when this option is selected.
  1927. @item -start_at_zero
  1928. When used with @option{copyts}, shift input timestamps so they start at zero.
  1929. This means that using e.g. @code{-ss 50} will make output timestamps start at
  1930. 50 seconds, regardless of what timestamp the input file started at.
  1931. @item -copytb @var{mode}
  1932. Specify how to set the encoder timebase when stream copying. @var{mode} is an
  1933. integer numeric value, and can assume one of the following values:
  1934. @table @option
  1935. @item 1
  1936. Use the demuxer timebase.
  1937. The time base is copied to the output encoder from the corresponding input
  1938. demuxer. This is sometimes required to avoid non monotonically increasing
  1939. timestamps when copying video streams with variable frame rate.
  1940. @item 0
  1941. Use the decoder timebase.
  1942. The time base is copied to the output encoder from the corresponding input
  1943. decoder.
  1944. @item -1
  1945. Try to make the choice automatically, in order to generate a sane output.
  1946. @end table
  1947. Default value is -1.
  1948. @item -enc_time_base[:@var{stream_specifier}] @var{timebase} (@emph{output,per-stream})
  1949. Set the encoder timebase. @var{timebase} can assume one of the following values:
  1950. @table @option
  1951. @item 0
  1952. Assign a default value according to the media type.
  1953. For video - use 1/framerate, for audio - use 1/samplerate.
  1954. @item demux
  1955. Use the timebase from the demuxer.
  1956. @item filter
  1957. Use the timebase from the filtergraph.
  1958. @item a positive number
  1959. Use the provided number as the timebase.
  1960. This field can be provided as a ratio of two integers (e.g. 1:24, 1:48000)
  1961. or as a decimal number (e.g. 0.04166, 2.0833e-5)
  1962. @end table
  1963. Default value is 0.
  1964. @item -bitexact (@emph{input/output})
  1965. Enable bitexact mode for (de)muxer and (de/en)coder
  1966. @item -shortest (@emph{output})
  1967. Finish encoding when the shortest output stream ends.
  1968. Note that this option may require buffering frames, which introduces extra
  1969. latency. The maximum amount of this latency may be controlled with the
  1970. @code{-shortest_buf_duration} option.
  1971. @item -shortest_buf_duration @var{duration} (@emph{output})
  1972. The @code{-shortest} option may require buffering potentially large amounts
  1973. of data when at least one of the streams is "sparse" (i.e. has large gaps
  1974. between frames – this is typically the case for subtitles).
  1975. This option controls the maximum duration of buffered frames in seconds.
  1976. Larger values may allow the @code{-shortest} option to produce more accurate
  1977. results, but increase memory use and latency.
  1978. The default value is 10 seconds.
  1979. @item -dts_delta_threshold @var{threshold}
  1980. Timestamp discontinuity delta threshold, expressed as a decimal number
  1981. of seconds.
  1982. The timestamp discontinuity correction enabled by this option is only
  1983. applied to input formats accepting timestamp discontinuity (for which
  1984. the @code{AVFMT_TS_DISCONT} flag is enabled), e.g. MPEG-TS and HLS, and
  1985. is automatically disabled when employing the @code{-copyts} option
  1986. (unless wrapping is detected).
  1987. If a timestamp discontinuity is detected whose absolute value is
  1988. greater than @var{threshold}, ffmpeg will remove the discontinuity by
  1989. decreasing/increasing the current DTS and PTS by the corresponding
  1990. delta value.
  1991. The default value is 10.
  1992. @item -dts_error_threshold @var{threshold}
  1993. Timestamp error delta threshold, expressed as a decimal number of
  1994. seconds.
  1995. The timestamp correction enabled by this option is only applied to
  1996. input formats not accepting timestamp discontinuity (for which the
  1997. @code{AVFMT_TS_DISCONT} flag is not enabled).
  1998. If a timestamp discontinuity is detected whose absolute value is
  1999. greater than @var{threshold}, ffmpeg will drop the PTS/DTS timestamp
  2000. value.
  2001. The default value is @code{3600*30} (30 hours), which is arbitrarily
  2002. picked and quite conservative.
  2003. @item -muxdelay @var{seconds} (@emph{output})
  2004. Set the maximum demux-decode delay.
  2005. @item -muxpreload @var{seconds} (@emph{output})
  2006. Set the initial demux-decode delay.
  2007. @item -streamid @var{output-stream-index}:@var{new-value} (@emph{output})
  2008. Assign a new stream-id value to an output stream. This option should be
  2009. specified prior to the output filename to which it applies.
  2010. For the situation where multiple output files exist, a streamid
  2011. may be reassigned to a different value.
  2012. For example, to set the stream 0 PID to 33 and the stream 1 PID to 36 for
  2013. an output mpegts file:
  2014. @example
  2015. ffmpeg -i inurl -streamid 0:33 -streamid 1:36 out.ts
  2016. @end example
  2017. @item -bsf[:@var{stream_specifier}] @var{bitstream_filters} (@emph{input/output,per-stream})
  2018. Apply bitstream filters to matching streams. The filters are applied to each
  2019. packet as it is received from the demuxer (when used as an input option) or
  2020. before it is sent to the muxer (when used as an output option).
  2021. @var{bitstream_filters} is a comma-separated list of bitstream filter
  2022. specifications, each of the form
  2023. @example
  2024. @var{filter}[=@var{optname0}=@var{optval0}:@var{optname1}=@var{optval1}:...]
  2025. @end example
  2026. Any of the ',=:' characters that are to be a part of an option value need to be
  2027. escaped with a backslash.
  2028. Use the @code{-bsfs} option to get the list of bitstream filters.
  2029. E.g.
  2030. @example
  2031. ffmpeg -bsf:v h264_mp4toannexb -i h264.mp4 -c:v copy -an out.h264
  2032. @end example
  2033. applies the @code{h264_mp4toannexb} bitstream filter (which converts
  2034. MP4-encapsulated H.264 stream to Annex B) to the @emph{input} video stream.
  2035. On the other hand,
  2036. @example
  2037. ffmpeg -i file.mov -an -vn -bsf:s mov2textsub -c:s copy -f rawvideo sub.txt
  2038. @end example
  2039. applies the @code{mov2textsub} bitstream filter (which extracts text from MOV
  2040. subtitles) to the @emph{output} subtitle stream. Note, however, that since both
  2041. examples use @code{-c copy}, it matters little whether the filters are applied
  2042. on input or output - that would change if transcoding was happening.
  2043. @item -tag[:@var{stream_specifier}] @var{codec_tag} (@emph{input/output,per-stream})
  2044. Force a tag/fourcc for matching streams.
  2045. @item -timecode @var{hh}:@var{mm}:@var{ss}SEP@var{ff}
  2046. Specify Timecode for writing. @var{SEP} is ':' for non drop timecode and ';'
  2047. (or '.') for drop.
  2048. @example
  2049. ffmpeg -i input.mpg -timecode 01:02:03.04 -r 30000/1001 -s ntsc output.mpg
  2050. @end example
  2051. @anchor{filter_complex_option}
  2052. @item -filter_complex @var{filtergraph} (@emph{global})
  2053. Define a complex filtergraph, i.e. one with arbitrary number of inputs and/or
  2054. outputs. For simple graphs -- those with one input and one output of the same
  2055. type -- see the @option{-filter} options. @var{filtergraph} is a description of
  2056. the filtergraph, as described in the ``Filtergraph syntax'' section of the
  2057. ffmpeg-filters manual. This option may be specified multiple times - each use
  2058. creates a new complex filtergraph.
  2059. Inputs to a complex filtergraph may come from different source types,
  2060. distinguished by the format of the corresponding link label:
  2061. @itemize
  2062. @item
  2063. To connect an input stream, use @code{[file_index:stream_specifier]} (i.e. the
  2064. same syntax as @option{-map}). If @var{stream_specifier} matches multiple
  2065. streams, the first one will be used. For multiview video, the stream specifier
  2066. may be followed by the view specifier, see documentation for the @option{-map}
  2067. option for its syntax.
  2068. @item
  2069. To connect a loopback decoder use [dec:@var{dec_idx}], where @var{dec_idx} is
  2070. the index of the loopback decoder to be connected to given input. For multiview
  2071. video, the decoder index may be followed by the view specifier, see
  2072. documentation for the @option{-map} option for its syntax.
  2073. @item
  2074. To connect an output from another complex filtergraph, use its link label. E.g
  2075. the following example:
  2076. @example
  2077. ffmpeg -i input.mkv \
  2078. -filter_complex '[0:v]scale=size=hd1080,split=outputs=2[for_enc][orig_scaled]' \
  2079. -c:v libx264 -map '[for_enc]' output.mkv \
  2080. -dec 0:0 \
  2081. -filter_complex '[dec:0][orig_scaled]hstack[stacked]' \
  2082. -map '[stacked]' -c:v ffv1 comparison.mkv
  2083. @end example
  2084. reads an input video and
  2085. @itemize
  2086. @item
  2087. (line 2) uses a complex filtergraph with one input and two outputs
  2088. to scale the video to 1920x1080 and duplicate the result to both
  2089. outputs;
  2090. @item
  2091. (line 3) encodes one scaled output with @code{libx264} and writes the result to
  2092. @file{output.mkv};
  2093. @item
  2094. (line 4) decodes this encoded stream with a loopback decoder;
  2095. @item
  2096. (line 5) places the output of the loopback decoder (i.e. the
  2097. @code{libx264}-encoded video) side by side with the scaled original input;
  2098. @item
  2099. (line 6) combined video is then losslessly encoded and written into
  2100. @file{comparison.mkv}.
  2101. @end itemize
  2102. Note that the two filtergraphs cannot be combined into one, because then there
  2103. would be a cycle in the transcoding pipeline (filtergraph output goes to
  2104. encoding, from there to decoding, then back to the same graph), and such cycles
  2105. are not allowed.
  2106. @end itemize
  2107. An unlabeled input will be connected to the first unused input stream of the
  2108. matching type.
  2109. Output link labels are referred to with @option{-map}. Unlabeled outputs are
  2110. added to the first output file.
  2111. Note that with this option it is possible to use only lavfi sources without
  2112. normal input files.
  2113. For example, to overlay an image over video
  2114. @example
  2115. ffmpeg -i video.mkv -i image.png -filter_complex '[0:v][1:v]overlay[out]' -map
  2116. '[out]' out.mkv
  2117. @end example
  2118. Here @code{[0:v]} refers to the first video stream in the first input file,
  2119. which is linked to the first (main) input of the overlay filter. Similarly the
  2120. first video stream in the second input is linked to the second (overlay) input
  2121. of overlay.
  2122. Assuming there is only one video stream in each input file, we can omit input
  2123. labels, so the above is equivalent to
  2124. @example
  2125. ffmpeg -i video.mkv -i image.png -filter_complex 'overlay[out]' -map
  2126. '[out]' out.mkv
  2127. @end example
  2128. Furthermore we can omit the output label and the single output from the filter
  2129. graph will be added to the output file automatically, so we can simply write
  2130. @example
  2131. ffmpeg -i video.mkv -i image.png -filter_complex 'overlay' out.mkv
  2132. @end example
  2133. As a special exception, you can use a bitmap subtitle stream as input: it
  2134. will be converted into a video with the same size as the largest video in
  2135. the file, or 720x576 if no video is present. Note that this is an
  2136. experimental and temporary solution. It will be removed once libavfilter has
  2137. proper support for subtitles.
  2138. For example, to hardcode subtitles on top of a DVB-T recording stored in
  2139. MPEG-TS format, delaying the subtitles by 1 second:
  2140. @example
  2141. ffmpeg -i input.ts -filter_complex \
  2142. '[#0x2ef] setpts=PTS+1/TB [sub] ; [#0x2d0] [sub] overlay' \
  2143. -sn -map '#0x2dc' output.mkv
  2144. @end example
  2145. (0x2d0, 0x2dc and 0x2ef are the MPEG-TS PIDs of respectively the video,
  2146. audio and subtitles streams; 0:0, 0:3 and 0:7 would have worked too)
  2147. To generate 5 seconds of pure red video using lavfi @code{color} source:
  2148. @example
  2149. ffmpeg -filter_complex 'color=c=red' -t 5 out.mkv
  2150. @end example
  2151. @item -filter_complex_threads @var{nb_threads} (@emph{global})
  2152. Defines how many threads are used to process a filter_complex graph.
  2153. Similar to filter_threads but used for @code{-filter_complex} graphs only.
  2154. The default is the number of available CPUs.
  2155. @item -lavfi @var{filtergraph} (@emph{global})
  2156. Define a complex filtergraph, i.e. one with arbitrary number of inputs and/or
  2157. outputs. Equivalent to @option{-filter_complex}.
  2158. @item -accurate_seek (@emph{input})
  2159. This option enables or disables accurate seeking in input files with the
  2160. @option{-ss} option. It is enabled by default, so seeking is accurate when
  2161. transcoding. Use @option{-noaccurate_seek} to disable it, which may be useful
  2162. e.g. when copying some streams and transcoding the others.
  2163. @item -seek_timestamp (@emph{input})
  2164. This option enables or disables seeking by timestamp in input files with the
  2165. @option{-ss} option. It is disabled by default. If enabled, the argument
  2166. to the @option{-ss} option is considered an actual timestamp, and is not
  2167. offset by the start time of the file. This matters only for files which do
  2168. not start from timestamp 0, such as transport streams.
  2169. @item -thread_queue_size @var{size} (@emph{input/output})
  2170. For input, this option sets the maximum number of queued packets when reading
  2171. from the file or device. With low latency / high rate live streams, packets may
  2172. be discarded if they are not read in a timely manner; setting this value can
  2173. force ffmpeg to use a separate input thread and read packets as soon as they
  2174. arrive. By default ffmpeg only does this if multiple inputs are specified.
  2175. For output, this option specified the maximum number of packets that may be
  2176. queued to each muxing thread.
  2177. @item -sdp_file @var{file} (@emph{global})
  2178. Print sdp information for an output stream to @var{file}.
  2179. This allows dumping sdp information when at least one output isn't an
  2180. rtp stream. (Requires at least one of the output formats to be rtp).
  2181. @item -discard (@emph{input})
  2182. Allows discarding specific streams or frames from streams.
  2183. Any input stream can be fully discarded, using value @code{all} whereas
  2184. selective discarding of frames from a stream occurs at the demuxer
  2185. and is not supported by all demuxers.
  2186. @table @option
  2187. @item none
  2188. Discard no frame.
  2189. @item default
  2190. Default, which discards no frames.
  2191. @item noref
  2192. Discard all non-reference frames.
  2193. @item bidir
  2194. Discard all bidirectional frames.
  2195. @item nokey
  2196. Discard all frames excepts keyframes.
  2197. @item all
  2198. Discard all frames.
  2199. @end table
  2200. @item -abort_on @var{flags} (@emph{global})
  2201. Stop and abort on various conditions. The following flags are available:
  2202. @table @option
  2203. @item empty_output
  2204. No packets were passed to the muxer, the output is empty.
  2205. @item empty_output_stream
  2206. No packets were passed to the muxer in some of the output streams.
  2207. @end table
  2208. @item -max_error_rate (@emph{global})
  2209. Set fraction of decoding frame failures across all inputs which when crossed
  2210. ffmpeg will return exit code 69. Crossing this threshold does not terminate
  2211. processing. Range is a floating-point number between 0 to 1. Default is 2/3.
  2212. @item -xerror (@emph{global})
  2213. Stop and exit on error
  2214. @item -max_muxing_queue_size @var{packets} (@emph{output,per-stream})
  2215. When transcoding audio and/or video streams, ffmpeg will not begin writing into
  2216. the output until it has one packet for each such stream. While waiting for that
  2217. to happen, packets for other streams are buffered. This option sets the size of
  2218. this buffer, in packets, for the matching output stream.
  2219. The default value of this option should be high enough for most uses, so only
  2220. touch this option if you are sure that you need it.
  2221. @item -muxing_queue_data_threshold @var{bytes} (@emph{output,per-stream})
  2222. This is a minimum threshold until which the muxing queue size is not taken into
  2223. account. Defaults to 50 megabytes per stream, and is based on the overall size
  2224. of packets passed to the muxer.
  2225. @item -auto_conversion_filters (@emph{global})
  2226. Enable automatically inserting format conversion filters in all filter
  2227. graphs, including those defined by @option{-vf}, @option{-af},
  2228. @option{-filter_complex} and @option{-lavfi}. If filter format negotiation
  2229. requires a conversion, the initialization of the filters will fail.
  2230. Conversions can still be performed by inserting the relevant conversion
  2231. filter (scale, aresample) in the graph.
  2232. On by default, to explicitly disable it you need to specify
  2233. @code{-noauto_conversion_filters}.
  2234. @item -bits_per_raw_sample[:@var{stream_specifier}] @var{value} (@emph{output,per-stream})
  2235. Declare the number of bits per raw sample in the given output stream to be
  2236. @var{value}. Note that this option sets the information provided to the
  2237. encoder/muxer, it does not change the stream to conform to this value. Setting
  2238. values that do not match the stream properties may result in encoding failures
  2239. or invalid output files.
  2240. @anchor{stats_enc_options}
  2241. @item -stats_enc_pre[:@var{stream_specifier}] @var{path} (@emph{output,per-stream})
  2242. @item -stats_enc_post[:@var{stream_specifier}] @var{path} (@emph{output,per-stream})
  2243. @item -stats_mux_pre[:@var{stream_specifier}] @var{path} (@emph{output,per-stream})
  2244. Write per-frame encoding information about the matching streams into the file
  2245. given by @var{path}.
  2246. @option{-stats_enc_pre} writes information about raw video or audio frames right
  2247. before they are sent for encoding, while @option{-stats_enc_post} writes
  2248. information about encoded packets as they are received from the encoder.
  2249. @option{-stats_mux_pre} writes information about packets just as they are about to
  2250. be sent to the muxer. Every frame or packet produces one line in the specified
  2251. file. The format of this line is controlled by @option{-stats_enc_pre_fmt} /
  2252. @option{-stats_enc_post_fmt} / @option{-stats_mux_pre_fmt}.
  2253. When stats for multiple streams are written into a single file, the lines
  2254. corresponding to different streams will be interleaved. The precise order of
  2255. this interleaving is not specified and not guaranteed to remain stable between
  2256. different invocations of the program, even with the same options.
  2257. @item -stats_enc_pre_fmt[:@var{stream_specifier}] @var{format_spec} (@emph{output,per-stream})
  2258. @item -stats_enc_post_fmt[:@var{stream_specifier}] @var{format_spec} (@emph{output,per-stream})
  2259. @item -stats_mux_pre_fmt[:@var{stream_specifier}] @var{format_spec} (@emph{output,per-stream})
  2260. Specify the format for the lines written with @option{-stats_enc_pre} /
  2261. @option{-stats_enc_post} / @option{-stats_mux_pre}.
  2262. @var{format_spec} is a string that may contain directives of the form
  2263. @var{@{fmt@}}. @var{format_spec} is backslash-escaped --- use \@{, \@}, and \\
  2264. to write a literal @{, @}, or \, respectively, into the output.
  2265. The directives given with @var{fmt} may be one of the following:
  2266. @table @option
  2267. @item fidx
  2268. Index of the output file.
  2269. @item sidx
  2270. Index of the output stream in the file.
  2271. @item n
  2272. Frame number. Pre-encoding: number of frames sent to the encoder so far.
  2273. Post-encoding: number of packets received from the encoder so far.
  2274. Muxing: number of packets submitted to the muxer for this stream so far.
  2275. @item ni
  2276. Input frame number. Index of the input frame (i.e. output by a decoder) that
  2277. corresponds to this output frame or packet. -1 if unavailable.
  2278. @item tb
  2279. Timebase in which this frame/packet's timestamps are expressed, as a rational
  2280. number @var{num/den}. Note that encoder and muxer may use different timebases.
  2281. @item tbi
  2282. Timebase for @var{ptsi}, as a rational number @var{num/den}. Available when
  2283. @var{ptsi} is available, @var{0/1} otherwise.
  2284. @item pts
  2285. Presentation timestamp of the frame or packet, as an integer. Should be
  2286. multiplied by the timebase to compute presentation time.
  2287. @item ptsi
  2288. Presentation timestamp of the input frame (see @var{ni}), as an integer. Should
  2289. be multiplied by @var{tbi} to compute presentation time. Printed as
  2290. (2^63 - 1 = 9223372036854775807) when not available.
  2291. @item t
  2292. Presentation time of the frame or packet, as a decimal number. Equal to
  2293. @var{pts} multiplied by @var{tb}.
  2294. @item ti
  2295. Presentation time of the input frame (see @var{ni}), as a decimal number. Equal
  2296. to @var{ptsi} multiplied by @var{tbi}. Printed as inf when not available.
  2297. @item dts (@emph{packet})
  2298. Decoding timestamp of the packet, as an integer. Should be multiplied by the
  2299. timebase to compute presentation time.
  2300. @item dt (@emph{packet})
  2301. Decoding time of the frame or packet, as a decimal number. Equal to
  2302. @var{dts} multiplied by @var{tb}.
  2303. @item sn (@emph{frame,audio})
  2304. Number of audio samples sent to the encoder so far.
  2305. @item samp (@emph{frame,audio})
  2306. Number of audio samples in the frame.
  2307. @item size (@emph{packet})
  2308. Size of the encoded packet in bytes.
  2309. @item br (@emph{packet})
  2310. Current bitrate in bits per second.
  2311. @item abr (@emph{packet})
  2312. Average bitrate for the whole stream so far, in bits per second, -1 if it cannot
  2313. be determined at this point.
  2314. @item key (@emph{packet})
  2315. Character 'K' if the packet contains a keyframe, character 'N' otherwise.
  2316. @end table
  2317. Directives tagged with @emph{packet} may only be used with
  2318. @option{-stats_enc_post_fmt} and @option{-stats_mux_pre_fmt}.
  2319. Directives tagged with @emph{frame} may only be used with
  2320. @option{-stats_enc_pre_fmt}.
  2321. Directives tagged with @emph{audio} may only be used with audio streams.
  2322. The default format strings are:
  2323. @table @option
  2324. @item pre-encoding
  2325. @{fidx@} @{sidx@} @{n@} @{t@}
  2326. @item post-encoding
  2327. @{fidx@} @{sidx@} @{n@} @{t@}
  2328. @end table
  2329. In the future, new items may be added to the end of the default formatting
  2330. strings. Users who depend on the format staying exactly the same, should
  2331. prescribe it manually.
  2332. Note that stats for different streams written into the same file may have
  2333. different formats.
  2334. @end table
  2335. @section Preset files
  2336. A preset file contains a sequence of @var{option}=@var{value} pairs,
  2337. one for each line, specifying a sequence of options which would be
  2338. awkward to specify on the command line. Lines starting with the hash
  2339. ('#') character are ignored and are used to provide comments. Check
  2340. the @file{presets} directory in the FFmpeg source tree for examples.
  2341. There are two types of preset files: ffpreset and avpreset files.
  2342. @subsection ffpreset files
  2343. ffpreset files are specified with the @code{vpre}, @code{apre},
  2344. @code{spre}, and @code{fpre} options. The @code{fpre} option takes the
  2345. filename of the preset instead of a preset name as input and can be
  2346. used for any kind of codec. For the @code{vpre}, @code{apre}, and
  2347. @code{spre} options, the options specified in a preset file are
  2348. applied to the currently selected codec of the same type as the preset
  2349. option.
  2350. The argument passed to the @code{vpre}, @code{apre}, and @code{spre}
  2351. preset options identifies the preset file to use according to the
  2352. following rules:
  2353. First ffmpeg searches for a file named @var{arg}.ffpreset in the
  2354. directories @file{$FFMPEG_DATADIR} (if set), and @file{$HOME/.ffmpeg}, and in
  2355. the datadir defined at configuration time (usually @file{PREFIX/share/ffmpeg})
  2356. or in a @file{ffpresets} folder along the executable on win32,
  2357. in that order. For example, if the argument is @code{libvpx-1080p}, it will
  2358. search for the file @file{libvpx-1080p.ffpreset}.
  2359. If no such file is found, then ffmpeg will search for a file named
  2360. @var{codec_name}-@var{arg}.ffpreset in the above-mentioned
  2361. directories, where @var{codec_name} is the name of the codec to which
  2362. the preset file options will be applied. For example, if you select
  2363. the video codec with @code{-vcodec libvpx} and use @code{-vpre 1080p},
  2364. then it will search for the file @file{libvpx-1080p.ffpreset}.
  2365. @subsection avpreset files
  2366. avpreset files are specified with the @code{pre} option. They work similar to
  2367. ffpreset files, but they only allow encoder- specific options. Therefore, an
  2368. @var{option}=@var{value} pair specifying an encoder cannot be used.
  2369. When the @code{pre} option is specified, ffmpeg will look for files with the
  2370. suffix .avpreset in the directories @file{$AVCONV_DATADIR} (if set), and
  2371. @file{$HOME/.avconv}, and in the datadir defined at configuration time (usually
  2372. @file{PREFIX/share/ffmpeg}), in that order.
  2373. First ffmpeg searches for a file named @var{codec_name}-@var{arg}.avpreset in
  2374. the above-mentioned directories, where @var{codec_name} is the name of the codec
  2375. to which the preset file options will be applied. For example, if you select the
  2376. video codec with @code{-vcodec libvpx} and use @code{-pre 1080p}, then it will
  2377. search for the file @file{libvpx-1080p.avpreset}.
  2378. If no such file is found, then ffmpeg will search for a file named
  2379. @var{arg}.avpreset in the same directories.
  2380. @anchor{vstats_file_format}
  2381. @section vstats file format
  2382. The @code{-vstats} and @code{-vstats_file} options enable generation of a file
  2383. containing statistics about the generated video outputs.
  2384. The @code{-vstats_version} option controls the format version of the generated
  2385. file.
  2386. With version @code{1} the format is:
  2387. @example
  2388. frame= @var{FRAME} q= @var{FRAME_QUALITY} PSNR= @var{PSNR} f_size= @var{FRAME_SIZE} s_size= @var{STREAM_SIZE}kB time= @var{TIMESTAMP} br= @var{BITRATE}kbits/s avg_br= @var{AVERAGE_BITRATE}kbits/s
  2389. @end example
  2390. With version @code{2} the format is:
  2391. @example
  2392. out= @var{OUT_FILE_INDEX} st= @var{OUT_FILE_STREAM_INDEX} frame= @var{FRAME_NUMBER} q= @var{FRAME_QUALITY}f PSNR= @var{PSNR} f_size= @var{FRAME_SIZE} s_size= @var{STREAM_SIZE}kB time= @var{TIMESTAMP} br= @var{BITRATE}kbits/s avg_br= @var{AVERAGE_BITRATE}kbits/s
  2393. @end example
  2394. The value corresponding to each key is described below:
  2395. @table @option
  2396. @item avg_br
  2397. average bitrate expressed in Kbits/s
  2398. @item br
  2399. bitrate expressed in Kbits/s
  2400. @item frame
  2401. number of encoded frame
  2402. @item out
  2403. out file index
  2404. @item PSNR
  2405. Peak Signal to Noise Ratio
  2406. @item q
  2407. quality of the frame
  2408. @item f_size
  2409. encoded packet size expressed as number of bytes
  2410. @item s_size
  2411. stream size expressed in KiB
  2412. @item st
  2413. out file stream index
  2414. @item time
  2415. time of the packet
  2416. @item type
  2417. picture type
  2418. @end table
  2419. See also the @ref{stats_enc_options,,-stats_enc options} for an alternative way
  2420. to show encoding statistics.
  2421. @c man end OPTIONS
  2422. @chapter Examples
  2423. @c man begin EXAMPLES
  2424. @section Video and Audio grabbing
  2425. If you specify the input format and device then ffmpeg can grab video
  2426. and audio directly.
  2427. @example
  2428. ffmpeg -f oss -i /dev/dsp -f video4linux2 -i /dev/video0 /tmp/out.mpg
  2429. @end example
  2430. Or with an ALSA audio source (mono input, card id 1) instead of OSS:
  2431. @example
  2432. ffmpeg -f alsa -ac 1 -i hw:1 -f video4linux2 -i /dev/video0 /tmp/out.mpg
  2433. @end example
  2434. Note that you must activate the right video source and channel before
  2435. launching ffmpeg with any TV viewer such as
  2436. @uref{http://linux.bytesex.org/xawtv/, xawtv} by Gerd Knorr. You also
  2437. have to set the audio recording levels correctly with a
  2438. standard mixer.
  2439. @section X11 grabbing
  2440. Grab the X11 display with ffmpeg via
  2441. @example
  2442. ffmpeg -f x11grab -video_size cif -framerate 25 -i :0.0 /tmp/out.mpg
  2443. @end example
  2444. 0.0 is display.screen number of your X11 server, same as
  2445. the DISPLAY environment variable.
  2446. @example
  2447. ffmpeg -f x11grab -video_size cif -framerate 25 -i :0.0+10,20 /tmp/out.mpg
  2448. @end example
  2449. 0.0 is display.screen number of your X11 server, same as the DISPLAY environment
  2450. variable. 10 is the x-offset and 20 the y-offset for the grabbing.
  2451. @section Video and Audio file format conversion
  2452. Any supported file format and protocol can serve as input to ffmpeg:
  2453. Examples:
  2454. @itemize
  2455. @item
  2456. You can use YUV files as input:
  2457. @example
  2458. ffmpeg -i /tmp/test%d.Y /tmp/out.mpg
  2459. @end example
  2460. It will use the files:
  2461. @example
  2462. /tmp/test0.Y, /tmp/test0.U, /tmp/test0.V,
  2463. /tmp/test1.Y, /tmp/test1.U, /tmp/test1.V, etc...
  2464. @end example
  2465. The Y files use twice the resolution of the U and V files. They are
  2466. raw files, without header. They can be generated by all decent video
  2467. decoders. You must specify the size of the image with the @option{-s} option
  2468. if ffmpeg cannot guess it.
  2469. @item
  2470. You can input from a raw YUV420P file:
  2471. @example
  2472. ffmpeg -i /tmp/test.yuv /tmp/out.avi
  2473. @end example
  2474. test.yuv is a file containing raw YUV planar data. Each frame is composed
  2475. of the Y plane followed by the U and V planes at half vertical and
  2476. horizontal resolution.
  2477. @item
  2478. You can output to a raw YUV420P file:
  2479. @example
  2480. ffmpeg -i mydivx.avi hugefile.yuv
  2481. @end example
  2482. @item
  2483. You can set several input files and output files:
  2484. @example
  2485. ffmpeg -i /tmp/a.wav -s 640x480 -i /tmp/a.yuv /tmp/a.mpg
  2486. @end example
  2487. Converts the audio file a.wav and the raw YUV video file a.yuv
  2488. to MPEG file a.mpg.
  2489. @item
  2490. You can also do audio and video conversions at the same time:
  2491. @example
  2492. ffmpeg -i /tmp/a.wav -ar 22050 /tmp/a.mp2
  2493. @end example
  2494. Converts a.wav to MPEG audio at 22050 Hz sample rate.
  2495. @item
  2496. You can encode to several formats at the same time and define a
  2497. mapping from input stream to output streams:
  2498. @example
  2499. ffmpeg -i /tmp/a.wav -map 0:a -b:a 64k /tmp/a.mp2 -map 0:a -b:a 128k /tmp/b.mp2
  2500. @end example
  2501. Converts a.wav to a.mp2 at 64 kbits and to b.mp2 at 128 kbits. '-map
  2502. file:index' specifies which input stream is used for each output
  2503. stream, in the order of the definition of output streams.
  2504. @item
  2505. You can transcode decrypted VOBs:
  2506. @example
  2507. ffmpeg -i snatch_1.vob -f avi -c:v mpeg4 -b:v 800k -g 300 -bf 2 -c:a libmp3lame -b:a 128k snatch.avi
  2508. @end example
  2509. This is a typical DVD ripping example; the input is a VOB file, the
  2510. output an AVI file with MPEG-4 video and MP3 audio. Note that in this
  2511. command we use B-frames so the MPEG-4 stream is DivX5 compatible, and
  2512. GOP size is 300 which means one intra frame every 10 seconds for 29.97fps
  2513. input video. Furthermore, the audio stream is MP3-encoded so you need
  2514. to enable LAME support by passing @code{--enable-libmp3lame} to configure.
  2515. The mapping is particularly useful for DVD transcoding
  2516. to get the desired audio language.
  2517. NOTE: To see the supported input formats, use @code{ffmpeg -demuxers}.
  2518. @item
  2519. You can extract images from a video, or create a video from many images:
  2520. For extracting images from a video:
  2521. @example
  2522. ffmpeg -i foo.avi -r 1 -s WxH -f image2 foo-%03d.jpeg
  2523. @end example
  2524. This will extract one video frame per second from the video and will
  2525. output them in files named @file{foo-001.jpeg}, @file{foo-002.jpeg},
  2526. etc. Images will be rescaled to fit the new WxH values.
  2527. If you want to extract just a limited number of frames, you can use the
  2528. above command in combination with the @code{-frames:v} or @code{-t} option,
  2529. or in combination with -ss to start extracting from a certain point in time.
  2530. For creating a video from many images:
  2531. @example
  2532. ffmpeg -f image2 -framerate 12 -i foo-%03d.jpeg -s WxH foo.avi
  2533. @end example
  2534. The syntax @code{foo-%03d.jpeg} specifies to use a decimal number
  2535. composed of three digits padded with zeroes to express the sequence
  2536. number. It is the same syntax supported by the C printf function, but
  2537. only formats accepting a normal integer are suitable.
  2538. When importing an image sequence, -i also supports expanding
  2539. shell-like wildcard patterns (globbing) internally, by selecting the
  2540. image2-specific @code{-pattern_type glob} option.
  2541. For example, for creating a video from filenames matching the glob pattern
  2542. @code{foo-*.jpeg}:
  2543. @example
  2544. ffmpeg -f image2 -pattern_type glob -framerate 12 -i 'foo-*.jpeg' -s WxH foo.avi
  2545. @end example
  2546. @item
  2547. You can put many streams of the same type in the output:
  2548. @example
  2549. ffmpeg -i test1.avi -i test2.avi -map 1:1 -map 1:0 -map 0:1 -map 0:0 -c copy -y test12.nut
  2550. @end example
  2551. The resulting output file @file{test12.nut} will contain the first four streams
  2552. from the input files in reverse order.
  2553. @item
  2554. To force CBR video output:
  2555. @example
  2556. ffmpeg -i myfile.avi -b 4000k -minrate 4000k -maxrate 4000k -bufsize 1835k out.m2v
  2557. @end example
  2558. @item
  2559. The four options lmin, lmax, mblmin and mblmax use 'lambda' units,
  2560. but you may use the QP2LAMBDA constant to easily convert from 'q' units:
  2561. @example
  2562. ffmpeg -i src.ext -lmax 21*QP2LAMBDA dst.ext
  2563. @end example
  2564. @end itemize
  2565. @c man end EXAMPLES
  2566. @include config.texi
  2567. @ifset config-all
  2568. @ifset config-avutil
  2569. @include utils.texi
  2570. @end ifset
  2571. @ifset config-avcodec
  2572. @include codecs.texi
  2573. @include bitstream_filters.texi
  2574. @end ifset
  2575. @ifset config-avformat
  2576. @include formats.texi
  2577. @include protocols.texi
  2578. @end ifset
  2579. @ifset config-avdevice
  2580. @include devices.texi
  2581. @end ifset
  2582. @ifset config-swresample
  2583. @include resampler.texi
  2584. @end ifset
  2585. @ifset config-swscale
  2586. @include scaler.texi
  2587. @end ifset
  2588. @ifset config-avfilter
  2589. @include filters.texi
  2590. @end ifset
  2591. @include general_contents.texi
  2592. @end ifset
  2593. @chapter See Also
  2594. @ifhtml
  2595. @ifset config-all
  2596. @url{ffmpeg.html,ffmpeg}
  2597. @end ifset
  2598. @ifset config-not-all
  2599. @url{ffmpeg-all.html,ffmpeg-all},
  2600. @end ifset
  2601. @url{ffplay.html,ffplay}, @url{ffprobe.html,ffprobe},
  2602. @url{ffmpeg-utils.html,ffmpeg-utils},
  2603. @url{ffmpeg-scaler.html,ffmpeg-scaler},
  2604. @url{ffmpeg-resampler.html,ffmpeg-resampler},
  2605. @url{ffmpeg-codecs.html,ffmpeg-codecs},
  2606. @url{ffmpeg-bitstream-filters.html,ffmpeg-bitstream-filters},
  2607. @url{ffmpeg-formats.html,ffmpeg-formats},
  2608. @url{ffmpeg-devices.html,ffmpeg-devices},
  2609. @url{ffmpeg-protocols.html,ffmpeg-protocols},
  2610. @url{ffmpeg-filters.html,ffmpeg-filters}
  2611. @end ifhtml
  2612. @ifnothtml
  2613. @ifset config-all
  2614. ffmpeg(1),
  2615. @end ifset
  2616. @ifset config-not-all
  2617. ffmpeg-all(1),
  2618. @end ifset
  2619. ffplay(1), ffprobe(1),
  2620. ffmpeg-utils(1), ffmpeg-scaler(1), ffmpeg-resampler(1),
  2621. ffmpeg-codecs(1), ffmpeg-bitstream-filters(1), ffmpeg-formats(1),
  2622. ffmpeg-devices(1), ffmpeg-protocols(1), ffmpeg-filters(1)
  2623. @end ifnothtml
  2624. @include authors.texi
  2625. @ignore
  2626. @setfilename ffmpeg
  2627. @settitle ffmpeg media converter
  2628. @end ignore
  2629. @bye