-
Star
(207)
You must be signed in to star a gist -
Fork
(33)
You must be signed in to fork a gist
-
-
Save nrk/2286511 to your computer and use it in GitHub Desktop.
| ffprobe -v quiet -print_format json -show_format -show_streams "lolwut.mp4" > "lolwut.mp4.json" |
| { | |
| "streams": [ | |
| { | |
| "index": 0, | |
| "codec_name": "h264", | |
| "codec_long_name": "H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10", | |
| "codec_type": "video", | |
| "codec_time_base": "1001/48000", | |
| "codec_tag_string": "avc1", | |
| "codec_tag": "0x31637661", | |
| "width": 1280, | |
| "height": 720, | |
| "has_b_frames": 0, | |
| "pix_fmt": "yuv420p", | |
| "level": 31, | |
| "is_avc": "1", | |
| "nal_length_size": "4", | |
| "r_frame_rate": "35029/1461", | |
| "avg_frame_rate": "35029/1461", | |
| "time_base": "1/35029", | |
| "start_time": "0.000000", | |
| "duration": "1239.195267", | |
| "bit_rate": "1782423", | |
| "nb_frames": "29711", | |
| "tags": { | |
| "creation_time": "1970-01-01 00:00:00", | |
| "language": "und", | |
| "handler_name": "VideoHandler" | |
| } | |
| }, | |
| { | |
| "index": 1, | |
| "codec_name": "aac", | |
| "codec_long_name": "Advanced Audio Coding", | |
| "codec_type": "audio", | |
| "codec_time_base": "1/48000", | |
| "codec_tag_string": "mp4a", | |
| "codec_tag": "0x6134706d", | |
| "sample_fmt": "s16", | |
| "sample_rate": "48000", | |
| "channels": 2, | |
| "bits_per_sample": 0, | |
| "r_frame_rate": "0/0", | |
| "avg_frame_rate": "0/0", | |
| "time_base": "1/48000", | |
| "start_time": "0.000000", | |
| "duration": "1239.059396", | |
| "bit_rate": "127966", | |
| "nb_frames": "58081", | |
| "tags": { | |
| "creation_time": "2012-04-01 15:42:28", | |
| "language": "jpn", | |
| "handler_name": "GPAC ISO Audio Handler" | |
| } | |
| } | |
| ], | |
| "format": { | |
| "filename": "lolwut.mp4", | |
| "nb_streams": 2, | |
| "format_name": "mov,mp4,m4a,3gp,3g2,mj2", | |
| "format_long_name": "QuickTime/MPEG-4/Motion JPEG 2000 format", | |
| "start_time": "0.000000", | |
| "duration": "1239.195000", | |
| "size": "296323860", | |
| "bit_rate": "1913008", | |
| "tags": { | |
| "major_brand": "isom", | |
| "minor_version": "1", | |
| "compatible_brands": "isom", | |
| "creation_time": "2012-04-01 15:42:24" | |
| } | |
| } | |
| } |
If anyone wants it, here's my quick-and-dirty Newtonsoft.Json-compatible C# class for ffprobe's -print_format json -show_format -show_streams output:
Usage: If you've got ffprobe's output in a String ffprobeJsonOutput then just do FFProbeJsonOutput? parsedOutput = JsonConvert.DeserializeObject<FFProbeJsonOutput>( ffprobeJsonOutput );.
Almost every member is marked nullable because I wasn't sure which JSON properties FFprobe will always return for all formats/codecs.
using System;
using System.Collections.Generic;
using System.Diagnostics.CodeAnalysis;
using System.Globalization;
using System.Linq;
using Newtonsoft.Json;
namespace MyProject
{
public class FFProbeJsonOutput
{
/// <summary>Information about the streams within the container.</summary>
[JsonProperty("streams")]
[SuppressMessage( "Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly" )]
public List<FFProbeStream>? Streams { get; set; }
/// <summary>Information about the container.</summary>
[JsonProperty("format")]
public FFProbeFormat? Format { get; set; }
}
public class FFProbeStream
{
[JsonProperty("index")]
public Int32 Index { get; set; }
[JsonProperty("codec_name")]
public String? CodecName { get; set; }
/// <summary>Values like "H.264 / AVC / MPEG-4 AVC / MPEG-4 part 10".</summary>
[JsonProperty("codec_long_name")]
public String? CodecLongName { get; set; }
[JsonProperty("codec_type")]
public String? CodecType { get; set; }
/// <summary>Contains the FourCC of a video codec or the TwoCC of an audio codec - or another textual representation as FourCC/TwoCC is container-specific!</summary>
[JsonProperty("codec_tag_string")]
public String? CodecTag { get; set; }
/// <summary>Codec/format-specific profile information, e.g. for AVC (H.264) this may be "High" but for AAC this may be "LC".</summary>
[JsonProperty("profile")]
public String? Profile { get; set; }
[JsonProperty("level")]
public Int32? Level { get; set; }
/// <summary>Only specified for Video streams.</summary>
[JsonProperty("width")]
public Int32? Width { get; set; }
/// <summary>Only specified for Video streams.</summary>
[JsonProperty("height")]
public Int32? Height { get; set; }
// Don't worry about `coded_width`/`coded_height`, that's for when a video codec requires the encoded video's dimensions to be a multiple of a block-size, but the video is still cropped to an arbitrary size (the `width`/`height`) when rendered.
[JsonProperty("duration")]
public Double? Duration { get; set; }
/// <summary>Duration expressed in integer time-base units (https://video.stackexchange.com/questions/27546/difference-between-duration-ts-and-duration-in-ffprobe-output</summary>
[JsonProperty("duration_ts")]
public Int64? DurationTS { get; set; }
/// <summary>Values like "1/600". See https://stackoverflow.com/questions/43333542/what-is-video-timescale-timebase-or-timestamp-in-ffmpeg </summary>
[JsonProperty("time_base")]
public String? TimeBase { get; set; }
/// <summary>Stream-specific tags/metadata. See <see cref="KnownFFProbeVideoStreamTags"/>.</summary>
[SuppressMessage( "Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly" )]
[JsonProperty("tags")]
public Dictionary<String,String>? Tags { get; set; }
}
public class FFProbeFormat
{
[JsonProperty("filename")]
public String? FileName { get; set; }
[JsonProperty("nb_streams")]
public Int32? NBStreams { get; set; }
[JsonProperty("nb_programs")]
public Int32? NBPrograms { get; set; }
[JsonProperty("format_name")]
public String? FormatName { get; set; }
[JsonProperty("format_long_name")]
public String? FormatLongName { get; set; }
/// <summary>Approximate duration in seconds (it's approximate because videos can start *after* the 00:00:00 timecode, e.g. see this SO question with a video that starts at 0.04: https://superuser.com/questions/650291/how-to-get-video-duration-in-seconds ).</summary>
[JsonProperty("duration")]
public Double? Duration { get; set; }
[JsonProperty("bit_rate")]
public Int32? BitRate { get; set; }
[JsonProperty("probe_score")]
public Int32? ProbeScore { get; set; }
/// <summary>Container and format tags/metadata, not stream-specific tags. See <see cref="KnownFFProbeFormatTags"/>.</summary>
[SuppressMessage( "Microsoft.Usage", "CA2227:CollectionPropertiesShouldBeReadOnly" )]
[JsonProperty("tags")]
public Dictionary<String,String>? Tags { get; set; }
}
/// <summary>Known tag-names for <see cref="FFProbeStream"/>'s <see cref="FFProbeStream.Tags"/>.</summary>
public static class KnownFFProbeVideoStreamTags
{
/// <summary>Tag value is a decimal integer value in degrees, e.g. "90".</summary>
public const String Rotate = "rotate";
/// <summary>Tag value is ISO 8601 e.g. "2020-04-21T15:22:51.000000Z".</summary>
public const String CreationTime = "creation_time";
/// <summary>Values like "H.264".</summary>
public const String Encoder = "encoder";
}
/// <summary>Known tag-names for <see cref="FFProbeFormat"/>'s <see cref="FFProbeFormat.Tags"/>.</summary>
public static class KnownFFProbeFormatTags
{
/// <summary>MPEG-4 container "brand" identifier.</summary>
public const String MajorBrand = "major_brand";
/// <summary>Tag value is ISO 8601 e.g. "2020-04-21T15:22:51.000000Z".</summary>
public const String CreationTime = "creation_time";
/// <summary>E.g. "Apple".</summary>
public const String AppleMake = "com.apple.quicktime.make";
/// <summary>E.g. "iPhone 8 Plus".</summary>
public const String AppleModel = "com.apple.quicktime.model";
/// <summary>I think this is the iOS/iPhone OS software version if it's from an iPhone, e.g. "13.4.1".</summary>
public const String AppleSoftware = "com.apple.quicktime.software";
}
public static class FFProbeOutputExtensions
{
public static FFProbeStream? GetFirstVideoStreamOrNull( this FFProbeJsonOutput json )
{
if( json.Streams != null )
{
return json.Streams.FirstOrDefault( s => "video".Equals( s.CodecType, StringComparison.OrdinalIgnoreCase ) );
}
else
{
return null;
}
}
public static FFProbeStream? GetFirstAudioStreamOrNull( this FFProbeJsonOutput json )
{
if( json.Streams != null )
{
return json.Streams.FirstOrDefault( s => "audio".Equals( s.CodecType, StringComparison.OrdinalIgnoreCase ) );
}
else
{
return null;
}
}
public static Int32? GetRotation( this FFProbeStream ffProbeStream )
{
if( ffProbeStream.Tags != null )
{
if( ffProbeStream.Tags.TryGetValue( KnownFFProbeVideoStreamTags.Rotate, value: out String? rotateTagValue ) )
{
if( Int32.TryParse( rotateTagValue, NumberStyles.Any, CultureInfo.InvariantCulture, out Int32 rotateTagValueInt ) )
{
return rotateTagValueInt;
}
}
}
return null;
}
/// <summary>If the <see cref="FFProbeJsonOutput.Format"/> has a duration, that's returned - otherwise the first stream duration is returned. Returns null if none of the streams (nor the <see cref="FFProbeJsonOutput.Format"/>) has a duration value set.</summary>
public static TimeSpan? GetDuration( this FFProbeJsonOutput ffProbeOutput )
{
if( ffProbeOutput.Format != null )
{
Double? d = ffProbeOutput.Format.Duration;
if( d.HasValue )
{
return TimeSpan.FromSeconds( d.Value );
}
}
if( ffProbeOutput.Streams != null )
{
foreach( FFProbeStream? stream in ffProbeOutput.Streams )
{
Double? d = stream?.Duration;
if( d.HasValue )
{
return TimeSpan.FromSeconds( d.Value );
}
}
}
return null;
}
}
}
Hello.
Great list! However, does anyone know how to find the duration of stream (HLS or MPEG-DASH) segments?
@quintanaplaca I believe that information is in the .m3u8 file's #EXTINF tags, and not the segment blob (while a segment blob will have a duration, I think (though am uncertain) that clients will prefer timing data from the m3u8 file as that's more canonical.... or something. YMMV. IANAL. Etc.
To quote Apple: https://developer.apple.com/documentation/http_live_streaming/example_playlists_for_http_live_streaming/video_on_demand_playlist_construction (emphasis mine)
#EXTINF: A record marker that describes the media file identified by the URL that follows it. Each media file URL must be preceded by an EXTINF tag. This tag contains a duration attribute that's an integer or floating-point number in decimal positional notation that specifies the duration of the media segment in seconds. This value must be less than or equal to the target duration.
@Jehoel, thank you for your reply. Really, you are right. I would like a way to analyze HLS and MPEG-DASH stream with FFmpeg (the segmentSize, segmentDuration) and check its integrity.
is there a way to grab these informations using one of library provided by ffmpeg using C?
nice
-select_streams "s"for just subtitles.-select_streams "s:0"for just the first subtitle (index is zero based).-find_stream_infofill-in missing information by actually read the streams instead of just parsing the header(s). it helps with corrupted files. optionally add-probesize "10000000"and-analyzeduration "2000000"as well, for better results.-prettya more human-readable output (works with other print formats as well, some builds have this on by default for JSON). it helps to debug stuff in console.2>/dev/null(linux) or2>nul(Windows. note: thenulmissing lastlis intentional) to speed up the command, if you don't need output to console at all. you can keep whatever loglevel you currently have.