metadata fetch using ffmpeg kit, animated AVIF support

This commit is contained in:
Thibault Deckers 2023-08-05 17:39:14 +02:00
parent 6efead811a
commit 910680edda
30 changed files with 545 additions and 83 deletions

View file

@ -6,6 +6,7 @@ All notable changes to this project will be documented in this file.
### Added ### Added
- support for animated AVIF (requires rescan)
- Collection: filtering by rating range - Collection: filtering by rating range
- About: data usage - About: data usage

View file

@ -57,8 +57,9 @@
allow install on API 19, despite the `minSdkVersion` declared in dependencies: allow install on API 19, despite the `minSdkVersion` declared in dependencies:
- Google Maps is from API 20 - Google Maps is from API 20
- the Security library is from API 21 - the Security library is from API 21
- FFmpegKit for Flutter is from API 24
--> -->
<uses-sdk tools:overrideLibrary="io.flutter.plugins.googlemaps, androidx.security:security-crypto" /> <uses-sdk tools:overrideLibrary="io.flutter.plugins.googlemaps, androidx.security:security-crypto, com.arthenica.ffmpegkit.flutter" />
<!-- from Android 11, we should define <queries> to make other apps visible to this app --> <!-- from Android 11, we should define <queries> to make other apps visible to this app -->
<queries> <queries>

View file

@ -294,6 +294,9 @@ class AvesEntry with AvesEntryBase {
return d == null ? null : DateTime(d.year, d.month, d.day); return d == null ? null : DateTime(d.year, d.month, d.day);
} }
@override
bool get isAnimated => catalogMetadata?.isAnimated ?? false;
@override @override
int? get durationMillis => _durationMillis; int? get durationMillis => _durationMillis;

View file

@ -3,6 +3,7 @@ import 'package:aves/model/entry/extensions/props.dart';
import 'package:aves/model/geotiff.dart'; import 'package:aves/model/geotiff.dart';
import 'package:aves/model/metadata/catalog.dart'; import 'package:aves/model/metadata/catalog.dart';
import 'package:aves/model/video/metadata.dart'; import 'package:aves/model/video/metadata.dart';
import 'package:aves/ref/mime_types.dart';
import 'package:aves/services/common/services.dart'; import 'package:aves/services/common/services.dart';
import 'package:aves/services/metadata/svg_metadata_service.dart'; import 'package:aves/services/metadata/svg_metadata_service.dart';
@ -23,7 +24,7 @@ extension ExtraAvesEntryCatalog on AvesEntry {
catalogMetadata = CatalogMetadata(id: id); catalogMetadata = CatalogMetadata(id: id);
} else { } else {
// pre-processing // pre-processing
if (isVideo && (!isSized || durationMillis == 0)) { if ((isVideo && (!isSized || durationMillis == 0)) || mimeType == MimeTypes.avif) {
// exotic video that is not sized during loading // exotic video that is not sized during loading
final fields = await VideoMetadataFormatter.getLoadingMetadata(this); final fields = await VideoMetadataFormatter.getLoadingMetadata(this);
await applyNewFields(fields, persist: persist); await applyNewFields(fields, persist: persist);
@ -33,7 +34,7 @@ extension ExtraAvesEntryCatalog on AvesEntry {
catalogMetadata = await metadataFetchService.getCatalogMetadata(this, background: background); catalogMetadata = await metadataFetchService.getCatalogMetadata(this, background: background);
// post-processing // post-processing
if (isVideo && (catalogMetadata?.dateMillis ?? 0) == 0) { if ((isVideo && (catalogMetadata?.dateMillis ?? 0) == 0) || (mimeType == MimeTypes.avif && durationMillis != null)) {
catalogMetadata = await VideoMetadataFormatter.getCatalogMetadata(this); catalogMetadata = await VideoMetadataFormatter.getCatalogMetadata(this);
} }
if (isGeotiff && !hasGps) { if (isGeotiff && !hasGps) {

View file

@ -26,7 +26,9 @@ extension ExtraAvesEntryProps on AvesEntry {
bool get isImage => MimeTypes.isImage(mimeType); bool get isImage => MimeTypes.isImage(mimeType);
bool get isVideo => MimeTypes.isVideo(mimeType); bool get isVideo => MimeTypes.isVideo(mimeType) || (mimeType == MimeTypes.avif && isAnimated);
bool get isPureVideo => isVideo && !isAnimated;
// size // size
@ -68,8 +70,6 @@ extension ExtraAvesEntryProps on AvesEntry {
// catalog // catalog
bool get isAnimated => catalogMetadata?.isAnimated ?? false;
bool get isGeotiff => catalogMetadata?.isGeotiff ?? false; bool get isGeotiff => catalogMetadata?.isGeotiff ?? false;
bool get is360 => catalogMetadata?.is360 ?? false; bool get is360 => catalogMetadata?.is360 ?? false;

View file

@ -55,6 +55,7 @@ class CatalogMetadata {
int? id, int? id,
String? mimeType, String? mimeType,
int? dateMillis, int? dateMillis,
bool? isAnimated,
bool? isMultiPage, bool? isMultiPage,
int? rotationDegrees, int? rotationDegrees,
double? latitude, double? latitude,
@ -64,7 +65,7 @@ class CatalogMetadata {
id: id ?? this.id, id: id ?? this.id,
mimeType: mimeType ?? this.mimeType, mimeType: mimeType ?? this.mimeType,
dateMillis: dateMillis ?? this.dateMillis, dateMillis: dateMillis ?? this.dateMillis,
isAnimated: isAnimated, isAnimated: isAnimated ?? this.isAnimated,
isFlipped: isFlipped, isFlipped: isFlipped,
isGeotiff: isGeotiff, isGeotiff: isGeotiff,
is360: is360, is360: is360,

View file

@ -8,6 +8,7 @@ import 'package:aves/model/video/profiles/aac.dart';
import 'package:aves/model/video/profiles/h264.dart'; import 'package:aves/model/video/profiles/h264.dart';
import 'package:aves/model/video/profiles/hevc.dart'; import 'package:aves/model/video/profiles/hevc.dart';
import 'package:aves/ref/languages.dart'; import 'package:aves/ref/languages.dart';
import 'package:aves/ref/mime_types.dart';
import 'package:aves/ref/mp4.dart'; import 'package:aves/ref/mp4.dart';
import 'package:aves/services/common/services.dart'; import 'package:aves/services/common/services.dart';
import 'package:aves/theme/format.dart'; import 'package:aves/theme/format.dart';
@ -24,7 +25,8 @@ class VideoMetadataFormatter {
static final _ambiguousDatePatterns = { static final _ambiguousDatePatterns = {
RegExp(r'^\d{2}[-/]\d{2}[-/]\d{4}$'), RegExp(r'^\d{2}[-/]\d{2}[-/]\d{4}$'),
}; };
static final _durationPattern = RegExp(r'(\d+):(\d+):(\d+)(.\d+)'); static final _durationHmsmPattern = RegExp(r'(\d+):(\d+):(\d+)(.\d+)');
static final _durationSmPattern = RegExp(r'(\d+)(.\d+)');
static final _locationPattern = RegExp(r'([+-][.0-9]+)'); static final _locationPattern = RegExp(r'([+-][.0-9]+)');
static final Map<String, String> _codecNames = { static final Map<String, String> _codecNames = {
Codecs.ac3: 'AC-3', Codecs.ac3: 'AC-3',
@ -63,13 +65,27 @@ class VideoMetadataFormatter {
final durationMicros = mediaInfo[Keys.durationMicros]; final durationMicros = mediaInfo[Keys.durationMicros];
if (durationMicros is num) { if (durationMicros is num) {
fields['durationMillis'] = (durationMicros / 1000).round(); fields['durationMillis'] = (durationMicros / 1000).round();
} else {
final duration = _parseDuration(mediaInfo[Keys.duration]);
if (duration != null) {
fields['durationMillis'] = duration.inMilliseconds;
}
} }
return fields; return fields;
} }
static Future<CatalogMetadata?> getCatalogMetadata(AvesEntry entry) async { static Future<CatalogMetadata?> getCatalogMetadata(AvesEntry entry) async {
var catalogMetadata = entry.catalogMetadata ?? CatalogMetadata(id: entry.id);
final mediaInfo = await videoMetadataFetcher.getMetadata(entry); final mediaInfo = await videoMetadataFetcher.getMetadata(entry);
if (entry.mimeType == MimeTypes.avif) {
final duration = _parseDuration(mediaInfo[Keys.duration]);
if (duration == null) return null;
catalogMetadata = catalogMetadata.copyWith(isAnimated: true);
}
// only consider values with at least 8 characters (yyyymmdd), // only consider values with at least 8 characters (yyyymmdd),
// ignoring unset values like `0`, as well as year values like `2021` // ignoring unset values like `0`, as well as year values like `2021`
bool isDefined(dynamic value) => value is String && value.length >= 8; bool isDefined(dynamic value) => value is String && value.length >= 8;
@ -88,12 +104,12 @@ class VideoMetadataFormatter {
// exclude date if it is suspiciously close to epoch // exclude date if it is suspiciously close to epoch
if (dateMillis != null && !DateTime.fromMillisecondsSinceEpoch(dateMillis).isAtSameDayAs(epoch)) { if (dateMillis != null && !DateTime.fromMillisecondsSinceEpoch(dateMillis).isAtSameDayAs(epoch)) {
return (entry.catalogMetadata ?? CatalogMetadata(id: entry.id)).copyWith( catalogMetadata = catalogMetadata.copyWith(
dateMillis: dateMillis, dateMillis: dateMillis,
); );
} }
return entry.catalogMetadata; return catalogMetadata;
} }
static bool isAmbiguousDate(String dateString) { static bool isAmbiguousDate(String dateString) {
@ -180,14 +196,21 @@ class VideoMetadataFormatter {
switch (key) { switch (key) {
case Keys.codecLevel: case Keys.codecLevel:
case Keys.codecTag:
case Keys.codecTagString:
case Keys.durationTs:
case Keys.fpsNum: case Keys.fpsNum:
case Keys.handlerName:
case Keys.index: case Keys.index:
case Keys.isAvc:
case Keys.probeScore:
case Keys.programCount:
case Keys.refs:
case Keys.sarNum: case Keys.sarNum:
case Keys.selectedAudioStream: case Keys.selectedAudioStream:
case Keys.selectedTextStream: case Keys.selectedTextStream:
case Keys.selectedVideoStream: case Keys.selectedVideoStream:
case Keys.statisticsTags: case Keys.statisticsTags:
case Keys.streamCount:
case Keys.streams: case Keys.streams:
case Keys.streamType: case Keys.streamType:
case Keys.tbrNum: case Keys.tbrNum:
@ -205,10 +228,14 @@ class VideoMetadataFormatter {
case Keys.bitrate: case Keys.bitrate:
case Keys.bps: case Keys.bps:
save('Bit Rate', _formatMetric(value, 'b/s')); save('Bit Rate', _formatMetric(value, 'b/s'));
case Keys.bitsPerRawSample:
save('Bits Per Raw Sample', value);
case Keys.byteCount: case Keys.byteCount:
save('Size', _formatFilesize(value)); save('Size', _formatFilesize(value));
case Keys.channelLayout: case Keys.channelLayout:
save('Channel Layout', _formatChannelLayout(value)); save('Channel Layout', _formatChannelLayout(value));
case Keys.chromaLocation:
save('Chroma Location', value);
case Keys.codecName: case Keys.codecName:
if (value != 'none') { if (value != 'none') {
save('Format', _formatCodecName(value)); save('Format', _formatCodecName(value));
@ -219,6 +246,18 @@ class VideoMetadataFormatter {
// user-friendly descriptions for related enums are defined in libavutil/pixfmt.h // user-friendly descriptions for related enums are defined in libavutil/pixfmt.h
save('Pixel Format', (value as String).toUpperCase()); save('Pixel Format', (value as String).toUpperCase());
} }
case Keys.codedHeight:
save('Coded Height', '$value pixels');
case Keys.codedWidth:
save('Coded Width', '$value pixels');
case Keys.colorPrimaries:
save('Color Primaries', (value as String).toUpperCase());
case Keys.colorRange:
save('Color Range', (value as String).toUpperCase());
case Keys.colorSpace:
save('Color Space', (value as String).toUpperCase());
case Keys.colorTransfer:
save('Color Transfer', (value as String).toUpperCase());
case Keys.codecProfileId: case Keys.codecProfileId:
{ {
final profile = int.tryParse(value); final profile = int.tryParse(value);
@ -228,9 +267,9 @@ class VideoMetadataFormatter {
case Codecs.h264: case Codecs.h264:
case Codecs.hevc: case Codecs.hevc:
{ {
final levelString = info[Keys.codecLevel]; final levelValue = info[Keys.codecLevel];
if (levelString != null) { if (levelValue != null) {
final level = int.tryParse(levelString) ?? 0; final level = levelValue is int ? levelValue : int.tryParse(levelValue) ?? 0;
if (codec == Codecs.h264) { if (codec == Codecs.h264) {
profileString = H264.formatProfile(profile, level); profileString = H264.formatProfile(profile, level);
} else { } else {
@ -254,6 +293,8 @@ class VideoMetadataFormatter {
save('Compatible Brands', formattedBrands); save('Compatible Brands', formattedBrands);
case Keys.creationTime: case Keys.creationTime:
save('Creation Time', _formatDate(value)); save('Creation Time', _formatDate(value));
case Keys.dar:
save('Display Aspect Ratio', value);
case Keys.date: case Keys.date:
if (value is String && value != '0') { if (value is String && value != '0') {
final charCount = value.length; final charCount = value.length;
@ -263,10 +304,18 @@ class VideoMetadataFormatter {
save('Duration', _formatDuration(value)); save('Duration', _formatDuration(value));
case Keys.durationMicros: case Keys.durationMicros:
if (value != 0) save('Duration', formatPreciseDuration(Duration(microseconds: value))); if (value != 0) save('Duration', formatPreciseDuration(Duration(microseconds: value)));
case Keys.extraDataSize:
save('Extra Data Size', _formatFilesize(value));
case Keys.fieldOrder:
save('Field Order', value);
case Keys.fpsDen: case Keys.fpsDen:
save('Frame Rate', '${roundToPrecision(info[Keys.fpsNum] / info[Keys.fpsDen], decimals: 3).toString()} FPS'); save('Frame Rate', '${roundToPrecision(info[Keys.fpsNum] / info[Keys.fpsDen], decimals: 3).toString()} FPS');
case Keys.frameCount: case Keys.frameCount:
save('Frame Count', value); save('Frame Count', value);
case Keys.handlerName:
save('Handler Name', value);
case Keys.hasBFrames:
save('Has B-Frames', value);
case Keys.height: case Keys.height:
save('Height', '$value pixels'); save('Height', '$value pixels');
case Keys.language: case Keys.language:
@ -281,6 +330,8 @@ class VideoMetadataFormatter {
save('Media Type', value); save('Media Type', value);
case Keys.minorVersion: case Keys.minorVersion:
if (value != '0') save('Minor Version', value); if (value != '0') save('Minor Version', value);
case Keys.nalLengthSize:
save('NAL Length Size', _formatFilesize(value));
case Keys.quicktimeLocationAccuracyHorizontal: case Keys.quicktimeLocationAccuracyHorizontal:
save('QuickTime Location Horizontal Accuracy', value); save('QuickTime Location Horizontal Accuracy', value);
case Keys.quicktimeCreationDate: case Keys.quicktimeCreationDate:
@ -290,10 +341,16 @@ class VideoMetadataFormatter {
case Keys.quicktimeSoftware: case Keys.quicktimeSoftware:
// redundant with `QuickTime Metadata` directory // redundant with `QuickTime Metadata` directory
break; break;
case Keys.rFrameRate:
save('R Frame Rate', value);
case Keys.rotate: case Keys.rotate:
save('Rotation', '$value°'); save('Rotation', '$value°');
case Keys.sampleFormat:
save('Sample Format', (value as String).toUpperCase());
case Keys.sampleRate: case Keys.sampleRate:
save('Sample Rate', _formatMetric(value, 'Hz')); save('Sample Rate', _formatMetric(value, 'Hz'));
case Keys.sar:
save('Sample Aspect Ratio', value);
case Keys.sarDen: case Keys.sarDen:
final sarNum = info[Keys.sarNum]; final sarNum = info[Keys.sarNum];
final sarDen = info[Keys.sarDen]; final sarDen = info[Keys.sarDen];
@ -303,12 +360,20 @@ class VideoMetadataFormatter {
save('Source OSHash', value); save('Source OSHash', value);
case Keys.startMicros: case Keys.startMicros:
if (value != 0) save('Start', formatPreciseDuration(Duration(microseconds: value))); if (value != 0) save('Start', formatPreciseDuration(Duration(microseconds: value)));
case Keys.startPts:
save('Start PTS', value);
case Keys.startTime:
save('Start', _formatDuration(value));
case Keys.statisticsWritingApp: case Keys.statisticsWritingApp:
save('Stats Writing App', value); save('Stats Writing App', value);
case Keys.statisticsWritingDateUtc: case Keys.statisticsWritingDateUtc:
save('Stats Writing Date', _formatDate(value)); save('Stats Writing Date', _formatDate(value));
case Keys.timeBase:
save('Time Base', value);
case Keys.track: case Keys.track:
if (value != '0') save('Track', value); if (value != '0') save('Track', value);
case Keys.vendorId:
save('Vendor ID', value);
case Keys.width: case Keys.width:
save('Width', '$value pixels'); save('Width', '$value pixels');
case Keys.xiaomiSlowMoment: case Keys.xiaomiSlowMoment:
@ -326,7 +391,12 @@ class VideoMetadataFormatter {
static String _formatBrand(String value) => Mp4.brands[value] ?? value; static String _formatBrand(String value) => Mp4.brands[value] ?? value;
static String _formatChannelLayout(value) => ChannelLayouts.names[value] ?? 'unknown ($value)'; static String _formatChannelLayout(dynamic value) {
if (value is int) {
return ChannelLayouts.names[value] ?? 'unknown ($value)';
}
return '$value';
}
static String _formatCodecName(String value) => _codecNames[value] ?? value.toUpperCase().replaceAll('_', ' '); static String _formatCodecName(String value) => _codecNames[value] ?? value.toUpperCase().replaceAll('_', ' ');
@ -338,28 +408,49 @@ class VideoMetadataFormatter {
return date.toIso8601String(); return date.toIso8601String();
} }
// input example: '00:00:05.408000000' // input example: '00:00:05.408000000' or '5.408000'
static String _formatDuration(String value) { static Duration? _parseDuration(String? value) {
final match = _durationPattern.firstMatch(value); if (value == null) return null;
var match = _durationHmsmPattern.firstMatch(value);
if (match != null) { if (match != null) {
final h = int.tryParse(match.group(1)!); final h = int.tryParse(match.group(1)!);
final m = int.tryParse(match.group(2)!); final m = int.tryParse(match.group(2)!);
final s = int.tryParse(match.group(3)!); final s = int.tryParse(match.group(3)!);
final millis = double.tryParse(match.group(4)!); final millis = double.tryParse(match.group(4)!);
if (h != null && m != null && s != null && millis != null) { if (h != null && m != null && s != null && millis != null) {
return formatPreciseDuration(Duration( return Duration(
hours: h, hours: h,
minutes: m, minutes: m,
seconds: s, seconds: s,
milliseconds: (millis * 1000).toInt(), milliseconds: (millis * 1000).toInt(),
)); );
} }
} }
return value;
}
static String _formatFilesize(String value) { match = _durationSmPattern.firstMatch(value);
final size = int.tryParse(value); if (match != null) {
final s = int.tryParse(match.group(1)!);
final millis = double.tryParse(match.group(2)!);
if (s != null && millis != null) {
return Duration(
seconds: s,
milliseconds: (millis * 1000).toInt(),
);
}
}
return null;
}
// input example: '00:00:05.408000000' or '5.408000'
static String _formatDuration(String value) {
final duration = _parseDuration(value);
return duration != null ? formatPreciseDuration(duration) : value;
}
static String _formatFilesize(dynamic value) {
final size = value is int ? value : int.tryParse(value);
return size != null ? formatFileSize('en_US', size) : value; return size != null ? formatFileSize('en_US', size) : value;
} }

View file

@ -20,7 +20,7 @@ import 'package:aves_report_platform/aves_report_platform.dart';
import 'package:aves_services/aves_services.dart'; import 'package:aves_services/aves_services.dart';
import 'package:aves_services_platform/aves_services_platform.dart'; import 'package:aves_services_platform/aves_services_platform.dart';
import 'package:aves_video/aves_video.dart'; import 'package:aves_video/aves_video.dart';
import 'package:aves_video_ijk/aves_video_ijk.dart'; import 'package:aves_video_ffmpeg/aves_video_ffmpeg.dart';
import 'package:aves_video_mpv/aves_video_mpv.dart'; import 'package:aves_video_mpv/aves_video_mpv.dart';
import 'package:get_it/get_it.dart'; import 'package:get_it/get_it.dart';
import 'package:path/path.dart' as p; import 'package:path/path.dart' as p;
@ -56,7 +56,7 @@ void initPlatformServices() {
getIt.registerLazySingleton<AvesAvailability>(LiveAvesAvailability.new); getIt.registerLazySingleton<AvesAvailability>(LiveAvesAvailability.new);
getIt.registerLazySingleton<MetadataDb>(SqfliteMetadataDb.new); getIt.registerLazySingleton<MetadataDb>(SqfliteMetadataDb.new);
getIt.registerLazySingleton<AvesVideoControllerFactory>(MpvVideoControllerFactory.new); getIt.registerLazySingleton<AvesVideoControllerFactory>(MpvVideoControllerFactory.new);
getIt.registerLazySingleton<AvesVideoMetadataFetcher>(IjkVideoMetadataFetcher.new); getIt.registerLazySingleton<AvesVideoMetadataFetcher>(FfmpegVideoMetadataFetcher.new);
getIt.registerLazySingleton<AppService>(PlatformAppService.new); getIt.registerLazySingleton<AppService>(PlatformAppService.new);
getIt.registerLazySingleton<DeviceService>(PlatformDeviceService.new); getIt.registerLazySingleton<DeviceService>(PlatformDeviceService.new);

View file

@ -89,7 +89,7 @@ class GridThemeData {
if (located && showLocated) LocationIcon.located(), if (located && showLocated) LocationIcon.located(),
if (!located && showUnlocated) LocationIcon.unlocated(), if (!located && showUnlocated) LocationIcon.unlocated(),
if (entry.rating != 0 && showRating) RatingIcon(entry: entry), if (entry.rating != 0 && showRating) RatingIcon(entry: entry),
if (entry.isVideo) if (entry.isPureVideo)
VideoIcon(entry: entry) VideoIcon(entry: entry)
else if (entry.isAnimated) else if (entry.isAnimated)
const AnimatedImageIcon() const AnimatedImageIcon()

View file

@ -80,18 +80,18 @@ class EntryActionDelegate with FeedbackMixin, PermissionAwareMixin, SizeAwareMix
case EntryAction.flip: case EntryAction.flip:
return targetEntry.canFlip; return targetEntry.canFlip;
case EntryAction.convert: case EntryAction.convert:
return canWrite && !targetEntry.isVideo; return canWrite && !targetEntry.isPureVideo;
case EntryAction.print: case EntryAction.print:
return !targetEntry.isVideo; return !targetEntry.isPureVideo;
case EntryAction.openMap: case EntryAction.openMap:
return !settings.useTvLayout && targetEntry.hasGps; return !settings.useTvLayout && targetEntry.hasGps;
case EntryAction.viewSource: case EntryAction.viewSource:
return targetEntry.isSvg; return targetEntry.isSvg;
case EntryAction.videoCaptureFrame: case EntryAction.videoCaptureFrame:
return canWrite && targetEntry.isVideo; return canWrite && targetEntry.isPureVideo;
case EntryAction.lockViewer: case EntryAction.lockViewer:
case EntryAction.videoToggleMute: case EntryAction.videoToggleMute:
return !settings.useTvLayout && targetEntry.isVideo; return !settings.useTvLayout && targetEntry.isPureVideo;
case EntryAction.videoSelectStreams: case EntryAction.videoSelectStreams:
case EntryAction.videoSetSpeed: case EntryAction.videoSetSpeed:
case EntryAction.videoSettings: case EntryAction.videoSettings:
@ -99,7 +99,7 @@ class EntryActionDelegate with FeedbackMixin, PermissionAwareMixin, SizeAwareMix
case EntryAction.videoReplay10: case EntryAction.videoReplay10:
case EntryAction.videoSkip10: case EntryAction.videoSkip10:
case EntryAction.openVideo: case EntryAction.openVideo:
return targetEntry.isVideo; return targetEntry.isPureVideo;
case EntryAction.rotateScreen: case EntryAction.rotateScreen:
return !settings.useTvLayout && settings.isRotationLocked; return !settings.useTvLayout && settings.isRotationLocked;
case EntryAction.addShortcut: case EntryAction.addShortcut:

View file

@ -417,7 +417,7 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
final targetEntry = pageEntry ?? mainEntry; final targetEntry = pageEntry ?? mainEntry;
Widget? child; Widget? child;
// a 360 video is both a video and a panorama but only the video controls are displayed // a 360 video is both a video and a panorama but only the video controls are displayed
if (targetEntry.isVideo) { if (targetEntry.isPureVideo) {
child = Selector<VideoConductor, AvesVideoController?>( child = Selector<VideoConductor, AvesVideoController?>(
selector: (context, vc) => vc.getController(targetEntry), selector: (context, vc) => vc.getController(targetEntry),
builder: (context, videoController, child) => VideoControlOverlay( builder: (context, videoController, child) => VideoControlOverlay(

View file

@ -125,8 +125,8 @@ class _BasicSectionState extends State<BasicSection> {
if (entry.isMotionPhoto) TypeFilter.motionPhoto, if (entry.isMotionPhoto) TypeFilter.motionPhoto,
if (entry.isRaw) TypeFilter.raw, if (entry.isRaw) TypeFilter.raw,
if (entry.isImage && entry.is360) TypeFilter.panorama, if (entry.isImage && entry.is360) TypeFilter.panorama,
if (entry.isVideo && entry.is360) TypeFilter.sphericalVideo, if (entry.isPureVideo && entry.is360) TypeFilter.sphericalVideo,
if (entry.isVideo && !entry.is360) MimeFilter.video, if (entry.isPureVideo && !entry.is360) MimeFilter.video,
if (date != null) DateFilter(DateLevel.ymd, date), if (date != null) DateFilter(DateLevel.ymd, date),
if (album != null) AlbumFilter(album, collection?.source.getAlbumDisplayName(context, album)), if (album != null) AlbumFilter(album, collection?.source.getAlbumDisplayName(context, album)),
if (entry.rating != 0) RatingFilter(entry.rating), if (entry.rating != 0) RatingFilter(entry.rating),

View file

@ -126,7 +126,7 @@ mixin EntryViewControllerMixin<T extends StatefulWidget> on State<T> {
final controller = context.read<VideoConductor>().getOrCreateController(entry); final controller = context.read<VideoConductor>().getOrCreateController(entry);
setState(() {}); setState(() {});
if (videoAutoPlayEnabled) { if (videoAutoPlayEnabled || entry.isAnimated) {
final resumeTimeMillis = await controller.getResumeTime(context); final resumeTimeMillis = await controller.getResumeTime(context);
await _autoPlayVideo(controller, () => entry == entryNotifier.value, resumeTimeMillis: resumeTimeMillis); await _autoPlayVideo(controller, () => entry == entryNotifier.value, resumeTimeMillis: resumeTimeMillis);
} }
@ -198,7 +198,7 @@ mixin EntryViewControllerMixin<T extends StatefulWidget> on State<T> {
// so we play after a delay for increased stability // so we play after a delay for increased stability
await Future.delayed(const Duration(milliseconds: 300) * timeDilation); await Future.delayed(const Duration(milliseconds: 300) * timeDilation);
if (!videoController.isMuted && shouldAutoPlayVideoMuted) { if (!videoController.isMuted && (videoController.entry.isAnimated || shouldAutoPlayVideoMuted)) {
await videoController.mute(true); await videoController.mute(true);
} }

View file

@ -201,12 +201,13 @@ class _EntryPageViewState extends State<EntryPageView> with SingleTickerProvider
valueListenable: videoController.sarNotifier, valueListenable: videoController.sarNotifier,
builder: (context, sar, child) { builder: (context, sar, child) {
final videoDisplaySize = entry.videoDisplaySize(sar); final videoDisplaySize = entry.videoDisplaySize(sar);
final isPureVideo = entry.isPureVideo;
return Selector<Settings, Tuple3<bool, bool, bool>>( return Selector<Settings, Tuple3<bool, bool, bool>>(
selector: (context, s) => Tuple3( selector: (context, s) => Tuple3(
s.videoGestureDoubleTapTogglePlay, isPureVideo && s.videoGestureDoubleTapTogglePlay,
s.videoGestureSideDoubleTapSeek, isPureVideo && s.videoGestureSideDoubleTapSeek,
s.videoGestureVerticalDragBrightnessVolume, isPureVideo && s.videoGestureVerticalDragBrightnessVolume,
), ),
builder: (context, s, child) { builder: (context, s, child) {
final playGesture = s.item1; final playGesture = s.item1;

View file

@ -173,7 +173,7 @@ class _EntryEditorState extends State<EntryEditor> with EntryViewControllerMixin
final targetEntry = pageEntry ?? mainEntry; final targetEntry = pageEntry ?? mainEntry;
Widget? child; Widget? child;
// a 360 video is both a video and a panorama but only the video controls are displayed // a 360 video is both a video and a panorama but only the video controls are displayed
if (targetEntry.isVideo) { if (targetEntry.isPureVideo) {
child = Selector<VideoConductor, AvesVideoController?>( child = Selector<VideoConductor, AvesVideoController?>(
selector: (context, vc) => vc.getController(targetEntry), selector: (context, vc) => vc.getController(targetEntry),
builder: (context, videoController, child) => VideoControlOverlay( builder: (context, videoController, child) => VideoControlOverlay(

View file

@ -15,6 +15,8 @@ mixin AvesEntryBase {
int? get durationMillis; int? get durationMillis;
bool get isAnimated;
int get rotationDegrees; int get rotationDegrees;
Size get displaySize; Size get displaySize;

View file

@ -6,41 +6,66 @@ class Keys {
static const androidManufacturer = 'com.android.manufacturer'; static const androidManufacturer = 'com.android.manufacturer';
static const androidModel = 'com.android.model'; static const androidModel = 'com.android.model';
static const androidVersion = 'com.android.version'; static const androidVersion = 'com.android.version';
static const avgFrameRate = 'avg_frame_rate';
static const bps = 'bps'; static const bps = 'bps';
static const bitrate = 'bitrate'; static const bitrate = 'bitrate';
static const bitsPerRawSample = 'bits_per_raw_sample';
static const byteCount = 'number_of_bytes'; static const byteCount = 'number_of_bytes';
static const channelLayout = 'channel_layout'; static const channelLayout = 'channel_layout';
static const chromaLocation = 'chroma_location';
static const codecLevel = 'codec_level'; static const codecLevel = 'codec_level';
static const codecName = 'codec_name'; static const codecName = 'codec_name';
static const codecPixelFormat = 'codec_pixel_format'; static const codecPixelFormat = 'codec_pixel_format';
static const codecProfileId = 'codec_profile_id'; static const codecProfileId = 'codec_profile_id';
static const codecTag = 'codec_tag';
static const codecTagString = 'codec_tag_string';
static const codedHeight = 'coded_height';
static const codedWidth = 'coded_width';
static const colorPrimaries = 'color_primaries';
static const colorRange = 'color_range';
static const colorSpace = 'color_space';
static const colorTransfer = 'color_transfer';
static const compatibleBrands = 'compatible_brands'; static const compatibleBrands = 'compatible_brands';
static const creationTime = 'creation_time'; static const creationTime = 'creation_time';
static const dar = 'display_aspect_ratio';
static const date = 'date'; static const date = 'date';
static const disposition = 'disposition';
static const duration = 'duration'; static const duration = 'duration';
static const durationMicros = 'duration_us'; static const durationMicros = 'duration_us';
static const durationTs = 'duration_ts';
static const encoder = 'encoder'; static const encoder = 'encoder';
static const extraDataSize = 'extradata_size';
static const fieldOrder = 'field_order';
static const filename = 'filename'; static const filename = 'filename';
static const fpsDen = 'fps_den'; static const fpsDen = 'fps_den';
static const fpsNum = 'fps_num'; static const fpsNum = 'fps_num';
static const frameCount = 'number_of_frames'; static const frameCount = 'number_of_frames';
static const handlerName = 'handler_name'; static const handlerName = 'handler_name';
static const hasBFrames = 'has_b_frames';
static const height = 'height'; static const height = 'height';
static const index = 'index'; static const index = 'index';
static const isAvc = 'is_avc';
static const language = 'language'; static const language = 'language';
static const location = 'location'; static const location = 'location';
static const majorBrand = 'major_brand'; static const majorBrand = 'major_brand';
static const mediaFormat = 'format'; static const mediaFormat = 'format';
static const mediaType = 'media_type'; static const mediaType = 'media_type';
static const minorVersion = 'minor_version'; static const minorVersion = 'minor_version';
static const nalLengthSize = 'nal_length_size';
static const probeScore = 'probe_score';
static const programCount = 'nb_programs';
static const quicktimeCreationDate = 'com.apple.quicktime.creationdate'; static const quicktimeCreationDate = 'com.apple.quicktime.creationdate';
static const quicktimeLocationAccuracyHorizontal = 'com.apple.quicktime.location.accuracy.horizontal'; static const quicktimeLocationAccuracyHorizontal = 'com.apple.quicktime.location.accuracy.horizontal';
static const quicktimeLocationIso6709 = 'com.apple.quicktime.location.iso6709'; static const quicktimeLocationIso6709 = 'com.apple.quicktime.location.iso6709';
static const quicktimeMake = 'com.apple.quicktime.make'; static const quicktimeMake = 'com.apple.quicktime.make';
static const quicktimeModel = 'com.apple.quicktime.model'; static const quicktimeModel = 'com.apple.quicktime.model';
static const quicktimeSoftware = 'com.apple.quicktime.software'; static const quicktimeSoftware = 'com.apple.quicktime.software';
static const refs = 'refs';
static const rFrameRate = 'r_frame_rate';
static const rotate = 'rotate'; static const rotate = 'rotate';
static const sampleFormat = 'sample_fmt';
static const sampleRate = 'sample_rate'; static const sampleRate = 'sample_rate';
static const sar = 'sample_aspect_ratio';
static const sarDen = 'sar_den'; static const sarDen = 'sar_den';
static const sarNum = 'sar_num'; static const sarNum = 'sar_num';
static const selectedAudioStream = 'audio'; static const selectedAudioStream = 'audio';
@ -48,15 +73,20 @@ class Keys {
static const selectedVideoStream = 'video'; static const selectedVideoStream = 'video';
static const sourceOshash = 'source_oshash'; static const sourceOshash = 'source_oshash';
static const startMicros = 'start_us'; static const startMicros = 'start_us';
static const startPts = 'start_pts';
static const startTime = 'start_time';
static const statisticsTags = '_statistics_tags'; static const statisticsTags = '_statistics_tags';
static const statisticsWritingApp = '_statistics_writing_app'; static const statisticsWritingApp = '_statistics_writing_app';
static const statisticsWritingDateUtc = '_statistics_writing_date_utc'; static const statisticsWritingDateUtc = '_statistics_writing_date_utc';
static const streamCount = 'nb_streams';
static const streams = 'streams'; static const streams = 'streams';
static const tbrDen = 'tbr_den'; static const tbrDen = 'tbr_den';
static const tbrNum = 'tbr_num'; static const tbrNum = 'tbr_num';
static const streamType = 'type'; static const streamType = 'type';
static const title = 'title'; static const title = 'title';
static const timeBase = 'time_base';
static const track = 'track'; static const track = 'track';
static const vendorId = 'vendor_id';
static const width = 'width'; static const width = 'width';
static const xiaomiSlowMoment = 'com.xiaomi.slow_moment'; static const xiaomiSlowMoment = 'com.xiaomi.slow_moment';
} }

View file

@ -3,11 +3,14 @@ import 'package:aves_model/aves_model.dart';
extension ExtraVideoLoopMode on VideoLoopMode { extension ExtraVideoLoopMode on VideoLoopMode {
static const shortVideoThreshold = Duration(seconds: 30); static const shortVideoThreshold = Duration(seconds: 30);
bool shouldLoop(int? durationMillis) { bool shouldLoop(AvesEntryBase entry) {
if (entry.isAnimated) return true;
switch (this) { switch (this) {
case VideoLoopMode.never: case VideoLoopMode.never:
return false; return false;
case VideoLoopMode.shortOnly: case VideoLoopMode.shortOnly:
final durationMillis = entry.durationMillis;
return durationMillis != null ? durationMillis < shortVideoThreshold.inMilliseconds : false; return durationMillis != null ? durationMillis < shortVideoThreshold.inMilliseconds : false;
case VideoLoopMode.always: case VideoLoopMode.always:
return true; return true;

30
plugins/aves_video_ffmpeg/.gitignore vendored Normal file
View file

@ -0,0 +1,30 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
migrate_working_dir/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
# Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock.
#/pubspec.lock
**/doc/api/
.dart_tool/
.packages
build/

View file

@ -0,0 +1,10 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: f468f3366c26a5092eb964a230ce7892fda8f2f8
channel: stable
project_type: package

View file

@ -0,0 +1 @@
include: ../../analysis_options.yaml

View file

@ -0,0 +1,3 @@
library aves_video_ffmpeg;
export 'src/metadata.dart';

View file

@ -0,0 +1,146 @@
import 'package:aves_model/aves_model.dart';
import 'package:aves_video/aves_video.dart';
import 'package:ffmpeg_kit_flutter/ffmpeg_kit_config.dart';
import 'package:ffmpeg_kit_flutter/ffprobe_kit.dart';
import 'package:flutter/foundation.dart';
class FfmpegVideoMetadataFetcher extends AvesVideoMetadataFetcher {
static const chaptersKey = 'chapters';
static const formatKey = 'format';
static const streamsKey = 'streams';
@override
void init() {}
@override
Future<Map> getMetadata(AvesEntryBase entry) async {
var uri = entry.uri;
if (uri.startsWith('content://')) {
final safUri = await FFmpegKitConfig.getSafParameterForRead(uri);
if (safUri == null) {
debugPrint('failed to get SAF URI for entry=$entry');
return {};
}
uri = safUri;
}
final session = await FFprobeKit.getMediaInformation(uri);
final information = session.getMediaInformation();
if (information == null) {
final failStackTrace = await session.getFailStackTrace();
final output = await session.getOutput();
debugPrint('failed to get video metadata for entry=$entry, failStackTrace=$failStackTrace, output=$output');
return {};
}
final props = information.getAllProperties();
if (props == null) return {};
final chapters = props[chaptersKey];
if (chapters is List) {
if (chapters.isEmpty) {
props.remove(chaptersKey);
}
}
final format = props.remove(formatKey);
if (format is Map) {
format.remove(Keys.filename);
format.remove('size');
_normalizeGroup(format);
props.addAll(format);
}
final streams = props[streamsKey];
if (streams is List) {
streams.forEach((stream) {
if (stream is Map) {
_normalizeGroup(stream);
final fps = stream[Keys.avgFrameRate];
if (fps is String) {
final parts = fps.split('/');
if (parts.length == 2) {
final num = int.tryParse(parts[0]);
final den = int.tryParse(parts[1]);
if (num != null && den != null) {
if (den > 0) {
stream[Keys.fpsNum] = num;
stream[Keys.fpsDen] = den;
}
stream.remove(Keys.avgFrameRate);
}
}
}
final disposition = stream[Keys.disposition];
if (disposition is Map) {
disposition.removeWhere((key, value) => value == 0);
stream[Keys.disposition] = disposition.keys.join(', ');
}
final idValue = stream['id'];
if (idValue is String) {
final id = int.tryParse(idValue);
if (id != null) {
stream[Keys.index] = id - 1;
stream.remove('id');
}
}
if (stream[Keys.streamType] == 'data') {
stream[Keys.streamType] = MediaStreamTypes.metadata;
}
}
});
}
return props;
}
void _normalizeGroup(Map<dynamic, dynamic> stream) {
void replaceKey(k1, k2) {
final v = stream.remove(k1);
if (v != null) {
stream[k2] = v;
}
}
replaceKey('bit_rate', Keys.bitrate);
replaceKey('codec_type', Keys.streamType);
replaceKey('format_name', Keys.mediaFormat);
replaceKey('level', Keys.codecLevel);
replaceKey('nb_frames', Keys.frameCount);
replaceKey('pix_fmt', Keys.codecPixelFormat);
replaceKey('profile', Keys.codecProfileId);
final tags = stream.remove('tags');
if (tags is Map) {
stream.addAll(tags);
}
<String>{
Keys.codecProfileId,
Keys.rFrameRate,
'bits_per_sample',
'closed_captions',
'codec_long_name',
'film_grain',
'has_b_frames',
'start_pts',
'start_time',
'vendor_id',
}.forEach((key) {
final value = stream[key];
switch (value) {
case final num v:
if (v == 0) {
stream.remove(key);
}
case final String v:
if (double.tryParse(v) == 0 || v == '0/0' || v == 'unknown' || v == '[0][0][0][0]') {
stream.remove(key);
}
}
});
}
}

View file

@ -0,0 +1,134 @@
# Generated by pub
# See https://dart.dev/tools/pub/glossary#lockfile
packages:
aves_model:
dependency: "direct main"
description:
path: "../aves_model"
relative: true
source: path
version: "0.0.1"
aves_utils:
dependency: transitive
description:
path: "../aves_utils"
relative: true
source: path
version: "0.0.1"
aves_video:
dependency: "direct main"
description:
path: "../aves_video"
relative: true
source: path
version: "0.0.1"
characters:
dependency: transitive
description:
name: characters
sha256: "04a925763edad70e8443c99234dc3328f442e811f1d8fd1a72f1c8ad0f69a605"
url: "https://pub.dev"
source: hosted
version: "1.3.0"
collection:
dependency: transitive
description:
name: collection
sha256: "4a07be6cb69c84d677a6c3096fcf960cc3285a8330b4603e0d463d15d9bd934c"
url: "https://pub.dev"
source: hosted
version: "1.17.1"
equatable:
dependency: transitive
description:
name: equatable
sha256: c2b87cb7756efdf69892005af546c56c0b5037f54d2a88269b4f347a505e3ca2
url: "https://pub.dev"
source: hosted
version: "2.0.5"
ffmpeg_kit_flutter:
dependency: "direct main"
description:
path: "flutter/flutter"
ref: development-flutter
resolved-ref: "497bda9b0bbd4fb94b3d578fa0c40632b3793de7"
url: "https://github.com/arthenica/ffmpeg-kit.git"
source: git
version: "5.1.0"
ffmpeg_kit_flutter_platform_interface:
dependency: transitive
description:
name: ffmpeg_kit_flutter_platform_interface
sha256: addf046ae44e190ad0101b2fde2ad909a3cd08a2a109f6106d2f7048b7abedee
url: "https://pub.dev"
source: hosted
version: "0.2.1"
flutter:
dependency: "direct main"
description: flutter
source: sdk
version: "0.0.0"
flutter_lints:
dependency: "direct dev"
description:
name: flutter_lints
sha256: "2118df84ef0c3ca93f96123a616ae8540879991b8b57af2f81b76a7ada49b2a4"
url: "https://pub.dev"
source: hosted
version: "2.0.2"
js:
dependency: transitive
description:
name: js
sha256: f2c445dce49627136094980615a031419f7f3eb393237e4ecd97ac15dea343f3
url: "https://pub.dev"
source: hosted
version: "0.6.7"
lints:
dependency: transitive
description:
name: lints
sha256: "0a217c6c989d21039f1498c3ed9f3ed71b354e69873f13a8dfc3c9fe76f1b452"
url: "https://pub.dev"
source: hosted
version: "2.1.1"
material_color_utilities:
dependency: transitive
description:
name: material_color_utilities
sha256: d92141dc6fe1dad30722f9aa826c7fbc896d021d792f80678280601aff8cf724
url: "https://pub.dev"
source: hosted
version: "0.2.0"
meta:
dependency: transitive
description:
name: meta
sha256: "3c74dbf8763d36539f114c799d8a2d87343b5067e9d796ca22b5eb8437090ee3"
url: "https://pub.dev"
source: hosted
version: "1.9.1"
plugin_platform_interface:
dependency: transitive
description:
name: plugin_platform_interface
sha256: "43798d895c929056255600343db8f049921cbec94d31ec87f1dc5c16c01935dd"
url: "https://pub.dev"
source: hosted
version: "2.1.5"
sky_engine:
dependency: transitive
description: flutter
source: sdk
version: "0.0.99"
vector_math:
dependency: transitive
description:
name: vector_math
sha256: "80b3257d1492ce4d091729e3a67a60407d227c27241d6927be0130c98e741803"
url: "https://pub.dev"
source: hosted
version: "2.1.4"
sdks:
dart: ">=3.0.0 <4.0.0"
flutter: ">=2.0.0"

View file

@ -0,0 +1,24 @@
name: aves_video_ffmpeg
version: 0.0.1
publish_to: none
environment:
sdk: ">=3.0.0 <4.0.0"
dependencies:
flutter:
sdk: flutter
aves_model:
path: ../aves_model
aves_video:
path: ../aves_video
ffmpeg_kit_flutter:
git:
url: https://github.com/arthenica/ffmpeg-kit.git
ref: development-flutter
path: flutter/flutter
dev_dependencies:
flutter_lints:
flutter:

View file

@ -160,7 +160,7 @@ class IjkVideoController extends AvesVideoController {
_macroBlockCrop = Offset(s.width, s.height); _macroBlockCrop = Offset(s.width, s.height);
} }
final loopEnabled = settings.videoLoopMode.shouldLoop(entry.durationMillis); final loopEnabled = settings.videoLoopMode.shouldLoop(entry);
// `fastseek`: enable fast, but inaccurate seeks for some formats // `fastseek`: enable fast, but inaccurate seeks for some formats
// in practice the flag seems ineffective, but harmless too // in practice the flag seems ineffective, but harmless too

View file

@ -95,7 +95,7 @@ class MpvVideoController extends AvesVideoController {
} }
Future<void> _applyLoop() async { Future<void> _applyLoop() async {
final loopEnabled = settings.videoLoopMode.shouldLoop(entry.durationMillis); final loopEnabled = settings.videoLoopMode.shouldLoop(entry);
await _instance.setPlaylistMode(loopEnabled ? PlaylistMode.single : PlaylistMode.none); await _instance.setPlaylistMode(loopEnabled ? PlaylistMode.single : PlaylistMode.none);
} }

View file

@ -23,34 +23,4 @@ dependencies:
dev_dependencies: dev_dependencies:
flutter_lints: flutter_lints:
#dependency_overrides:
# media_kit:
# path: ../../../media_kit/media_kit
# media_kit_video:
# path: ../../../media_kit/media_kit_video
# media_kit_native_event_loop:
# path: ../../../media_kit/media_kit_native_event_loop
# media_kit_libs_android_video:
# path: ../../../media_kit/media_kit_libs_android_video
# media_kit:
# git:
# url: https://github.com/alexmercerind/media_kit
# ref: main
# path: media_kit
# media_kit_video:
# git:
# url: https://github.com/alexmercerind/media_kit
# ref: main
# path: media_kit_video
# media_kit_native_event_loop:
# git:
# url: https://github.com/alexmercerind/media_kit
# ref: main
# path: media_kit_native_event_loop
# media_kit_libs_android_video:
# git:
# url: https://github.com/alexmercerind/media_kit
# ref: main
# path: libs/android/media_kit_libs_android_video
flutter: flutter:

View file

@ -126,10 +126,10 @@ packages:
relative: true relative: true
source: path source: path
version: "0.0.1" version: "0.0.1"
aves_video_ijk: aves_video_ffmpeg:
dependency: "direct main" dependency: "direct main"
description: description:
path: "plugins/aves_video_ijk" path: "plugins/aves_video_ffmpeg"
relative: true relative: true
source: path source: path
version: "0.0.1" version: "0.0.1"
@ -351,15 +351,23 @@ packages:
url: "https://pub.dev" url: "https://pub.dev"
source: hosted source: hosted
version: "2.0.2" version: "2.0.2"
fijkplayer: ffmpeg_kit_flutter:
dependency: transitive dependency: transitive
description: description:
path: "." path: "flutter/flutter"
ref: aves ref: development-flutter
resolved-ref: "935a2d86ebf45fbdbaf8b4a0921d5eaea87410d6" resolved-ref: "497bda9b0bbd4fb94b3d578fa0c40632b3793de7"
url: "https://github.com/deckerst/fijkplayer.git" url: "https://github.com/arthenica/ffmpeg-kit.git"
source: git source: git
version: "0.10.0" version: "5.1.0"
ffmpeg_kit_flutter_platform_interface:
dependency: transitive
description:
name: ffmpeg_kit_flutter_platform_interface
sha256: addf046ae44e190ad0101b2fde2ad909a3cd08a2a109f6106d2f7048b7abedee
url: "https://pub.dev"
source: hosted
version: "0.2.1"
file: file:
dependency: transitive dependency: transitive
description: description:

View file

@ -40,8 +40,10 @@ dependencies:
path: plugins/aves_services_google path: plugins/aves_services_google
aves_video: aves_video:
path: plugins/aves_video path: plugins/aves_video
aves_video_ijk: # aves_video_ijk:
path: plugins/aves_video_ijk # path: plugins/aves_video_ijk
aves_video_ffmpeg:
path: plugins/aves_video_ffmpeg
aves_video_mpv: aves_video_mpv:
path: plugins/aves_video_mpv path: plugins/aves_video_mpv
aves_ui: aves_ui: