Merge branch 'develop'

This commit is contained in:
Thibault Deckers 2025-01-13 10:45:04 +01:00
commit 94ad4c01f4
61 changed files with 2783 additions and 405 deletions

View file

@ -17,7 +17,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2
uses: step-security/harden-runner@c95a14d0e5bab51a9f56296a4eb0e416910cd350 # v2.10.3
with:
egress-policy: audit

View file

@ -18,7 +18,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2
uses: step-security/harden-runner@c95a14d0e5bab51a9f56296a4eb0e416910cd350 # v2.10.3
with:
egress-policy: audit
@ -52,7 +52,7 @@ jobs:
build-mode: manual
steps:
- name: Harden Runner
uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2
uses: step-security/harden-runner@c95a14d0e5bab51a9f56296a4eb0e416910cd350 # v2.10.3
with:
egress-policy: audit

View file

@ -18,7 +18,7 @@ jobs:
id-token: write
steps:
- name: Harden Runner
uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2
uses: step-security/harden-runner@c95a14d0e5bab51a9f56296a4eb0e416910cd350 # v2.10.3
with:
egress-policy: audit
@ -87,7 +87,7 @@ jobs:
token: ${{ secrets.GITHUB_TOKEN }}
- name: Upload app bundle
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: appbundle
path: outputs/app-play-release.aab
@ -98,7 +98,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- name: Harden Runner
uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2
uses: step-security/harden-runner@c95a14d0e5bab51a9f56296a4eb0e416910cd350 # v2.10.3
with:
egress-policy: audit

View file

@ -31,7 +31,7 @@ jobs:
steps:
- name: Harden Runner
uses: step-security/harden-runner@0080882f6c36860b6ba35c610c98ce87d4e2f26f # v2.10.2
uses: step-security/harden-runner@c95a14d0e5bab51a9f56296a4eb0e416910cd350 # v2.10.3
with:
egress-policy: audit
@ -63,7 +63,7 @@ jobs:
# Upload the results as artifacts (optional). Commenting out will disable uploads of run results in SARIF
# format to the repository Actions tab.
- name: "Upload artifact"
uses: actions/upload-artifact@6f51ac03b9356f520e9adb1b1b7802705f340c2b # v4.5.0
uses: actions/upload-artifact@65c4c4a1ddee5b72f698fdd19549f0f0fb45cf08 # v4.6.0
with:
name: SARIF file
path: results.sarif

View file

@ -4,6 +4,23 @@ All notable changes to this project will be documented in this file.
## <a id="unreleased"></a>[Unreleased]
## <a id="v1.12.2"></a>[v1.12.2] - 2025-01-13
### Added
- DDM coordinate format option
### Changed
- Video: use `media-kit` instead of `ffmpeg-kit` for metadata fetch
- Info: show video chapters
- Accessibility: apply system "touch and hold delay" setting
### Fixed
- crash when cataloguing some videos
- switching to PiP for any inactive app state
## <a id="v1.12.1"></a>[v1.12.1] - 2025-01-05
### Added

View file

@ -6,6 +6,7 @@ import android.content.res.Configuration
import android.os.Build
import android.provider.Settings
import android.util.Log
import android.view.ViewConfiguration
import android.view.accessibility.AccessibilityManager
import deckers.thibault.aves.channel.calls.Coresult.Companion.safe
import deckers.thibault.aves.utils.LogUtils
@ -17,6 +18,7 @@ class AccessibilityHandler(private val contextWrapper: ContextWrapper) : MethodC
override fun onMethodCall(call: MethodCall, result: MethodChannel.Result) {
when (call.method) {
"areAnimationsRemoved" -> safe(call, result, ::areAnimationsRemoved)
"getLongPressTimeout" -> safe(call, result, ::getLongPressTimeout)
"hasRecommendedTimeouts" -> safe(call, result, ::hasRecommendedTimeouts)
"getRecommendedTimeoutMillis" -> safe(call, result, ::getRecommendedTimeoutMillis)
"shouldUseBoldFont" -> safe(call, result, ::shouldUseBoldFont)
@ -34,6 +36,10 @@ class AccessibilityHandler(private val contextWrapper: ContextWrapper) : MethodC
result.success(removed)
}
private fun getLongPressTimeout(@Suppress("unused_parameter") call: MethodCall, result: MethodChannel.Result) {
result.success(ViewConfiguration.getLongPressTimeout())
}
private fun hasRecommendedTimeouts(@Suppress("unused_parameter") call: MethodCall, result: MethodChannel.Result) {
result.success(Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q)
}

View file

@ -7,6 +7,7 @@ import android.os.Handler
import android.os.Looper
import android.provider.Settings
import android.util.Log
import android.view.ViewConfiguration
import deckers.thibault.aves.model.FieldMap
import deckers.thibault.aves.utils.LogUtils
import io.flutter.plugin.common.EventChannel
@ -21,6 +22,7 @@ class SettingsChangeStreamHandler(private val context: Context) : EventChannel.S
private val contentObserver = object : ContentObserver(null) {
private var accelerometerRotation: Int = 0
private var transitionAnimationScale: Float = 1f
private var longPressTimeoutMillis: Int = 0
init {
update()
@ -36,6 +38,7 @@ class SettingsChangeStreamHandler(private val context: Context) : EventChannel.S
hashMapOf(
Settings.System.ACCELEROMETER_ROTATION to accelerometerRotation,
Settings.Global.TRANSITION_ANIMATION_SCALE to transitionAnimationScale,
KEY_LONG_PRESS_TIMEOUT_MILLIS to longPressTimeoutMillis,
)
)
}
@ -54,6 +57,11 @@ class SettingsChangeStreamHandler(private val context: Context) : EventChannel.S
transitionAnimationScale = newTransitionAnimationScale
changed = true
}
val newLongPressTimeout = ViewConfiguration.getLongPressTimeout()
if (longPressTimeoutMillis != newLongPressTimeout) {
longPressTimeoutMillis = newLongPressTimeout
changed = true
}
} catch (e: Exception) {
Log.w(LOG_TAG, "failed to get settings with error=${e.message}", null)
}
@ -93,5 +101,8 @@ class SettingsChangeStreamHandler(private val context: Context) : EventChannel.S
companion object {
private val LOG_TAG = LogUtils.createTag<SettingsChangeStreamHandler>()
const val CHANNEL = "deckers.thibault/aves/settings_change"
// cf `Settings.Secure.LONG_PRESS_TIMEOUT`
const val KEY_LONG_PRESS_TIMEOUT_MILLIS = "long_press_timeout"
}
}

View file

@ -8,4 +8,5 @@
<string name="analysis_channel_name">メディアスキャン</string>
<string name="analysis_notification_default_title">メディアをスキャン中</string>
<string name="analysis_notification_action_stop">停止</string>
<string name="map_shortcut_short_label">マップ</string>
</resources>

View file

@ -0,0 +1,3 @@
In v1.12.2:
- enjoy the app in Danish
Full changelog available on GitHub

View file

@ -0,0 +1,3 @@
In v1.12.2:
- enjoy the app in Danish
Full changelog available on GitHub

View file

@ -1516,5 +1516,53 @@
"chipActionShowCollection": "Zobrazit ve sbírce",
"@chipActionShowCollection": {},
"mapAttributionOsmData": "Mapová data © [OpenStreetMap](https://www.openstreetmap.org/copyright) přispěvatelé",
"@mapAttributionOsmData": {}
"@mapAttributionOsmData": {},
"videoActionShowNextFrame": "Zobrazit další snímek",
"@videoActionShowNextFrame": {},
"newAlbumDialogAlbumAlreadyExistsHelper": "Album již existuje",
"@newAlbumDialogAlbumAlreadyExistsHelper": {},
"renameProcessorHash": "Hash",
"@renameProcessorHash": {},
"albumTierDynamic": "Dynamické",
"@albumTierDynamic": {},
"collectionActionAddDynamicAlbum": "Přidat dynamické album",
"@collectionActionAddDynamicAlbum": {},
"sortOrderShortestFirst": "Nejkratší první",
"@sortOrderShortestFirst": {},
"sortOrderLongestFirst": "Nejdelší první",
"@sortOrderLongestFirst": {},
"mapAttributionOpenTopoMap": "[SRTM](https://www.earthdata.nasa.gov/sensors/srtm) | Dlaždice z [OpenTopoMap](https://opentopomap.org/), [CC BY-SA](https://creativecommons.org/licenses/by-sa/3.0/)",
"@mapAttributionOpenTopoMap": {},
"collectionActionSetHome": "Nastavit jako domov",
"@collectionActionSetHome": {},
"chipActionRemove": "Odstranit",
"@chipActionRemove": {},
"videoActionShowPreviousFrame": "Zobrazit předchozí snímek",
"@videoActionShowPreviousFrame": {},
"dynamicAlbumAlreadyExists": "Dynamické album již existuje",
"@dynamicAlbumAlreadyExists": {},
"newDynamicAlbumDialogTitle": "Nové dynamické album",
"@newDynamicAlbumDialogTitle": {},
"selectStorageVolumeDialogTitle": "Vybrat úložiště",
"@selectStorageVolumeDialogTitle": {},
"explorerActionSelectStorageVolume": "Vyberte úložiště",
"@explorerActionSelectStorageVolume": {},
"mapStyleOsmLiberty": "OSM Liberty",
"@mapStyleOsmLiberty": {},
"mapAttributionOsmLiberty": "Dlaždice z [OpenMapTiles](https://www.openmaptiles.org/), [CC BY](http://creativecommons.org/licenses/by/4.0) • Hostované na [OSM Americana](https://tile.ourmap.us)",
"@mapAttributionOsmLiberty": {},
"setHomeCustom": "Vlastní",
"@setHomeCustom": {},
"sortByDuration": "Podle trvání",
"@sortByDuration": {},
"chipActionGoToExplorerPage": "Zobrazit v průzkumníku",
"@chipActionGoToExplorerPage": {},
"explorerPageTitle": "Průzkumník",
"@explorerPageTitle": {},
"chipActionDecompose": "Rozdělit",
"@chipActionDecompose": {},
"appExportDynamicAlbums": "Dynamická alba",
"@appExportDynamicAlbums": {},
"mapStyleOpenTopoMap": "OpenTopoMap",
"@mapStyleOpenTopoMap": {}
}

View file

@ -210,6 +210,7 @@
"albumTierRegular": "Others",
"coordinateFormatDms": "DMS",
"coordinateFormatDdm": "DDM",
"coordinateFormatDecimal": "Decimal degrees",
"coordinateDms": "{coordinate} {direction}",
"@coordinateDms": {

View file

@ -1155,7 +1155,7 @@
"@vaultDialogLockModeWhenScreenOff": {},
"newVaultDialogTitle": "新しい保管庫",
"@newVaultDialogTitle": {},
"authenticateToConfigureVault": "保管庫を設定するための認証",
"authenticateToConfigureVault": "認証して保管庫を設定",
"@authenticateToConfigureVault": {},
"vaultDialogLockTypeLabel": "ロックの種類",
"@vaultDialogLockTypeLabel": {},
@ -1167,7 +1167,7 @@
"@pinDialogConfirm": {},
"passwordDialogEnter": "パスワードを入力",
"@passwordDialogEnter": {},
"authenticateToUnlockVault": "認証して保管庫のロックを解除する",
"authenticateToUnlockVault": "認証して保管庫を解除",
"@authenticateToUnlockVault": {},
"passwordDialogConfirm": "パスワードの確認",
"@passwordDialogConfirm": {},
@ -1360,5 +1360,41 @@
"filterLocatedLabel": "位置情報あり",
"@filterLocatedLabel": {},
"mapAttributionOsmData": "地図データ © [OpenStreetMap](https://www.openstreetmap.org/copyright) contributors",
"@mapAttributionOsmData": {}
"@mapAttributionOsmData": {},
"sortByDuration": "期間順",
"@sortByDuration": {},
"explorerActionSelectStorageVolume": "ストレージを選択",
"@explorerActionSelectStorageVolume": {},
"newAlbumDialogAlbumAlreadyExistsHelper": "アルバムはすでに存在します",
"@newAlbumDialogAlbumAlreadyExistsHelper": {},
"videoActionShowPreviousFrame": "前のフレームを表示",
"@videoActionShowPreviousFrame": {},
"videoActionShowNextFrame": "次のフレームを表示",
"@videoActionShowNextFrame": {},
"albumTierDynamic": "ダイナミック",
"@albumTierDynamic": {},
"dynamicAlbumAlreadyExists": "ダイナミックアルバムはすでに存在します",
"@dynamicAlbumAlreadyExists": {},
"stateEmpty": "州なし",
"@stateEmpty": {},
"appExportDynamicAlbums": "ダイナミックアルバム",
"@appExportDynamicAlbums": {},
"chipActionRemove": "削除",
"@chipActionRemove": {},
"newDynamicAlbumDialogTitle": "新規ダイナミックアルバム",
"@newDynamicAlbumDialogTitle": {},
"setHomeCustom": "カスタム",
"@setHomeCustom": {},
"mapStyleOsmLiberty": "OSM Liberty",
"@mapStyleOsmLiberty": {},
"mapStyleOpenTopoMap": "OpenTopoMap",
"@mapStyleOpenTopoMap": {},
"selectStorageVolumeDialogTitle": "ストレージを選択",
"@selectStorageVolumeDialogTitle": {},
"sortOrderShortestFirst": "短いものから表示",
"@sortOrderShortestFirst": {},
"sortOrderLongestFirst": "長いものから表示",
"@sortOrderLongestFirst": {},
"collectionActionAddDynamicAlbum": "ダイナミックアルバムを追加",
"@collectionActionAddDynamicAlbum": {}
}

View file

@ -1562,5 +1562,7 @@
"newDynamicAlbumDialogTitle": "Nový dynamický album",
"@newDynamicAlbumDialogTitle": {},
"dynamicAlbumAlreadyExists": "Dynamický album už existuje",
"@dynamicAlbumAlreadyExists": {}
"@dynamicAlbumAlreadyExists": {},
"chipActionDecompose": "Rozdeliť",
"@chipActionDecompose": {}
}

View file

@ -73,11 +73,6 @@ class Dependencies {
licenseUrl: 'https://github.com/material-foundation/flutter-packages/blob/main/packages/dynamic_color/LICENSE',
sourceUrl: 'https://github.com/material-foundation/flutter-packages/tree/main/packages/dynamic_color',
),
Dependency(
name: 'FFmpegKit (Aves fork)',
license: lgpl3,
sourceUrl: 'https://github.com/deckerst/ffmpeg-kit',
),
Dependency(
name: 'Floating',
license: mit,

View file

@ -232,6 +232,7 @@ class AvesEntry with AvesEntryBase {
// the MIME type reported by the Media Store is unreliable
// so we use the one found during cataloguing if possible
@override
String get mimeType => _catalogMetadata?.mimeType ?? sourceMimeType;
bool get isCatalogued => _catalogMetadata != null;

View file

@ -9,6 +9,7 @@ import 'package:aves/ref/mime_types.dart';
import 'package:aves/services/common/services.dart';
import 'package:aves/services/metadata/svg_metadata_service.dart';
import 'package:aves/theme/colors.dart';
import 'package:aves/theme/format.dart';
import 'package:aves/theme/text.dart';
import 'package:aves/widgets/viewer/info/metadata/metadata_dir.dart';
import 'package:aves_model/aves_model.dart';
@ -82,6 +83,21 @@ extension ExtraAvesEntryInfo on AvesEntry {
directories.add(MetadataDirectory(MetadataDirectory.mediaDirectory, _toSortedTags(formattedMediaTags)));
}
if (mediaInfo.containsKey(Keys.chapters)) {
final allChapters = (mediaInfo.remove(Keys.chapters) as List).cast<Map>();
if (allChapters.isNotEmpty) {
allChapters.sortBy((v) => v[Keys.time] as num? ?? 0);
final chapterTags = SplayTreeMap.of(Map.fromEntries(allChapters.mapIndexed((i, chapter) {
final chapterNumber = i + 1;
final time = Duration(seconds: (chapter[Keys.time] as num? ?? 0).round());
final title = chapter[Keys.title] as String? ?? 'Chapter $chapterNumber';
return MapEntry('$chapterNumber${AText.separator}${formatFriendlyDuration(time)}', title);
})), compareNatural);
directories.add(MetadataDirectory('Chapters', chapterTags));
}
}
if (mediaInfo.containsKey(Keys.streams)) {
String getTypeText(Map stream) {
final type = stream[Keys.streamType] ?? MediaStreamTypes.unknown;
@ -96,7 +112,7 @@ extension ExtraAvesEntryInfo on AvesEntry {
case MediaStreamTypes.timedText:
return 'Text';
case MediaStreamTypes.video:
return stream.containsKey(Keys.fpsDen) ? 'Video' : 'Image';
return stream.containsKey(Keys.fpsDen) || stream.containsKey(Keys.fps) ? 'Video' : 'Image';
case MediaStreamTypes.unknown:
default:
return 'Unknown';

View file

@ -60,12 +60,15 @@ class CoordinateFilter extends CollectionFilter {
@override
String getLabel(BuildContext context) {
return _formatBounds((latLng) => settings.coordinateFormat.format(
context,
latLng,
minuteSecondPadding: minuteSecondPadding,
dmsSecondDecimals: 0,
));
return _formatBounds((latLng) {
final format = settings.coordinateFormat;
return format.format(
context,
latLng,
minuteSecondPadding: minuteSecondPadding,
dmsSecondDecimals: format == CoordinateFormat.ddm ? 2 : 0,
);
});
}
@override

View file

@ -6,6 +6,7 @@ import 'package:aves/model/media/video/codecs.dart';
import 'package:aves/model/media/video/profiles/aac.dart';
import 'package:aves/model/media/video/profiles/h264.dart';
import 'package:aves/model/media/video/profiles/hevc.dart';
import 'package:aves/model/media/video/stereo_3d_modes.dart';
import 'package:aves/model/metadata/catalog.dart';
import 'package:aves/ref/languages.dart';
import 'package:aves/ref/locales.dart';
@ -52,10 +53,10 @@ class VideoMetadataFormatter {
final streams = mediaInfo[Keys.streams];
if (streams is List) {
final allStreamInfo = streams.cast<Map>();
final sizedStream = allStreamInfo.firstWhereOrNull((stream) => stream.containsKey(Keys.width) && stream.containsKey(Keys.height));
final sizedStream = allStreamInfo.firstWhereOrNull((stream) => stream.containsKey(Keys.videoWidth) && stream.containsKey(Keys.videoHeight));
if (sizedStream != null) {
final width = sizedStream[Keys.width];
final height = sizedStream[Keys.height];
final width = sizedStream[Keys.videoWidth];
final height = sizedStream[Keys.videoHeight];
if (width is int && height is int) {
fields['width'] = width;
fields['height'] = height;
@ -68,7 +69,7 @@ class VideoMetadataFormatter {
fields['durationMillis'] = (durationMicros / 1000).round();
} else {
final duration = _parseDuration(mediaInfo[Keys.duration]);
if (duration != null) {
if (duration != null && duration > Duration.zero) {
fields['durationMillis'] = duration.inMilliseconds;
}
}
@ -82,7 +83,7 @@ class VideoMetadataFormatter {
if (entry.mimeType == MimeTypes.avif) {
final duration = _parseDuration(mediaInfo[Keys.duration]);
if (duration == null) return null;
if (duration == null || duration == Duration.zero) return null;
catalogMetadata = catalogMetadata.copyWith(isAnimated: true);
}
@ -189,13 +190,14 @@ class VideoMetadataFormatter {
}
key = (key ?? (kv.key as String)).toLowerCase();
void save(String key, String? value) {
void save(String key, dynamic value) {
if (value != null) {
dir[keyLanguage != null ? '$key ($keyLanguage)' : key] = value;
dir[keyLanguage != null ? '$key ($keyLanguage)' : key] = value.toString();
}
}
switch (key) {
case Keys.chapters:
case Keys.codecLevel:
case Keys.codecTag:
case Keys.codecTagString:
@ -219,24 +221,22 @@ class VideoMetadataFormatter {
break;
case Keys.androidCaptureFramerate:
final captureFps = double.parse(value);
save('Capture Frame Rate', '${roundToPrecision(captureFps, decimals: 3).toString()} FPS');
save('Capture Frame Rate', '${roundToPrecision(captureFps, decimals: 3)} FPS');
case Keys.androidManufacturer:
save('Android Manufacturer', value);
case Keys.androidModel:
save('Android Model', value);
case Keys.androidVersion:
save('Android Version', value);
case Keys.audioChannels:
save('Audio Channels', value);
case Keys.bitrate:
case Keys.bps:
save('Bit Rate', _formatMetric(value, 'b/s'));
case Keys.bitsPerRawSample:
save('Bits Per Raw Sample', value);
case Keys.byteCount:
save('Size', _formatFilesize(value));
case Keys.channelLayout:
save('Channel Layout', _formatChannelLayout(value));
case Keys.chromaLocation:
save('Chroma Location', value);
case Keys.codecName:
if (value != 'none') {
save('Format', _formatCodecName(value));
@ -245,20 +245,28 @@ class VideoMetadataFormatter {
if (streamType == MediaStreamTypes.video) {
// this is just a short name used by FFmpeg
// user-friendly descriptions for related enums are defined in libavutil/pixfmt.h
save('Pixel Format', (value as String).toUpperCase());
save('Pixel Format', value.toString().toUpperCase());
}
case Keys.hwPixelFormat:
save('Hardware Pixel Format', value.toString().toUpperCase());
case Keys.codedHeight:
save('Coded Height', '$value pixels');
case Keys.codedWidth:
save('Coded Width', '$value pixels');
case Keys.decoderHeight:
save('Decoder Height', '$value pixels');
case Keys.decoderWidth:
save('Decoder Width', '$value pixels');
case Keys.colorMatrix:
save('Color Matrix', value.toString().toUpperCase());
case Keys.colorPrimaries:
save('Color Primaries', (value as String).toUpperCase());
save('Color Primaries', value.toString().toUpperCase());
case Keys.colorRange:
save('Color Range', (value as String).toUpperCase());
save('Color Range', value.toString().toUpperCase());
case Keys.colorSpace:
save('Color Space', (value as String).toUpperCase());
save('Color Space', value.toString().toUpperCase());
case Keys.colorTransfer:
save('Color Transfer', (value as String).toUpperCase());
save('Color Transfer', value.toString().toUpperCase());
case Keys.codecProfileId:
{
final profile = int.tryParse(value);
@ -294,8 +302,6 @@ class VideoMetadataFormatter {
save('Compatible Brands', formattedBrands);
case Keys.creationTime:
save('Creation Time', _formatDate(value));
case Keys.dar:
save('Display Aspect Ratio', value);
case Keys.date:
if (value is String && value != '0') {
final charCount = value.length;
@ -307,18 +313,18 @@ class VideoMetadataFormatter {
if (value != 0) save('Duration', formatPreciseDuration(Duration(microseconds: value)));
case Keys.extraDataSize:
save('Extra Data Size', _formatFilesize(value));
case Keys.fieldOrder:
save('Field Order', value);
case Keys.fps:
save('Frame Rate', '${roundToPrecision(info[Keys.fps], decimals: 3)} FPS');
case Keys.fpsDen:
save('Frame Rate', '${roundToPrecision(info[Keys.fpsNum] / info[Keys.fpsDen], decimals: 3).toString()} FPS');
save('Frame Rate', '${roundToPrecision(info[Keys.fpsNum] / info[Keys.fpsDen], decimals: 3)} FPS');
case Keys.frameCount:
save('Frame Count', value);
case Keys.handlerName:
save('Handler Name', value);
case Keys.gamma:
save('Gamma', value.toString().toUpperCase());
case Keys.hasBFrames:
save('Has B-Frames', value);
case Keys.height:
save('Height', '$value pixels');
case Keys.hearingImpaired:
save('Hearing impaired', value);
case Keys.language:
if (value != 'und') save('Language', _formatLanguage(value));
case Keys.location:
@ -326,9 +332,7 @@ class VideoMetadataFormatter {
case Keys.majorBrand:
save('Major Brand', _formatBrand(value));
case Keys.mediaFormat:
save('Format', (value as String).splitMapJoin(',', onMatch: (s) => ', ', onNonMatch: _formatCodecName));
case Keys.mediaType:
save('Media Type', value);
save('Format', value.toString().splitMapJoin(',', onMatch: (s) => ', ', onNonMatch: _formatCodecName));
case Keys.minorVersion:
if (value != '0') save('Minor Version', value);
case Keys.nalLengthSize:
@ -347,7 +351,7 @@ class VideoMetadataFormatter {
case Keys.rotate:
save('Rotation', '$value°');
case Keys.sampleFormat:
save('Sample Format', (value as String).toUpperCase());
save('Sample Format', value.toString().toUpperCase());
case Keys.sampleRate:
save('Sample Rate', _formatMetric(value, 'Hz'));
case Keys.sar:
@ -371,18 +375,24 @@ class VideoMetadataFormatter {
save('Stats Writing App', value);
case Keys.statisticsWritingDateUtc:
save('Stats Writing Date', _formatDate(value));
case Keys.stereo3dMode:
save('Stereo 3D Mode', _formatStereo3dMode(value));
case Keys.timeBase:
save('Time Base', value);
case Keys.track:
if (value != '0') save('Track', value);
case Keys.vendorId:
save('Vendor ID', value);
case Keys.width:
save('Width', '$value pixels');
case Keys.videoHeight:
save('Video Height', '$value pixels');
case Keys.videoWidth:
save('Video Width', '$value pixels');
case Keys.visualImpaired:
save('Visual impaired', value);
case Keys.xiaomiSlowMoment:
save('Xiaomi Slow Moment', value);
default:
save(key.toSentenceCase(), value.toString());
save(key.toSentenceCase(), value);
}
} catch (error) {
debugPrint('failed to process video info key=${kv.key} value=${kv.value}, error=$error');
@ -411,6 +421,8 @@ class VideoMetadataFormatter {
return date.toIso8601String();
}
static String _formatStereo3dMode(String value) => Stereo3dModes.names[value] ?? value;
// input example: '00:00:05.408000000' or '5.408000'
static Duration? _parseDuration(String? value) {
if (value == null) return null;

View file

@ -0,0 +1,22 @@
class Stereo3dModes {
static const names = {
'ab2l': 'above below half height left first',
'tb2l': 'above below half height left first',
'ab2r': 'above below half height right first',
'tb2r': 'above below half height right first',
'abl': 'above below left first',
'tbl': 'above below left first',
'abr': 'above below right first',
'tbr': 'above below right first',
'al': 'alternating frames left first',
'ar': 'alternating frames right first',
'sbs2l': 'side by side half width left first',
'sbs2r': 'side by side half width right first',
'sbsl': 'side by side left first',
'sbsr': 'side by side right first',
'irl': 'interleave rows left first',
'irr': 'interleave rows right first',
'icl': 'interleave columns left first',
'icr': 'interleave columns right first',
};
}

View file

@ -8,15 +8,17 @@ import 'package:latlong2/latlong.dart';
extension ExtraCoordinateFormat on CoordinateFormat {
static const _separator = ', ';
String format(BuildContext context, LatLng latLng, {bool minuteSecondPadding = false, int dmsSecondDecimals = 2}) {
String format(BuildContext context, LatLng latLng, {bool minuteSecondPadding = false, int? dmsSecondDecimals}) {
final text = formatWithoutDirectionality(context.l10n, latLng, minuteSecondPadding: minuteSecondPadding, dmsSecondDecimals: dmsSecondDecimals);
return context.applyDirectionality(text);
}
String formatWithoutDirectionality(AppLocalizations l10n, LatLng latLng, {bool minuteSecondPadding = false, int dmsSecondDecimals = 2}) {
String formatWithoutDirectionality(AppLocalizations l10n, LatLng latLng, {bool minuteSecondPadding = false, int? dmsSecondDecimals}) {
switch (this) {
case CoordinateFormat.dms:
return toDMS(l10n, latLng, minuteSecondPadding: minuteSecondPadding, secondDecimals: dmsSecondDecimals).join(_separator);
return toDMS(l10n, latLng, minuteSecondPadding: minuteSecondPadding, secondDecimals: dmsSecondDecimals ?? 2).join(_separator);
case CoordinateFormat.ddm:
return toDDM(l10n, latLng, minutePadding: minuteSecondPadding, minuteDecimals: dmsSecondDecimals ?? 4).join(_separator);
case CoordinateFormat.decimal:
return _toDecimal(l10n, latLng).join(_separator);
}
@ -35,6 +37,19 @@ extension ExtraCoordinateFormat on CoordinateFormat {
];
}
// returns coordinates formatted as DDM, e.g. ['41° 24.2028 N', '2° 10.4418 E']
static List<String> toDDM(AppLocalizations l10n, LatLng latLng, {bool minutePadding = false, int minuteDecimals = 4}) {
final locale = l10n.localeName;
final lat = latLng.latitude;
final lng = latLng.longitude;
final latSexa = _decimal2ddm(lat, minutePadding, minuteDecimals, locale);
final lngSexa = _decimal2ddm(lng, minutePadding, minuteDecimals, locale);
return [
l10n.coordinateDms(latSexa, lat < 0 ? l10n.coordinateDmsSouth : l10n.coordinateDmsNorth),
l10n.coordinateDms(lngSexa, lng < 0 ? l10n.coordinateDmsWest : l10n.coordinateDmsEast),
];
}
static String _decimal2sexagesimal(
double degDecimal,
bool minuteSecondPadding,
@ -54,6 +69,22 @@ extension ExtraCoordinateFormat on CoordinateFormat {
return '$degText° $minText $secText';
}
static String _decimal2ddm(
double degDecimal,
bool minutePadding,
int minuteDecimals,
String locale,
) {
final degAbs = degDecimal.abs();
final deg = degAbs.toInt();
final min = (degAbs - deg) * 60;
final degText = NumberFormat('0', locale).format(deg);
final minText = NumberFormat('${'0' * (minutePadding ? 2 : 1)}${minuteDecimals > 0 ? '.${'0' * minuteDecimals}' : ''}', locale).format(min);
return '$degText° $minText';
}
static List<String> _toDecimal(AppLocalizations l10n, LatLng latLng) {
final coordinateFormatter = NumberFormat('0.000000°', l10n.localeName);
return [

View file

@ -34,6 +34,7 @@ import 'package:aves_video/aves_video.dart';
import 'package:collection/collection.dart';
import 'package:device_info_plus/device_info_plus.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/services.dart';
import 'package:latlong2/latlong.dart';
@ -319,6 +320,10 @@ class Settings with ChangeNotifier, SettingsAccess, AppSettings, DisplaySettings
if (value is num) {
areAnimationsRemoved = value == 0;
}
case SettingKeys.platformLongPressTimeoutMillisKey:
if (value is num) {
longPressTimeoutMillis = value.toInt();
}
}
});
}
@ -331,6 +336,10 @@ class Settings with ChangeNotifier, SettingsAccess, AppSettings, DisplaySettings
set areAnimationsRemoved(bool newValue) => set(SettingKeys.platformTransitionAnimationScaleKey, newValue);
Duration get longPressTimeout => Duration(milliseconds: getInt(SettingKeys.platformLongPressTimeoutMillisKey) ?? kLongPressTimeout.inMilliseconds);
set longPressTimeoutMillis(int newValue) => set(SettingKeys.platformLongPressTimeoutMillisKey, newValue);
// import/export
Map<String, dynamic> export() => Map.fromEntries(

View file

@ -1,20 +1,11 @@
import 'package:aves/services/common/services.dart';
import 'package:flutter/foundation.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/services.dart';
class AccessibilityService {
static const _platform = MethodChannel('deckers.thibault/aves/accessibility');
static Future<bool> shouldUseBoldFont() async {
try {
final result = await _platform.invokeMethod('shouldUseBoldFont');
if (result != null) return result as bool;
} on PlatformException catch (e, stack) {
await reportService.recordError(e, stack);
}
return false;
}
static Future<bool> areAnimationsRemoved() async {
try {
final result = await _platform.invokeMethod('areAnimationsRemoved');
@ -25,6 +16,16 @@ class AccessibilityService {
return false;
}
static Future<int> getLongPressTimeout() async {
try {
final result = await _platform.invokeMethod('getLongPressTimeout');
if (result != null) return result as int;
} on PlatformException catch (e, stack) {
await reportService.recordError(e, stack);
}
return kLongPressTimeout.inMilliseconds;
}
static bool? _hasRecommendedTimeouts;
static Future<bool> hasRecommendedTimeouts() async {
@ -65,4 +66,14 @@ class AccessibilityService {
}
return originalTimeoutMillis;
}
static Future<bool> shouldUseBoldFont() async {
try {
final result = await _platform.invokeMethod('shouldUseBoldFont');
if (result != null) return result as bool;
} on PlatformException catch (e, stack) {
await reportService.recordError(e, stack);
}
return false;
}
}

View file

@ -60,6 +60,7 @@ Future<void> _init() async {
await mobileServices.init();
await settings.init(monitorPlatformSettings: false);
await reportService.init();
videoMetadataFetcher.init();
final analyzer = Analyzer();
_channel.setMethodCallHandler((call) {

View file

@ -21,7 +21,6 @@ import 'package:aves_report_platform/aves_report_platform.dart';
import 'package:aves_services/aves_services.dart';
import 'package:aves_services_platform/aves_services_platform.dart';
import 'package:aves_video/aves_video.dart';
import 'package:aves_video_ffmpeg/aves_video_ffmpeg.dart';
import 'package:aves_video_mpv/aves_video_mpv.dart';
import 'package:get_it/get_it.dart';
import 'package:path/path.dart' as p;
@ -58,7 +57,7 @@ void initPlatformServices() {
getIt.registerLazySingleton<AvesAvailability>(LiveAvesAvailability.new);
getIt.registerLazySingleton<LocalMediaDb>(SqfliteLocalMediaDb.new);
getIt.registerLazySingleton<AvesVideoControllerFactory>(MpvVideoControllerFactory.new);
getIt.registerLazySingleton<AvesVideoMetadataFetcher>(FfmpegVideoMetadataFetcher.new);
getIt.registerLazySingleton<AvesVideoMetadataFetcher>(MpvVideoMetadataFetcher.new);
getIt.registerLazySingleton<AppService>(PlatformAppService.new);
getIt.registerLazySingleton<AppProfileService>(PlatformAppProfileService.new);

View file

@ -1,9 +1,12 @@
extension ExtraString on String {
static final _sentenceCaseStep1 = RegExp(r'([A-Z][a-z]|\[)');
static final _sentenceCaseStep2 = RegExp(r'([a-z])([A-Z])');
static final _sentenceCaseStep2 = RegExp(r'_([a-z])');
static final _sentenceCaseStep3 = RegExp(r'([a-z])([A-Z])');
String toSentenceCase() {
var s = replaceFirstMapped(RegExp('.'), (m) => m.group(0)!.toUpperCase());
return s.replaceAllMapped(_sentenceCaseStep1, (m) => ' ${m.group(1)}').replaceAllMapped(_sentenceCaseStep2, (m) => '${m.group(1)} ${m.group(2)}').trim();
s = s.replaceAllMapped(_sentenceCaseStep1, (m) => ' ${m.group(1)}');
s = s.replaceAllMapped(_sentenceCaseStep2, (m) => m.group(0)!.toUpperCase()).replaceAll('_', ' ');
return s.replaceAllMapped(_sentenceCaseStep3, (m) => '${m.group(1)} ${m.group(2)}').trim();
}
}

View file

@ -45,6 +45,7 @@ extension ExtraCoordinateFormatView on CoordinateFormat {
final l10n = context.l10n;
return switch (this) {
CoordinateFormat.dms => l10n.coordinateFormatDms,
CoordinateFormat.ddm => l10n.coordinateFormatDdm,
CoordinateFormat.decimal => l10n.coordinateFormatDecimal,
};
}

View file

@ -494,10 +494,12 @@ class _AvesAppState extends State<AvesApp> with WidgetsBindingObserver {
await mobileServices.init();
await settings.init(monitorPlatformSettings: true);
settings.isRotationLocked = await windowService.isRotationLocked();
settings.longPressTimeoutMillis = await AccessibilityService.getLongPressTimeout();
settings.areAnimationsRemoved = await AccessibilityService.areAnimationsRemoved();
await _onTvLayoutChanged();
_monitorSettings();
videoControllerFactory.init();
videoMetadataFetcher.init();
unawaited(deviceService.setLocaleConfig(AvesApp.supportedLocales));
unawaited(storageService.deleteTempDirectory());

View file

@ -1,7 +1,9 @@
import 'dart:async';
import 'package:aves/model/settings/settings.dart';
import 'package:aves/theme/durations.dart';
import 'package:aves/widgets/common/action_controls/quick_choosers/common/route_layout.dart';
import 'package:aves/widgets/common/basic/gestures/gesture_detector.dart';
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
@ -79,7 +81,7 @@ abstract class ChooserQuickButtonState<T extends ChooserQuickButton<U>, U> exten
onSurfaceVariant: colorScheme.onSurface,
),
),
child: GestureDetector(
child: AGestureDetector(
behavior: HitTestBehavior.opaque,
onLongPressStart: _hasChooser ? _showChooser : null,
onLongPressMoveUpdate: _hasChooser ? _moveUpdateStreamController.add : null,
@ -93,6 +95,7 @@ abstract class ChooserQuickButtonState<T extends ChooserQuickButton<U>, U> exten
}
: null,
onLongPressCancel: _clearChooserOverlayEntry,
longPressTimeout: settings.longPressTimeout,
child: child,
),
);

View file

@ -1,8 +1,10 @@
import 'dart:async';
import 'package:aves/model/settings/settings.dart';
import 'package:aves/widgets/common/basic/draggable_scrollbar/notifications.dart';
import 'package:aves/widgets/common/basic/draggable_scrollbar/scroll_label.dart';
import 'package:aves/widgets/common/basic/draggable_scrollbar/transition.dart';
import 'package:aves/widgets/common/basic/gestures/gesture_detector.dart';
import 'package:flutter/widgets.dart';
/*
@ -221,7 +223,7 @@ class _DraggableScrollbarState extends State<DraggableScrollbar> with TickerProv
// exclude semantics, otherwise this layer will block access to content layers below when using TalkBack
ExcludeSemantics(
child: RepaintBoundary(
child: GestureDetector(
child: AGestureDetector(
onLongPressStart: (details) {
_longPressLastGlobalPosition = details.globalPosition;
_onVerticalDragStart();
@ -235,6 +237,7 @@ class _DraggableScrollbarState extends State<DraggableScrollbar> with TickerProv
onVerticalDragStart: (_) => _onVerticalDragStart(),
onVerticalDragUpdate: (details) => _onVerticalDragUpdate(details.delta.dy),
onVerticalDragEnd: (_) => _onVerticalDragEnd(),
longPressTimeout: settings.longPressTimeout,
child: ValueListenableBuilder<double>(
valueListenable: _thumbOffsetNotifier,
builder: (context, thumbOffset, child) => Container(

View file

@ -0,0 +1,992 @@
import 'package:flutter/widgets.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/rendering.dart';
// as of Flutter v3.27.1, `GestureDetector` does not allow setting long press delay
// adapted from Flutter `GestureDetector` in `/widgets/gesture_detector.dart`
class AGestureDetector extends StatelessWidget {
/// Creates a widget that detects gestures.
///
/// Pan and scale callbacks cannot be used simultaneously because scale is a
/// superset of pan. Use the scale callbacks instead.
///
/// Horizontal and vertical drag callbacks cannot be used simultaneously
/// because a combination of a horizontal and vertical drag is a pan.
/// Use the pan callbacks instead.
///
/// {@youtube 560 315 https://www.youtube.com/watch?v=WhVXkCFPmK4}
///
/// By default, gesture detectors contribute semantic information to the tree
/// that is used by assistive technology.
AGestureDetector({
super.key,
this.child,
this.onTapDown,
this.onTapUp,
this.onTap,
this.onTapCancel,
this.onSecondaryTap,
this.onSecondaryTapDown,
this.onSecondaryTapUp,
this.onSecondaryTapCancel,
this.onTertiaryTapDown,
this.onTertiaryTapUp,
this.onTertiaryTapCancel,
this.onDoubleTapDown,
this.onDoubleTap,
this.onDoubleTapCancel,
this.onLongPressDown,
this.onLongPressCancel,
this.onLongPress,
this.onLongPressStart,
this.onLongPressMoveUpdate,
this.onLongPressUp,
this.onLongPressEnd,
this.onSecondaryLongPressDown,
this.onSecondaryLongPressCancel,
this.onSecondaryLongPress,
this.onSecondaryLongPressStart,
this.onSecondaryLongPressMoveUpdate,
this.onSecondaryLongPressUp,
this.onSecondaryLongPressEnd,
this.onTertiaryLongPressDown,
this.onTertiaryLongPressCancel,
this.onTertiaryLongPress,
this.onTertiaryLongPressStart,
this.onTertiaryLongPressMoveUpdate,
this.onTertiaryLongPressUp,
this.onTertiaryLongPressEnd,
this.onVerticalDragDown,
this.onVerticalDragStart,
this.onVerticalDragUpdate,
this.onVerticalDragEnd,
this.onVerticalDragCancel,
this.onHorizontalDragDown,
this.onHorizontalDragStart,
this.onHorizontalDragUpdate,
this.onHorizontalDragEnd,
this.onHorizontalDragCancel,
this.onForcePressStart,
this.onForcePressPeak,
this.onForcePressUpdate,
this.onForcePressEnd,
this.onPanDown,
this.onPanStart,
this.onPanUpdate,
this.onPanEnd,
this.onPanCancel,
this.onScaleStart,
this.onScaleUpdate,
this.onScaleEnd,
this.behavior,
this.excludeFromSemantics = false,
this.dragStartBehavior = DragStartBehavior.start,
this.trackpadScrollCausesScale = false,
this.trackpadScrollToScaleFactor = kDefaultTrackpadScrollToScaleFactor,
this.supportedDevices,
this.longPressTimeout = kLongPressTimeout,
}) : assert(() {
final bool haveVerticalDrag = onVerticalDragStart != null || onVerticalDragUpdate != null || onVerticalDragEnd != null;
final bool haveHorizontalDrag = onHorizontalDragStart != null || onHorizontalDragUpdate != null || onHorizontalDragEnd != null;
final bool havePan = onPanStart != null || onPanUpdate != null || onPanEnd != null;
final bool haveScale = onScaleStart != null || onScaleUpdate != null || onScaleEnd != null;
if (havePan || haveScale) {
if (havePan && haveScale) {
throw FlutterError.fromParts(<DiagnosticsNode>[
ErrorSummary('Incorrect GestureDetector arguments.'),
ErrorDescription(
'Having both a pan gesture recognizer and a scale gesture recognizer is redundant; scale is a superset of pan.',
),
ErrorHint('Just use the scale gesture recognizer.'),
]);
}
final String recognizer = havePan ? 'pan' : 'scale';
if (haveVerticalDrag && haveHorizontalDrag) {
throw FlutterError(
'Incorrect GestureDetector arguments.\n'
'Simultaneously having a vertical drag gesture recognizer, a horizontal drag gesture recognizer, and a $recognizer gesture recognizer '
'will result in the $recognizer gesture recognizer being ignored, since the other two will catch all drags.',
);
}
}
return true;
}());
/// The widget below this widget in the tree.
///
/// {@macro flutter.widgets.ProxyWidget.child}
final Widget? child;
/// A pointer that might cause a tap with a primary button has contacted the
/// screen at a particular location.
///
/// This is called after a short timeout, even if the winning gesture has not
/// yet been selected. If the tap gesture wins, [onTapUp] will be called,
/// otherwise [onTapCancel] will be called.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureTapDownCallback? onTapDown;
/// A pointer that will trigger a tap with a primary button has stopped
/// contacting the screen at a particular location.
///
/// This triggers immediately before [onTap] in the case of the tap gesture
/// winning. If the tap gesture did not win, [onTapCancel] is called instead.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureTapUpCallback? onTapUp;
/// A tap with a primary button has occurred.
///
/// This triggers when the tap gesture wins. If the tap gesture did not win,
/// [onTapCancel] is called instead.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [onTapUp], which is called at the same time but includes details
/// regarding the pointer position.
final GestureTapCallback? onTap;
/// The pointer that previously triggered [onTapDown] will not end up causing
/// a tap.
///
/// This is called after [onTapDown], and instead of [onTapUp] and [onTap], if
/// the tap gesture did not win.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureTapCancelCallback? onTapCancel;
/// A tap with a secondary button has occurred.
///
/// This triggers when the tap gesture wins. If the tap gesture did not win,
/// [onSecondaryTapCancel] is called instead.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [onSecondaryTapUp], which is called at the same time but includes details
/// regarding the pointer position.
final GestureTapCallback? onSecondaryTap;
/// A pointer that might cause a tap with a secondary button has contacted the
/// screen at a particular location.
///
/// This is called after a short timeout, even if the winning gesture has not
/// yet been selected. If the tap gesture wins, [onSecondaryTapUp] will be
/// called, otherwise [onSecondaryTapCancel] will be called.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
final GestureTapDownCallback? onSecondaryTapDown;
/// A pointer that will trigger a tap with a secondary button has stopped
/// contacting the screen at a particular location.
///
/// This triggers in the case of the tap gesture winning. If the tap gesture
/// did not win, [onSecondaryTapCancel] is called instead.
///
/// See also:
///
/// * [onSecondaryTap], a handler triggered right after this one that doesn't
/// pass any details about the tap.
/// * [kSecondaryButton], the button this callback responds to.
final GestureTapUpCallback? onSecondaryTapUp;
/// The pointer that previously triggered [onSecondaryTapDown] will not end up
/// causing a tap.
///
/// This is called after [onSecondaryTapDown], and instead of
/// [onSecondaryTapUp], if the tap gesture did not win.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
final GestureTapCancelCallback? onSecondaryTapCancel;
/// A pointer that might cause a tap with a tertiary button has contacted the
/// screen at a particular location.
///
/// This is called after a short timeout, even if the winning gesture has not
/// yet been selected. If the tap gesture wins, [onTertiaryTapUp] will be
/// called, otherwise [onTertiaryTapCancel] will be called.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
final GestureTapDownCallback? onTertiaryTapDown;
/// A pointer that will trigger a tap with a tertiary button has stopped
/// contacting the screen at a particular location.
///
/// This triggers in the case of the tap gesture winning. If the tap gesture
/// did not win, [onTertiaryTapCancel] is called instead.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
final GestureTapUpCallback? onTertiaryTapUp;
/// The pointer that previously triggered [onTertiaryTapDown] will not end up
/// causing a tap.
///
/// This is called after [onTertiaryTapDown], and instead of
/// [onTertiaryTapUp], if the tap gesture did not win.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
final GestureTapCancelCallback? onTertiaryTapCancel;
/// A pointer that might cause a double tap has contacted the screen at a
/// particular location.
///
/// Triggered immediately after the down event of the second tap.
///
/// If the user completes the double tap and the gesture wins, [onDoubleTap]
/// will be called after this callback. Otherwise, [onDoubleTapCancel] will
/// be called after this callback.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureTapDownCallback? onDoubleTapDown;
/// The user has tapped the screen with a primary button at the same location
/// twice in quick succession.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureTapCallback? onDoubleTap;
/// The pointer that previously triggered [onDoubleTapDown] will not end up
/// causing a double tap.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureTapCancelCallback? onDoubleTapCancel;
/// The pointer has contacted the screen with a primary button, which might
/// be the start of a long-press.
///
/// This triggers after the pointer down event.
///
/// If the user completes the long-press, and this gesture wins,
/// [onLongPressStart] will be called after this callback. Otherwise,
/// [onLongPressCancel] will be called after this callback.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [onSecondaryLongPressDown], a similar callback but for a secondary button.
/// * [onTertiaryLongPressDown], a similar callback but for a tertiary button.
/// * [LongPressGestureRecognizer.onLongPressDown], which exposes this
/// callback at the gesture layer.
final GestureLongPressDownCallback? onLongPressDown;
/// A pointer that previously triggered [onLongPressDown] will not end up
/// causing a long-press.
///
/// This triggers once the gesture loses if [onLongPressDown] has previously
/// been triggered.
///
/// If the user completed the long-press, and the gesture won, then
/// [onLongPressStart] and [onLongPress] are called instead.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onLongPressCancel], which exposes this
/// callback at the gesture layer.
final GestureLongPressCancelCallback? onLongPressCancel;
/// Called when a long press gesture with a primary button has been recognized.
///
/// Triggered when a pointer has remained in contact with the screen at the
/// same location for a long period of time.
///
/// This is equivalent to (and is called immediately after) [onLongPressStart].
/// The only difference between the two is that this callback does not
/// contain details of the position at which the pointer initially contacted
/// the screen.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onLongPress], which exposes this
/// callback at the gesture layer.
final GestureLongPressCallback? onLongPress;
/// Called when a long press gesture with a primary button has been recognized.
///
/// Triggered when a pointer has remained in contact with the screen at the
/// same location for a long period of time.
///
/// This is equivalent to (and is called immediately before) [onLongPress].
/// The only difference between the two is that this callback contains
/// details of the position at which the pointer initially contacted the
/// screen, whereas [onLongPress] does not.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onLongPressStart], which exposes this
/// callback at the gesture layer.
final GestureLongPressStartCallback? onLongPressStart;
/// A pointer has been drag-moved after a long-press with a primary button.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onLongPressMoveUpdate], which exposes this
/// callback at the gesture layer.
final GestureLongPressMoveUpdateCallback? onLongPressMoveUpdate;
/// A pointer that has triggered a long-press with a primary button has
/// stopped contacting the screen.
///
/// This is equivalent to (and is called immediately after) [onLongPressEnd].
/// The only difference between the two is that this callback does not
/// contain details of the state of the pointer when it stopped contacting
/// the screen.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onLongPressUp], which exposes this
/// callback at the gesture layer.
final GestureLongPressUpCallback? onLongPressUp;
/// A pointer that has triggered a long-press with a primary button has
/// stopped contacting the screen.
///
/// This is equivalent to (and is called immediately before) [onLongPressUp].
/// The only difference between the two is that this callback contains
/// details of the state of the pointer when it stopped contacting the
/// screen, whereas [onLongPressUp] does not.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onLongPressEnd], which exposes this
/// callback at the gesture layer.
final GestureLongPressEndCallback? onLongPressEnd;
/// The pointer has contacted the screen with a secondary button, which might
/// be the start of a long-press.
///
/// This triggers after the pointer down event.
///
/// If the user completes the long-press, and this gesture wins,
/// [onSecondaryLongPressStart] will be called after this callback. Otherwise,
/// [onSecondaryLongPressCancel] will be called after this callback.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [onLongPressDown], a similar callback but for a secondary button.
/// * [onTertiaryLongPressDown], a similar callback but for a tertiary button.
/// * [LongPressGestureRecognizer.onSecondaryLongPressDown], which exposes
/// this callback at the gesture layer.
final GestureLongPressDownCallback? onSecondaryLongPressDown;
/// A pointer that previously triggered [onSecondaryLongPressDown] will not
/// end up causing a long-press.
///
/// This triggers once the gesture loses if [onSecondaryLongPressDown] has
/// previously been triggered.
///
/// If the user completed the long-press, and the gesture won, then
/// [onSecondaryLongPressStart] and [onSecondaryLongPress] are called instead.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onSecondaryLongPressCancel], which exposes
/// this callback at the gesture layer.
final GestureLongPressCancelCallback? onSecondaryLongPressCancel;
/// Called when a long press gesture with a secondary button has been
/// recognized.
///
/// Triggered when a pointer has remained in contact with the screen at the
/// same location for a long period of time.
///
/// This is equivalent to (and is called immediately after)
/// [onSecondaryLongPressStart]. The only difference between the two is that
/// this callback does not contain details of the position at which the
/// pointer initially contacted the screen.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onSecondaryLongPress], which exposes
/// this callback at the gesture layer.
final GestureLongPressCallback? onSecondaryLongPress;
/// Called when a long press gesture with a secondary button has been
/// recognized.
///
/// Triggered when a pointer has remained in contact with the screen at the
/// same location for a long period of time.
///
/// This is equivalent to (and is called immediately before)
/// [onSecondaryLongPress]. The only difference between the two is that this
/// callback contains details of the position at which the pointer initially
/// contacted the screen, whereas [onSecondaryLongPress] does not.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onSecondaryLongPressStart], which exposes
/// this callback at the gesture layer.
final GestureLongPressStartCallback? onSecondaryLongPressStart;
/// A pointer has been drag-moved after a long press with a secondary button.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onSecondaryLongPressMoveUpdate], which exposes
/// this callback at the gesture layer.
final GestureLongPressMoveUpdateCallback? onSecondaryLongPressMoveUpdate;
/// A pointer that has triggered a long-press with a secondary button has
/// stopped contacting the screen.
///
/// This is equivalent to (and is called immediately after)
/// [onSecondaryLongPressEnd]. The only difference between the two is that
/// this callback does not contain details of the state of the pointer when
/// it stopped contacting the screen.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onSecondaryLongPressUp], which exposes
/// this callback at the gesture layer.
final GestureLongPressUpCallback? onSecondaryLongPressUp;
/// A pointer that has triggered a long-press with a secondary button has
/// stopped contacting the screen.
///
/// This is equivalent to (and is called immediately before)
/// [onSecondaryLongPressUp]. The only difference between the two is that
/// this callback contains details of the state of the pointer when it
/// stopped contacting the screen, whereas [onSecondaryLongPressUp] does not.
///
/// See also:
///
/// * [kSecondaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onSecondaryLongPressEnd], which exposes
/// this callback at the gesture layer.
final GestureLongPressEndCallback? onSecondaryLongPressEnd;
/// The pointer has contacted the screen with a tertiary button, which might
/// be the start of a long-press.
///
/// This triggers after the pointer down event.
///
/// If the user completes the long-press, and this gesture wins,
/// [onTertiaryLongPressStart] will be called after this callback. Otherwise,
/// [onTertiaryLongPressCancel] will be called after this callback.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
/// * [onLongPressDown], a similar callback but for a primary button.
/// * [onSecondaryLongPressDown], a similar callback but for a secondary button.
/// * [LongPressGestureRecognizer.onTertiaryLongPressDown], which exposes
/// this callback at the gesture layer.
final GestureLongPressDownCallback? onTertiaryLongPressDown;
/// A pointer that previously triggered [onTertiaryLongPressDown] will not
/// end up causing a long-press.
///
/// This triggers once the gesture loses if [onTertiaryLongPressDown] has
/// previously been triggered.
///
/// If the user completed the long-press, and the gesture won, then
/// [onTertiaryLongPressStart] and [onTertiaryLongPress] are called instead.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onTertiaryLongPressCancel], which exposes
/// this callback at the gesture layer.
final GestureLongPressCancelCallback? onTertiaryLongPressCancel;
/// Called when a long press gesture with a tertiary button has been
/// recognized.
///
/// Triggered when a pointer has remained in contact with the screen at the
/// same location for a long period of time.
///
/// This is equivalent to (and is called immediately after)
/// [onTertiaryLongPressStart]. The only difference between the two is that
/// this callback does not contain details of the position at which the
/// pointer initially contacted the screen.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onTertiaryLongPress], which exposes
/// this callback at the gesture layer.
final GestureLongPressCallback? onTertiaryLongPress;
/// Called when a long press gesture with a tertiary button has been
/// recognized.
///
/// Triggered when a pointer has remained in contact with the screen at the
/// same location for a long period of time.
///
/// This is equivalent to (and is called immediately before)
/// [onTertiaryLongPress]. The only difference between the two is that this
/// callback contains details of the position at which the pointer initially
/// contacted the screen, whereas [onTertiaryLongPress] does not.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onTertiaryLongPressStart], which exposes
/// this callback at the gesture layer.
final GestureLongPressStartCallback? onTertiaryLongPressStart;
/// A pointer has been drag-moved after a long press with a tertiary button.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onTertiaryLongPressMoveUpdate], which exposes
/// this callback at the gesture layer.
final GestureLongPressMoveUpdateCallback? onTertiaryLongPressMoveUpdate;
/// A pointer that has triggered a long-press with a tertiary button has
/// stopped contacting the screen.
///
/// This is equivalent to (and is called immediately after)
/// [onTertiaryLongPressEnd]. The only difference between the two is that
/// this callback does not contain details of the state of the pointer when
/// it stopped contacting the screen.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onTertiaryLongPressUp], which exposes
/// this callback at the gesture layer.
final GestureLongPressUpCallback? onTertiaryLongPressUp;
/// A pointer that has triggered a long-press with a tertiary button has
/// stopped contacting the screen.
///
/// This is equivalent to (and is called immediately before)
/// [onTertiaryLongPressUp]. The only difference between the two is that
/// this callback contains details of the state of the pointer when it
/// stopped contacting the screen, whereas [onTertiaryLongPressUp] does not.
///
/// See also:
///
/// * [kTertiaryButton], the button this callback responds to.
/// * [LongPressGestureRecognizer.onTertiaryLongPressEnd], which exposes
/// this callback at the gesture layer.
final GestureLongPressEndCallback? onTertiaryLongPressEnd;
/// A pointer has contacted the screen with a primary button and might begin
/// to move vertically.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragDownCallback? onVerticalDragDown;
/// A pointer has contacted the screen with a primary button and has begun to
/// move vertically.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragStartCallback? onVerticalDragStart;
/// A pointer that is in contact with the screen with a primary button and
/// moving vertically has moved in the vertical direction.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragUpdateCallback? onVerticalDragUpdate;
/// A pointer that was previously in contact with the screen with a primary
/// button and moving vertically is no longer in contact with the screen and
/// was moving at a specific velocity when it stopped contacting the screen.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragEndCallback? onVerticalDragEnd;
/// The pointer that previously triggered [onVerticalDragDown] did not
/// complete.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragCancelCallback? onVerticalDragCancel;
/// A pointer has contacted the screen with a primary button and might begin
/// to move horizontally.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragDownCallback? onHorizontalDragDown;
/// A pointer has contacted the screen with a primary button and has begun to
/// move horizontally.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragStartCallback? onHorizontalDragStart;
/// A pointer that is in contact with the screen with a primary button and
/// moving horizontally has moved in the horizontal direction.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragUpdateCallback? onHorizontalDragUpdate;
/// A pointer that was previously in contact with the screen with a primary
/// button and moving horizontally is no longer in contact with the screen and
/// was moving at a specific velocity when it stopped contacting the screen.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragEndCallback? onHorizontalDragEnd;
/// The pointer that previously triggered [onHorizontalDragDown] did not
/// complete.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragCancelCallback? onHorizontalDragCancel;
/// A pointer has contacted the screen with a primary button and might begin
/// to move.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragDownCallback? onPanDown;
/// A pointer has contacted the screen with a primary button and has begun to
/// move.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragStartCallback? onPanStart;
/// A pointer that is in contact with the screen with a primary button and
/// moving has moved again.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragUpdateCallback? onPanUpdate;
/// A pointer that was previously in contact with the screen with a primary
/// button and moving is no longer in contact with the screen and was moving
/// at a specific velocity when it stopped contacting the screen.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragEndCallback? onPanEnd;
/// The pointer that previously triggered [onPanDown] did not complete.
///
/// See also:
///
/// * [kPrimaryButton], the button this callback responds to.
final GestureDragCancelCallback? onPanCancel;
/// The pointers in contact with the screen have established a focal point and
/// initial scale of 1.0.
final GestureScaleStartCallback? onScaleStart;
/// The pointers in contact with the screen have indicated a new focal point
/// and/or scale.
final GestureScaleUpdateCallback? onScaleUpdate;
/// The pointers are no longer in contact with the screen.
final GestureScaleEndCallback? onScaleEnd;
/// The pointer is in contact with the screen and has pressed with sufficient
/// force to initiate a force press. The amount of force is at least
/// [ForcePressGestureRecognizer.startPressure].
///
/// This callback will only be fired on devices with pressure
/// detecting screens.
final GestureForcePressStartCallback? onForcePressStart;
/// The pointer is in contact with the screen and has pressed with the maximum
/// force. The amount of force is at least
/// [ForcePressGestureRecognizer.peakPressure].
///
/// This callback will only be fired on devices with pressure
/// detecting screens.
final GestureForcePressPeakCallback? onForcePressPeak;
/// A pointer is in contact with the screen, has previously passed the
/// [ForcePressGestureRecognizer.startPressure] and is either moving on the
/// plane of the screen, pressing the screen with varying forces or both
/// simultaneously.
///
/// This callback will only be fired on devices with pressure
/// detecting screens.
final GestureForcePressUpdateCallback? onForcePressUpdate;
/// The pointer tracked by [onForcePressStart] is no longer in contact with the screen.
///
/// This callback will only be fired on devices with pressure
/// detecting screens.
final GestureForcePressEndCallback? onForcePressEnd;
/// How this gesture detector should behave during hit testing when deciding
/// how the hit test propagates to children and whether to consider targets
/// behind this one.
///
/// This defaults to [HitTestBehavior.deferToChild] if [child] is not null and
/// [HitTestBehavior.translucent] if child is null.
///
/// See [HitTestBehavior] for the allowed values and their meanings.
final HitTestBehavior? behavior;
/// Whether to exclude these gestures from the semantics tree. For
/// example, the long-press gesture for showing a tooltip is
/// excluded because the tooltip itself is included in the semantics
/// tree directly and so having a gesture to show it would result in
/// duplication of information.
final bool excludeFromSemantics;
/// Determines the way that drag start behavior is handled.
///
/// If set to [DragStartBehavior.start], gesture drag behavior will
/// begin at the position where the drag gesture won the arena. If set to
/// [DragStartBehavior.down] it will begin at the position where a down event
/// is first detected.
///
/// In general, setting this to [DragStartBehavior.start] will make drag
/// animation smoother and setting it to [DragStartBehavior.down] will make
/// drag behavior feel slightly more reactive.
///
/// By default, the drag start behavior is [DragStartBehavior.start].
///
/// Only the [DragGestureRecognizer.onStart] callbacks for the
/// [VerticalDragGestureRecognizer], [HorizontalDragGestureRecognizer] and
/// [PanGestureRecognizer] are affected by this setting.
///
/// See also:
///
/// * [DragGestureRecognizer.dragStartBehavior], which gives an example for the different behaviors.
final DragStartBehavior dragStartBehavior;
/// The kind of devices that are allowed to be recognized.
///
/// If set to null, events from all device types will be recognized. Defaults to null.
final Set<PointerDeviceKind>? supportedDevices;
/// {@macro flutter.gestures.scale.trackpadScrollCausesScale}
final bool trackpadScrollCausesScale;
/// {@macro flutter.gestures.scale.trackpadScrollToScaleFactor}
final Offset trackpadScrollToScaleFactor;
final Duration longPressTimeout;
@override
Widget build(BuildContext context) {
final Map<Type, GestureRecognizerFactory> gestures = <Type, GestureRecognizerFactory>{};
final DeviceGestureSettings? gestureSettings = MediaQuery.maybeGestureSettingsOf(context);
final ScrollBehavior configuration = ScrollConfiguration.of(context);
if (onTapDown != null || onTapUp != null || onTap != null || onTapCancel != null || onSecondaryTap != null || onSecondaryTapDown != null || onSecondaryTapUp != null || onSecondaryTapCancel != null || onTertiaryTapDown != null || onTertiaryTapUp != null || onTertiaryTapCancel != null) {
gestures[TapGestureRecognizer] = GestureRecognizerFactoryWithHandlers<TapGestureRecognizer>(
() => TapGestureRecognizer(debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onTapDown = onTapDown
..onTapUp = onTapUp
..onTap = onTap
..onTapCancel = onTapCancel
..onSecondaryTap = onSecondaryTap
..onSecondaryTapDown = onSecondaryTapDown
..onSecondaryTapUp = onSecondaryTapUp
..onSecondaryTapCancel = onSecondaryTapCancel
..onTertiaryTapDown = onTertiaryTapDown
..onTertiaryTapUp = onTertiaryTapUp
..onTertiaryTapCancel = onTertiaryTapCancel
..gestureSettings = gestureSettings
..supportedDevices = supportedDevices;
},
);
}
if (onDoubleTap != null || onDoubleTapDown != null || onDoubleTapCancel != null) {
gestures[DoubleTapGestureRecognizer] = GestureRecognizerFactoryWithHandlers<DoubleTapGestureRecognizer>(
() => DoubleTapGestureRecognizer(debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onDoubleTapDown = onDoubleTapDown
..onDoubleTap = onDoubleTap
..onDoubleTapCancel = onDoubleTapCancel
..gestureSettings = gestureSettings
..supportedDevices = supportedDevices;
},
);
}
if (onLongPressDown != null || onLongPressCancel != null || onLongPress != null || onLongPressStart != null || onLongPressMoveUpdate != null || onLongPressUp != null || onLongPressEnd != null || onSecondaryLongPressDown != null || onSecondaryLongPressCancel != null || onSecondaryLongPress != null || onSecondaryLongPressStart != null || onSecondaryLongPressMoveUpdate != null || onSecondaryLongPressUp != null || onSecondaryLongPressEnd != null || onTertiaryLongPressDown != null || onTertiaryLongPressCancel != null || onTertiaryLongPress != null || onTertiaryLongPressStart != null || onTertiaryLongPressMoveUpdate != null || onTertiaryLongPressUp != null || onTertiaryLongPressEnd != null) {
gestures[LongPressGestureRecognizer] = GestureRecognizerFactoryWithHandlers<LongPressGestureRecognizer>(
() => LongPressGestureRecognizer(duration: longPressTimeout, debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onLongPressDown = onLongPressDown
..onLongPressCancel = onLongPressCancel
..onLongPress = onLongPress
..onLongPressStart = onLongPressStart
..onLongPressMoveUpdate = onLongPressMoveUpdate
..onLongPressUp = onLongPressUp
..onLongPressEnd = onLongPressEnd
..onSecondaryLongPressDown = onSecondaryLongPressDown
..onSecondaryLongPressCancel = onSecondaryLongPressCancel
..onSecondaryLongPress = onSecondaryLongPress
..onSecondaryLongPressStart = onSecondaryLongPressStart
..onSecondaryLongPressMoveUpdate = onSecondaryLongPressMoveUpdate
..onSecondaryLongPressUp = onSecondaryLongPressUp
..onSecondaryLongPressEnd = onSecondaryLongPressEnd
..onTertiaryLongPressDown = onTertiaryLongPressDown
..onTertiaryLongPressCancel = onTertiaryLongPressCancel
..onTertiaryLongPress = onTertiaryLongPress
..onTertiaryLongPressStart = onTertiaryLongPressStart
..onTertiaryLongPressMoveUpdate = onTertiaryLongPressMoveUpdate
..onTertiaryLongPressUp = onTertiaryLongPressUp
..onTertiaryLongPressEnd = onTertiaryLongPressEnd
..gestureSettings = gestureSettings
..supportedDevices = supportedDevices;
},
);
}
if (onVerticalDragDown != null || onVerticalDragStart != null || onVerticalDragUpdate != null || onVerticalDragEnd != null || onVerticalDragCancel != null) {
gestures[VerticalDragGestureRecognizer] = GestureRecognizerFactoryWithHandlers<VerticalDragGestureRecognizer>(
() => VerticalDragGestureRecognizer(debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onDown = onVerticalDragDown
..onStart = onVerticalDragStart
..onUpdate = onVerticalDragUpdate
..onEnd = onVerticalDragEnd
..onCancel = onVerticalDragCancel
..dragStartBehavior = dragStartBehavior
..multitouchDragStrategy = configuration.getMultitouchDragStrategy(context)
..gestureSettings = gestureSettings
..supportedDevices = supportedDevices;
},
);
}
if (onHorizontalDragDown != null || onHorizontalDragStart != null || onHorizontalDragUpdate != null || onHorizontalDragEnd != null || onHorizontalDragCancel != null) {
gestures[HorizontalDragGestureRecognizer] = GestureRecognizerFactoryWithHandlers<HorizontalDragGestureRecognizer>(
() => HorizontalDragGestureRecognizer(debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onDown = onHorizontalDragDown
..onStart = onHorizontalDragStart
..onUpdate = onHorizontalDragUpdate
..onEnd = onHorizontalDragEnd
..onCancel = onHorizontalDragCancel
..dragStartBehavior = dragStartBehavior
..multitouchDragStrategy = configuration.getMultitouchDragStrategy(context)
..gestureSettings = gestureSettings
..supportedDevices = supportedDevices;
},
);
}
if (onPanDown != null || onPanStart != null || onPanUpdate != null || onPanEnd != null || onPanCancel != null) {
gestures[PanGestureRecognizer] = GestureRecognizerFactoryWithHandlers<PanGestureRecognizer>(
() => PanGestureRecognizer(debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onDown = onPanDown
..onStart = onPanStart
..onUpdate = onPanUpdate
..onEnd = onPanEnd
..onCancel = onPanCancel
..dragStartBehavior = dragStartBehavior
..multitouchDragStrategy = configuration.getMultitouchDragStrategy(context)
..gestureSettings = gestureSettings
..supportedDevices = supportedDevices;
},
);
}
if (onScaleStart != null || onScaleUpdate != null || onScaleEnd != null) {
gestures[ScaleGestureRecognizer] = GestureRecognizerFactoryWithHandlers<ScaleGestureRecognizer>(
() => ScaleGestureRecognizer(debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onStart = onScaleStart
..onUpdate = onScaleUpdate
..onEnd = onScaleEnd
..dragStartBehavior = dragStartBehavior
..gestureSettings = gestureSettings
..trackpadScrollCausesScale = trackpadScrollCausesScale
..trackpadScrollToScaleFactor = trackpadScrollToScaleFactor
..supportedDevices = supportedDevices;
},
);
}
if (onForcePressStart != null || onForcePressPeak != null || onForcePressUpdate != null || onForcePressEnd != null) {
gestures[ForcePressGestureRecognizer] = GestureRecognizerFactoryWithHandlers<ForcePressGestureRecognizer>(
() => ForcePressGestureRecognizer(debugOwner: this, supportedDevices: supportedDevices),
(instance) {
instance
..onStart = onForcePressStart
..onPeak = onForcePressPeak
..onUpdate = onForcePressUpdate
..onEnd = onForcePressEnd
..gestureSettings = gestureSettings
..supportedDevices = supportedDevices;
},
);
}
return RawGestureDetector(
gestures: gestures,
behavior: behavior,
excludeFromSemantics: excludeFromSemantics,
child: child,
);
}
@override
void debugFillProperties(DiagnosticPropertiesBuilder properties) {
super.debugFillProperties(properties);
properties.add(EnumProperty<DragStartBehavior>('startBehavior', dragStartBehavior));
}
}

File diff suppressed because it is too large Load diff

View file

@ -4,6 +4,7 @@ import 'package:aves/model/source/section_keys.dart';
import 'package:aves/theme/durations.dart';
import 'package:aves/theme/icons.dart';
import 'package:aves/theme/styles.dart';
import 'package:aves/widgets/common/basic/gestures/gesture_detector.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/common/grid/sections/list_layout.dart';
import 'package:flutter/material.dart';
@ -42,7 +43,7 @@ class SectionHeader<T> extends StatelessWidget {
Widget child = Container(
padding: padding,
constraints: BoxConstraints(minHeight: leadingSize.height),
child: GestureDetector(
child: AGestureDetector(
onTap: onTap,
onLongPress: selectable
? Feedback.wrapForLongPress(() {
@ -55,6 +56,7 @@ class SectionHeader<T> extends StatelessWidget {
}
}, context)
: null,
longPressTimeout: settings.longPressTimeout,
child: Text.rich(
TextSpan(
children: [

View file

@ -2,11 +2,12 @@ import 'dart:async';
import 'dart:math';
import 'package:aves/model/selection.dart';
import 'package:aves/model/settings/settings.dart';
import 'package:aves/utils/math_utils.dart';
import 'package:aves/widgets/common/basic/gestures/gesture_detector.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/common/extensions/media_query.dart';
import 'package:aves/widgets/common/grid/sections/list_layout.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:provider/provider.dart';
@ -90,7 +91,7 @@ class _GridSelectionGestureDetectorState<T> extends State<GridSelectionGestureDe
@override
Widget build(BuildContext context) {
final selectable = widget.selectable;
return GestureDetector(
return AGestureDetector(
onLongPressStart: selectable
? (details) {
if (_isScrolling) return;
@ -137,6 +138,7 @@ class _GridSelectionGestureDetectorState<T> extends State<GridSelectionGestureDe
selection.toggleSelection(item);
}
: null,
longPressTimeout: settings.longPressTimeout,
child: widget.child,
);
}
@ -144,7 +146,7 @@ class _GridSelectionGestureDetectorState<T> extends State<GridSelectionGestureDe
void _onScrollChanged() {
_isScrolling = true;
_stopScrollMonitoringTimer();
_scrollMonitoringTimer = Timer(kLongPressTimeout + const Duration(milliseconds: 150), () {
_scrollMonitoringTimer = Timer(settings.longPressTimeout + const Duration(milliseconds: 150), () {
_isScrolling = false;
});
}

View file

@ -5,6 +5,7 @@ import 'package:aves/theme/durations.dart';
import 'package:aves/theme/themes.dart';
import 'package:aves/widgets/aves_app.dart';
import 'package:aves/widgets/common/basic/font_size_icon_theme.dart';
import 'package:aves/widgets/common/basic/gestures/ink_well.dart';
import 'package:aves/widgets/common/basic/insets.dart';
import 'package:aves/widgets/common/fx/blurred.dart';
import 'package:flutter/material.dart';
@ -95,10 +96,13 @@ class AvesAppBar extends StatelessWidget {
child: AvesFloatingBar(
builder: (context, backgroundColor, child) => Material(
color: backgroundColor,
child: InkWell(
child: AInkResponse(
// absorb taps while providing visual feedback
onTap: () {},
onLongPress: () {},
containedInkWell: true,
highlightShape: BoxShape.rectangle,
longPressTimeout: settings.longPressTimeout,
child: child,
),
),

View file

@ -13,6 +13,7 @@ import 'package:aves/theme/themes.dart';
import 'package:aves/view/view.dart';
import 'package:aves/widgets/collection/filter_bar.dart';
import 'package:aves/widgets/common/basic/font_size_icon_theme.dart';
import 'package:aves/widgets/common/basic/gestures/ink_well.dart';
import 'package:aves/widgets/common/basic/popup/menu_row.dart';
import 'package:aves/widgets/common/extensions/build_context.dart';
import 'package:aves/widgets/common/providers/media_query_data_provider.dart';
@ -347,13 +348,16 @@ class _AvesFilterChipState extends State<AvesFilterChip> {
shape: RoundedRectangleBorder(
borderRadius: borderRadius,
),
child: InkWell(
child: AInkResponse(
// as of Flutter v2.8.0, `InkWell` does not have `onLongPressStart` like `GestureDetector`,
// so we get the long press details from the tap instead
onTapDown: onLongPress != null ? (details) => _tapPosition = details.globalPosition : null,
onTap: onTap,
onLongPress: onLongPress,
containedInkWell: true,
highlightShape: BoxShape.rectangle,
borderRadius: borderRadius,
longPressTimeout: settings.longPressTimeout,
child: FutureBuilder<Color>(
future: _colorFuture,
builder: (context, snapshot) {

View file

@ -1,13 +1,16 @@
import 'dart:async';
import 'dart:math';
import 'package:aves/model/settings/settings.dart';
import 'package:aves/theme/durations.dart';
import 'package:aves/utils/debouncer.dart';
import 'package:aves/widgets/common/basic/gestures/gesture_detector.dart';
import 'package:aves/widgets/common/map/leaflet/latlng_tween.dart' as llt;
import 'package:aves/widgets/common/map/leaflet/scale_layer.dart';
import 'package:aves/widgets/common/map/leaflet/tile_layers.dart';
import 'package:aves_map/aves_map.dart';
import 'package:aves_utils/aves_utils.dart';
import 'package:flutter/gestures.dart';
import 'package:flutter/material.dart';
import 'package:flutter_map/flutter_map.dart';
import 'package:latlong2/latlong.dart';
@ -130,14 +133,19 @@ class _EntryLeafletMapState<T> extends State<EntryLeafletMap<T>> with TickerProv
final markerKey = kv.key;
final geoEntry = kv.value;
final latLng = LatLng(geoEntry.latitude!, geoEntry.longitude!);
final onMarkerLongPress = widget.onMarkerLongPress;
final onLongPress = onMarkerLongPress != null ? Feedback.wrapForLongPress(() => onMarkerLongPress.call(geoEntry, LatLng(geoEntry.latitude!, geoEntry.longitude!)), context) : null;
return Marker(
point: latLng,
child: GestureDetector(
child: AGestureDetector(
onTap: () => widget.onMarkerTap?.call(geoEntry),
// marker tap handling prevents the default handling of focal zoom on double tap,
// so we reimplement the double tap gesture here
onDoubleTap: interactive ? () => _zoomBy(1, focalPoint: latLng) : null,
onLongPress: Feedback.wrapForLongPress(() => widget.onMarkerLongPress?.call(geoEntry, LatLng(geoEntry.latitude!, geoEntry.longitude!)), context),
onLongPress: onLongPress,
// `MapInteractiveViewer` already declares a `LongPressGestureRecognizer` with the default delay (`kLongPressTimeout`),
// so this one should have a shorter delay to win in the gesture arena
longPressTimeout: Duration(milliseconds: min(settings.longPressTimeout.inMilliseconds, kLongPressTimeout.inMilliseconds)),
child: widget.markerWidgetBuilder(markerKey),
),
width: markerSize.width,

View file

@ -76,6 +76,7 @@ class _DebugSettingsSectionState extends State<DebugSettingsSection> with Automa
'locale': '${settings.locale}',
'systemLocales': '${WidgetsBinding.instance.platformDispatcher.locales}',
'topEntryIds': '${settings.topEntryIds}',
'longPressTimeout': '${settings.longPressTimeout}',
},
),
),

View file

@ -1,3 +1,4 @@
import 'package:aves/model/settings/settings.dart';
import 'package:aves/widgets/common/identity/buttons/captioned_button.dart';
import 'package:aves/widgets/common/identity/buttons/overlay_button.dart';
import 'package:aves/widgets/common/providers/media_query_data_provider.dart';
@ -98,6 +99,7 @@ class AvailableActionPanel<T extends Object> extends StatelessWidget {
maxSimultaneousDrags: 1,
onDragStarted: () => _setDraggedAvailableAction(action),
onDragEnd: (details) => _setDraggedAvailableAction(null),
delay: settings.longPressTimeout,
childWhenDragging: child,
child: child,
);

View file

@ -1,3 +1,4 @@
import 'package:aves/model/settings/settings.dart';
import 'package:aves/widgets/common/identity/buttons/overlay_button.dart';
import 'package:aves/widgets/common/providers/media_query_data_provider.dart';
import 'package:flutter/widgets.dart';
@ -72,6 +73,7 @@ class QuickActionButton<T extends Object> extends StatelessWidget {
// so we rely on `onDraggableCanceled` and `onDragCompleted` instead
onDraggableCanceled: (velocity, offset) => _setDraggedQuickAction(null),
onDragCompleted: () => _setDraggedQuickAction(null),
delay: settings.longPressTimeout,
childWhenDragging: child,
child: child,
);

View file

@ -78,7 +78,8 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
late VideoActionDelegate _videoActionDelegate;
final ValueNotifier<EntryHeroInfo?> _heroInfoNotifier = ValueNotifier(null);
bool _isEntryTracked = true;
Timer? _overlayHidingTimer, _appInactiveReactionTimer;
Timer? _overlayHidingTimer;
late ValueNotifier<AvesVideoController?> _playingVideoControllerNotifier;
@override
bool get isViewingImage => _currentVerticalPage.value == imagePage;
@ -168,6 +169,8 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
_videoActionDelegate = VideoActionDelegate(
collection: collection,
);
_playingVideoControllerNotifier = context.read<VideoConductor>().playingVideoControllerNotifier;
_playingVideoControllerNotifier.addListener(_onPlayingVideoControllerChanged);
initEntryControllers(entry);
_registerWidget(widget);
AvesApp.lifecycleStateNotifier.addListener(_onAppLifecycleStateChanged);
@ -185,6 +188,8 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
void dispose() {
AvesApp.pageRouteObserver.unsubscribe(this);
cleanEntryControllers(entryNotifier.value);
_playingVideoControllerNotifier.removeListener(_onPlayingVideoControllerChanged);
updatePictureInPicture(context);
_videoActionDelegate.dispose();
_verticalPageAnimationController.dispose();
_overlayButtonScale.dispose();
@ -201,7 +206,6 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
_verticalScrollNotifier.dispose();
_heroInfoNotifier.dispose();
_stopOverlayHidingTimer();
_stopAppInactiveTimer();
AvesApp.lifecycleStateNotifier.removeListener(_onAppLifecycleStateChanged);
_unregisterWidget(widget);
super.dispose();
@ -253,7 +257,7 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
// so we do not access status stream directly, but check for support first
stream: device.supportPictureInPicture ? Floating().pipStatusStream : Stream.value(PiPStatus.disabled),
builder: (context, snapshot) {
var pipEnabled = snapshot.data == PiPStatus.enabled;
final pipEnabled = snapshot.data == PiPStatus.enabled;
return ValueListenableBuilder<bool>(
valueListenable: _viewLocked,
builder: (context, locked, child) {
@ -328,47 +332,31 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
// lifecycle
// app lifecycle states:
// * rotating screen: resumed -> inactive -> resumed
// * going home: resumed -> inactive -> hidden -> paused
// * back from home: paused -> hidden -> inactive -> resumed
// * app switch / settings / etc: resumed -> inactive
void _onAppLifecycleStateChanged() {
switch (AvesApp.lifecycleStateNotifier.value) {
case AppLifecycleState.inactive:
// inactive: when losing focus
// also triggered when app is rotated on Android API >=33
_startAppInactiveTimer();
break;
case AppLifecycleState.hidden:
case AppLifecycleState.paused:
case AppLifecycleState.detached:
// paused: when switching to another app
// hidden: transient state between `inactive` and `paused`
// paused: when using another app
// detached: when app is without a view
viewerController.autopilot = false;
_stopAppInactiveTimer();
pauseVideoControllers();
case AppLifecycleState.resumed:
_stopAppInactiveTimer();
case AppLifecycleState.hidden:
// hidden: transient state between `inactive` and `paused`
break;
}
}
Future<void> _onAppInactive(AvesVideoController? playingController) async {
bool enabledPip = false;
if (settings.videoBackgroundMode == VideoBackgroundMode.pip) {
enabledPip |= await _enablePictureInPicture(playingController);
}
if (enabledPip) {
// ensure playback, in case lifecycle paused/resumed events happened when switching to PiP
await playingController?.play();
} else {
await pauseVideoControllers();
}
}
void _startAppInactiveTimer() {
_stopAppInactiveTimer();
final playingController = context.read<VideoConductor>().getPlayingController();
_appInactiveReactionTimer = Timer(ADurations.appInactiveReactionDelay, () => _onAppInactive(playingController));
}
void _stopAppInactiveTimer() => _appInactiveReactionTimer?.cancel();
void _onPlayingVideoControllerChanged() => updatePictureInPicture(context);
Widget _decorateOverlay(Widget overlay) {
return ValueListenableBuilder<double>(
@ -939,36 +927,6 @@ class _EntryViewerStackState extends State<EntryViewerStack> with EntryViewContr
await Future.delayed(const Duration(milliseconds: 50));
}
Future<bool> _enablePictureInPicture(AvesVideoController? playingController) async {
if (playingController != null) {
final entrySize = playingController.entry.displaySize;
final aspectRatio = Rational(entrySize.width.round(), entrySize.height.round());
final viewSize = MediaQuery.sizeOf(context) * MediaQuery.devicePixelRatioOf(context);
final fittedSize = applyBoxFit(BoxFit.contain, entrySize, viewSize).destination;
final sourceRectHint = Rectangle<int>(
((viewSize.width - fittedSize.width) / 2).round(),
((viewSize.height - fittedSize.height) / 2).round(),
fittedSize.width.round(),
fittedSize.height.round(),
);
try {
final status = await Floating().enable(ImmediatePiP(
aspectRatio: aspectRatio,
sourceRectHint: sourceRectHint,
));
await reportService.log('Enabled picture-in-picture with status=$status');
return status == PiPStatus.enabled;
} on PlatformException catch (e, stack) {
if (e.message != 'Activity must be resumed to enter picture-in-picture') {
await reportService.recordError(e, stack);
}
}
}
return false;
}
// overlay
Future<void> _initOverlay() async {

View file

@ -16,7 +16,9 @@ class VideoConductor {
final CollectionLens? _collection;
final List<AvesVideoController> _controllers = [];
final List<StreamSubscription> _subscriptions = [];
final PlaybackStateHandler playbackStateHandler = DatabasePlaybackStateHandler();
final PlaybackStateHandler _playbackStateHandler = DatabasePlaybackStateHandler();
final ValueNotifier<AvesVideoController?> playingVideoControllerNotifier = ValueNotifier(null);
static const _defaultMaxControllerCount = 3;
@ -38,6 +40,7 @@ class VideoConductor {
..forEach((sub) => sub.cancel())
..clear();
await _disposeAll();
playingVideoControllerNotifier.dispose();
_controllers.clear();
if (settings.keepScreenOn == KeepScreenOn.videoPlayback) {
await windowService.keepScreenOn(false);
@ -51,7 +54,7 @@ class VideoConductor {
} else {
controller = videoControllerFactory.buildController(
entry,
playbackStateHandler: playbackStateHandler,
playbackStateHandler: _playbackStateHandler,
settings: settings,
);
_subscriptions.add(controller.statusStream.listen((event) => _onControllerStatusChanged(entry, controller!, event)));
@ -90,6 +93,8 @@ class VideoConductor {
if (settings.keepScreenOn == KeepScreenOn.videoPlayback) {
await windowService.keepScreenOn(status == VideoStatus.playing);
}
playingVideoControllerNotifier.value = getPlayingController();
}
Future<void> _applyToAll(FutureOr Function(AvesVideoController controller) action) => Future.forEach<AvesVideoController>(_controllers, action);

View file

@ -1,8 +1,12 @@
import 'dart:async';
import 'dart:math';
import 'package:aves/app_mode.dart';
import 'package:aves/model/entry/entry.dart';
import 'package:aves/model/entry/extensions/multipage.dart';
import 'package:aves/model/entry/extensions/props.dart';
import 'package:aves/model/settings/settings.dart';
import 'package:aves/services/common/services.dart';
import 'package:aves/theme/durations.dart';
import 'package:aves/widgets/viewer/multipage/conductor.dart';
import 'package:aves/widgets/viewer/multipage/controller.dart';
@ -10,8 +14,10 @@ import 'package:aves/widgets/viewer/video/conductor.dart';
import 'package:aves_model/aves_model.dart';
import 'package:aves_video/aves_video.dart';
import 'package:collection/collection.dart';
import 'package:floating/floating.dart';
import 'package:flutter/material.dart';
import 'package:flutter/scheduler.dart';
import 'package:flutter/services.dart';
import 'package:provider/provider.dart';
// state controllers/monitors
@ -216,4 +222,50 @@ mixin EntryViewControllerMixin<T extends StatefulWidget> on State<T> {
}
Future<void> pauseVideoControllers() => context.read<VideoConductor>().pauseAll();
static const _pipRatioMax = Rational(43, 18);
static const _pipRatioMin = Rational(18, 43);
Future<void> updatePictureInPicture(BuildContext context) async {
if (context.mounted) {
if (settings.videoBackgroundMode == VideoBackgroundMode.pip) {
final playingController = context.read<VideoConductor>().getPlayingController();
if (playingController != null) {
final entrySize = playingController.entry.displaySize;
final entryAspectRatio = entrySize.aspectRatio;
final Rational pipAspectRatio;
if (entryAspectRatio > _pipRatioMax.aspectRatio) {
pipAspectRatio = _pipRatioMax;
} else if (entryAspectRatio < _pipRatioMin.aspectRatio) {
pipAspectRatio = _pipRatioMin;
} else {
pipAspectRatio = Rational(entrySize.width.round(), entrySize.height.round());
}
final viewSize = MediaQuery.sizeOf(context) * MediaQuery.devicePixelRatioOf(context);
final fittedSize = applyBoxFit(BoxFit.contain, entrySize, viewSize).destination;
final sourceRectHint = Rectangle<int>(
((viewSize.width - fittedSize.width) / 2).round(),
((viewSize.height - fittedSize.height) / 2).round(),
fittedSize.width.round(),
fittedSize.height.round(),
);
try {
final status = await Floating().enable(OnLeavePiP(
aspectRatio: pipAspectRatio,
sourceRectHint: sourceRectHint,
));
debugPrint('Enabled picture-in-picture with status=$status');
return;
} on PlatformException catch (e, stack) {
await reportService.recordError(e, stack);
}
}
}
}
debugPrint('Cancelling picture-in-picture');
await Floating().cancelOnLeavePiP();
}
}

View file

@ -9,6 +9,8 @@ mixin AvesEntryBase {
int? get pageId;
String get mimeType;
String? get path;
String? get bestTitle;

View file

@ -8,7 +8,7 @@ enum AvesThemeColorMode { monochrome, polychrome }
enum ConfirmationDialog { createVault, deleteForever, moveToBin, moveUndatedItems }
enum CoordinateFormat { dms, decimal }
enum CoordinateFormat { dms, ddm, decimal }
enum DisplayRefreshRateMode { auto, highest, lowest }

View file

@ -188,4 +188,7 @@ class SettingKeys {
// cf Android `Settings.Global.TRANSITION_ANIMATION_SCALE`
static const platformTransitionAnimationScaleKey = 'transition_animation_scale';
// cf Android `Settings.Secure.LONG_PRESS_TIMEOUT`
static const platformLongPressTimeoutMillisKey = 'long_press_timeout';
}

View file

@ -2,18 +2,23 @@
// they originate from FFmpeg, fijkplayer, and other software
// that write additional metadata to media files
class Keys {
static const alpha = 'alpha';
static const androidCaptureFramerate = 'com.android.capture.fps';
static const androidManufacturer = 'com.android.manufacturer';
static const androidModel = 'com.android.model';
static const androidVersion = 'com.android.version';
static const audioChannels = 'audio-channels';
static const avgFrameRate = 'avg_frame_rate';
static const bps = 'bps';
static const bitrate = 'bitrate';
static const bitsPerRawSample = 'bits_per_raw_sample';
static const bitsPerSample = 'bits_per_sample';
static const byteCount = 'number_of_bytes';
static const channelLayout = 'channel_layout';
static const chapters = 'chapters';
static const chromaLocation = 'chroma_location';
static const closedCaptions = 'closed_captions';
static const codecLevel = 'codec_level';
static const codecLongName = 'codec_long_name';
static const codecName = 'codec_name';
static const codecPixelFormat = 'codec_pixel_format';
static const codecProfileId = 'codec_profile_id';
@ -21,6 +26,8 @@ class Keys {
static const codecTagString = 'codec_tag_string';
static const codedHeight = 'coded_height';
static const codedWidth = 'coded_width';
static const colorLevels = 'color_levels';
static const colorMatrix = 'color_matrix';
static const colorPrimaries = 'color_primaries';
static const colorRange = 'color_range';
static const colorSpace = 'color_space';
@ -29,29 +36,34 @@ class Keys {
static const creationTime = 'creation_time';
static const dar = 'display_aspect_ratio';
static const date = 'date';
static const decoderHeight = 'dh';
static const decoderWidth = 'dw';
static const disposition = 'disposition';
static const duration = 'duration';
static const durationMicros = 'duration_us';
static const durationTs = 'duration_ts';
static const encoder = 'encoder';
static const extraDataSize = 'extradata_size';
static const fieldOrder = 'field_order';
static const filename = 'filename';
static const filmGrain = 'film_grain';
static const fpsDen = 'fps_den';
static const fpsNum = 'fps_num';
static const fps = 'fps';
static const frameCount = 'number_of_frames';
static const handlerName = 'handler_name';
static const gamma = 'gamma';
static const hasBFrames = 'has_b_frames';
static const height = 'height';
static const hearingImpaired = 'hearing_impaired';
static const hwPixelFormat = 'hw_pixel_format';
static const index = 'index';
static const isAvc = 'is_avc';
static const language = 'language';
static const light = 'light';
static const location = 'location';
static const majorBrand = 'major_brand';
static const mediaFormat = 'format';
static const mediaType = 'media_type';
static const minorVersion = 'minor_version';
static const nalLengthSize = 'nal_length_size';
static const par = 'pixel_aspect_ratio';
static const probeScore = 'probe_score';
static const programCount = 'nb_programs';
static const quicktimeCreationDate = 'com.apple.quicktime.creationdate';
@ -78,16 +90,20 @@ class Keys {
static const statisticsTags = '_statistics_tags';
static const statisticsWritingApp = '_statistics_writing_app';
static const statisticsWritingDateUtc = '_statistics_writing_date_utc';
static const stereo3dMode = 'stereo_3d_mode';
static const streamCount = 'nb_streams';
static const streams = 'streams';
static const tbrDen = 'tbr_den';
static const tbrNum = 'tbr_num';
static const time = 'time';
static const segmentCount = 'segment_count';
static const streamType = 'type';
static const title = 'title';
static const timeBase = 'time_base';
static const track = 'track';
static const vendorId = 'vendor_id';
static const width = 'width';
static const videoHeight = 'height';
static const videoWidth = 'width';
static const visualImpaired = 'visual_impaired';
static const xiaomiSlowMoment = 'com.xiaomi.slow_moment';
}

View file

@ -1,30 +0,0 @@
# Miscellaneous
*.class
*.log
*.pyc
*.swp
.DS_Store
.atom/
.buildlog/
.history
.svn/
migrate_working_dir/
# IntelliJ related
*.iml
*.ipr
*.iws
.idea/
# The .vscode folder contains launch configuration and tasks you configure in
# VS Code which you may wish to be included in version control, so this line
# is commented out by default.
#.vscode/
# Flutter/Dart/Pub related
# Libraries should not include pubspec.lock, per https://dart.dev/guides/libraries/private-files#pubspeclock.
#/pubspec.lock
**/doc/api/
.dart_tool/
.packages
build/

View file

@ -1,10 +0,0 @@
# This file tracks properties of this Flutter project.
# Used by Flutter tool to assess capabilities and perform upgrades etc.
#
# This file should be version controlled and should not be manually edited.
version:
revision: f468f3366c26a5092eb964a230ce7892fda8f2f8
channel: stable
project_type: package

View file

@ -1 +0,0 @@
include: ../../analysis_options.yaml

View file

@ -1 +0,0 @@
export 'src/metadata.dart';

View file

@ -1,146 +0,0 @@
import 'package:aves_model/aves_model.dart';
import 'package:aves_video/aves_video.dart';
import 'package:ffmpeg_kit_flutter/ffmpeg_kit_config.dart';
import 'package:ffmpeg_kit_flutter/ffprobe_kit.dart';
import 'package:flutter/foundation.dart';
class FfmpegVideoMetadataFetcher extends AvesVideoMetadataFetcher {
static const chaptersKey = 'chapters';
static const formatKey = 'format';
static const streamsKey = 'streams';
@override
void init() {}
@override
Future<Map> getMetadata(AvesEntryBase entry) async {
var uri = entry.uri;
if (uri.startsWith('content://')) {
final safUri = await FFmpegKitConfig.getSafParameterForRead(uri);
if (safUri == null) {
debugPrint('failed to get SAF URI for entry=$entry');
return {};
}
uri = safUri;
}
final session = await FFprobeKit.getMediaInformation(uri);
final information = session.getMediaInformation();
if (information == null) {
final failStackTrace = await session.getFailStackTrace();
final output = await session.getOutput();
debugPrint('failed to get video metadata for entry=$entry, failStackTrace=$failStackTrace, output=$output');
return {};
}
final props = information.getAllProperties();
if (props == null) return {};
final chapters = props[chaptersKey];
if (chapters is List) {
if (chapters.isEmpty) {
props.remove(chaptersKey);
}
}
final format = props.remove(formatKey);
if (format is Map) {
format.remove(Keys.filename);
format.remove('size');
_normalizeGroup(format);
props.addAll(format);
}
final streams = props[streamsKey];
if (streams is List) {
streams.forEach((stream) {
if (stream is Map) {
_normalizeGroup(stream);
final fps = stream[Keys.avgFrameRate];
if (fps is String) {
final parts = fps.split('/');
if (parts.length == 2) {
final num = int.tryParse(parts[0]);
final den = int.tryParse(parts[1]);
if (num != null && den != null) {
if (den > 0) {
stream[Keys.fpsNum] = num;
stream[Keys.fpsDen] = den;
}
stream.remove(Keys.avgFrameRate);
}
}
}
final disposition = stream[Keys.disposition];
if (disposition is Map) {
disposition.removeWhere((key, value) => value == 0);
stream[Keys.disposition] = disposition.keys.join(', ');
}
final idValue = stream['id'];
if (idValue is String) {
final id = int.tryParse(idValue);
if (id != null) {
stream[Keys.index] = id - 1;
stream.remove('id');
}
}
if (stream[Keys.streamType] == 'data') {
stream[Keys.streamType] = MediaStreamTypes.metadata;
}
}
});
}
return props;
}
void _normalizeGroup(Map<dynamic, dynamic> stream) {
void replaceKey(k1, k2) {
final v = stream.remove(k1);
if (v != null) {
stream[k2] = v;
}
}
replaceKey('bit_rate', Keys.bitrate);
replaceKey('codec_type', Keys.streamType);
replaceKey('format_name', Keys.mediaFormat);
replaceKey('level', Keys.codecLevel);
replaceKey('nb_frames', Keys.frameCount);
replaceKey('pix_fmt', Keys.codecPixelFormat);
replaceKey('profile', Keys.codecProfileId);
final tags = stream.remove('tags');
if (tags is Map) {
stream.addAll(tags);
}
<String>{
Keys.codecProfileId,
Keys.rFrameRate,
'bits_per_sample',
'closed_captions',
'codec_long_name',
'film_grain',
'has_b_frames',
'start_pts',
'start_time',
'vendor_id',
}.forEach((key) {
final value = stream[key];
switch (value) {
case final num v:
if (v == 0) {
stream.remove(key);
}
case final String v:
if (double.tryParse(v) == 0 || v == '0/0' || v == 'unknown' || v == '[0][0][0][0]') {
stream.remove(key);
}
}
});
}
}

View file

@ -1,28 +0,0 @@
name: aves_video_ffmpeg
version: 0.0.1
publish_to: none
environment:
sdk: ^3.6.0
resolution: workspace
dependencies:
flutter:
sdk: flutter
aves_model:
path: ../aves_model
aves_video:
path: ../aves_video
# `video` version is necessary, as some videos make the app crash
# when using only `min` or `https` (the default)
# ffmpeg_kit_flutter_video: 6.0.3-LTS
ffmpeg_kit_flutter:
git:
url: https://github.com/deckerst/ffmpeg-kit.git
ref: background-lts
path: flutter/flutter
dev_dependencies:
flutter_lints:
flutter:

View file

@ -1,2 +1,3 @@
export 'src/controller.dart';
export 'src/factory.dart';
export 'src/metadata.dart';

View file

@ -162,6 +162,12 @@ class MpvVideoController extends AvesVideoController {
Future<void> _init({int startMillis = 0}) async {
final playing = _instance.state.playing;
// Audio quality is better with `audiotrack` than `opensles` (the default).
// Calling `setAudioDevice` does not seem to work.
// As of 2025/01/13, directly setting audio output via property works for some files but not all,
// and switching from a supported file to an unsupported file crashes:
// cf https://github.com/media-kit/media-kit/issues/1061
await _applyLoop();
await _instance.open(Media(entry.uri), play: playing);
await _instance.setSubtitleTrack(SubtitleTrack.no());

View file

@ -0,0 +1,199 @@
import 'dart:async';
import 'dart:convert';
import 'package:aves_model/aves_model.dart';
import 'package:aves_video/aves_video.dart';
import 'package:flutter/widgets.dart';
import 'package:media_kit/media_kit.dart';
class MpvVideoMetadataFetcher extends AvesVideoMetadataFetcher {
static const mpvTypeAudio = 'audio';
static const mpvTypeVideo = 'video';
static const mpvTypeSub = 'sub';
static const probeTimeoutImage = 500;
static const probeTimeoutVideo = 5000;
@override
void init() => MediaKit.ensureInitialized();
@override
Future<Map> getMetadata(AvesEntryBase entry) async {
final player = Player(
configuration: PlayerConfiguration(
logLevel: MPVLogLevel.warn,
protocolWhitelist: [
...const PlayerConfiguration().protocolWhitelist,
// Android `content` URIs are considered unsafe by default,
// as they are transferred via a custom `fd` protocol
'fd',
],
),
);
final platform = player.platform;
if (platform is! NativePlayer) {
throw Exception('Platform player ${platform.runtimeType} does not support property retrieval');
}
// We need to enable video decoding to retrieve video params,
// but it is disabled by default unless a `VideoController` is attached.
// Attaching a `VideoController` is problematic, because `player.open()` may not return
// unless a new frame is rendered, and triggering fails from a background service.
// It is simpler to enable the video track via properties.
await platform.setProperty('vid', 'auto');
// deselect audio track to prevent triggering Android audio sessions
await platform.setProperty('aid', 'no');
final videoDecodedCompleter = Completer();
StreamSubscription? subscription;
subscription = player.stream.videoParams.listen((v) {
if (v.par != null) {
subscription?.cancel();
videoDecodedCompleter.complete();
}
});
await player.open(Media(entry.uri), play: false);
final timeoutMillis = entry.mimeType.startsWith('image') ? probeTimeoutImage : probeTimeoutVideo;
await Future.any([videoDecodedCompleter.future, Future.delayed(Duration(milliseconds: timeoutMillis))]);
final fields = <String, dynamic>{};
final videoParams = player.state.videoParams;
if (videoParams.par == null) {
debugPrint('failed to probe video metadata within $timeoutMillis ms for entry=$entry');
} else {
// mpv properties: https://mpv.io/manual/stable/#property-list
// mpv doc: "duration with milliseconds"
final durationMs = await platform.getProperty('duration/full');
if (durationMs.isNotEmpty) {
fields[Keys.duration] = durationMs;
}
// mpv doc: "metadata key/value pairs"
// note: seems to match FFprobe "format" > "tags" fields
final metadata = await platform.getProperty('metadata');
if (metadata.isNotEmpty) {
try {
jsonDecode(metadata).forEach((key, value) {
fields[key] = value;
});
} catch (error) {
debugPrint('failed to parse metadata=$metadata with error=$error');
}
}
final tracks = await platform.getProperty('track-list');
if (tracks.isNotEmpty) {
try {
final tracksJson = jsonDecode(tracks);
if (tracksJson is List && tracksJson.isNotEmpty) {
fields[Keys.streams] = tracksJson.whereType<Map>().map((stream) {
return _normalizeStream(stream.cast<String, dynamic>(), videoParams);
}).toList();
}
} catch (error) {
debugPrint('failed to parse tracks=$tracks with error=$error');
}
}
final chapters = await platform.getProperty('chapter-list');
if (chapters.isNotEmpty) {
try {
final chaptersJson = jsonDecode(chapters);
if (chaptersJson is List && chaptersJson.isNotEmpty) {
final chapterMaps = chaptersJson.whereType<Map>().toList();
if (chapterMaps.isNotEmpty) {
fields[Keys.chapters] = chapterMaps;
}
}
} catch (error) {
debugPrint('failed to parse chapters=$chapters with error=$error');
}
}
}
await player.dispose();
return fields;
}
Map<String, dynamic> _normalizeStream(Map<String, dynamic> stream, VideoParams videoParams) {
void replaceKey(String k1, String k2) {
final v = stream.remove(k1);
if (v != null) {
stream[k2] = v;
}
}
void removeIfFalse(String k) {
if (stream[k] == false) {
stream.remove(k);
}
}
stream.remove('id');
stream.remove('decoder-desc');
stream.remove('main-selection');
stream.remove('selected');
stream.remove('src-id');
replaceKey('ff-index', Keys.index);
replaceKey('codec', Keys.codecName);
replaceKey('lang', Keys.language);
replaceKey('demux-bitrate', Keys.bitrate);
replaceKey('demux-channel-count', Keys.audioChannels);
replaceKey('demux-fps', Keys.fps);
replaceKey('demux-samplerate', Keys.sampleRate);
replaceKey('hearing-impaired', Keys.hearingImpaired);
replaceKey('visual-impaired', Keys.visualImpaired);
stream.removeWhere((k, v) => k.startsWith('demux-'));
removeIfFalse('albumart');
removeIfFalse('default');
removeIfFalse('dependent');
removeIfFalse('external');
removeIfFalse('forced');
removeIfFalse(Keys.hearingImpaired);
removeIfFalse(Keys.visualImpaired);
final isImage = stream.remove('image');
switch (stream.remove('type')) {
case mpvTypeAudio:
stream[Keys.streamType] = MediaStreamTypes.audio;
case mpvTypeVideo:
stream[Keys.streamType] = MediaStreamTypes.video;
if (isImage) {
stream.remove(Keys.fps);
}
// Some video properties are not in the video track props but accessible via `video-params` (or `video-out-params`).
// These parameters are already stored in the player state, as `videoParams`.
// Parameters `sigPeak` and `averageBpp` are ignored.
final videoParamsTags = <String, dynamic>{
Keys.alpha: videoParams.alpha,
Keys.chromaLocation: videoParams.chromaLocation,
Keys.codecPixelFormat: videoParams.pixelformat,
Keys.colorLevels: videoParams.colorlevels,
Keys.colorMatrix: videoParams.colormatrix,
Keys.colorPrimaries: videoParams.primaries,
Keys.dar: videoParams.aspect,
Keys.decoderHeight: videoParams.dh,
Keys.decoderWidth: videoParams.dw,
Keys.gamma: videoParams.gamma,
Keys.hwPixelFormat: videoParams.hwPixelformat,
Keys.light: videoParams.light,
Keys.par: videoParams.par,
Keys.rotate: videoParams.rotate,
Keys.stereo3dMode: videoParams.stereoIn,
Keys.videoHeight: videoParams.h,
Keys.videoWidth: videoParams.w,
}..removeWhere((k, v) => v == null);
stream.addAll(videoParamsTags);
case mpvTypeSub:
stream[Keys.streamType] = MediaStreamTypes.subtitle;
}
return stream;
}
}

View file

@ -13,10 +13,10 @@ packages:
dependency: transitive
description:
name: _flutterfire_internals
sha256: daa1d780fdecf8af925680c06c86563cdd445deea995d5c9176f1302a2b10bbe
sha256: "27899c95f9e7ec06c8310e6e0eac967707714b9f1450c4a58fa00ca011a4a8ae"
url: "https://pub.dev"
source: hosted
version: "1.3.48"
version: "1.3.49"
_macros:
dependency: transitive
description: dart
@ -289,23 +289,6 @@ packages:
url: "https://pub.dev"
source: hosted
version: "2.1.3"
ffmpeg_kit_flutter:
dependency: transitive
description:
path: "flutter/flutter"
ref: background-lts
resolved-ref: "24213bd2334265cfc240525fb9a218b85ad4d872"
url: "https://github.com/deckerst/ffmpeg-kit.git"
source: git
version: "6.0.3"
ffmpeg_kit_flutter_platform_interface:
dependency: transitive
description:
name: ffmpeg_kit_flutter_platform_interface
sha256: addf046ae44e190ad0101b2fde2ad909a3cd08a2a109f6106d2f7048b7abedee
url: "https://pub.dev"
source: hosted
version: "0.2.1"
file:
dependency: transitive
description:
@ -318,10 +301,10 @@ packages:
dependency: transitive
description:
name: firebase_core
sha256: "15d761b95dfa2906dfcc31b7fc6fe293188533d1a3ffe78389ba9e69bd7fdbde"
sha256: "0307c1fde82e2b8b97e0be2dab93612aff9a72f31ebe9bfac66ed8b37ef7c568"
url: "https://pub.dev"
source: hosted
version: "3.9.0"
version: "3.10.0"
firebase_core_platform_interface:
dependency: transitive
description:
@ -342,18 +325,18 @@ packages:
dependency: transitive
description:
name: firebase_crashlytics
sha256: e235c8452d5622fc271404592388fde179e4b62c50e777ad3c8c3369296104ed
sha256: f6adb65fa3d6391a79f0e60833bb4cdc468ce0c318831c90057ee11e0909cd29
url: "https://pub.dev"
source: hosted
version: "4.2.0"
version: "4.3.0"
firebase_crashlytics_platform_interface:
dependency: transitive
description:
name: firebase_crashlytics_platform_interface
sha256: "4ddadf44ed0a202f3acad053f12c083877940fa8cc1a9f747ae09e1ef4372160"
sha256: "6635166c22c6f75f634b8e77b70fcc43b24af4cfee28f975249dbdbd9769a702"
url: "https://pub.dev"
source: hosted
version: "3.7.0"
version: "3.8.0"
fixnum:
dependency: transitive
description:
@ -813,8 +796,8 @@ packages:
dependency: "direct overridden"
description:
path: media_kit
ref: d094ba83715b0ac893e546781b2862e855d34502
resolved-ref: d094ba83715b0ac893e546781b2862e855d34502
ref: "4d8c634c28d439384aab40b9d2edff83077f37c9"
resolved-ref: "4d8c634c28d439384aab40b9d2edff83077f37c9"
url: "https://github.com/media-kit/media-kit.git"
source: git
version: "1.1.11"
@ -830,8 +813,8 @@ packages:
dependency: "direct overridden"
description:
path: media_kit_video
ref: d094ba83715b0ac893e546781b2862e855d34502
resolved-ref: d094ba83715b0ac893e546781b2862e855d34502
ref: "4d8c634c28d439384aab40b9d2edff83077f37c9"
resolved-ref: "4d8c634c28d439384aab40b9d2edff83077f37c9"
url: "https://github.com/media-kit/media-kit.git"
source: git
version: "1.2.5"

View file

@ -7,7 +7,7 @@ repository: https://github.com/deckerst/aves
# - play changelog: /whatsnew/whatsnew-en-US
# - izzy changelog: /fastlane/metadata/android/en-US/changelogs/XXX01.txt
# - libre changelog: /fastlane/metadata/android/en-US/changelogs/XXX.txt
version: 1.12.1+141
version: 1.12.2+142
publish_to: none
environment:
@ -28,7 +28,6 @@ workspace:
- plugins/aves_ui
- plugins/aves_utils
- plugins/aves_video
- plugins/aves_video_ffmpeg
- plugins/aves_video_mpv
# use `scripts/apply_flavor_{flavor}.sh` to set the right dependencies for the flavor
@ -55,8 +54,6 @@ dependencies:
path: plugins/aves_services_google
aves_video:
path: plugins/aves_video
aves_video_ffmpeg:
path: plugins/aves_video_ffmpeg
aves_video_mpv:
path: plugins/aves_video_mpv
aves_ui:
@ -137,12 +134,12 @@ dependency_overrides:
media_kit:
git:
url: https://github.com/media-kit/media-kit.git
ref: d094ba83715b0ac893e546781b2862e855d34502
ref: 4d8c634c28d439384aab40b9d2edff83077f37c9
path: media_kit
media_kit_video:
git:
url: https://github.com/media-kit/media-kit.git
ref: d094ba83715b0ac893e546781b2862e855d34502
ref: 4d8c634c28d439384aab40b9d2edff83077f37c9
path: media_kit_video
dev_dependencies:
@ -202,6 +199,12 @@ flutter:
# `OverlaySnackBar` in `/widgets/common/action_mixins/overlay_snack_bar.dart`
# adapts from Flutter v3.23.0 `SnackBar` in `/material/snack_bar.dart`
#
# `AGestureDetector` in `/widgets/common/basic/gestures/gesture_detector.dart`
# adapts from Flutter v3.21.1 `GestureDetector` in `/widgets/gesture_detector.dart`
#
# `AInkResponse` in `/widgets/common/basic/gestures/ink_well.dart`
# adapts from Flutter v3.21.1 `InkResponse` and related classes in `/material/ink_well.dart`
#
# `EagerScaleGestureRecognizer` in `/widgets/common/behaviour/eager_scale_gesture_recognizer.dart`
# adapts from Flutter v3.16.0 `ScaleGestureRecognizer` in `/gestures/scale.dart`
#

View file

@ -18,6 +18,18 @@ void main() {
expect(ExtraCoordinateFormat.toDMS(l10n, const LatLng(0, 0), secondDecimals: 4), ['0° 0 0.0000″ N', '0° 0 0.0000″ E']);
});
test('Decimal degrees to DDM', () {
final l10n = lookupAppLocalizations(AvesApp.supportedLocales.first);
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(37.496667, 127.0275)), ['37° 29.8000 N', '127° 1.6500 E']); // Gangnam
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(78.9243503, 11.9230465)), ['78° 55.4610 N', '11° 55.3828 E']); // Ny-Ålesund
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(-38.6965891, 175.9830047)), ['38° 41.7953 S', '175° 58.9803 E']); // Taupo
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(-64.249391, -56.6556145)), ['64° 14.9635 S', '56° 39.3369 W']); // Marambio
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(0, 0)), ['0° 0.0000 N', '0° 0.0000 E']);
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(0, 0), minutePadding: true), ['0° 00.0000 N', '0° 00.0000 E']);
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(0, 0), minuteDecimals: 0), ['0° 0 N', '0° 0 E']);
expect(ExtraCoordinateFormat.toDDM(l10n, const LatLng(0, 0), minuteDecimals: 6), ['0° 0.000000 N', '0° 0.000000 E']);
});
test('bounds center', () {
expect(GeoUtils.getLatLngCenter(const [LatLng(10, 30), LatLng(30, 50)]), const LatLng(20.28236664671092, 39.351653000319956));
expect(GeoUtils.getLatLngCenter(const [LatLng(10, -179), LatLng(30, 179)]), const LatLng(20.00279344048298, -179.9358157370226));

View file

@ -13,5 +13,7 @@ void main() {
expect('H'.toSentenceCase(), 'H');
expect('LW[1]'.toSentenceCase(), 'LW [1]');
expect('bits_per_raw_sample'.toSentenceCase(), 'Bits Per Raw Sample');
});
}

View file

@ -1,3 +1,3 @@
In v1.12.1:
In v1.12.2:
- enjoy the app in Danish
Full changelog available on GitHub