video: sized thumbnails match content resolver ones

This commit is contained in:
Thibault Deckers 2021-01-21 15:08:19 +09:00
parent c252ce7828
commit bb08f3dcb6
3 changed files with 29 additions and 7 deletions

View file

@ -1,7 +1,9 @@
package deckers.thibault.aves.decoder package deckers.thibault.aves.decoder
import android.content.Context import android.content.Context
import android.media.MediaMetadataRetriever
import android.net.Uri import android.net.Uri
import android.os.Build
import com.bumptech.glide.Glide import com.bumptech.glide.Glide
import com.bumptech.glide.Priority import com.bumptech.glide.Priority
import com.bumptech.glide.Registry import com.bumptech.glide.Registry
@ -48,9 +50,29 @@ internal class VideoThumbnailFetcher(private val model: VideoThumbnail) : DataFe
val retriever = openMetadataRetriever(model.context, model.uri) val retriever = openMetadataRetriever(model.context, model.uri)
if (retriever != null) { if (retriever != null) {
try { try {
val picture = retriever.embeddedPicture ?: retriever.frameAtTime?.getBytes(canHaveAlpha = false, recycle = false) var bytes = retriever.embeddedPicture
if (picture != null) { if (bytes == null) {
callback.onDataReady(ByteArrayInputStream(picture)) // try to match the thumbnails returned by the content resolver / Media Store
// the following strategies are from empirical evidence from a few test devices:
// - API 29: sync frame closest to the middle
// - API 26/27: default representative frame at any time position
var timeMillis: Long? = null
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.Q) {
val durationMillis = retriever.extractMetadata(MediaMetadataRetriever.METADATA_KEY_DURATION)?.toLongOrNull()
if (durationMillis != null) {
timeMillis = durationMillis / 2
}
}
val frame = if (timeMillis != null) {
retriever.getFrameAtTime(timeMillis * 1000)
} else {
retriever.frameAtTime
}
bytes = frame?.getBytes(canHaveAlpha = false, recycle = false)
}
if (bytes != null) {
callback.onDataReady(ByteArrayInputStream(bytes))
} else { } else {
callback.onLoadFailed(Exception("failed to get embedded picture or any frame")) callback.onLoadFailed(Exception("failed to get embedded picture or any frame"))
} }

View file

@ -70,9 +70,7 @@ class _RasterImageThumbnailState extends State<RasterImageThumbnail> {
if (!entry.canDecode) return; if (!entry.canDecode) return;
_fastThumbnailProvider = entry.getThumbnail(); _fastThumbnailProvider = entry.getThumbnail();
if (!entry.isVideo) { _sizedThumbnailProvider = entry.getThumbnail(extent: extent);
_sizedThumbnailProvider = entry.getThumbnail(extent: extent);
}
} }
void _pauseProvider() { void _pauseProvider() {

View file

@ -3,6 +3,8 @@ import 'dart:ui';
import 'package:aves/model/entry.dart'; import 'package:aves/model/entry.dart';
import 'package:aves/model/entry_images.dart'; import 'package:aves/model/entry_images.dart';
import 'package:aves/model/settings/settings.dart';
import 'package:aves/widgets/collection/collection_page.dart';
import 'package:flutter/material.dart'; import 'package:flutter/material.dart';
import 'package:flutter_ijkplayer/flutter_ijkplayer.dart'; import 'package:flutter_ijkplayer/flutter_ijkplayer.dart';
@ -98,7 +100,7 @@ class _VideoViewState extends State<VideoView> {
backgroundColor: Colors.transparent, backgroundColor: Colors.transparent,
) )
: Image( : Image(
image: entry.getBestThumbnail(entry.displaySize.longestSide), image: entry.getBestThumbnail(settings.getTileExtent(CollectionPage.routeName)),
fit: BoxFit.contain, fit: BoxFit.contain,
); );
}); });