com.bumptech.glide.load.engine.Engine Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of glide Show documentation
Show all versions of glide Show documentation
A fast and efficient image loading library for Android focused on smooth scrolling.
package com.bumptech.glide.load.engine;
import android.os.Looper;
import android.os.MessageQueue;
import android.util.Log;
import com.bumptech.glide.Priority;
import com.bumptech.glide.load.Key;
import com.bumptech.glide.load.Transformation;
import com.bumptech.glide.load.data.DataFetcher;
import com.bumptech.glide.load.engine.cache.DiskCache;
import com.bumptech.glide.load.engine.cache.DiskCacheAdapter;
import com.bumptech.glide.load.engine.cache.MemoryCache;
import com.bumptech.glide.load.resource.transcode.ResourceTranscoder;
import com.bumptech.glide.provider.DataLoadProvider;
import com.bumptech.glide.request.ResourceCallback;
import com.bumptech.glide.util.LogTime;
import com.bumptech.glide.util.Util;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.WeakReference;
import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.ExecutorService;
/**
* Responsible for starting loads and managing active and cached resources.
*/
public class Engine implements EngineJobListener,
MemoryCache.ResourceRemovedListener,
EngineResource.ResourceListener {
private static final String TAG = "Engine";
private final Map jobs;
private final EngineKeyFactory keyFactory;
private final MemoryCache cache;
private final EngineJobFactory engineJobFactory;
private final Map>> activeResources;
private final ResourceRecycler resourceRecycler;
private final LazyDiskCacheProvider diskCacheProvider;
// Lazily instantiate to avoid exceptions if Glide is initialized on a background thread. See #295.
private ReferenceQueue> resourceReferenceQueue;
/**
* Allows a request to indicate it no longer is interested in a given load.
*/
public static class LoadStatus {
private final EngineJob engineJob;
private final ResourceCallback cb;
public LoadStatus(ResourceCallback cb, EngineJob engineJob) {
this.cb = cb;
this.engineJob = engineJob;
}
public void cancel() {
engineJob.removeCallback(cb);
}
}
public Engine(MemoryCache memoryCache, DiskCache.Factory diskCacheFactory, ExecutorService diskCacheService,
ExecutorService sourceService) {
this(memoryCache, diskCacheFactory, diskCacheService, sourceService, null, null, null, null, null);
}
// Visible for testing.
Engine(MemoryCache cache, DiskCache.Factory diskCacheFactory, ExecutorService diskCacheService,
ExecutorService sourceService, Map jobs, EngineKeyFactory keyFactory,
Map>> activeResources, EngineJobFactory engineJobFactory,
ResourceRecycler resourceRecycler) {
this.cache = cache;
this.diskCacheProvider = new LazyDiskCacheProvider(diskCacheFactory);
if (activeResources == null) {
activeResources = new HashMap>>();
}
this.activeResources = activeResources;
if (keyFactory == null) {
keyFactory = new EngineKeyFactory();
}
this.keyFactory = keyFactory;
if (jobs == null) {
jobs = new HashMap();
}
this.jobs = jobs;
if (engineJobFactory == null) {
engineJobFactory = new EngineJobFactory(diskCacheService, sourceService, this);
}
this.engineJobFactory = engineJobFactory;
if (resourceRecycler == null) {
resourceRecycler = new ResourceRecycler();
}
this.resourceRecycler = resourceRecycler;
cache.setResourceRemovedListener(this);
}
/**
* Starts a load for the given arguments. Must be called on the main thread.
*
*
* The flow for any request is as follows:
*
* - Check the memory cache and provide the cached resource if present
* - Check the current set of actively used resources and return the active resource if present
* - Check the current set of in progress loads and add the cb to the in progress load if present
* - Start a new load
*
*
*
*
* Active resources are those that have been provided to at least one request and have not yet been released.
* Once all consumers of a resource have released that resource, the resource then goes to cache. If the
* resource is ever returned to a new consumer from cache, it is re-added to the active resources. If the
* resource is evicted from the cache, its resources are recycled and re-used if possible and the resource is
* discarded. There is no strict requirement that consumers release their resources so active resources are
* held weakly.
*
*
* @param signature A non-null unique key to be mixed into the cache key that identifies the version of the data to
* be loaded.
* @param width The target width in pixels of the desired resource.
* @param height The target height in pixels of the desired resource.
* @param fetcher The fetcher to use to retrieve data not in the disk cache.
* @param loadProvider The load provider containing various encoders and decoders use to decode and encode data.
* @param transformation The transformation to use to transform the decoded resource.
* @param transcoder The transcoder to use to transcode the decoded and transformed resource.
* @param priority The priority with which the request should run.
* @param isMemoryCacheable True if the transcoded resource can be cached in memory.
* @param diskCacheStrategy The strategy to use that determines what type of data, if any,
* will be cached in the local disk cache.
* @param cb The callback that will be called when the load completes.
*
* @param The type of data the resource will be decoded from.
* @param The type of the resource that will be decoded.
* @param The type of the resource that will be transcoded from the decoded resource.
*/
public LoadStatus load(Key signature, int width, int height, DataFetcher fetcher,
DataLoadProvider loadProvider, Transformation transformation, ResourceTranscoder transcoder,
Priority priority, boolean isMemoryCacheable, DiskCacheStrategy diskCacheStrategy, ResourceCallback cb) {
Util.assertMainThread();
long startTime = LogTime.getLogTime();
final String id = fetcher.getId();
EngineKey key = keyFactory.buildKey(id, signature, width, height, loadProvider.getCacheDecoder(),
loadProvider.getSourceDecoder(), transformation, loadProvider.getEncoder(),
transcoder, loadProvider.getSourceEncoder());
EngineResource> cached = loadFromCache(key, isMemoryCacheable);
if (cached != null) {
cb.onResourceReady(cached);
if (Log.isLoggable(TAG, Log.VERBOSE)) {
logWithTimeAndKey("Loaded resource from cache", startTime, key);
}
return null;
}
EngineResource> active = loadFromActiveResources(key, isMemoryCacheable);
if (active != null) {
cb.onResourceReady(active);
if (Log.isLoggable(TAG, Log.VERBOSE)) {
logWithTimeAndKey("Loaded resource from active resources", startTime, key);
}
return null;
}
EngineJob current = jobs.get(key);
if (current != null) {
current.addCallback(cb);
if (Log.isLoggable(TAG, Log.VERBOSE)) {
logWithTimeAndKey("Added to existing load", startTime, key);
}
return new LoadStatus(cb, current);
}
EngineJob engineJob = engineJobFactory.build(key, isMemoryCacheable);
DecodeJob decodeJob = new DecodeJob(key, width, height, fetcher, loadProvider, transformation,
transcoder, diskCacheProvider, diskCacheStrategy, priority);
EngineRunnable runnable = new EngineRunnable(engineJob, decodeJob, priority);
jobs.put(key, engineJob);
engineJob.addCallback(cb);
engineJob.start(runnable);
if (Log.isLoggable(TAG, Log.VERBOSE)) {
logWithTimeAndKey("Started new load", startTime, key);
}
return new LoadStatus(cb, engineJob);
}
private static void logWithTimeAndKey(String log, long startTime, Key key) {
Log.v(TAG, log + " in " + LogTime.getElapsedMillis(startTime) + "ms, key: " + key);
}
private EngineResource> loadFromActiveResources(Key key, boolean isMemoryCacheable) {
if (!isMemoryCacheable) {
return null;
}
EngineResource> active = null;
WeakReference> activeRef = activeResources.get(key);
if (activeRef != null) {
active = activeRef.get();
if (active != null) {
active.acquire();
} else {
activeResources.remove(key);
}
}
return active;
}
private EngineResource> loadFromCache(Key key, boolean isMemoryCacheable) {
if (!isMemoryCacheable) {
return null;
}
EngineResource> cached = getEngineResourceFromCache(key);
if (cached != null) {
cached.acquire();
activeResources.put(key, new ResourceWeakReference(key, cached, getReferenceQueue()));
}
return cached;
}
@SuppressWarnings("unchecked")
private EngineResource> getEngineResourceFromCache(Key key) {
Resource> cached = cache.remove(key);
final EngineResource result;
if (cached == null) {
result = null;
} else if (cached instanceof EngineResource) {
// Save an object allocation if we've cached an EngineResource (the typical case).
result = (EngineResource) cached;
} else {
result = new EngineResource(cached, true /*isCacheable*/);
}
return result;
}
public void release(Resource resource) {
Util.assertMainThread();
if (resource instanceof EngineResource) {
((EngineResource) resource).release();
} else {
throw new IllegalArgumentException("Cannot release anything but an EngineResource");
}
}
@SuppressWarnings("unchecked")
@Override
public void onEngineJobComplete(Key key, EngineResource> resource) {
Util.assertMainThread();
// A null resource indicates that the load failed, usually due to an exception.
if (resource != null) {
resource.setResourceListener(key, this);
if (resource.isCacheable()) {
activeResources.put(key, new ResourceWeakReference(key, resource, getReferenceQueue()));
}
}
// TODO: should this check that the engine job is still current?
jobs.remove(key);
}
@Override
public void onEngineJobCancelled(EngineJob engineJob, Key key) {
Util.assertMainThread();
EngineJob current = jobs.get(key);
if (engineJob.equals(current)) {
jobs.remove(key);
}
}
@Override
public void onResourceRemoved(final Resource> resource) {
Util.assertMainThread();
resourceRecycler.recycle(resource);
}
@Override
public void onResourceReleased(Key cacheKey, EngineResource resource) {
Util.assertMainThread();
activeResources.remove(cacheKey);
if (resource.isCacheable()) {
cache.put(cacheKey, resource);
} else {
resourceRecycler.recycle(resource);
}
}
public void clearDiskCache() {
diskCacheProvider.getDiskCache().clear();
}
private ReferenceQueue> getReferenceQueue() {
if (resourceReferenceQueue == null) {
resourceReferenceQueue = new ReferenceQueue>();
MessageQueue queue = Looper.myQueue();
queue.addIdleHandler(new RefQueueIdleHandler(activeResources, resourceReferenceQueue));
}
return resourceReferenceQueue;
}
private static class LazyDiskCacheProvider implements DecodeJob.DiskCacheProvider {
private final DiskCache.Factory factory;
private volatile DiskCache diskCache;
public LazyDiskCacheProvider(DiskCache.Factory factory) {
this.factory = factory;
}
@Override
public DiskCache getDiskCache() {
if (diskCache == null) {
synchronized (this) {
if (diskCache == null) {
diskCache = factory.build();
}
if (diskCache == null) {
diskCache = new DiskCacheAdapter();
}
}
}
return diskCache;
}
}
private static class ResourceWeakReference extends WeakReference> {
private final Key key;
public ResourceWeakReference(Key key, EngineResource> r, ReferenceQueue super EngineResource>> q) {
super(r, q);
this.key = key;
}
}
// Responsible for cleaning up the active resource map by remove weak references that have been cleared.
private static class RefQueueIdleHandler implements MessageQueue.IdleHandler {
private final Map>> activeResources;
private final ReferenceQueue> queue;
public RefQueueIdleHandler(Map>> activeResources,
ReferenceQueue> queue) {
this.activeResources = activeResources;
this.queue = queue;
}
@Override
public boolean queueIdle() {
ResourceWeakReference ref = (ResourceWeakReference) queue.poll();
if (ref != null) {
activeResources.remove(ref.key);
}
return true;
}
}
// Visible for testing.
static class EngineJobFactory {
private final ExecutorService diskCacheService;
private final ExecutorService sourceService;
private final EngineJobListener listener;
public EngineJobFactory(ExecutorService diskCacheService, ExecutorService sourceService,
EngineJobListener listener) {
this.diskCacheService = diskCacheService;
this.sourceService = sourceService;
this.listener = listener;
}
public EngineJob build(Key key, boolean isMemoryCacheable) {
return new EngineJob(key, diskCacheService, sourceService, isMemoryCacheable, listener);
}
}
}