Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.apollographql.apollo.internal.cache.normalized.RealAppSyncStore Maven / Gradle / Ivy
/**
* Copyright 2018-2019 Amazon.com,
* Inc. or its affiliates. All Rights Reserved.
*
* SPDX-License-Identifier: Apache-2.0
*/
package com.apollographql.apollo.internal.cache.normalized;
import com.apollographql.apollo.api.GraphqlFragment;
import com.apollographql.apollo.api.Operation;
import com.apollographql.apollo.api.Response;
import com.apollographql.apollo.api.ResponseField;
import com.apollographql.apollo.api.ResponseFieldMapper;
import com.apollographql.apollo.cache.CacheHeaders;
import com.apollographql.apollo.cache.normalized.ApolloStore;
import com.apollographql.apollo.cache.normalized.GraphQLStoreOperation;
import com.apollographql.apollo.cache.normalized.CacheKey;
import com.apollographql.apollo.cache.normalized.CacheKeyResolver;
import com.apollographql.apollo.cache.normalized.NormalizedCache;
import com.apollographql.apollo.cache.normalized.OptimisticNormalizedCache;
import com.apollographql.apollo.cache.normalized.Record;
import com.apollographql.apollo.internal.field.CacheFieldValueResolver;
import com.apollographql.apollo.internal.response.RealResponseReader;
import com.apollographql.apollo.internal.response.ScalarTypeAdapters;
import com.apollographql.apollo.internal.ApolloLogger;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.UUID;
import java.util.WeakHashMap;
import java.util.concurrent.Executor;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import javax.annotation.Nonnull;
import javax.annotation.Nullable;
import static com.apollographql.apollo.api.internal.Utils.checkNotNull;
public final class RealAppSyncStore implements ApolloStore, ReadableStore, WriteableStore {
private final OptimisticNormalizedCache optimisticCache;
private final CacheKeyResolver cacheKeyResolver;
private final ScalarTypeAdapters scalarTypeAdapters;
private final ReadWriteLock lock;
private final Set subscribers;
private final Executor dispatcher;
private final CacheKeyBuilder cacheKeyBuilder;
private final ApolloLogger logger;
public RealAppSyncStore(@Nonnull NormalizedCache normalizedCache, @Nonnull CacheKeyResolver cacheKeyResolver,
@Nonnull final ScalarTypeAdapters scalarTypeAdapters, @Nonnull Executor dispatcher,
@Nonnull ApolloLogger logger) {
checkNotNull(normalizedCache, "cacheStore == null");
this.optimisticCache = (OptimisticNormalizedCache) new OptimisticNormalizedCache().chain(normalizedCache);
this.cacheKeyResolver = checkNotNull(cacheKeyResolver, "cacheKeyResolver == null");
this.scalarTypeAdapters = checkNotNull(scalarTypeAdapters, "scalarTypeAdapters == null");
this.dispatcher = checkNotNull(dispatcher, "dispatcher == null");
this.logger = checkNotNull(logger, "logger == null");
this.lock = new ReentrantReadWriteLock();
this.subscribers = Collections.newSetFromMap(new WeakHashMap());
this.cacheKeyBuilder = new RealCacheKeyBuilder();
}
@Override public ResponseNormalizer> networkResponseNormalizer() {
return new ResponseNormalizer>() {
@Nonnull @Override public CacheKey resolveCacheKey(@Nonnull ResponseField field,
@Nonnull Map record) {
return cacheKeyResolver.fromFieldRecordSet(field, record);
}
@Nonnull @Override public CacheKeyBuilder cacheKeyBuilder() {
return cacheKeyBuilder;
}
};
}
@Override public ResponseNormalizer cacheResponseNormalizer() {
return new ResponseNormalizer() {
@Nonnull @Override public CacheKey resolveCacheKey(@Nonnull ResponseField field, @Nonnull Record record) {
return CacheKey.from(record.key());
}
@Nonnull @Override public CacheKeyBuilder cacheKeyBuilder() {
return cacheKeyBuilder;
}
};
}
@Override public synchronized void subscribe(RecordChangeSubscriber subscriber) {
subscribers.add(subscriber);
}
@Override public synchronized void unsubscribe(RecordChangeSubscriber subscriber) {
subscribers.remove(subscriber);
}
@Override public void publish(@Nonnull final Set changedKeys) {
checkNotNull(changedKeys, "changedKeys == null");
if (changedKeys.isEmpty()) {
return;
}
Set iterableSubscribers;
synchronized (this) {
iterableSubscribers = new LinkedHashSet<>(subscribers);
}
for (RecordChangeSubscriber subscriber : iterableSubscribers) {
subscriber.onCacheRecordsChanged(changedKeys);
}
}
@Override @Nonnull public GraphQLStoreOperation clearAll() {
return new GraphQLStoreOperation(dispatcher) {
@Override public Boolean perform() {
return writeTransaction(new Transaction() {
@Override public Boolean execute(WriteableStore cache) {
optimisticCache.clearAll();
return Boolean.TRUE;
}
});
}
};
}
@Override @Nonnull public GraphQLStoreOperation remove(@Nonnull final CacheKey cacheKey) {
checkNotNull(cacheKey, "cacheKey == null");
return new GraphQLStoreOperation(dispatcher) {
@Override protected Boolean perform() {
return writeTransaction(new Transaction() {
@Override public Boolean execute(WriteableStore cache) {
return optimisticCache.remove(cacheKey);
}
});
}
};
}
@Override @Nonnull public GraphQLStoreOperation remove(@Nonnull final List cacheKeys) {
checkNotNull(cacheKeys, "cacheKey == null");
return new GraphQLStoreOperation(dispatcher) {
@Override protected Integer perform() {
return writeTransaction(new Transaction() {
@Override public Integer execute(WriteableStore cache) {
int count = 0;
for (CacheKey cacheKey : cacheKeys) {
if (optimisticCache.remove(cacheKey)) {
count++;
}
}
return count;
}
});
}
};
}
@Override public R readTransaction(Transaction transaction) {
lock.readLock().lock();
try {
return transaction.execute(RealAppSyncStore.this);
} finally {
lock.readLock().unlock();
}
}
@Override public R writeTransaction(Transaction transaction) {
lock.writeLock().lock();
try {
return transaction.execute(RealAppSyncStore.this);
} finally {
lock.writeLock().unlock();
}
}
@Override public NormalizedCache normalizedCache() {
return optimisticCache;
}
@Nullable public Record read(@Nonnull String key, @Nonnull CacheHeaders cacheHeaders) {
return optimisticCache.loadRecord(checkNotNull(key, "key == null"), cacheHeaders);
}
@Nonnull public Collection read(@Nonnull Collection keys, @Nonnull CacheHeaders cacheHeaders) {
return optimisticCache.loadRecords(checkNotNull(keys, "keys == null"), cacheHeaders);
}
@Nonnull public Set merge(@Nonnull Collection recordSet, @Nonnull CacheHeaders cacheHeaders) {
return optimisticCache.merge(checkNotNull(recordSet, "recordSet == null"), cacheHeaders);
}
@Override public Set merge(Record record, @Nonnull CacheHeaders cacheHeaders) {
return optimisticCache.merge(checkNotNull(record, "record == null"), cacheHeaders);
}
@Override public CacheKeyResolver cacheKeyResolver() {
return cacheKeyResolver;
}
@Override @Nonnull public GraphQLStoreOperation read(
@Nonnull final Operation operation) {
checkNotNull(operation, "operation == null");
return new GraphQLStoreOperation(dispatcher) {
@Override protected T perform() {
return doRead(operation);
}
};
}
@Override @Nonnull public
GraphQLStoreOperation> read(@Nonnull final Operation operation,
@Nonnull final ResponseFieldMapper responseFieldMapper,
@Nonnull final ResponseNormalizer responseNormalizer, @Nonnull final CacheHeaders cacheHeaders) {
checkNotNull(operation, "operation == null");
checkNotNull(responseNormalizer, "responseNormalizer == null");
return new GraphQLStoreOperation>(dispatcher) {
@Override protected Response perform() {
return doRead(operation, responseFieldMapper, responseNormalizer, cacheHeaders);
}
};
}
@Override @Nonnull public GraphQLStoreOperation read(
@Nonnull final ResponseFieldMapper responseFieldMapper, @Nonnull final CacheKey cacheKey,
@Nonnull final Operation.Variables variables) {
checkNotNull(responseFieldMapper, "responseFieldMapper == null");
checkNotNull(cacheKey, "cacheKey == null");
checkNotNull(variables, "variables == null");
return new GraphQLStoreOperation(dispatcher) {
@Override protected F perform() {
return doRead(responseFieldMapper, cacheKey, variables);
}
};
}
@Override @Nonnull public
GraphQLStoreOperation> write(@Nonnull final Operation operation, @Nonnull final D operationData) {
checkNotNull(operation, "operation == null");
checkNotNull(operationData, "operationData == null");
return new GraphQLStoreOperation>(dispatcher) {
@Override protected Set perform() {
return doWrite(operation, operationData, false, null);
}
};
}
@Override @Nonnull public GraphQLStoreOperation
writeAndPublish(@Nonnull final Operation operation, @Nonnull final D operationData) {
return new GraphQLStoreOperation(dispatcher) {
@Override protected Boolean perform() {
Set changedKeys = doWrite(operation, operationData, false, null);
publish(changedKeys);
return Boolean.TRUE;
}
};
}
@Override @Nonnull public GraphQLStoreOperation> write(@Nonnull final GraphqlFragment fragment,
@Nonnull final CacheKey cacheKey, @Nonnull final Operation.Variables variables) {
checkNotNull(fragment, "fragment == null");
checkNotNull(cacheKey, "cacheKey == null");
checkNotNull(variables, "operation == null");
if (cacheKey == CacheKey.NO_KEY) {
throw new IllegalArgumentException("undefined cache key");
}
return new GraphQLStoreOperation>(dispatcher) {
@Override protected Set perform() {
return writeTransaction(new Transaction>() {
@Override public Set execute(WriteableStore cache) {
return doWrite(fragment, cacheKey, variables);
}
});
}
};
}
@Override @Nonnull public GraphQLStoreOperation writeAndPublish(@Nonnull final GraphqlFragment fragment,
@Nonnull final CacheKey cacheKey, @Nonnull final Operation.Variables variables) {
return new GraphQLStoreOperation(dispatcher) {
@Override protected Boolean perform() {
Set changedKeys = doWrite(fragment, cacheKey, variables);
publish(changedKeys);
return Boolean.TRUE;
}
};
}
@Nonnull @Override
public GraphQLStoreOperation>
writeOptimisticUpdates(@Nonnull final Operation operation, @Nonnull final D operationData,
@Nonnull final UUID mutationId) {
return new GraphQLStoreOperation>(dispatcher) {
@Override protected Set perform() {
return doWrite(operation, operationData, true, mutationId);
}
};
}
@Nonnull @Override
public GraphQLStoreOperation
writeOptimisticUpdatesAndPublish(@Nonnull final Operation operation, @Nonnull final D operationData,
@Nonnull final UUID mutationId) {
return new GraphQLStoreOperation(dispatcher) {
@Override protected Boolean perform() {
Set changedKeys = doWrite(operation, operationData, true, mutationId);
publish(changedKeys);
return Boolean.TRUE;
}
};
}
@Nonnull @Override
public GraphQLStoreOperation> rollbackOptimisticUpdates(@Nonnull final UUID mutationId) {
return new GraphQLStoreOperation>(dispatcher) {
@Override protected Set perform() {
return writeTransaction(new Transaction>() {
@Override public Set execute(WriteableStore cache) {
return optimisticCache.removeOptimisticUpdates(mutationId);
}
});
}
};
}
@Nonnull @Override
public GraphQLStoreOperation rollbackOptimisticUpdatesAndPublish(@Nonnull final UUID mutationId) {
return new GraphQLStoreOperation(dispatcher) {
@Override protected Boolean perform() {
Set changedKeys = writeTransaction(new Transaction>() {
@Override public Set execute(WriteableStore cache) {
return optimisticCache.removeOptimisticUpdates(mutationId);
}
});
publish(changedKeys);
return Boolean.TRUE;
}
};
}
private T doRead(final Operation operation) {
return readTransaction(new Transaction() {
@Nullable @Override public T execute(ReadableStore cache) {
Record rootRecord = cache.read(CacheKeyResolver.rootKeyForOperation(operation).key(), CacheHeaders.NONE);
if (rootRecord == null) {
return null;
}
ResponseFieldMapper responseFieldMapper = operation.responseFieldMapper();
CacheFieldValueResolver fieldValueResolver = new CacheFieldValueResolver(cache, operation.variables(),
cacheKeyResolver(), CacheHeaders.NONE, cacheKeyBuilder);
//noinspection unchecked
RealResponseReader responseReader = new RealResponseReader<>(operation.variables(), rootRecord,
fieldValueResolver, scalarTypeAdapters, ResponseNormalizer.NO_OP_NORMALIZER);
return operation.wrapData(responseFieldMapper.map(responseReader));
}
});
}
private Response doRead(
final Operation operation, final ResponseFieldMapper responseFieldMapper,
final ResponseNormalizer responseNormalizer, final CacheHeaders cacheHeaders) {
return readTransaction(new Transaction>() {
@Nonnull @Override public Response execute(ReadableStore cache) {
Record rootRecord = cache.read(CacheKeyResolver.rootKeyForOperation(operation).key(), cacheHeaders);
if (rootRecord == null) {
return Response.builder(operation).fromCache(true).build();
}
CacheFieldValueResolver fieldValueResolver = new CacheFieldValueResolver(cache, operation.variables(),
cacheKeyResolver(), cacheHeaders, cacheKeyBuilder);
RealResponseReader responseReader = new RealResponseReader<>(operation.variables(), rootRecord,
fieldValueResolver, scalarTypeAdapters, responseNormalizer);
try {
responseNormalizer.willResolveRootQuery(operation);
T data = operation.wrapData(responseFieldMapper.map(responseReader));
return Response.builder(operation)
.data(data)
.fromCache(true)
.dependentKeys(responseNormalizer.dependentKeys())
.build();
} catch (Exception e) {
logger.e(e, "Failed to read cache response");
return Response.builder(operation).fromCache(true).build();
}
}
});
}
private F doRead(final ResponseFieldMapper responseFieldMapper,
final CacheKey cacheKey, final Operation.Variables variables) {
return readTransaction(new Transaction() {
@Nullable @Override public F execute(ReadableStore cache) {
Record rootRecord = cache.read(cacheKey.key(), CacheHeaders.NONE);
if (rootRecord == null) {
return null;
}
CacheFieldValueResolver fieldValueResolver = new CacheFieldValueResolver(cache, variables,
cacheKeyResolver(), CacheHeaders.NONE, cacheKeyBuilder);
//noinspection unchecked
RealResponseReader responseReader = new RealResponseReader<>(variables, rootRecord,
fieldValueResolver, scalarTypeAdapters, ResponseNormalizer.NO_OP_NORMALIZER);
return responseFieldMapper.map(responseReader);
}
});
}
private Set doWrite(
final Operation operation, final D operationData, final boolean optimistic,
final UUID mutationId) {
return writeTransaction(new Transaction>() {
@Override public Set execute(WriteableStore cache) {
CacheResponseWriter cacheResponseWriter = new CacheResponseWriter(operation.variables(),
scalarTypeAdapters);
operationData.marshaller().marshal(cacheResponseWriter);
ResponseNormalizer> responseNormalizer = networkResponseNormalizer();
responseNormalizer.willResolveRootQuery(operation);
Collection records = cacheResponseWriter.normalize(responseNormalizer);
if (optimistic) {
List updatedRecords = new ArrayList<>();
for (Record record : records) {
updatedRecords.add(record.toBuilder().mutationId(mutationId).build());
}
return optimisticCache.mergeOptimisticUpdates(updatedRecords);
} else {
return optimisticCache.merge(records, CacheHeaders.NONE);
}
}
});
}
private Set doWrite(final GraphqlFragment fragment, final CacheKey cacheKey,
final Operation.Variables variables) {
return writeTransaction(new Transaction>() {
@Override public Set execute(WriteableStore cache) {
CacheResponseWriter cacheResponseWriter = new CacheResponseWriter(variables, scalarTypeAdapters);
fragment.marshaller().marshal(cacheResponseWriter);
ResponseNormalizer> responseNormalizer = networkResponseNormalizer();
responseNormalizer.willResolveRecord(cacheKey);
Collection records = cacheResponseWriter.normalize(responseNormalizer);
return merge(records, CacheHeaders.NONE);
}
});
}
}