Please wait. This can take some minutes ...
Many resources are needed to download a project. Please understand that we have to compensate our server costs. Thank you in advance.
Project price only 1 $
You can buy this project and download/modify it how often you want.
com.netflix.dyno.jedis.DynoJedisPipeline Maven / Gradle / Ivy
package com.netflix.dyno.jedis;
import com.netflix.dyno.connectionpool.*;
import com.netflix.dyno.connectionpool.Connection;
import com.netflix.dyno.connectionpool.exception.DynoException;
import com.netflix.dyno.connectionpool.exception.FatalConnectionException;
import com.netflix.dyno.connectionpool.exception.NoAvailableHostsException;
import com.netflix.dyno.connectionpool.impl.ConnectionPoolImpl;
import com.netflix.dyno.connectionpool.impl.utils.CollectionUtils;
import com.netflix.dyno.connectionpool.impl.utils.ZipUtils;
import com.netflix.dyno.jedis.JedisConnectionFactory.JedisConnection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import redis.clients.jedis.BinaryClient.LIST_POSITION;
import redis.clients.jedis.*;
import redis.clients.jedis.exceptions.JedisConnectionException;
import javax.annotation.concurrent.NotThreadSafe;
import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicReference;
import static com.netflix.dyno.connectionpool.ConnectionPoolConfiguration.CompressionStrategy;
@NotThreadSafe
public class DynoJedisPipeline implements RedisPipeline, AutoCloseable {
private static final Logger Logger = LoggerFactory.getLogger(DynoJedisPipeline.class);
// ConnPool and connection to exec the pipeline
private final ConnectionPoolImpl connPool;
private volatile Connection connection;
private final DynoJedisPipelineMonitor opMonitor;
private final ConnectionPoolMonitor cpMonitor;
// the cached pipeline
private volatile Pipeline jedisPipeline = null;
// the cached row key for the pipeline. all subsequent requests to pipeline must be the same. this is used to check that.
private final AtomicReference theKey = new AtomicReference(null);
// used for tracking errors
private final AtomicReference pipelineEx = new AtomicReference(null);
private static final String DynoPipeline = "DynoPipeline";
DynoJedisPipeline(ConnectionPoolImpl cPool, DynoJedisPipelineMonitor operationMonitor, ConnectionPoolMonitor connPoolMonitor) {
this.connPool = cPool;
this.opMonitor = operationMonitor;
this.cpMonitor = connPoolMonitor;
}
private void checkKey(final String key) {
if (theKey.get() != null) {
verifyKey(key);
} else {
boolean success = theKey.compareAndSet(null, key);
if (!success) {
// someone already beat us to it. that's fine, just verify that the key is the same
verifyKey(key);
} else {
try {
connection = connPool.getConnectionForOperation(new BaseOperation() {
@Override
public String getName() {
return DynoPipeline;
}
@Override
public String getKey() {
return key;
}
});
} catch (NoAvailableHostsException nahe) {
cpMonitor.incOperationFailure(connection != null ? connection.getHost() : null, nahe);
discardPipelineAndReleaseConnection();
throw nahe;
}
}
Jedis jedis = ((JedisConnection) connection).getClient();
jedisPipeline = jedis.pipelined();
cpMonitor.incOperationSuccess(connection.getHost(), 0);
}
}
private void verifyKey(final String key) {
if (!theKey.get().equals(key)) {
try {
throw new RuntimeException("Must have same key for Redis Pipeline in Dynomite");
} finally {
discardPipelineAndReleaseConnection();
}
}
}
private String decompressValue(String value) {
try {
if (ZipUtils.isCompressed(value)) {
return ZipUtils.decompressFromBase64String(value);
}
} catch (IOException e) {
Logger.warn("Unable to decompress value [" + value + "]");
}
return value;
}
private byte[] decompressValue(byte[] value) {
try {
if (ZipUtils.isCompressed(value)) {
return ZipUtils.decompressBytesNonBase64(value);
}
} catch (IOException e) {
Logger.warn("Unable to decompress byte array value [" + value + "]");
}
return value;
}
/**
* As long as jdk 7 and below is supported we need to define our own function interfaces
*/
private interface Func0 {
R call();
}
public class PipelineResponse extends Response {
private Response response;
public PipelineResponse(Builder b) {
super(BuilderFactory.STRING);
}
public PipelineResponse apply(Func0 extends Response> f) {
this.response = f.call();
return this;
}
@Override
public String get() {
return decompressValue(response.get());
}
}
public class PipelineLongResponse extends Response {
private Response response;
public PipelineLongResponse(Builder b) {
super(b);
}
public PipelineLongResponse apply(Func0 extends Response> f) {
this.response = f.call();
return this;
}
}
public class PipelineListResponse extends Response> {
private Response> response;
public PipelineListResponse(Builder b) {
super(BuilderFactory.STRING_LIST);
}
public PipelineListResponse apply(Func0 extends Response>> f) {
this.response = f.call();
return this;
}
@Override
public List get() {
return new ArrayList(CollectionUtils.transform(
response.get(),
new CollectionUtils.Transform(){
@Override
public String get(String s) {
return decompressValue(s);
}
}
));
}
}
public class PipelineBinaryResponse extends Response {
private Response response;
public PipelineBinaryResponse(Builder b) {
super(BuilderFactory.BYTE_ARRAY);
}
public PipelineBinaryResponse apply(Func0 extends Response> f) {
this.response = f.call();
return this;
}
@Override
public byte[] get() {
return decompressValue(response.get());
}
}
public class PipelineMapResponse extends Response> {
private Response> response;
public PipelineMapResponse(Builder> b) {
super(BuilderFactory.STRING_MAP);
}
@Override
public Map get() {
return CollectionUtils.transform(
response.get(),
new CollectionUtils.MapEntryTransform() {
@Override
public String get(String key, String val) {
return decompressValue(val);
}
}
);
}
}
public class PipelineBinaryMapResponse extends Response> {
private Response> response;
public PipelineBinaryMapResponse(Builder> b) {
super(BuilderFactory.BYTE_ARRAY_MAP);
}
public PipelineBinaryMapResponse apply(Func0 extends Response>> f) {
this.response = f.call();
return this;
}
@Override
public Map get() {
return CollectionUtils.transform(
response.get(),
new CollectionUtils.MapEntryTransform() {
@Override
public byte[] get(byte[] key, byte[] val) {
return decompressValue(val);
}
}
);
}
}
private abstract class PipelineOperation {
abstract Response execute(Pipeline jedisPipeline) throws DynoException;
Response execute(final byte[] key, final OpName opName) {
// For now simply convert the key into a String. Properly supporting this
// functionality requires significant changes to plumb this throughout for the LB
return execute(new String(key), opName);
}
Response execute(final String key, final OpName opName) {
checkKey(key);
return executeOperation(opName);
}
Response executeOperation(final OpName opName) {
try {
opMonitor.recordOperation(opName.name());
return execute(jedisPipeline);
} catch (JedisConnectionException ex) {
handleConnectionException(ex);
throw ex;
}
}
void handleConnectionException(JedisConnectionException ex) {
DynoException e = new FatalConnectionException(ex).setAttempt(1);
pipelineEx.set(e);
cpMonitor.incOperationFailure(connection.getHost(), e);
}
}
private abstract class PipelineCompressionOperation extends PipelineOperation {
/**
* Compresses the value based on the threshold defined by
* {@link ConnectionPoolConfiguration#getValueCompressionThreshold()}
*
* @param value
* @return
*/
public String compressValue(String value) {
String result = value;
int thresholdBytes = connPool.getConfiguration().getValueCompressionThreshold();
try {
// prefer speed over accuracy here so rather than using getBytes() to get the actual size
// just estimate using 2 bytes per character
if ((2 * value.length()) > thresholdBytes) {
result = ZipUtils.compressStringToBase64String(value);
}
} catch (IOException e) {
Logger.warn("UNABLE to compress [" + value + "]; sending value uncompressed");
}
return result;
}
public byte[] compressValue(byte[] value) {
int thresholdBytes = connPool.getConfiguration().getValueCompressionThreshold();
if (value.length > thresholdBytes) {
try {
return ZipUtils.compressBytesNonBase64(value);
} catch (IOException e) {
Logger.warn("UNABLE to compress byte array [" + value + "]; sending value uncompressed");
}
}
return value;
}
}
@Override
public Response append(final String key, final String value) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.append(key, value);
}
}.execute(key, OpName.APPEND);
}
@Override
public Response> blpop(final String arg) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.blpop(arg);
}
}.execute(arg, OpName.BLPOP);
}
@Override
public Response> brpop(final String arg) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.brpop(arg);
}
}.execute(arg, OpName.BRPOP);
}
@Override
public Response decr(final String key) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.decr(key);
}
}.execute(key, OpName.DECR);
}
@Override
public Response decrBy(final String key, final long integer) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.decrBy(key, integer);
}
}.execute(key, OpName.DECRBY);
}
@Override
public Response del(final String key) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.del(key);
}
}.execute(key, OpName.DEL);
}
@Override
public Response echo(final String string) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.echo(string);
}
}.execute(string, OpName.ECHO);
}
@Override
public Response exists(final String key) {
return new PipelineOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.exists(key);
}
}.execute(key, OpName.EXISTS);
}
@Override
public Response expire(final String key, final int seconds) {
return new PipelineOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.expire(key, seconds);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.EXPIRE.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.EXPIRE);
}
@Override
public Response pexpire(String key, long milliseconds) {
throw new UnsupportedOperationException("not yet implemented");
}
@Override
public Response expireAt(final String key, final long unixTime) {
return new PipelineOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.expireAt(key, unixTime);
}
}.execute(key, OpName.EXPIREAT);
}
@Override
public Response pexpireAt(String key, long millisecondsTimestamp) {
throw new UnsupportedOperationException("not yet implemented");
}
@Override
public Response get(final String key) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.get(key);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.GET.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.GET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.get(key);
}
});
}
}.execute(key, OpName.GET);
}
}
@Override
public Response getbit(final String key, final long offset) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.getbit(key, offset);
}
}.execute(key, OpName.GETBIT);
}
@Override
public Response getrange(final String key, final long startOffset, final long endOffset) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.getrange(key, startOffset, endOffset);
}
}.execute(key, OpName.GETRANGE);
}
@Override
public Response getSet(final String key, final String value) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.getSet(key, value);
}
}.execute(key, OpName.GETSET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.getSet(key, compressValue(value));
}
});
}
}.execute(key, OpName.GETSET);
}
}
@Override
public Response hdel(final String key, final String... field) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hdel(key, field);
}
}.execute(key, OpName.HDEL);
}
@Override
public Response hexists(final String key, final String field) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hexists(key, field);
}
}.execute(key, OpName.HEXISTS);
}
@Override
public Response hget(final String key, final String field) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hget(key, field);
}
}.execute(key, OpName.HGET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.hget(key, field);
}
});
}
}.execute(key, OpName.HGET);
}
}
/**
* This method is a BinaryRedisPipeline command which dyno does not yet properly support, therefore the
* interface is not yet implemented.
*/
public Response hget(final byte[] key, final byte[] field) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hget(key, field);
}
}.execute(key, OpName.HGET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineBinaryResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.hget(key, field);
}
});
}
}.execute(key, OpName.HGET);
}
}
@Override
public Response> hgetAll(final String key) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.hgetAll(key);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.HGETALL.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.HGETALL);
}
/**
* This method is a BinaryRedisPipeline command which dyno does not yet properly support, therefore the
* interface is not yet implemented.
*/
public Response> hgetAll(final byte[] key) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.hgetAll(key);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.HGETALL.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.HGETALL);
} else {
return new PipelineCompressionOperation>() {
@Override
Response> execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineBinaryMapResponse(null).apply(new Func0>>() {
@Override
public Response> call() {
return jedisPipeline.hgetAll(key);
}
});
}
}.execute(key, OpName.HGETALL);
}
}
@Override
public Response hincrBy(final String key, final String field, final long value) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hincrBy(key, field, value);
}
}.execute(key, OpName.HINCRBY);
}
/* not supported by RedisPipeline 2.7.3 */
public Response hincrByFloat(final String key, final String field, final double value) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hincrByFloat(key, field, value);
}
}.execute(key, OpName.HINCRBYFLOAT);
}
@Override
public Response> hkeys(final String key) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hkeys(key);
}
}.execute(key, OpName.HKEYS);
}
public Response>> hscan(final String key, int cursor) {
throw new UnsupportedOperationException("'HSCAN' cannot be called in pipeline");
}
@Override
public Response hlen(final String key) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hlen(key);
}
}.execute(key, OpName.HLEN);
}
/**
* This method is a BinaryRedisPipeline command which dyno does not yet properly support, therefore the
* interface is not yet implemented.
*/
public Response> hmget(final byte[] key, final byte[]... fields) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.hmget(key, fields);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.HMGET.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.HMGET);
}
@Override
public Response> hmget(final String key, final String... fields) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.hmget(key, fields);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.HMGET.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.HMGET);
} else {
return new PipelineCompressionOperation>() {
@Override
Response> execute(final Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return new PipelineListResponse(null).apply(new Func0>>() {
@Override
public Response> call() {
return jedisPipeline.hmget(key, fields);
}
});
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.HMGET.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.HGET);
}
}
/**
* This method is a BinaryRedisPipeline command which dyno does not yet properly support, therefore the
* interface is not yet implemented since only a few binary commands are present.
*/
public Response hmset(final byte[] key, final Map hash) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.hmset(key, hash);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.HMSET.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.HMSET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.hmset(key,
CollectionUtils.transform(hash,
new CollectionUtils.MapEntryTransform() {
@Override
public byte[] get(byte[] key, byte[] val) {
return compressValue(val);
}
}
));
}
});
}
}.execute(key, OpName.HMSET);
}
}
@Override
public Response hmset(final String key, final Map hash) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
long startTime = System.nanoTime() / 1000;
try {
return jedisPipeline.hmset(key, hash);
} finally {
long duration = System.nanoTime() / 1000 - startTime;
opMonitor.recordSendLatency(OpName.HMSET.name(), duration, TimeUnit.MICROSECONDS);
}
}
}.execute(key, OpName.HMSET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.hmset(key,
CollectionUtils.transform(hash, new CollectionUtils.MapEntryTransform() {
@Override
public String get(String key, String val) {
return compressValue(val);
}
})
);
}
});
}
}.execute(key, OpName.HMSET);
}
}
@Override
public Response hset(final String key, final String field, final String value) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hset(key, field, value);
}
}.execute(key, OpName.HSET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineLongResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.hset(key, field, compressValue(value));
}
});
}
}.execute(key, OpName.HSET);
}
}
/**
* This method is a BinaryRedisPipeline command which dyno does not yet properly support, therefore the
* interface is not yet implemented.
*/
public Response hset(final byte[] key, final byte[] field, final byte[] value) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hset(key, field, value);
}
}.execute(key, OpName.HSET);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineLongResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.hset(key, field, compressValue(value));
}
});
}
}.execute(key, OpName.HSET);
}
}
@Override
public Response hsetnx(final String key, final String field, final String value) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hsetnx(key, field, value);
}
}.execute(key, OpName.HSETNX);
} else {
return new PipelineCompressionOperation() {
@Override
Response execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineLongResponse(null).apply(new Func0>() {
@Override
public Response call() {
return jedisPipeline.hsetnx(key, field, compressValue(value));
}
});
}
}.execute(key, OpName.HSETNX);
}
}
@Override
public Response> hvals(final String key) {
if (CompressionStrategy.NONE == connPool.getConfiguration().getCompressionStrategy()) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.hvals(key);
}
}.execute(key, OpName.HVALS);
} else {
return new PipelineCompressionOperation>() {
@Override
Response> execute(final Pipeline jedisPipeline) throws DynoException {
return new PipelineListResponse(null).apply(new Func0>>() {
@Override
public Response> call() {
return jedisPipeline.hvals(key);
}
});
}
}.execute(key, OpName.HVALS);
}
}
@Override
public Response incr(final String key) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.incr(key);
}
}.execute(key, OpName.INCR);
}
@Override
public Response incrBy(final String key, final long integer) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.incrBy(key, integer);
}
}.execute(key, OpName.INCRBY);
}
/* not supported by RedisPipeline 2.7.3 */
public Response incrByFloat(final String key, final double increment) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.incrByFloat(key, increment);
}
}.execute(key, OpName.INCRBYFLOAT);
}
@Override
public Response lindex(final String key, final long index) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.lindex(key, index);
}
}.execute(key, OpName.LINDEX);
}
@Override
public Response linsert(final String key, final LIST_POSITION where, final String pivot, final String value) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return linsert(key, where, pivot, value);
}
}.execute(key, OpName.LINSERT);
}
@Override
public Response llen(final String key) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.llen(key);
}
}.execute(key, OpName.LLEN);
}
@Override
public Response lpop(final String key) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.lpop(key);
}
}.execute(key, OpName.LPOP);
}
@Override
public Response lpush(final String key, final String... string) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.lpush(key, string);
}
}.execute(key, OpName.LPUSH);
}
@Override
public Response lpushx(final String key, final String... string) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.lpushx(key, string);
}
}.execute(key, OpName.LPUSHX);
}
@Override
public Response> lrange(final String key, final long start, final long end) {
return new PipelineOperation>() {
@Override
Response> execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.lrange(key, start, end);
}
}.execute(key, OpName.LRANGE);
}
@Override
public Response lrem(final String key, final long count, final String value) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.lrem(key, count, value);
}
}.execute(key, OpName.LREM);
}
@Override
public Response lset(final String key, final long index, final String value) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.lset(key, index, value);
}
}.execute(key, OpName.LSET);
}
@Override
public Response ltrim(final String key, final long start, final long end) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.ltrim(key, start, end);
}
}.execute(key, OpName.LTRIM);
}
@Override
public Response move(final String key, final int dbIndex) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.move(key, dbIndex);
}
}.execute(key, OpName.MOVE);
}
@Override
public Response persist(final String key) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.persist(key);
}
}.execute(key, OpName.PERSIST);
}
/* not supported by RedisPipeline 2.7.3 */
public Response rename(final String oldkey, final String newkey) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.rename(oldkey, newkey);
}
}.execute(oldkey, OpName.RENAME);
}
/* not supported by RedisPipeline 2.7.3 */
public Response renamenx(final String oldkey, final String newkey) {
return new PipelineOperation() {
@Override
Response execute(Pipeline jedisPipeline) throws DynoException {
return jedisPipeline.renamenx(oldkey, newkey);
}
}.execute(oldkey, OpName.RENAMENX);
}
@Override
public Response rpop(final String key) {
return new PipelineOperation() {
@Override
Response