org.apache.cassandra.tools.NodeTool Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of cassandra-all Show documentation
Show all versions of cassandra-all Show documentation
The Apache Cassandra Project develops a highly scalable second-generation distributed database, bringing together Dynamo's fully distributed design and Bigtable's ColumnFamily-based data model.
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.cassandra.tools;
import java.io.*;
import java.lang.management.MemoryUsage;
import java.net.InetAddress;
import java.net.UnknownHostException;
import java.text.DecimalFormat;
import java.text.SimpleDateFormat;
import java.util.*;
import java.util.Map.Entry;
import java.util.concurrent.ExecutionException;
import javax.management.openmbean.*;
import com.google.common.base.Joiner;
import com.google.common.base.Throwables;
import com.google.common.collect.*;
import com.yammer.metrics.reporting.JmxReporter;
import io.airlift.command.*;
import org.apache.cassandra.concurrent.JMXEnabledThreadPoolExecutorMBean;
import org.apache.cassandra.db.ColumnFamilyStoreMBean;
import org.apache.cassandra.db.Keyspace;
import org.apache.cassandra.db.compaction.CompactionManagerMBean;
import org.apache.cassandra.db.compaction.OperationType;
import org.apache.cassandra.io.util.FileUtils;
import org.apache.cassandra.locator.EndpointSnitchInfoMBean;
import org.apache.cassandra.metrics.ColumnFamilyMetrics.Sampler;
import org.apache.cassandra.net.MessagingServiceMBean;
import org.apache.cassandra.repair.RepairParallelism;
import org.apache.cassandra.service.CacheServiceMBean;
import org.apache.cassandra.service.StorageProxyMBean;
import org.apache.cassandra.streaming.ProgressInfo;
import org.apache.cassandra.streaming.SessionInfo;
import org.apache.cassandra.streaming.StreamState;
import org.apache.cassandra.utils.FBUtilities;
import org.apache.cassandra.utils.JVMStabilityInspector;
import org.apache.commons.lang3.ArrayUtils;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.base.Throwables.getStackTraceAsString;
import static com.google.common.collect.Iterables.toArray;
import static com.google.common.collect.Lists.newArrayList;
import static java.lang.Integer.parseInt;
import static java.lang.String.format;
import static org.apache.commons.lang3.ArrayUtils.EMPTY_STRING_ARRAY;
import static org.apache.commons.lang3.StringUtils.*;
public class NodeTool
{
private static final String HISTORYFILE = "nodetool.history";
public static void main(String... args)
{
List> commands = newArrayList(
Help.class,
Info.class,
Ring.class,
NetStats.class,
CfStats.class,
CfHistograms.class,
Cleanup.class,
ClearSnapshot.class,
Compact.class,
Scrub.class,
Flush.class,
UpgradeSSTable.class,
DisableAutoCompaction.class,
EnableAutoCompaction.class,
CompactionStats.class,
CompactionHistory.class,
Decommission.class,
DescribeCluster.class,
DisableBinary.class,
EnableBinary.class,
EnableGossip.class,
DisableGossip.class,
EnableHandoff.class,
EnableThrift.class,
GcStats.class,
GetCompactionThreshold.class,
GetCompactionThroughput.class,
GetStreamThroughput.class,
GetEndpoints.class,
GetSSTables.class,
GossipInfo.class,
InvalidateKeyCache.class,
InvalidateRowCache.class,
InvalidateCounterCache.class,
Join.class,
Move.class,
PauseHandoff.class,
ResumeHandoff.class,
ProxyHistograms.class,
Rebuild.class,
Refresh.class,
RemoveToken.class,
RemoveNode.class,
Repair.class,
SetCacheCapacity.class,
SetHintedHandoffThrottleInKB.class,
SetCompactionThreshold.class,
SetCompactionThroughput.class,
SetStreamThroughput.class,
SetTraceProbability.class,
Snapshot.class,
ListSnapshots.class,
Status.class,
StatusBinary.class,
StatusGossip.class,
StatusThrift.class,
Stop.class,
StopDaemon.class,
Version.class,
DescribeRing.class,
RebuildIndex.class,
RangeKeySample.class,
EnableBackup.class,
DisableBackup.class,
ResetLocalSchema.class,
ReloadTriggers.class,
SetCacheKeysToSave.class,
DisableThrift.class,
DisableHandoff.class,
Drain.class,
TruncateHints.class,
TpStats.class,
TopPartitions.class,
SetLoggingLevel.class,
GetLoggingLevels.class
);
Cli parser = Cli.builder("nodetool")
.withDescription("Manage your Cassandra cluster")
.withDefaultCommand(Help.class)
.withCommands(commands)
.build();
int status = 0;
try
{
Runnable parse = parser.parse(args);
printHistory(args);
parse.run();
} catch (IllegalArgumentException |
IllegalStateException |
ParseArgumentsMissingException |
ParseArgumentsUnexpectedException |
ParseOptionConversionException |
ParseOptionMissingException |
ParseOptionMissingValueException |
ParseCommandMissingException |
ParseCommandUnrecognizedException e)
{
badUse(e);
status = 1;
} catch (Throwable throwable)
{
err(Throwables.getRootCause(throwable));
status = 2;
}
System.exit(status);
}
private static void printHistory(String... args)
{
//don't bother to print if no args passed (meaning, nodetool is just printing out the sub-commands list)
if (args.length == 0)
return;
String cmdLine = Joiner.on(" ").skipNulls().join(args);
cmdLine = cmdLine.replaceFirst("(?<=(-pw|--password))\\s+\\S+", " ");
try (FileWriter writer = new FileWriter(new File(FBUtilities.getToolsOutputDirectory(), HISTORYFILE), true))
{
SimpleDateFormat sdf = new SimpleDateFormat("yyyy-MM-dd HH:mm:ss,SSS");
writer.append(sdf.format(new Date())).append(": ").append(cmdLine).append(System.lineSeparator());
}
catch (IOException | IOError ioe)
{
//quietly ignore any errors about not being able to write out history
}
}
private static void badUse(Exception e)
{
System.out.println("nodetool: " + e.getMessage());
System.out.println("See 'nodetool help' or 'nodetool help '.");
}
private static void err(Throwable e)
{
System.err.println("error: " + e.getMessage());
System.err.println("-- StackTrace --");
System.err.println(getStackTraceAsString(e));
}
public static abstract class NodeToolCmd implements Runnable
{
@Option(type = OptionType.GLOBAL, name = {"-h", "--host"}, description = "Node hostname or ip address")
private String host = "127.0.0.1";
@Option(type = OptionType.GLOBAL, name = {"-p", "--port"}, description = "Remote jmx agent port number")
private String port = "7199";
@Option(type = OptionType.GLOBAL, name = {"-u", "--username"}, description = "Remote jmx agent username")
private String username = EMPTY;
@Option(type = OptionType.GLOBAL, name = {"-pw", "--password"}, description = "Remote jmx agent password")
private String password = EMPTY;
@Option(type = OptionType.GLOBAL, name = {"-pwf", "--password-file"}, description = "Path to the JMX password file")
private String passwordFilePath = EMPTY;
@Override
public void run()
{
if (isNotEmpty(username)) {
if (isNotEmpty(passwordFilePath))
password = readUserPasswordFromFile(username, passwordFilePath);
if (isEmpty(password))
password = promptAndReadPassword();
}
try (NodeProbe probe = connect())
{
execute(probe);
}
catch (IOException e)
{
throw new RuntimeException("Error while closing JMX connection", e);
}
}
private String readUserPasswordFromFile(String username, String passwordFilePath) {
String password = EMPTY;
File passwordFile = new File(passwordFilePath);
try (Scanner scanner = new Scanner(passwordFile).useDelimiter("\\s+"))
{
while (scanner.hasNextLine())
{
if (scanner.hasNext())
{
String jmxRole = scanner.next();
if (jmxRole.equals(username) && scanner.hasNext())
{
password = scanner.next();
break;
}
}
scanner.nextLine();
}
} catch (FileNotFoundException e)
{
throw new RuntimeException(e);
}
return password;
}
private String promptAndReadPassword()
{
String password = EMPTY;
Console console = System.console();
if (console != null)
password = String.valueOf(console.readPassword("Password:"));
return password;
}
protected abstract void execute(NodeProbe probe);
private NodeProbe connect()
{
NodeProbe nodeClient = null;
try
{
if (username.isEmpty())
nodeClient = new NodeProbe(host, parseInt(port));
else
nodeClient = new NodeProbe(host, parseInt(port), username, password);
} catch (IOException e)
{
Throwable rootCause = Throwables.getRootCause(e);
System.err.println(format("nodetool: Failed to connect to '%s:%s' - %s: '%s'.", host, port, rootCause.getClass().getSimpleName(), rootCause.getMessage()));
System.exit(1);
}
return nodeClient;
}
protected List parseOptionalKeyspace(List cmdArgs, NodeProbe nodeProbe)
{
List keyspaces = new ArrayList<>();
if (cmdArgs == null || cmdArgs.isEmpty())
keyspaces.addAll(nodeProbe.getKeyspaces());
else
keyspaces.add(cmdArgs.get(0));
for (String keyspace : keyspaces)
{
if (!nodeProbe.getKeyspaces().contains(keyspace))
throw new IllegalArgumentException("Keyspace [" + keyspace + "] does not exist.");
}
return Collections.unmodifiableList(keyspaces);
}
protected String[] parseOptionalColumnFamilies(List cmdArgs)
{
return cmdArgs.size() <= 1 ? EMPTY_STRING_ARRAY : toArray(cmdArgs.subList(1, cmdArgs.size()), String.class);
}
}
@Command(name = "info", description = "Print node information (uptime, load, ...)")
public static class Info extends NodeToolCmd
{
@Option(name = {"-T", "--tokens"}, description = "Display all tokens")
private boolean tokens = false;
@Override
public void execute(NodeProbe probe)
{
boolean gossipInitialized = probe.isInitialized();
System.out.printf("%-23s: %s%n", "ID", probe.getLocalHostId());
System.out.printf("%-23s: %s%n", "Gossip active", gossipInitialized);
System.out.printf("%-23s: %s%n", "Thrift active", probe.isThriftServerRunning());
System.out.printf("%-23s: %s%n", "Native Transport active", probe.isNativeTransportRunning());
System.out.printf("%-23s: %s%n", "Load", probe.getLoadString());
if (gossipInitialized)
System.out.printf("%-23s: %s%n", "Generation No", probe.getCurrentGenerationNumber());
else
System.out.printf("%-23s: %s%n", "Generation No", 0);
// Uptime
long secondsUp = probe.getUptime() / 1000;
System.out.printf("%-23s: %d%n", "Uptime (seconds)", secondsUp);
// Memory usage
MemoryUsage heapUsage = probe.getHeapMemoryUsage();
double memUsed = (double) heapUsage.getUsed() / (1024 * 1024);
double memMax = (double) heapUsage.getMax() / (1024 * 1024);
System.out.printf("%-23s: %.2f / %.2f%n", "Heap Memory (MB)", memUsed, memMax);
System.out.printf("%-23s: %.2f%n", "Off Heap Memory (MB)", getOffHeapMemoryUsed(probe));
// Data Center/Rack
System.out.printf("%-23s: %s%n", "Data Center", probe.getDataCenter());
System.out.printf("%-23s: %s%n", "Rack", probe.getRack());
// Exceptions
System.out.printf("%-23s: %s%n", "Exceptions", probe.getStorageMetric("Exceptions"));
CacheServiceMBean cacheService = probe.getCacheServiceMBean();
// Key Cache: Hits, Requests, RecentHitRate, SavePeriodInSeconds
System.out.printf("%-23s: entries %d, size %s, capacity %s, %d hits, %d requests, %.3f recent hit rate, %d save period in seconds%n",
"Key Cache",
probe.getCacheMetric("KeyCache", "Entries"),
FileUtils.stringifyFileSize((long) probe.getCacheMetric("KeyCache", "Size")),
FileUtils.stringifyFileSize((long) probe.getCacheMetric("KeyCache", "Capacity")),
probe.getCacheMetric("KeyCache", "Hits"),
probe.getCacheMetric("KeyCache", "Requests"),
probe.getCacheMetric("KeyCache", "HitRate"),
cacheService.getKeyCacheSavePeriodInSeconds());
// Row Cache: Hits, Requests, RecentHitRate, SavePeriodInSeconds
System.out.printf("%-23s: entries %d, size %s, capacity %s, %d hits, %d requests, %.3f recent hit rate, %d save period in seconds%n",
"Row Cache",
probe.getCacheMetric("RowCache", "Entries"),
FileUtils.stringifyFileSize((long) probe.getCacheMetric("RowCache", "Size")),
FileUtils.stringifyFileSize((long) probe.getCacheMetric("RowCache", "Capacity")),
probe.getCacheMetric("RowCache", "Hits"),
probe.getCacheMetric("RowCache", "Requests"),
probe.getCacheMetric("RowCache", "HitRate"),
cacheService.getRowCacheSavePeriodInSeconds());
// Counter Cache: Hits, Requests, RecentHitRate, SavePeriodInSeconds
System.out.printf("%-23s: entries %d, size %s, capacity %s, %d hits, %d requests, %.3f recent hit rate, %d save period in seconds%n",
"Counter Cache",
probe.getCacheMetric("CounterCache", "Entries"),
FileUtils.stringifyFileSize((long) probe.getCacheMetric("CounterCache", "Size")),
FileUtils.stringifyFileSize((long) probe.getCacheMetric("CounterCache", "Capacity")),
probe.getCacheMetric("CounterCache", "Hits"),
probe.getCacheMetric("CounterCache", "Requests"),
probe.getCacheMetric("CounterCache", "HitRate"),
cacheService.getCounterCacheSavePeriodInSeconds());
// Tokens
List tokens = probe.getTokens();
if (tokens.size() == 1 || this.tokens)
for (String token : tokens)
System.out.printf("%-23s: %s%n", "Token", token);
else
System.out.printf("%-23s: (invoke with -T/--tokens to see all %d tokens)%n", "Token", tokens.size());
}
/**
* Returns the total off heap memory used in MB.
* @return the total off heap memory used in MB.
*/
private static double getOffHeapMemoryUsed(NodeProbe probe)
{
long offHeapMemUsedInBytes = 0;
// get a list of column family stores
Iterator> cfamilies = probe.getColumnFamilyStoreMBeanProxies();
while (cfamilies.hasNext())
{
Entry entry = cfamilies.next();
String keyspaceName = entry.getKey();
String cfName = entry.getValue().getColumnFamilyName();
offHeapMemUsedInBytes += (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "MemtableOffHeapSize");
offHeapMemUsedInBytes += (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "BloomFilterOffHeapMemoryUsed");
offHeapMemUsedInBytes += (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "IndexSummaryOffHeapMemoryUsed");
offHeapMemUsedInBytes += (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "CompressionMetadataOffHeapMemoryUsed");
}
return offHeapMemUsedInBytes / (1024d * 1024);
}
}
@Command(name = "ring", description = "Print information about the token ring")
public static class Ring extends NodeToolCmd
{
@Arguments(description = "Specify a keyspace for accurate ownership information (topology awareness)")
private String keyspace = null;
@Option(title = "resolve_ip", name = {"-r", "--resolve-ip"}, description = "Show node domain names instead of IPs")
private boolean resolveIp = false;
@Override
public void execute(NodeProbe probe)
{
Map tokensToEndpoints = probe.getTokenToEndpointMap();
LinkedHashMultimap endpointsToTokens = LinkedHashMultimap.create();
boolean haveVnodes = false;
for (Map.Entry entry : tokensToEndpoints.entrySet())
{
haveVnodes |= endpointsToTokens.containsKey(entry.getValue());
endpointsToTokens.put(entry.getValue(), entry.getKey());
}
int maxAddressLength = Collections.max(endpointsToTokens.keys(), new Comparator()
{
@Override
public int compare(String first, String second)
{
return ((Integer) first.length()).compareTo(second.length());
}
}).length();
String formatPlaceholder = "%%-%ds %%-12s%%-7s%%-8s%%-16s%%-20s%%-44s%%n";
String format = format(formatPlaceholder, maxAddressLength);
StringBuffer errors = new StringBuffer();
boolean showEffectiveOwnership = true;
// Calculate per-token ownership of the ring
Map ownerships;
try
{
ownerships = probe.effectiveOwnership(keyspace);
}
catch (IllegalStateException ex)
{
ownerships = probe.getOwnership();
errors.append("Note: " + ex.getMessage() + "%n");
showEffectiveOwnership = false;
}
catch (IllegalArgumentException ex)
{
System.out.printf("%nError: " + ex.getMessage() + "%n");
return;
}
System.out.println();
for (Entry entry : getOwnershipByDc(probe, resolveIp, tokensToEndpoints, ownerships).entrySet())
printDc(probe, format, entry.getKey(), endpointsToTokens, entry.getValue(),showEffectiveOwnership);
if (haveVnodes)
{
System.out.println(" Warning: \"nodetool ring\" is used to output all the tokens of a node.");
System.out.println(" To view status related info of a node use \"nodetool status\" instead.\n");
}
System.out.printf("%n " + errors.toString());
}
private void printDc(NodeProbe probe, String format,
String dc,
LinkedHashMultimap endpointsToTokens,
SetHostStat hoststats,boolean showEffectiveOwnership)
{
Collection liveNodes = probe.getLiveNodes();
Collection deadNodes = probe.getUnreachableNodes();
Collection joiningNodes = probe.getJoiningNodes();
Collection leavingNodes = probe.getLeavingNodes();
Collection movingNodes = probe.getMovingNodes();
Map loadMap = probe.getLoadMap();
System.out.println("Datacenter: " + dc);
System.out.println("==========");
// get the total amount of replicas for this dc and the last token in this dc's ring
List tokens = new ArrayList<>();
String lastToken = "";
for (HostStat stat : hoststats)
{
tokens.addAll(endpointsToTokens.get(stat.endpoint.getHostAddress()));
lastToken = tokens.get(tokens.size() - 1);
}
System.out.printf(format, "Address", "Rack", "Status", "State", "Load", "Owns", "Token");
if (hoststats.size() > 1)
System.out.printf(format, "", "", "", "", "", "", lastToken);
else
System.out.println();
for (HostStat stat : hoststats)
{
String endpoint = stat.endpoint.getHostAddress();
String rack;
try
{
rack = probe.getEndpointSnitchInfoProxy().getRack(endpoint);
}
catch (UnknownHostException e)
{
rack = "Unknown";
}
String status = liveNodes.contains(endpoint)
? "Up"
: deadNodes.contains(endpoint)
? "Down"
: "?";
String state = "Normal";
if (joiningNodes.contains(endpoint))
state = "Joining";
else if (leavingNodes.contains(endpoint))
state = "Leaving";
else if (movingNodes.contains(endpoint))
state = "Moving";
String load = loadMap.containsKey(endpoint)
? loadMap.get(endpoint)
: "?";
String owns = stat.owns != null && showEffectiveOwnership? new DecimalFormat("##0.00%").format(stat.owns) : "?";
System.out.printf(format, stat.ipOrDns(), rack, status, state, load, owns, stat.token);
}
System.out.println();
}
}
@Command(name = "netstats", description = "Print network information on provided host (connecting node by default)")
public static class NetStats extends NodeToolCmd
{
@Option(title = "human_readable",
name = {"-H", "--human-readable"},
description = "Display bytes in human readable form, i.e. KB, MB, GB, TB")
private boolean humanReadable = false;
@Override
public void execute(NodeProbe probe)
{
System.out.printf("Mode: %s%n", probe.getOperationMode());
Set statuses = probe.getStreamStatus();
if (statuses.isEmpty())
System.out.println("Not sending any streams.");
for (StreamState status : statuses)
{
System.out.printf("%s %s%n", status.description, status.planId.toString());
for (SessionInfo info : status.sessions)
{
System.out.printf(" %s", info.peer.toString());
// print private IP when it is used
if (!info.peer.equals(info.connecting))
{
System.out.printf(" (using %s)", info.connecting.toString());
}
System.out.printf("%n");
if (!info.receivingSummaries.isEmpty())
{
if (humanReadable)
System.out.printf(" Receiving %d files, %s total%n", info.getTotalFilesToReceive(), FileUtils.stringifyFileSize(info.getTotalSizeToReceive()));
else
System.out.printf(" Receiving %d files, %d bytes total%n", info.getTotalFilesToReceive(), info.getTotalSizeToReceive());
for (ProgressInfo progress : info.getReceivingFiles())
{
System.out.printf(" %s%n", progress.toString());
}
}
if (!info.sendingSummaries.isEmpty())
{
if (humanReadable)
System.out.printf(" Sending %d files, %s total%n", info.getTotalFilesToSend(), FileUtils.stringifyFileSize(info.getTotalSizeToSend()));
else
System.out.printf(" Sending %d files, %d bytes total%n", info.getTotalFilesToSend(), info.getTotalSizeToSend());
for (ProgressInfo progress : info.getSendingFiles())
{
System.out.printf(" %s%n", progress.toString());
}
}
}
}
if (!probe.isStarting())
{
System.out.printf("Read Repair Statistics:%nAttempted: %d%nMismatch (Blocking): %d%nMismatch (Background): %d%n", probe.getReadRepairAttempted(), probe.getReadRepairRepairedBlocking(), probe.getReadRepairRepairedBackground());
MessagingServiceMBean ms = probe.msProxy;
System.out.printf("%-25s", "Pool Name");
System.out.printf("%10s", "Active");
System.out.printf("%10s", "Pending");
System.out.printf("%15s%n", "Completed");
int pending;
long completed;
pending = 0;
for (int n : ms.getCommandPendingTasks().values())
pending += n;
completed = 0;
for (long n : ms.getCommandCompletedTasks().values())
completed += n;
System.out.printf("%-25s%10s%10s%15s%n", "Commands", "n/a", pending, completed);
pending = 0;
for (int n : ms.getResponsePendingTasks().values())
pending += n;
completed = 0;
for (long n : ms.getResponseCompletedTasks().values())
completed += n;
System.out.printf("%-25s%10s%10s%15s%n", "Responses", "n/a", pending, completed);
}
}
}
@Command(name = "cfstats", description = "Print statistics on column families")
public static class CfStats extends NodeToolCmd
{
@Arguments(usage = "[...]", description = "List of column families (or keyspace) names")
private List cfnames = new ArrayList<>();
@Option(name = "-i", description = "Ignore the list of column families and display the remaining cfs")
private boolean ignore = false;
@Option(title = "human_readable",
name = {"-H", "--human-readable"},
description = "Display bytes in human readable form, i.e. KB, MB, GB, TB")
private boolean humanReadable = false;
@Override
public void execute(NodeProbe probe)
{
OptionFilter filter = new OptionFilter(ignore, cfnames);
Map> cfstoreMap = new HashMap<>();
// get a list of column family stores
Iterator> cfamilies = probe.getColumnFamilyStoreMBeanProxies();
while (cfamilies.hasNext())
{
Map.Entry entry = cfamilies.next();
String keyspaceName = entry.getKey();
ColumnFamilyStoreMBean cfsProxy = entry.getValue();
if (!cfstoreMap.containsKey(keyspaceName) && filter.isColumnFamilyIncluded(entry.getKey(), cfsProxy.getColumnFamilyName()))
{
List columnFamilies = new ArrayList<>();
columnFamilies.add(cfsProxy);
cfstoreMap.put(keyspaceName, columnFamilies);
} else if (filter.isColumnFamilyIncluded(entry.getKey(), cfsProxy.getColumnFamilyName()))
{
cfstoreMap.get(keyspaceName).add(cfsProxy);
}
}
// make sure all specified kss and cfs exist
filter.verifyKeyspaces(probe.getKeyspaces());
filter.verifyColumnFamilies();
// print out the table statistics
for (Map.Entry> entry : cfstoreMap.entrySet())
{
String keyspaceName = entry.getKey();
List columnFamilies = entry.getValue();
long keyspaceReadCount = 0;
long keyspaceWriteCount = 0;
int keyspacePendingFlushes = 0;
double keyspaceTotalReadTime = 0.0f;
double keyspaceTotalWriteTime = 0.0f;
System.out.println("Keyspace: " + keyspaceName);
for (ColumnFamilyStoreMBean cfstore : columnFamilies)
{
String cfName = cfstore.getColumnFamilyName();
long writeCount = ((JmxReporter.TimerMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "WriteLatency")).getCount();
long readCount = ((JmxReporter.TimerMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "ReadLatency")).getCount();
if (readCount > 0)
{
keyspaceReadCount += readCount;
keyspaceTotalReadTime += (long) probe.getColumnFamilyMetric(keyspaceName, cfName, "ReadTotalLatency");
}
if (writeCount > 0)
{
keyspaceWriteCount += writeCount;
keyspaceTotalWriteTime += (long) probe.getColumnFamilyMetric(keyspaceName, cfName, "WriteTotalLatency");
}
keyspacePendingFlushes += (long) probe.getColumnFamilyMetric(keyspaceName, cfName, "PendingFlushes");
}
double keyspaceReadLatency = keyspaceReadCount > 0
? keyspaceTotalReadTime / keyspaceReadCount / 1000
: Double.NaN;
double keyspaceWriteLatency = keyspaceWriteCount > 0
? keyspaceTotalWriteTime / keyspaceWriteCount / 1000
: Double.NaN;
System.out.println("\tRead Count: " + keyspaceReadCount);
System.out.println("\tRead Latency: " + String.format("%s", keyspaceReadLatency) + " ms.");
System.out.println("\tWrite Count: " + keyspaceWriteCount);
System.out.println("\tWrite Latency: " + String.format("%s", keyspaceWriteLatency) + " ms.");
System.out.println("\tPending Flushes: " + keyspacePendingFlushes);
// print out column family statistics for this keyspace
for (ColumnFamilyStoreMBean cfstore : columnFamilies)
{
String cfName = cfstore.getColumnFamilyName();
if (cfName.contains("."))
System.out.println("\t\tTable (index): " + cfName);
else
System.out.println("\t\tTable: " + cfName);
System.out.println("\t\tSSTable count: " + probe.getColumnFamilyMetric(keyspaceName, cfName, "LiveSSTableCount"));
int[] leveledSStables = cfstore.getSSTableCountPerLevel();
if (leveledSStables != null)
{
System.out.print("\t\tSSTables in each level: [");
for (int level = 0; level < leveledSStables.length; level++)
{
int count = leveledSStables[level];
System.out.print(count);
long maxCount = 4L; // for L0
if (level > 0)
maxCount = (long) Math.pow(10, level);
// show max threshold for level when exceeded
if (count > maxCount)
System.out.print("/" + maxCount);
if (level < leveledSStables.length - 1)
System.out.print(", ");
else
System.out.println("]");
}
}
Long memtableOffHeapSize = (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "MemtableOffHeapSize");
Long bloomFilterOffHeapSize = (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "BloomFilterOffHeapMemoryUsed");
Long indexSummaryOffHeapSize = (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "IndexSummaryOffHeapMemoryUsed");
Long compressionMetadataOffHeapSize = (Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "CompressionMetadataOffHeapMemoryUsed");
Long offHeapSize = memtableOffHeapSize + bloomFilterOffHeapSize + indexSummaryOffHeapSize + compressionMetadataOffHeapSize;
System.out.println("\t\tSpace used (live): " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "LiveDiskSpaceUsed"), humanReadable));
System.out.println("\t\tSpace used (total): " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "TotalDiskSpaceUsed"), humanReadable));
System.out.println("\t\tSpace used by snapshots (total): " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "SnapshotsSize"), humanReadable));
System.out.println("\t\tOff heap memory used (total): " + format(offHeapSize, humanReadable));
System.out.println("\t\tSSTable Compression Ratio: " + probe.getColumnFamilyMetric(keyspaceName, cfName, "CompressionRatio"));
int numberOfKeys = 0;
for (long keys : (long[]) probe.getColumnFamilyMetric(keyspaceName, cfName, "EstimatedColumnCountHistogram"))
numberOfKeys += keys;
System.out.println("\t\tNumber of keys (estimate): " + numberOfKeys);
System.out.println("\t\tMemtable cell count: " + probe.getColumnFamilyMetric(keyspaceName, cfName, "MemtableColumnsCount"));
System.out.println("\t\tMemtable data size: " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "MemtableLiveDataSize"), humanReadable));
System.out.println("\t\tMemtable off heap memory used: " + format(memtableOffHeapSize, humanReadable));
System.out.println("\t\tMemtable switch count: " + probe.getColumnFamilyMetric(keyspaceName, cfName, "MemtableSwitchCount"));
System.out.println("\t\tLocal read count: " + ((JmxReporter.TimerMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "ReadLatency")).getCount());
double localReadLatency = ((JmxReporter.TimerMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "ReadLatency")).getMean() / 1000;
double localRLatency = localReadLatency > 0 ? localReadLatency : Double.NaN;
System.out.printf("\t\tLocal read latency: %01.3f ms%n", localRLatency);
System.out.println("\t\tLocal write count: " + ((JmxReporter.TimerMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "WriteLatency")).getCount());
double localWriteLatency = ((JmxReporter.TimerMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "WriteLatency")).getMean() / 1000;
double localWLatency = localWriteLatency > 0 ? localWriteLatency : Double.NaN;
System.out.printf("\t\tLocal write latency: %01.3f ms%n", localWLatency);
System.out.println("\t\tPending flushes: " + probe.getColumnFamilyMetric(keyspaceName, cfName, "PendingFlushes"));
System.out.println("\t\tBloom filter false positives: " + probe.getColumnFamilyMetric(keyspaceName, cfName, "BloomFilterFalsePositives"));
System.out.printf("\t\tBloom filter false ratio: %s%n", String.format("%01.5f", probe.getColumnFamilyMetric(keyspaceName, cfName, "RecentBloomFilterFalseRatio")));
System.out.println("\t\tBloom filter space used: " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "BloomFilterDiskSpaceUsed"), humanReadable));
System.out.println("\t\tBloom filter off heap memory used: " + format(bloomFilterOffHeapSize, humanReadable));
System.out.println("\t\tIndex summary off heap memory used: " + format(indexSummaryOffHeapSize, humanReadable));
System.out.println("\t\tCompression metadata off heap memory used: " + format(compressionMetadataOffHeapSize, humanReadable));
System.out.println("\t\tCompacted partition minimum bytes: " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "MinRowSize"), humanReadable));
System.out.println("\t\tCompacted partition maximum bytes: " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "MaxRowSize"), humanReadable));
System.out.println("\t\tCompacted partition mean bytes: " + format((Long) probe.getColumnFamilyMetric(keyspaceName, cfName, "MeanRowSize"), humanReadable));
JmxReporter.HistogramMBean histogram = (JmxReporter.HistogramMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "LiveScannedHistogram");
System.out.println("\t\tAverage live cells per slice (last five minutes): " + histogram.getMean());
System.out.println("\t\tMaximum live cells per slice (last five minutes): " + histogram.getMax());
histogram = (JmxReporter.HistogramMBean) probe.getColumnFamilyMetric(keyspaceName, cfName, "TombstoneScannedHistogram");
System.out.println("\t\tAverage tombstones per slice (last five minutes): " + histogram.getMean());
System.out.println("\t\tMaximum tombstones per slice (last five minutes): " + histogram.getMax());
System.out.println("");
}
System.out.println("----------------");
}
}
private String format(long bytes, boolean humanReadable) {
return humanReadable ? FileUtils.stringifyFileSize(bytes) : Long.toString(bytes);
}
/**
* Used for filtering keyspaces and columnfamilies to be displayed using the cfstats command.
*/
private static class OptionFilter
{
private Map> filter = new HashMap<>();
private Map> verifier = new HashMap<>();
private List filterList = new ArrayList<>();
private boolean ignoreMode;
public OptionFilter(boolean ignoreMode, List filterList)
{
this.filterList.addAll(filterList);
this.ignoreMode = ignoreMode;
for (String s : filterList)
{
String[] keyValues = s.split("\\.", 2);
// build the map that stores the ks' and cfs to use
if (!filter.containsKey(keyValues[0]))
{
filter.put(keyValues[0], new ArrayList());
verifier.put(keyValues[0], new ArrayList());
if (keyValues.length == 2)
{
filter.get(keyValues[0]).add(keyValues[1]);
verifier.get(keyValues[0]).add(keyValues[1]);
}
} else
{
if (keyValues.length == 2)
{
filter.get(keyValues[0]).add(keyValues[1]);
verifier.get(keyValues[0]).add(keyValues[1]);
}
}
}
}
public boolean isColumnFamilyIncluded(String keyspace, String columnFamily)
{
// supplying empty params list is treated as wanting to display all kss & cfs
if (filterList.isEmpty())
return !ignoreMode;
List cfs = filter.get(keyspace);
// no such keyspace is in the map
if (cfs == null)
return ignoreMode;
// only a keyspace with no cfs was supplied
// so ignore or include (based on the flag) every column family in specified keyspace
else if (cfs.size() == 0)
return !ignoreMode;
// keyspace exists, and it contains specific cfs
verifier.get(keyspace).remove(columnFamily);
return ignoreMode ^ cfs.contains(columnFamily);
}
public void verifyKeyspaces(List keyspaces)
{
for (String ks : verifier.keySet())
if (!keyspaces.contains(ks))
throw new IllegalArgumentException("Unknown keyspace: " + ks);
}
public void verifyColumnFamilies()
{
for (String ks : filter.keySet())
if (verifier.get(ks).size() > 0)
throw new IllegalArgumentException("Unknown column families: " + verifier.get(ks).toString() + " in keyspace: " + ks);
}
}
}
@Command(name = "toppartitions", description = "Sample and print the most active partitions for a given column family")
public static class TopPartitions extends NodeToolCmd
{
@Arguments(usage = " ", description = "The keyspace, column family name, and duration in milliseconds")
private List args = new ArrayList<>();
@Option(name = "-s", description = "Capacity of stream summary, closer to the actual cardinality of partitions will yield more accurate results (Default: 256)")
private int size = 256;
@Option(name = "-k", description = "Number of the top partitions to list (Default: 10)")
private int topCount = 10;
@Option(name = "-a", description = "Comma separated list of samplers to use (Default: all)")
private String samplers = join(Sampler.values(), ',');
@Override
public void execute(NodeProbe probe)
{
checkArgument(args.size() == 3, "toppartitions requires keyspace, column family name, and duration");
checkArgument(topCount < size, "TopK count (-k) option must be smaller then the summary capacity (-s)");
String keyspace = args.get(0);
String cfname = args.get(1);
Integer duration = Integer.parseInt(args.get(2));
// generate the list of samplers
List targets = Lists.newArrayList();
for (String s : samplers.split(","))
{
try
{
targets.add(Sampler.valueOf(s.toUpperCase()));
} catch (Exception e)
{
throw new IllegalArgumentException(s + " is not a valid sampler, choose one of: " + join(Sampler.values(), ", "));
}
}
Map results;
try
{
results = probe.getPartitionSample(keyspace, cfname, size, duration, topCount, targets);
} catch (OpenDataException e)
{
throw new RuntimeException(e);
}
boolean first = true;
for(Entry result : results.entrySet())
{
CompositeData sampling = result.getValue();
// weird casting for http://bugs.sun.com/view_bug.do?bug_id=6548436
List topk = (List) (Object) Lists.newArrayList(((TabularDataSupport) sampling.get("partitions")).values());
Collections.sort(topk, new Ordering()
{
public int compare(CompositeData left, CompositeData right)
{
return Long.compare((long) right.get("count"), (long) left.get("count"));
}
});
if(!first)
System.out.println();
System.out.println(result.getKey().toString()+ " Sampler:");
System.out.printf(" Cardinality: ~%d (%d capacity)%n", (long) sampling.get("cardinality"), size);
System.out.printf(" Top %d partitions:%n", topCount);
if (topk.size() == 0)
{
System.out.println("\tNothing recorded during sampling period...");
} else
{
int offset = 0;
for (CompositeData entry : topk)
offset = Math.max(offset, entry.get("string").toString().length());
System.out.printf("\t%-" + offset + "s%10s%10s%n", "Partition", "Count", "+/-");
for (CompositeData entry : topk)
System.out.printf("\t%-" + offset + "s%10d%10d%n", entry.get("string").toString(), entry.get("count"), entry.get("error"));
}
first = false;
}
}
}
@Command(name = "cfhistograms", description = "Print statistic histograms for a given column family")
public static class CfHistograms extends NodeToolCmd
{
@Arguments(usage = " ", description = "The keyspace and column family name")
private List args = new ArrayList<>();
@Override
public void execute(NodeProbe probe)
{
checkArgument(args.size() == 2, "cfhistograms requires ks and cf args");
String keyspace = args.get(0);
String cfname = args.get(1);
ColumnFamilyStoreMBean store = probe.getCfsProxy(keyspace, cfname);
long[] estimatedRowSizeHistogram = store.getEstimatedRowSizeHistogram();
long[] estimatedColumnCountHistogram = store.getEstimatedColumnCountHistogram();
if (ArrayUtils.isEmpty(estimatedRowSizeHistogram) || ArrayUtils.isEmpty(estimatedColumnCountHistogram))
{
System.err.println("No SSTables exists, unable to calculate 'Partition Size' and 'Cell Count' percentiles");
}
// calculate percentile of row size and column count
String[] percentiles = new String[]{"50%", "75%", "95%", "98%", "99%", "Min", "Max"};
double[] readLatency = probe.metricPercentilesAsArray(store.getRecentReadLatencyHistogramMicros());
double[] writeLatency = probe.metricPercentilesAsArray(store.getRecentWriteLatencyHistogramMicros());
double[] estimatedRowSizePercentiles = probe.metricPercentilesAsArray(estimatedRowSizeHistogram);
double[] estimatedColumnCountPercentiles = probe.metricPercentilesAsArray(estimatedColumnCountHistogram);
double[] sstablesPerRead = probe.metricPercentilesAsArray(store.getRecentSSTablesPerReadHistogram());
System.out.println(format("%s/%s histograms", keyspace, cfname));
System.out.println(format("%-10s%10s%18s%18s%18s%18s",
"Percentile", "SSTables", "Write Latency", "Read Latency", "Partition Size", "Cell Count"));
System.out.println(format("%-10s%10s%18s%18s%18s%18s",
"", "", "(micros)", "(micros)", "(bytes)", ""));
for (int i = 0; i < percentiles.length; i++)
{
System.out.println(format("%-10s%10.2f%18.2f%18.2f%18.0f%18.0f",
percentiles[i],
sstablesPerRead[i],
writeLatency[i],
readLatency[i],
estimatedRowSizePercentiles[i],
estimatedColumnCountPercentiles[i]));
}
System.out.println();
}
}
@Command(name = "cleanup", description = "Triggers the immediate cleanup of keys no longer belonging to a node. By default, clean all keyspaces")
public static class Cleanup extends NodeToolCmd
{
@Arguments(usage = "[ ...]", description = "The keyspace followed by one or many column families")
private List args = new ArrayList<>();
@Override
public void execute(NodeProbe probe)
{
List keyspaces = parseOptionalKeyspace(args, probe);
String[] cfnames = parseOptionalColumnFamilies(args);
for (String keyspace : keyspaces)
{
if (Keyspace.SYSTEM_KS.equals(keyspace))
continue;
try
{
probe.forceKeyspaceCleanup(System.out, keyspace, cfnames);
} catch (Exception e)
{
throw new RuntimeException("Error occurred during cleanup", e);
}
}
}
}
@Command(name = "clearsnapshot", description = "Remove the snapshot with the given name from the given keyspaces. If no snapshotName is specified we will remove all snapshots")
public static class ClearSnapshot extends NodeToolCmd
{
@Arguments(usage = "[...] ", description = "Remove snapshots from the given keyspaces")
private List keyspaces = new ArrayList<>();
@Option(title = "snapshot_name", name = "-t", description = "Remove the snapshot with a given name")
private String snapshotName = EMPTY;
@Override
public void execute(NodeProbe probe)
{
StringBuilder sb = new StringBuilder();
sb.append("Requested clearing snapshot(s) for ");
if (keyspaces.isEmpty())
sb.append("[all keyspaces]");
else
sb.append("[").append(join(keyspaces, ", ")).append("]");
if (!snapshotName.isEmpty())
sb.append(" with snapshot name [").append(snapshotName).append("]");
System.out.println(sb.toString());
try
{
probe.clearSnapshot(snapshotName, toArray(keyspaces, String.class));
} catch (IOException e)
{
throw new RuntimeException("Error during clearing snapshots", e);
}
}
}
@Command(name = "compact", description = "Force a (major) compaction on one or more column families")
public static class Compact extends NodeToolCmd
{
@Arguments(usage = "[ ...]", description = "The keyspace followed by one or many column families")
private List args = new ArrayList<>();
@Override
public void execute(NodeProbe probe)
{
List keyspaces = parseOptionalKeyspace(args, probe);
String[] cfnames = parseOptionalColumnFamilies(args);
for (String keyspace : keyspaces)
{
try
{
probe.forceKeyspaceCompaction(keyspace, cfnames);
} catch (Exception e)
{
throw new RuntimeException("Error occurred during compaction", e);
}
}
}
}
@Command(name = "flush", description = "Flush one or more column families")
public static class Flush extends NodeToolCmd
{
@Arguments(usage = "[ ...]", description = "The keyspace followed by one or many column families")
private List args = new ArrayList<>();
@Override
public void execute(NodeProbe probe)
{
List keyspaces = parseOptionalKeyspace(args, probe);
String[] cfnames = parseOptionalColumnFamilies(args);
for (String keyspace : keyspaces)
{
try
{
probe.forceKeyspaceFlush(keyspace, cfnames);
} catch (Exception e)
{
throw new RuntimeException("Error occurred during flushing", e);
}
}
}
}
@Command(name = "scrub", description = "Scrub (rebuild sstables for) one or more column families")
public static class Scrub extends NodeToolCmd
{
@Arguments(usage = "[ ...]", description = "The keyspace followed by one or many column families")
private List args = new ArrayList<>();
@Option(title = "disable_snapshot",
name = {"-ns", "--no-snapshot"},
description = "Scrubbed CFs will be snapshotted first, if disableSnapshot is false. (default false)")
private boolean disableSnapshot = false;
@Option(title = "skip_corrupted",
name = {"-s", "--skip-corrupted"},
description = "Skip corrupted partitions even when scrubbing counter tables. (default false)")
private boolean skipCorrupted = false;
@Override
public void execute(NodeProbe probe)
{
List keyspaces = parseOptionalKeyspace(args, probe);
String[] cfnames = parseOptionalColumnFamilies(args);
for (String keyspace : keyspaces)
{
try
{
probe.scrub(System.out, disableSnapshot, skipCorrupted, keyspace, cfnames);
} catch (Exception e)
{
throw new RuntimeException("Error occurred during flushing", e);
}
}
}
}
@Command(name = "disableautocompaction", description = "Disable autocompaction for the given keyspace and column family")
public static class DisableAutoCompaction extends NodeToolCmd
{
@Arguments(usage = "[ ...]", description = "The keyspace followed by one or many column families")
private List args = new ArrayList<>();
@Override
public void execute(NodeProbe probe)
{
List keyspaces = parseOptionalKeyspace(args, probe);
String[] cfnames = parseOptionalColumnFamilies(args);
for (String keyspace : keyspaces)
{
try
{
probe.disableAutoCompaction(keyspace, cfnames);
} catch (IOException e)
{
throw new RuntimeException("Error occurred during disabling auto-compaction", e);
}
}
}
}
@Command(name = "enableautocompaction", description = "Enable autocompaction for the given keyspace and column family")
public static class EnableAutoCompaction extends NodeToolCmd
{
@Arguments(usage = "[ ...]", description = "The keyspace followed by one or many column families")
private List args = new ArrayList<>();
@Override
public void execute(NodeProbe probe)
{
List keyspaces = parseOptionalKeyspace(args, probe);
String[] cfnames = parseOptionalColumnFamilies(args);
for (String keyspace : keyspaces)
{
try
{
probe.enableAutoCompaction(keyspace, cfnames);
} catch (IOException e)
{
throw new RuntimeException("Error occurred during enabling auto-compaction", e);
}
}
}
}
@Command(name = "upgradesstables", description = "Rewrite sstables (for the requested column families) that are not on the current version (thus upgrading them to said current version)")
public static class UpgradeSSTable extends NodeToolCmd
{
@Arguments(usage = "[ ...]", description = "The keyspace followed by one or many column families")
private List args = new ArrayList<>();
@Option(title = "include_all", name = {"-a", "--include-all-sstables"}, description = "Use -a to include all sstables, even those already on the current version")
private boolean includeAll = false;
@Override
public void execute(NodeProbe probe)
{
List keyspaces = parseOptionalKeyspace(args, probe);
String[] cfnames = parseOptionalColumnFamilies(args);
for (String keyspace : keyspaces)
{
try
{
probe.upgradeSSTables(System.out, keyspace, !includeAll, cfnames);
} catch (Exception e)
{
throw new RuntimeException("Error occurred during enabling auto-compaction", e);
}
}
}
}
@Command(name = "compactionstats", description = "Print statistics on compactions")
public static class CompactionStats extends NodeToolCmd
{
@Option(title = "human_readable",
name = {"-H", "--human-readable"},
description = "Display bytes in human readable form, i.e. KB, MB, GB, TB")
private boolean humanReadable = false;
@Override
public void execute(NodeProbe probe)
{
int compactionThroughput = probe.getCompactionThroughput();
CompactionManagerMBean cm = probe.getCompactionManagerProxy();
System.out.println("pending tasks: " + probe.getCompactionMetric("PendingTasks"));
long remainingBytes = 0;
List
© 2015 - 2025 Weber Informatics LLC | Privacy Policy