org.apache.hadoop.hbase.tmpl.tool.CanaryStatusTmplImpl Maven / Gradle / Ivy
Go to download
Show more of this group Show more artifacts with this name
Show all versions of hbase-server Show documentation
Show all versions of hbase-server Show documentation
Server functionality for HBase
// Autogenerated Jamon implementation
// /Users/apurtell/src/hbase/hbase-server/src/main/jamon/org/apache/hadoop/hbase/tmpl/tool/CanaryStatusTmpl.jamon
package org.apache.hadoop.hbase.tmpl.tool;
// 24, 1
import java.util.Map;
// 25, 1
import java.util.concurrent.atomic.LongAdder;
// 26, 1
import org.apache.hadoop.hbase.ServerName;
// 27, 1
import org.apache.hadoop.hbase.tool.CanaryTool.RegionStdOutSink;
// 28, 1
import org.apache.hadoop.hbase.util.JvmVersion;
public class CanaryStatusTmplImpl
extends org.jamon.AbstractTemplateImpl
implements org.apache.hadoop.hbase.tmpl.tool.CanaryStatusTmpl.Intf
{
private final RegionStdOutSink sink;
protected static org.apache.hadoop.hbase.tmpl.tool.CanaryStatusTmpl.ImplData __jamon_setOptionalArguments(org.apache.hadoop.hbase.tmpl.tool.CanaryStatusTmpl.ImplData p_implData)
{
return p_implData;
}
public CanaryStatusTmplImpl(org.jamon.TemplateManager p_templateManager, org.apache.hadoop.hbase.tmpl.tool.CanaryStatusTmpl.ImplData p_implData)
{
super(p_templateManager, __jamon_setOptionalArguments(p_implData));
sink = p_implData.getSink();
}
@Override public void renderNoFlush(final java.io.Writer jamonWriter)
throws java.io.IOException
{
// 1, 1
jamonWriter.write("\n\n\n\n \n \n Canary \n \n \n \n \n \n \n\n \n\n \n\n \n \n Failed Servers
\n ");
// 64, 9
Map perServerFailuresCount = sink.getPerServerFailuresCount();
// 67, 9
jamonWriter.write("\n \n Server \n Failures Count \n \n ");
// 72, 11
if ((perServerFailuresCount != null && perServerFailuresCount.size() > 0))
{
// 72, 87
jamonWriter.write("\n ");
// 73, 11
for (Map.Entry entry : perServerFailuresCount.entrySet() )
{
// 73, 94
jamonWriter.write("\n \n ");
// 75, 19
{
// 75, 19
__jamon_innerUnit__serverNameLink(jamonWriter, entry.getKey() );
}
// 75, 69
jamonWriter.write(" \n ");
// 76, 19
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(entry.getValue()), jamonWriter);
// 76, 41
jamonWriter.write(" \n \n ");
}
// 78, 18
jamonWriter.write("\n ");
}
// 79, 17
jamonWriter.write("\n Total Failed Servers: ");
// 80, 41
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf((perServerFailuresCount != null) ? perServerFailuresCount.size() : 0), jamonWriter);
// 80, 115
jamonWriter.write(" \n
\n \n \n Failed Tables
\n ");
// 85, 13
Map perTableFailuresCount = sink.getPerTableFailuresCount();
// 88, 13
jamonWriter.write("\n \n Table \n Failures Count \n \n ");
// 93, 15
if ((perTableFailuresCount != null && perTableFailuresCount.size() > 0))
{
// 93, 89
jamonWriter.write("\n ");
// 94, 15
for (Map.Entry entry : perTableFailuresCount.entrySet())
{
// 94, 92
jamonWriter.write("\n \n ");
// 96, 23
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(entry.getKey()), jamonWriter);
// 96, 43
jamonWriter.write(" \n ");
// 97, 23
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(entry.getValue()), jamonWriter);
// 97, 45
jamonWriter.write(" \n \n ");
}
// 99, 22
jamonWriter.write("\n ");
}
// 100, 21
jamonWriter.write("\n Total Failed Tables: ");
// 101, 44
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf((perTableFailuresCount != null) ? perTableFailuresCount.size() : 0), jamonWriter);
// 101, 116
jamonWriter.write(" \n
\n \n\n \n Software Attributes
\n \n \n Attribute Name \n Value \n Description \n \n \n JVM Version \n ");
// 115, 25
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(JvmVersion.getVersion()), jamonWriter);
// 115, 54
jamonWriter.write(" \n JVM vendor and version \n \n \n HBase Version \n ");
// 120, 25
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.hbase.util.VersionInfo.getVersion()), jamonWriter);
// 120, 84
jamonWriter.write(", r");
// 120, 87
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.hbase.util.VersionInfo.getRevision()), jamonWriter);
// 120, 147
jamonWriter.write(" HBase version and revision \n \n \n HBase Compiled \n ");
// 124, 25
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.hbase.util.VersionInfo.getDate()), jamonWriter);
// 124, 81
jamonWriter.write(", ");
// 124, 83
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.hbase.util.VersionInfo.getUser()), jamonWriter);
// 124, 139
jamonWriter.write(" \n When HBase version was compiled and by whom \n \n \n Hadoop Version \n ");
// 129, 25
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.util.VersionInfo.getVersion()), jamonWriter);
// 129, 78
jamonWriter.write(", r");
// 129, 81
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.util.VersionInfo.getRevision()), jamonWriter);
// 129, 135
jamonWriter.write(" \n Hadoop version and revision \n \n \n Hadoop Compiled \n ");
// 134, 25
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.util.VersionInfo.getDate()), jamonWriter);
// 134, 75
jamonWriter.write(", ");
// 134, 77
org.jamon.escaping.Escaping.HTML.write(org.jamon.emit.StandardEmitter.valueOf(org.apache.hadoop.util.VersionInfo.getUser()), jamonWriter);
// 134, 127
jamonWriter.write(" \n When Hadoop version was compiled and by whom \n \n
\n \n \n