本文整理汇总了Java中org.apache.hadoop.metrics.ContextFactory类的典型用法代码示例。如果您正苦于以下问题:Java ContextFactory类的具体用法?Java ContextFactory怎么用?Java ContextFactory使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
ContextFactory类属于org.apache.hadoop.metrics包,在下文中一共展示了ContextFactory类的12个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
@Override
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
int nKids;
try {
String sKids = getAttribute(ARITY_LABEL);
nKids = Integer.parseInt(sKids);
} catch (Exception e) {
LOG.error("Unable to initialize composite metric " + contextName +
": could not init arity", e);
return;
}
for (int i = 0; i < nKids; ++i) {
MetricsContext ctxt = MetricsUtil.getContext(
String.format(SUB_FMT, contextName, i), contextName);
if (null != ctxt) {
subctxt.add(ctxt);
}
}
}
示例2: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
@Override
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
LOG.debug("Initializing the GangliaContext31 for Ganglia 3.1 metrics.");
// Take the hostname from the DNS class.
Configuration conf = new Configuration();
if (conf.get("slave.host.name") != null) {
hostName = conf.get("slave.host.name");
} else {
try {
hostName = DNS.getDefaultHost(
conf.get("dfs.datanode.dns.interface","default"),
conf.get("dfs.datanode.dns.nameserver","default"));
} catch (UnknownHostException uhe) {
LOG.error(uhe);
hostName = "UNKNOWN.example.com";
}
}
}
示例3: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
int nKids;
try {
String sKids = getAttribute(ARITY_LABEL);
nKids = Integer.parseInt(sKids);
} catch (Exception e) {
LOG.error("Unable to initialize composite metric " + contextName +
": could not init arity", e);
return;
}
for (int i = 0; i < nKids; ++i) {
MetricsContext ctxt = MetricsUtil.getContext(
String.format(SUB_FMT, contextName, i), contextName);
if (null != ctxt) {
subctxt.add(ctxt);
}
}
}
示例4: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
LOG.debug("Initializing the GangliaContext31 for Ganglia 3.1 metrics.");
// Take the hostname from the DNS class.
Configuration conf = new Configuration();
if (conf.get("slave.host.name") != null) {
hostName = conf.get("slave.host.name");
} else {
try {
hostName = DNS.getDefaultHost(
conf.get("dfs.datanode.dns.interface","default"),
conf.get("dfs.datanode.dns.nameserver","default"));
} catch (UnknownHostException uhe) {
LOG.error(uhe);
hostName = "UNKNOWN.example.com";
}
}
}
示例5: MasterMetrics
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
public MasterMetrics(final String name) {
MetricsContext context = MetricsUtil.getContext("hbase");
metricsRecord = MetricsUtil.createRecord(context, "master");
metricsRecord.setTag("Master", name);
context.registerUpdater(this);
JvmMetrics.init("Master", name);
HBaseInfo.init();
// expose the MBean for metrics
masterStatistics = new MasterStatistics(this.registry);
// get custom attributes
try {
Object m =
ContextFactory.getFactory().getAttribute("hbase.extendedperiod");
if (m instanceof String) {
this.extendedPeriod = Long.parseLong((String) m)*1000;
}
} catch (IOException ioe) {
LOG.info("Couldn't load ContextFactory for Metrics config info");
}
LOG.info("Initialized");
}
示例6: RegionServerMetrics
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
public RegionServerMetrics() {
MetricsContext context = MetricsUtil.getContext("hbase");
metricsRecord = MetricsUtil.createRecord(context, "regionserver");
String name = Thread.currentThread().getName();
metricsRecord.setTag("RegionServer", name);
context.registerUpdater(this);
// Add jvmmetrics.
JvmMetrics.init("RegionServer", name);
// Add Hbase Info metrics
HBaseInfo.init();
// export for JMX
statistics = new RegionServerStatistics(this.registry, name);
// get custom attributes
try {
Object m = ContextFactory.getFactory().getAttribute("hbase.extendedperiod");
if (m instanceof String) {
this.extendedPeriod = Long.parseLong((String) m)*1000;
}
} catch (IOException ioe) {
LOG.info("Couldn't load ContextFactory for Metrics config info");
}
LOG.info("Initialized");
}
示例7: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
int nKids;
try {
String sKids = getAttribute(ARITY_LABEL);
nKids = Integer.valueOf(sKids);
} catch (Exception e) {
LOG.error("Unable to initialize composite metric " + contextName +
": could not init arity", e);
return;
}
for (int i = 0; i < nKids; ++i) {
MetricsContext ctxt = MetricsUtil.getContext(
String.format(SUB_FMT, contextName, i), contextName);
if (null != ctxt) {
subctxt.add(ctxt);
}
}
}
示例8: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
parseAndSetPeriod(PERIOD_PROPERTY);
metricsServers =
Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT);
unitsTable = getAttributeTable(UNITS_PROPERTY);
slopeTable = getAttributeTable(SLOPE_PROPERTY);
tmaxTable = getAttributeTable(TMAX_PROPERTY);
dmaxTable = getAttributeTable(DMAX_PROPERTY);
try {
datagramSocket = new DatagramSocket();
}
catch (SocketException se) {
se.printStackTrace();
}
}
示例9: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
@Override
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
initAllowedRecords();
String periodStr = getAttribute(PERIOD_PROPERTY);
if (periodStr != null) {
int period = 0;
try {
period = Integer.parseInt(periodStr);
} catch (NumberFormatException nfe) {
}
if (period <= 0) {
throw new MetricsException("Invalid period: " + periodStr);
}
setPeriod(period);
}
}
示例10: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
int nKids;
try {
String sKids = getAttribute(ARITY_LABEL);
nKids = Integer.valueOf(sKids);
} catch (Exception e) {
LOG.error("Unable to initialize composite metric " + contextName +
": could not init arity", e);
return;
}
for (int i = 0; i < nKids; ++i) {
MetricsContext ctxt = MetricsUtil.getContext(
String.format(SUB_FMT, contextName, i), contextName);
if (null != ctxt) {
subctxt.add(ctxt);
}
}
}
示例11: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
fileName = getAttribute(FILE_NAME_PROPERTY);
String recordDatePattern = getAttribute(RECORD_DATE_PATTERN_PROPERTY);
if (recordDatePattern == null)
recordDatePattern = DEFAULT_RECORD_DATE_PATTERN;
recordDateFormat = new SimpleDateFormat(recordDatePattern);
fileSuffixDateFormat = new SimpleDateFormat(FILE_SUFFIX_DATE_PATTERN);
Calendar currentDate = Calendar.getInstance();
if (fileName != null)
file = new File(getFullFileName(currentDate));
lastRecordDate = currentDate;
parseAndSetPeriod(PERIOD_PROPERTY);
}
示例12: init
import org.apache.hadoop.metrics.ContextFactory; //导入依赖的package包/类
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
parseAndSetPeriod(PERIOD_PROPERTY);
metricsServers =
Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT);
unitsTable = getAttributeTable(UNITS_PROPERTY);
slopeTable = getAttributeTable(SLOPE_PROPERTY);
tmaxTable = getAttributeTable(TMAX_PROPERTY);
dmaxTable = getAttributeTable(DMAX_PROPERTY);
try {
datagramSocket = new DatagramSocket();
}
catch (SocketException se) {
se.printStackTrace();
}
}