本文整理汇总了Java中org.apache.hadoop.metrics.spi.Util.parse方法的典型用法代码示例。如果您正苦于以下问题:Java Util.parse方法的具体用法?Java Util.parse怎么用?Java Util.parse使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.metrics.spi.Util
的用法示例。
在下文中一共展示了Util.parse方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: init
import org.apache.hadoop.metrics.spi.Util; //导入方法依赖的package包/类
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
parseAndSetPeriod(PERIOD_PROPERTY);
metricsServers =
Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT);
unitsTable = getAttributeTable(UNITS_PROPERTY);
slopeTable = getAttributeTable(SLOPE_PROPERTY);
tmaxTable = getAttributeTable(TMAX_PROPERTY);
dmaxTable = getAttributeTable(DMAX_PROPERTY);
try {
datagramSocket = new DatagramSocket();
}
catch (SocketException se) {
se.printStackTrace();
}
}
示例2: init
import org.apache.hadoop.metrics.spi.Util; //导入方法依赖的package包/类
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
parseAndSetPeriod(PERIOD_PROPERTY);
metricsServers =
Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT);
unitsTable = getAttributeTable(UNITS_PROPERTY);
slopeTable = getAttributeTable(SLOPE_PROPERTY);
tmaxTable = getAttributeTable(TMAX_PROPERTY);
dmaxTable = getAttributeTable(DMAX_PROPERTY);
try {
datagramSocket = new DatagramSocket();
}
catch (SocketException se) {
se.printStackTrace();
}
}
示例3: init
import org.apache.hadoop.metrics.spi.Util; //导入方法依赖的package包/类
@Override
@InterfaceAudience.Private
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
parseAndSetPeriod(PERIOD_PROPERTY);
metricsServers =
Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT);
unitsTable = getAttributeTable(UNITS_PROPERTY);
slopeTable = getAttributeTable(SLOPE_PROPERTY);
tmaxTable = getAttributeTable(TMAX_PROPERTY);
dmaxTable = getAttributeTable(DMAX_PROPERTY);
multicastEnabled = Boolean.parseBoolean(getAttribute(MULTICAST_PROPERTY));
String multicastTtlValue = getAttribute(MULTICAST_TTL_PROPERTY);
if (multicastEnabled) {
if (multicastTtlValue == null) {
multicastTtl = DEFAULT_MULTICAST_TTL;
} else {
multicastTtl = Integer.parseInt(multicastTtlValue);
}
}
try {
if (multicastEnabled) {
LOG.info("Enabling multicast for Ganglia with TTL " + multicastTtl);
datagramSocket = new MulticastSocket();
((MulticastSocket) datagramSocket).setTimeToLive(multicastTtl);
} else {
datagramSocket = new DatagramSocket();
}
} catch (IOException e) {
LOG.error(e);
}
}
示例4: init
import org.apache.hadoop.metrics.spi.Util; //导入方法依赖的package包/类
public void init(String contextName, ContextFactory factory) {
super.init(contextName, factory);
String periodStr = getAttribute(PERIOD_PROPERTY);
if (periodStr != null) {
int period = 0;
try {
period = Integer.parseInt(periodStr);
} catch (NumberFormatException nfe) {
}
if (period <= 0) {
throw new MetricsException("Invalid period: " + periodStr);
}
setPeriod(period);
}
metricsServers =
Util.parse(getAttribute(SERVERS_PROPERTY), DEFAULT_PORT);
unitsTable = getAttributeTable(UNITS_PROPERTY);
slopeTable = getAttributeTable(SLOPE_PROPERTY);
tmaxTable = getAttributeTable(TMAX_PROPERTY);
dmaxTable = getAttributeTable(DMAX_PROPERTY);
try {
datagramSocket = new DatagramSocket();
}
catch (SocketException se) {
se.printStackTrace();
}
}