本文整理汇总了Java中org.apache.hadoop.metrics2.source.JvmMetrics.create方法的典型用法代码示例。如果您正苦于以下问题:Java JvmMetrics.create方法的具体用法?Java JvmMetrics.create怎么用?Java JvmMetrics.create使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.hadoop.metrics2.source.JvmMetrics
的用法示例。
在下文中一共展示了JvmMetrics.create方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: start
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
/**
* Start listening for edits via RPC.
*/
public void start() throws IOException {
Preconditions.checkState(!isStarted(), "JN already running");
validateAndCreateJournalDir(localDir);
DefaultMetricsSystem.initialize("JournalNode");
JvmMetrics.create("JournalNode",
conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
DefaultMetricsSystem.instance());
InetSocketAddress socAddr = JournalNodeRpcServer.getAddress(conf);
SecurityUtil.login(conf, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_JOURNALNODE_KERBEROS_PRINCIPAL_KEY, socAddr.getHostName());
registerJNMXBean();
httpServer = new JournalNodeHttpServer(conf, this);
httpServer.start();
httpServerURI = httpServer.getServerURI().toString();
rpcServer = new JournalNodeRpcServer(conf, this);
rpcServer.start();
}
示例2: start
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
/**
* Start listening for edits via RPC.
*/
public void start() throws IOException {
Preconditions.checkState(!isStarted(), "JN already running");
validateAndCreateJournalDir(localDir);
DefaultMetricsSystem.initialize("JournalNode");
JvmMetrics.create("JournalNode",
conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
DefaultMetricsSystem.instance());
InetSocketAddress socAddr = JournalNodeRpcServer.getAddress(conf);
SecurityUtil.login(conf, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_JOURNALNODE_USER_NAME_KEY, socAddr.getHostName());
httpServer = new JournalNodeHttpServer(conf, this);
httpServer.start();
rpcServer = new JournalNodeRpcServer(conf, this);
rpcServer.start();
}
示例3: start
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
/**
* Start listening for edits via RPC.
*/
public void start() throws IOException {
Preconditions.checkState(!isStarted(), "JN already running");
validateAndCreateJournalDir(localDir);
DefaultMetricsSystem.initialize("JournalNode");
JvmMetrics.create("JournalNode",
conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
DefaultMetricsSystem.instance());
InetSocketAddress socAddr = JournalNodeRpcServer.getAddress(conf);
SecurityUtil.login(conf, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_JOURNALNODE_USER_NAME_KEY, socAddr.getHostName());
registerJNMXBean();
httpServer = new JournalNodeHttpServer(conf, this);
httpServer.start();
rpcServer = new JournalNodeRpcServer(conf, this);
rpcServer.start();
}
示例4: start
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
/**
* Start listening for edits via RPC.
*/
public void start() throws IOException {
Preconditions.checkState(!isStarted(), "JN already running");
validateAndCreateJournalDir(localDir);
DefaultMetricsSystem.initialize("JournalNode");
JvmMetrics.create("JournalNode",
conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY),
DefaultMetricsSystem.instance());
InetSocketAddress socAddr = JournalNodeRpcServer.getAddress(conf);
SecurityUtil.login(conf, DFSConfigKeys.DFS_JOURNALNODE_KEYTAB_FILE_KEY,
DFSConfigKeys.DFS_JOURNALNODE_USER_NAME_KEY, socAddr.getHostName());
registerJNMXBean();
httpServer = new JournalNodeHttpServer(conf, this);
httpServer.start();
httpServerURI = httpServer.getServerURI().toString();
rpcServer = new JournalNodeRpcServer(conf, this);
rpcServer.start();
}
示例5: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static Nfs3Metrics create(Configuration conf, String gatewayName) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics jm = JvmMetrics.create(gatewayName, sessionId, ms);
// Percentile measurement is [50th,75th,90th,95th,99th] currently
int[] intervals = conf
.getInts(NfsConfigKeys.NFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(new Nfs3Metrics(gatewayName, sessionId, intervals, jm));
}
示例6: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static NameNodeMetrics create(Configuration conf, NamenodeRole r) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
String processName = r.toString();
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics jm = JvmMetrics.create(processName, sessionId, ms);
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(new NameNodeMetrics(processName, sessionId,
intervals, jm));
}
示例7: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static DataNodeMetrics create(Configuration conf, String dnName) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics jm = JvmMetrics.create("DataNode", sessionId, ms);
String name = "DataNodeActivity-"+ (dnName.isEmpty()
? "UndefinedDataNodeName"+ DFSUtil.getRandom().nextInt()
: dnName.replace(':', '-'));
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(name, null, new DataNodeMetrics(name, sessionId,
intervals, jm));
}
示例8: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static DataNodeMetrics create(Configuration conf, String dnName) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics jm = JvmMetrics.create("DataNode", sessionId, ms);
String name = "DataNodeActivity-"+ (dnName.isEmpty()
? "UndefinedDataNodeName"+ ThreadLocalRandom.current().nextInt()
: dnName.replace(':', '-'));
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(name, null, new DataNodeMetrics(name, sessionId,
intervals, jm));
}
示例9: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static ProxyMetrics create(Configuration conf) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
String processName = "NNPROXY";
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics jm = JvmMetrics.create(processName, sessionId, ms);
return ms.register(new ProxyMetrics(processName, sessionId, jm));
}
示例10: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static Nfs3Metrics create(Configuration conf, String gatewayName) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics jm = JvmMetrics.create(gatewayName, sessionId, ms);
// Percentile measurement is [,,,] by default
int[] intervals = conf.getInts(conf.get(
NfsConfigKeys.NFS_METRICS_PERCENTILES_INTERVALS_KEY,
NfsConfigKeys.NFS_METRICS_PERCENTILES_INTERVALS_DEFAULT));
return ms.register(new Nfs3Metrics(gatewayName, sessionId, intervals, jm));
}
示例11: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static NameNodeMetrics create(Configuration conf, NamenodeRole r) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
String processName = r.toString();
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics.create(processName, sessionId, ms);
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(new NameNodeMetrics(processName, sessionId, intervals));
}
示例12: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static DataNodeMetrics create(Configuration conf, String dnName) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics.create("DataNode", sessionId, ms);
String name = "DataNodeActivity-"+ (dnName.isEmpty()
? "UndefinedDataNodeName"+ DFSUtil.getRandom().nextInt()
: dnName.replace(':', '-'));
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(name, null, new DataNodeMetrics(name, sessionId,
intervals));
}
示例13: init
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
synchronized void init(String name) {
if (inited) return;
inited = true;
DefaultMetricsSystem.initialize(HBASE_METRICS_SYSTEM_NAME);
jvmMetricsSource = JvmMetrics.create(name, "", DefaultMetricsSystem.instance());
}
示例14: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static NameNodeMetrics create(Configuration conf, NamenodeRole r) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
String processName = r.toString();
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics.create(processName, sessionId, ms);
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms.register(new NameNodeMetrics(processName, sessionId, intervals));
}
示例15: create
import org.apache.hadoop.metrics2.source.JvmMetrics; //导入方法依赖的package包/类
public static DataNodeMetrics create(Configuration conf, String dnName) {
String sessionId = conf.get(DFSConfigKeys.DFS_METRICS_SESSION_ID_KEY);
MetricsSystem ms = DefaultMetricsSystem.instance();
JvmMetrics.create("DataNode", sessionId, ms);
String name = "DataNodeActivity-" + (dnName.isEmpty() ?
"UndefinedDataNodeName" + DFSUtil.getRandom().nextInt() :
dnName.replace(':', '-'));
// Percentile measurement is off by default, by watching no intervals
int[] intervals =
conf.getInts(DFSConfigKeys.DFS_METRICS_PERCENTILES_INTERVALS_KEY);
return ms
.register(name, null, new DataNodeMetrics(name, sessionId, intervals));
}