本文整理汇总了Java中com.twitter.finagle.thrift.ThriftServerFramedCodec类的典型用法代码示例。如果您正苦于以下问题:Java ThriftServerFramedCodec类的具体用法?Java ThriftServerFramedCodec怎么用?Java ThriftServerFramedCodec使用的例子?那么恭喜您, 这里精选的类代码示例或许可以为您提供帮助。
ThriftServerFramedCodec类属于com.twitter.finagle.thrift包,在下文中一共展示了ThriftServerFramedCodec类的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: startThriftServer
import com.twitter.finagle.thrift.ThriftServerFramedCodec; //导入依赖的package包/类
private void startThriftServer(int thriftPort) throws UnknownHostException {
TerrapinController.ServiceIface serviceImpl = new TerrapinControllerServiceImpl(
this.configuration,
this.zkManager,
this.hdfsClient,
this.helixAdmin,
this.clusterName);
TerrapinController.Service service =
new TerrapinController.Service(serviceImpl, new TBinaryProtocol.Factory());
this.server = ServerBuilder.safeBuild(
service,
ServerBuilder.get()
.name("TerrapinController")
.codec(ThriftServerFramedCodec.get())
.hostConnectionMaxIdleTime(Duration.fromTimeUnit(
configuration.getInt(Constants.THRIFT_CONN_MAX_IDLE_TIME, 1), TimeUnit.MINUTES))
.maxConcurrentRequests(configuration.getInt(Constants.THRIFT_MAX_CONCURRENT_REQUESTS,
100))
.reportTo(new OstrichStatsReceiver(Stats.get("")))
.bindTo(new InetSocketAddress(thriftPort)));
new OstrichAdminService(configuration.getInt(Constants.OSTRICH_METRICS_PORT, 9999)).start();
}
示例2: main
import com.twitter.finagle.thrift.ThriftServerFramedCodec; //导入依赖的package包/类
public static void main(String[] args) throws Exception {
final PropertiesConfiguration config = TerrapinUtil.readPropertiesExitOnFailure(
System.getProperties().getProperty("terrapin.config", "thrift.properties"));
OstrichStatsReceiver statsReceiver = new OstrichStatsReceiver(Stats.get(""));
int listenPort = config.getInt("thrift_port", 9090);
TerrapinServiceImpl serviceImpl = new TerrapinServiceImpl(config,
(List) config.getList("cluster_list"));
Service<byte[], byte[]> service = new TerrapinService.Service(serviceImpl,
new TBinaryProtocol.Factory());
Server server = ServerBuilder.safeBuild(
service,
ServerBuilder.get()
.name("TERRAPIN_THRIFT")
.codec(ThriftServerFramedCodec.get())
.hostConnectionMaxIdleTime(Duration.apply(1, TimeUnit.MINUTES))
.maxConcurrentRequests(3000)
.reportTo(statsReceiver)
.bindTo(new InetSocketAddress(listenPort)));
new OstrichAdminService(config.getInt(Constants.OSTRICH_METRICS_PORT, 9999)).start();
LOG.info("\n#######################################"
+ "\n# Ready To Serve Requests. #"
+ "\n#######################################");
}
示例3: startThriftServer
import com.twitter.finagle.thrift.ThriftServerFramedCodec; //导入依赖的package包/类
private void startThriftServer(int thriftPort) {
TerrapinServerInternal.ServiceIface serviceImpl = new TerrapinServerInternalImpl(configuration,
resourcePartitionMap);
TerrapinServerInternal.Service service =
new TerrapinServerInternal.Service(serviceImpl, new TBinaryProtocol.Factory());
this.server = ServerBuilder.safeBuild(
service,
ServerBuilder.get()
.name("TerrapinServer")
.codec(ThriftServerFramedCodec.get())
.hostConnectionMaxIdleTime(Duration.fromTimeUnit(
configuration.getInt(Constants.THRIFT_CONN_MAX_IDLE_TIME, 1), TimeUnit.MINUTES))
.maxConcurrentRequests(configuration.getInt(Constants.THRIFT_MAX_CONCURRENT_REQUESTS,
100))
.reportTo(new OstrichStatsReceiver(Stats.get("")))
.bindTo(new InetSocketAddress(thriftPort)));
new OstrichAdminService(configuration.getInt(Constants.OSTRICH_METRICS_PORT, 9999)).start();
}
示例4: main
import com.twitter.finagle.thrift.ThriftServerFramedCodec; //导入依赖的package包/类
public static void main(String[] args) {
try {
String serverHostName = InetAddress.getLocalHost().getHostName();
PinLaterQueueConfig queueConfig = new PinLaterQueueConfig(CONFIGURATION);
queueConfig.initialize();
String backend = CONFIGURATION.getString("PINLATER_BACKEND");
PinLaterBackendIface backendIFace = getBackendIface(backend, serverHostName);
PinLaterServiceImpl serviceImpl = new PinLaterServiceImpl(backendIFace, queueConfig);
PinLater.Service service = new PinLater.Service(serviceImpl, new TBinaryProtocol.Factory());
ServiceShutdownHook.register(ServerBuilder.safeBuild(
service,
ServerBuilder.get()
.name("PinLaterService")
.codec(ThriftServerFramedCodec.get())
.hostConnectionMaxIdleTime(Duration.fromTimeUnit(
CONFIGURATION.getInt("SERVER_CONN_MAX_IDLE_TIME_MINUTES"), TimeUnit.MINUTES))
.maxConcurrentRequests(CONFIGURATION.getInt("MAX_CONCURRENT_REQUESTS"))
.reportTo(new OstrichStatsReceiver(Stats.get("")))
.bindTo(new InetSocketAddress(CONFIGURATION.getInt("THRIFT_PORT")))));
new OstrichAdminService(CONFIGURATION.getInt("OSTRICH_PORT")).start();
LOG.info("\n#######################################"
+ "\n# Ready To Serve Requests. #"
+ "\n#######################################");
} catch (Exception e) {
LOG.error("Failed to start the pinlater server", e);
System.exit(1);
}
}
示例5: runServer
import com.twitter.finagle.thrift.ThriftServerFramedCodec; //导入依赖的package包/类
static Pair<DistributedLogServiceImpl, Server> runServer(
ServerConfiguration serverConf,
DistributedLogConfiguration dlConf,
DynamicDistributedLogConfiguration dynDlConf,
URI dlUri,
StreamPartitionConverter partitionConverter,
StatsProvider provider,
int port,
CountDownLatch keepAliveLatch,
StatsReceiver statsReceiver,
boolean thriftmux,
StreamConfigProvider streamConfProvider) throws IOException {
logger.info("Running server @ uri {}.", dlUri);
boolean perStreamStatsEnabled = serverConf.isPerStreamStatEnabled();
StatsLogger perStreamStatsLogger;
if (perStreamStatsEnabled) {
perStreamStatsLogger = provider.getStatsLogger("stream");
} else {
perStreamStatsLogger = NullStatsLogger.INSTANCE;
}
// dl service
DistributedLogServiceImpl dlService = new DistributedLogServiceImpl(
serverConf,
dlConf,
dynDlConf,
streamConfProvider,
dlUri,
partitionConverter,
provider.getStatsLogger(""),
perStreamStatsLogger,
keepAliveLatch);
StatsReceiver serviceStatsReceiver = statsReceiver.scope("service");
StatsLogger serviceStatsLogger = provider.getStatsLogger("service");
ServerBuilder serverBuilder = ServerBuilder.get()
.name("DistributedLogServer")
.codec(ThriftServerFramedCodec.get())
.reportTo(statsReceiver)
.keepAlive(true)
.bindTo(new InetSocketAddress(port));
if (thriftmux) {
logger.info("Using thriftmux.");
Tuple2<Transport.Liveness, Stack.Param<Transport.Liveness>> livenessParam = new Transport.Liveness(
Duration.Top(), Duration.Top(), Option.apply((Object) Boolean.valueOf(true))).mk();
serverBuilder = serverBuilder.stack(ThriftMuxServer$.MODULE$.configured(livenessParam._1(), livenessParam._2()));
}
logger.info("DistributedLogServer running with the following configuration : \n{}", dlConf.getPropsAsString());
// starts dl server
Server server = ServerBuilder.safeBuild(
new ClientIdRequiredFilter<byte[], byte[]>(serviceStatsReceiver).andThen(
new StatsFilter<byte[], byte[]>(serviceStatsLogger).andThen(
new DistributedLogService.Service(dlService, new TBinaryProtocol.Factory()))),
serverBuilder);
logger.info("Started DistributedLog Server.");
return Pair.of(dlService, server);
}