本文整理汇总了Java中org.apache.solr.common.cloud.ZkNodeProps.get方法的典型用法代码示例。如果您正苦于以下问题:Java ZkNodeProps.get方法的具体用法?Java ZkNodeProps.get怎么用?Java ZkNodeProps.get使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.apache.solr.common.cloud.ZkNodeProps
的用法示例。
在下文中一共展示了ZkNodeProps.get方法的4个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: checkRequired
import org.apache.solr.common.cloud.ZkNodeProps; //导入方法依赖的package包/类
private void checkRequired(ZkNodeProps message, String... props) {
for (String prop : props) {
if(message.get(prop) == null){
throw new SolrException(ErrorCode.BAD_REQUEST, StrUtils.join(Arrays.asList(props),',') +" are required params" );
}
}
}
示例2: createCollection
import org.apache.solr.common.cloud.ZkNodeProps; //导入方法依赖的package包/类
private ClusterState createCollection(ClusterState state, String collectionName, List<String> shards , ZkNodeProps message) {
log.info("Create collection {} with shards {}", collectionName, shards);
Map<String, Object> routerSpec = DocRouter.getRouterSpec(message);
String routerName = routerSpec.get("name") == null ? DocRouter.DEFAULT_NAME : (String) routerSpec.get("name");
DocRouter router = DocRouter.getDocRouter(routerName);
List<DocRouter.Range> ranges = router.partitionRange(shards.size(), router.fullRange());
// Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>();
Map<String, Slice> newSlices = new LinkedHashMap<>();
// newCollections.putAll(state.getCollectionStates());
for (int i = 0; i < shards.size(); i++) {
String sliceName = shards.get(i);
/*}
for (int i = 0; i < numShards; i++) {
final String sliceName = "shard" + (i+1);*/
Map<String, Object> sliceProps = new LinkedHashMap<>(1);
sliceProps.put(Slice.RANGE, ranges == null? null: ranges.get(i));
newSlices.put(sliceName, new Slice(sliceName, null, sliceProps));
}
// TODO: fill in with collection properties read from the /collections/<collectionName> node
Map<String,Object> collectionProps = new HashMap<>();
for (Entry<String, Object> e : OverseerCollectionProcessor.COLL_PROPS.entrySet()) {
Object val = message.get(e.getKey());
if(val == null){
val = OverseerCollectionProcessor.COLL_PROPS.get(e.getKey());
}
if(val != null) collectionProps.put(e.getKey(),val);
}
collectionProps.put(DocCollection.DOC_ROUTER, routerSpec);
if(message.getStr("fromApi") == null) collectionProps.put("autoCreated","true");
DocCollection newCollection = new DocCollection(collectionName, newSlices, collectionProps, router);
// newCollections.put(collectionName, newCollection);
return state.copyWith(singletonMap(newCollection.getName(), newCollection));
// ClusterState newClusterState = new ClusterState(state.getLiveNodes(), newCollections);
// return newClusterState;
}
示例3: migrate
import org.apache.solr.common.cloud.ZkNodeProps; //导入方法依赖的package包/类
private void migrate(ClusterState clusterState, ZkNodeProps message, NamedList results) throws KeeperException, InterruptedException {
String sourceCollectionName = message.getStr("collection");
String splitKey = message.getStr("split.key");
String targetCollectionName = message.getStr("target.collection");
int timeout = message.getInt("forward.timeout", 10 * 60) * 1000;
DocCollection sourceCollection = clusterState.getCollection(sourceCollectionName);
if (sourceCollection == null) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown source collection: " + sourceCollectionName);
}
DocCollection targetCollection = clusterState.getCollection(targetCollectionName);
if (targetCollection == null) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Unknown target collection: " + sourceCollectionName);
}
if (!(sourceCollection.getRouter() instanceof CompositeIdRouter)) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Source collection must use a compositeId router");
}
if (!(targetCollection.getRouter() instanceof CompositeIdRouter)) {
throw new SolrException(ErrorCode.BAD_REQUEST, "Target collection must use a compositeId router");
}
CompositeIdRouter sourceRouter = (CompositeIdRouter) sourceCollection.getRouter();
CompositeIdRouter targetRouter = (CompositeIdRouter) targetCollection.getRouter();
Collection<Slice> sourceSlices = sourceRouter.getSearchSlicesSingle(splitKey, null, sourceCollection);
if (sourceSlices.isEmpty()) {
throw new SolrException(ErrorCode.BAD_REQUEST,
"No active slices available in source collection: " + sourceCollection + "for given split.key: " + splitKey);
}
Collection<Slice> targetSlices = targetRouter.getSearchSlicesSingle(splitKey, null, targetCollection);
if (targetSlices.isEmpty()) {
throw new SolrException(ErrorCode.BAD_REQUEST,
"No active slices available in target collection: " + targetCollection + "for given split.key: " + splitKey);
}
String asyncId = null;
if(message.containsKey(ASYNC) && message.get(ASYNC) != null)
asyncId = message.getStr(ASYNC);
for (Slice sourceSlice : sourceSlices) {
for (Slice targetSlice : targetSlices) {
log.info("Migrating source shard: {} to target shard: {} for split.key = " + splitKey, sourceSlice, targetSlice);
migrateKey(clusterState, sourceCollection, sourceSlice, targetCollection, targetSlice, splitKey,
timeout, results, asyncId, message);
}
}
}
示例4: createCollection
import org.apache.solr.common.cloud.ZkNodeProps; //导入方法依赖的package包/类
private ClusterState createCollection(ClusterState state, String collectionName, List<String> shards , ZkNodeProps message) {
log.info("Create collection {} with shards {}", collectionName, shards);
Map<String, Object> routerSpec = DocRouter.getRouterSpec(message);
String routerName = routerSpec.get("name") == null ? DocRouter.DEFAULT_NAME : (String) routerSpec.get("name");
DocRouter router = DocRouter.getDocRouter(routerName);
List<DocRouter.Range> ranges = router.partitionRange(shards.size(), router.fullRange());
// Map<String, DocCollection> newCollections = new LinkedHashMap<String,DocCollection>();
Map<String, Slice> newSlices = new LinkedHashMap<String,Slice>();
// newCollections.putAll(state.getCollectionStates());
for (int i = 0; i < shards.size(); i++) {
String sliceName = shards.get(i);
/*}
for (int i = 0; i < numShards; i++) {
final String sliceName = "shard" + (i+1);*/
Map<String, Object> sliceProps = new LinkedHashMap<String, Object>(1);
sliceProps.put(Slice.RANGE, ranges == null? null: ranges.get(i));
newSlices.put(sliceName, new Slice(sliceName, null, sliceProps));
}
// TODO: fill in with collection properties read from the /collections/<collectionName> node
Map<String,Object> collectionProps = new HashMap<String,Object>();
for (Entry<String, Object> e : OverseerCollectionProcessor.COLL_PROPS.entrySet()) {
Object val = message.get(e.getKey());
if(val == null){
val = OverseerCollectionProcessor.COLL_PROPS.get(e.getKey());
}
if(val != null) collectionProps.put(e.getKey(),val);
}
collectionProps.put(DocCollection.DOC_ROUTER, routerSpec);
if(message.getStr("fromApi") == null) collectionProps.put("autoCreated","true");
DocCollection newCollection = new DocCollection(collectionName, newSlices, collectionProps, router);
// newCollections.put(collectionName, newCollection);
return state.copyWith(singletonMap(newCollection.getName(), newCollection));
// ClusterState newClusterState = new ClusterState(state.getLiveNodes(), newCollections);
// return newClusterState;
}