当前位置: 首页>>代码示例>>Java>>正文


Java ObjectWritable类代码示例

本文整理汇总了Java中org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable的典型用法代码示例。如果您正苦于以下问题:Java ObjectWritable类的具体用法?Java ObjectWritable怎么用?Java ObjectWritable使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。


ObjectWritable类属于org.apache.tinkerpop.gremlin.hadoop.structure.io包,在下文中一共展示了ObjectWritable类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: GraknSparkMemory

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
public GraknSparkMemory(final VertexProgram<?> vertexProgram,
                        final Set<MapReduce> mapReducers,
                        final JavaSparkContext sparkContext) {
    if (null != vertexProgram) {
        for (final MemoryComputeKey key : vertexProgram.getMemoryComputeKeys()) {
            this.memoryComputeKeys.put(key.getKey(), key);
        }
    }
    for (final MapReduce mapReduce : mapReducers) {
        this.memoryComputeKeys.put(
                mapReduce.getMemoryKey(),
                MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false));
    }
    for (final MemoryComputeKey memoryComputeKey : this.memoryComputeKeys.values()) {
        this.sparkMemory.put(
                memoryComputeKey.getKey(),
                sparkContext.accumulator(ObjectWritable.empty(), memoryComputeKey.getKey(),
                        new MemoryAccumulator<>(memoryComputeKey)));
    }
    this.broadcast = sparkContext.broadcast(Collections.emptyMap());
}
 
开发者ID:graknlabs,项目名称:grakn,代码行数:22,代码来源:GraknSparkMemory.java

示例2: get

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public <R> R get(final String key) throws IllegalArgumentException {
    if (!this.memoryComputeKeys.containsKey(key)) {
        throw Memory.Exceptions.memoryDoesNotExist(key);
    }
    if (this.inExecute && !this.memoryComputeKeys.get(key).isBroadcast()) {
        throw Memory.Exceptions.memoryDoesNotExist(key);
    }
    final ObjectWritable<R> r = (ObjectWritable<R>) (this.inExecute ?
            this.broadcast.value().get(key) : this.sparkMemory.get(key).value());
    if (null == r || r.isEmpty()) {
        throw Memory.Exceptions.memoryDoesNotExist(key);
    } else {
        return r.get();
    }
}
 
开发者ID:graknlabs,项目名称:grakn,代码行数:17,代码来源:GraknSparkMemory.java

示例3: sendMessage

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void sendMessage(final MessageScope messageScope, final M message) {
    if (messageScope instanceof MessageScope.Local) {
        final MessageScope.Local<M> localMessageScope = (MessageScope.Local) messageScope;
        final Traversal.Admin<Vertex, Edge> incidentTraversal = GiraphMessenger.setVertexStart(localMessageScope.getIncidentTraversal().get().asAdmin(), this.giraphVertex.getValue().get());
        final Direction direction = GiraphMessenger.getOppositeDirection(incidentTraversal);
        incidentTraversal.forEachRemaining(edge ->
                this.giraphComputation.sendMessage(
                        new ObjectWritable<>(edge.vertices(direction).next().id()),
                        new ObjectWritable<>(localMessageScope.getEdgeFunction().apply(message, edge))));
    } else {
        final MessageScope.Global globalMessageScope = (MessageScope.Global) messageScope;
        globalMessageScope.vertices().forEach(vertex ->
                this.giraphComputation.sendMessage(new ObjectWritable<>(vertex.id()), new ObjectWritable<>(message)));
    }
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:17,代码来源:GiraphMessenger.java

示例4: GiraphGraphComputer

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
public GiraphGraphComputer(final HadoopGraph hadoopGraph) {
    super(hadoopGraph);
    final Configuration configuration = hadoopGraph.configuration();
    configuration.getKeys().forEachRemaining(key -> this.giraphConfiguration.set(key, configuration.getProperty(key).toString()));
    this.giraphConfiguration.setMasterComputeClass(GiraphMemory.class);
    this.giraphConfiguration.setVertexClass(GiraphVertex.class);
    this.giraphConfiguration.setComputationClass(GiraphComputation.class);
    this.giraphConfiguration.setWorkerContextClass(GiraphWorkerContext.class);
    this.giraphConfiguration.setOutEdgesClass(EmptyOutEdges.class);
    this.giraphConfiguration.setClass(GiraphConstants.VERTEX_ID_CLASS.getKey(), ObjectWritable.class, ObjectWritable.class);
    this.giraphConfiguration.setClass(GiraphConstants.VERTEX_VALUE_CLASS.getKey(), VertexWritable.class, VertexWritable.class);
    this.giraphConfiguration.setBoolean(GiraphConstants.STATIC_GRAPH.getKey(), true);
    this.giraphConfiguration.setVertexInputFormatClass(GiraphVertexInputFormat.class);
    this.giraphConfiguration.setVertexOutputFormatClass(GiraphVertexOutputFormat.class);
    this.useWorkerThreadsInConfiguration = this.giraphConfiguration.getInt(GiraphConstants.MAX_WORKERS, -666) != -666 || this.giraphConfiguration.getInt(GiraphConstants.NUM_COMPUTE_THREADS.getKey(), -666) != -666;
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:17,代码来源:GiraphGraphComputer.java

示例5: writeMemoryRDD

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public <K, V> Iterator<KeyValue<K, V>> writeMemoryRDD(final Configuration configuration, final String memoryKey, JavaPairRDD<K, V> memoryRDD) {
    final org.apache.hadoop.conf.Configuration hadoopConfiguration = ConfUtil.makeHadoopConfiguration(configuration);
    final String outputLocation = hadoopConfiguration.get(Constants.GREMLIN_HADOOP_OUTPUT_LOCATION);
    if (null != outputLocation) {
        // map back to a Hadoop stream for output
        memoryRDD.mapToPair(keyValue -> new Tuple2<>(new ObjectWritable<>(keyValue._1()), new ObjectWritable<>(keyValue._2())))
                .saveAsNewAPIHadoopFile(Constants.getMemoryLocation(outputLocation, memoryKey),
                        ObjectWritable.class,
                        ObjectWritable.class,
                        SequenceFileOutputFormat.class, hadoopConfiguration);
        try {
            return (Iterator) new ObjectWritableIterator(hadoopConfiguration, new Path(Constants.getMemoryLocation(outputLocation, memoryKey)));
        } catch (final IOException e) {
            throw new IllegalStateException(e.getMessage(), e);
        }
    }
    return Collections.emptyIterator();
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:20,代码来源:OutputFormatRDD.java

示例6: SparkMemory

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
public SparkMemory(final VertexProgram<?> vertexProgram, final Set<MapReduce> mapReducers, final JavaSparkContext sparkContext) {
    if (null != vertexProgram) {
        for (final MemoryComputeKey key : vertexProgram.getMemoryComputeKeys()) {
            this.memoryComputeKeys.put(key.getKey(), key);
        }
    }
    for (final MapReduce mapReduce : mapReducers) {
        this.memoryComputeKeys.put(mapReduce.getMemoryKey(), MemoryComputeKey.of(mapReduce.getMemoryKey(), Operator.assign, false, false));
    }
    for (final MemoryComputeKey memoryComputeKey : this.memoryComputeKeys.values()) {
        this.sparkMemory.put(
                memoryComputeKey.getKey(),
                sparkContext.accumulator(ObjectWritable.empty(), memoryComputeKey.getKey(), new MemoryAccumulator<>(memoryComputeKey)));
    }
    this.broadcast = sparkContext.broadcast(Collections.emptyMap());
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:17,代码来源:SparkMemory.java

示例7: add

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void add(final String key, final Object value) {
    checkKeyValue(key, value);
    if (this.inExecute) {
        this.sparkMemory.get(key).add(new ObjectWritable<>(value));
    } else {
        throw Memory.Exceptions.memoryAddOnlyDuringVertexProgramExecute(key);
    }
}
 
开发者ID:graknlabs,项目名称:grakn,代码行数:10,代码来源:GraknSparkMemory.java

示例8: set

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void set(final String key, final Object value) {
    checkKeyValue(key, value);
    if (this.inExecute) {
        throw Memory.Exceptions.memorySetOnlyDuringVertexProgramSetUpAndTerminate(key);
    } else {
        this.sparkMemory.get(key).setValue(new ObjectWritable<>(value));
    }
}
 
开发者ID:graknlabs,项目名称:grakn,代码行数:10,代码来源:GraknSparkMemory.java

示例9: setup

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void setup(final Reducer<ObjectWritable, ObjectWritable, ObjectWritable, ObjectWritable>.Context context) {
    final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration());
    KryoShimServiceLoader.applyConfiguration(apacheConfiguration);
    this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration);
    this.mapReduce.workerStart(MapReduce.Stage.REDUCE);
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:8,代码来源:HadoopReduce.java

示例10: setup

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void setup(final Reducer<ObjectWritable, ObjectWritable, ObjectWritable, ObjectWritable>.Context context) {
    final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration());
    KryoShimServiceLoader.applyConfiguration(apacheConfiguration);
    this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration);
    this.mapReduce.workerStart(MapReduce.Stage.COMBINE);
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:8,代码来源:HadoopCombine.java

示例11: setup

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void setup(final Mapper<NullWritable, VertexWritable, ObjectWritable, ObjectWritable>.Context context) {
    final Configuration apacheConfiguration = ConfUtil.makeApacheConfiguration(context.getConfiguration());
    KryoShimServiceLoader.applyConfiguration(apacheConfiguration);
    this.mapReduce = MapReduce.createMapReduce(HadoopGraph.open(apacheConfiguration), apacheConfiguration);
    this.mapReduce.workerStart(MapReduce.Stage.MAP);
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:8,代码来源:HadoopMap.java

示例12: compute

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void compute(final Vertex<ObjectWritable, VertexWritable, NullWritable> vertex, final Iterable<ObjectWritable> messages) throws IOException {
    final GiraphWorkerContext workerContext = this.getWorkerContext();
    final VertexProgram<?> vertexProgram = workerContext.getVertexProgramPool().take();
    vertexProgram.execute(ComputerGraph.vertexProgram(vertex.getValue().get(), vertexProgram), workerContext.getMessenger((GiraphVertex) vertex, this, messages.iterator()), workerContext.getMemory());
    workerContext.getVertexProgramPool().offer(vertexProgram);
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:8,代码来源:GiraphComputation.java

示例13: aggregate

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public void aggregate(final ObjectWritable<Pair<BinaryOperator, Object>> object) {
    if (null == object)
        return;
    else if (this.currentObject.isEmpty())
        this.currentObject = object;
    else if (!object.isEmpty())
        this.currentObject.set(new Pair<>(object.get().getValue0(), object.get().getValue0().apply(this.currentObject.get().getValue1(), object.get().getValue1())));
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:10,代码来源:MemoryAggregator.java

示例14: exists

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public boolean exists(final String key) {
    if (this.inExecute && this.memoryComputeKeys.containsKey(key) && !this.memoryComputeKeys.get(key).isBroadcast())
        return false;
    final ObjectWritable value = this.inExecute ? this.worker.getAggregatedValue(key) : this.getAggregatedValue(key);
    return null != value && !value.isEmpty();
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:8,代码来源:GiraphMemory.java

示例15: get

import org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable; //导入依赖的package包/类
@Override
public <R> R get(final String key) throws IllegalArgumentException {
    if (!this.memoryComputeKeys.containsKey(key))
        throw Memory.Exceptions.memoryDoesNotExist(key);
    if (this.inExecute && !this.memoryComputeKeys.get(key).isBroadcast())
        throw Memory.Exceptions.memoryDoesNotExist(key);
    final ObjectWritable<Pair<BinaryOperator, Object>> value = this.inExecute ?
            this.worker.<ObjectWritable<Pair<BinaryOperator, Object>>>getAggregatedValue(key) :
            this.<ObjectWritable<Pair<BinaryOperator, Object>>>getAggregatedValue(key);
    if (null == value || value.isEmpty())
        throw Memory.Exceptions.memoryDoesNotExist(key);
    else
        return (R) value.get().getValue1();
}
 
开发者ID:PKUSilvester,项目名称:LiteGraph,代码行数:15,代码来源:GiraphMemory.java


注:本文中的org.apache.tinkerpop.gremlin.hadoop.structure.io.ObjectWritable类示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。