本文整理匯總了Java中org.apache.tinkerpop.gremlin.process.traversal.step.map.ProgramTest類的典型用法代碼示例。如果您正苦於以下問題:Java ProgramTest類的具體用法?Java ProgramTest怎麽用?Java ProgramTest使用的例子?那麽, 這裏精選的類代碼示例或許可以為您提供幫助。
ProgramTest類屬於org.apache.tinkerpop.gremlin.process.traversal.step.map包,在下文中一共展示了ProgramTest類的2個代碼示例,這些例子默認根據受歡迎程度排序。您可以為喜歡或者感覺有用的代碼點讚,您的評價將有助於係統推薦出更棒的Java代碼示例。
示例1: getBaseConfiguration
import org.apache.tinkerpop.gremlin.process.traversal.step.map.ProgramTest; //導入依賴的package包/類
@Override
public Map<String, Object> getBaseConfiguration(final String graphName, final Class<?> test, final String testMethodName, final LoadGraphWith.GraphData loadGraphWith) {
final Map<String, Object> config = super.getBaseConfiguration(graphName, test, testMethodName, loadGraphWith);
config.put(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true); // this makes the test suite go really fast
// toy graph inputRDD does not have corresponding outputRDD so where jobs chain, it fails (failing makes sense)
if (null != loadGraphWith &&
!test.equals(ProgramTest.Traversals.class) &&
!test.equals(GroovyProgramTest.Traversals.class) &&
!test.equals(PageRankTest.Traversals.class) &&
!test.equals(GroovyPageRankTest.Traversals.class) &&
!test.equals(PeerPressureTest.Traversals.class) &&
!test.equals(GroovyPeerPressureTest.Traversals.class) &&
!test.equals(FileSystemStorageCheck.class) &&
!testMethodName.equals("shouldSupportJobChaining") && // GraphComputerTest.shouldSupportJobChaining
RANDOM.nextBoolean()) {
config.put(RANDOM.nextBoolean() ? Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD : Constants.GREMLIN_HADOOP_GRAPH_READER, ToyGraphInputRDD.class.getCanonicalName());
}
// tests persisted RDDs
if (test.equals(SparkContextStorageCheck.class)) {
config.put(RANDOM.nextBoolean() ? Constants.GREMLIN_SPARK_GRAPH_INPUT_RDD : Constants.GREMLIN_HADOOP_GRAPH_READER, ToyGraphInputRDD.class.getCanonicalName());
config.put(RANDOM.nextBoolean() ? Constants.GREMLIN_SPARK_GRAPH_OUTPUT_RDD : Constants.GREMLIN_HADOOP_GRAPH_WRITER, PersistedOutputRDD.class.getCanonicalName());
}
// sugar plugin causes meta-method issues with a persisted context
if (test.equals(HadoopGremlinPluginCheck.class)) {
Spark.close();
SugarTestHelper.clearRegistry(this);
}
config.put(Constants.GREMLIN_HADOOP_DEFAULT_GRAPH_COMPUTER, SparkGraphComputer.class.getCanonicalName());
config.put("spark.master", "local[4]");
config.put("spark.serializer", GryoSerializer.class.getCanonicalName());
config.put("spark.kryo.registrationRequired", true);
return config;
}
示例2: getBaseConfiguration
import org.apache.tinkerpop.gremlin.process.traversal.step.map.ProgramTest; //導入依賴的package包/類
@Override
public Map<String, Object> getBaseConfiguration(final String graphName, final Class<?> test, final String testMethodName, final LoadGraphWith.GraphData loadGraphWith) {
if (this.getClass().equals(SparkHadoopGraphProvider.class) && !SparkHadoopGraphProvider.class.getCanonicalName().equals(System.getProperty(PREVIOUS_SPARK_PROVIDER, null))) {
Spark.close();
HadoopPools.close();
KryoShimServiceLoader.close();
System.setProperty(PREVIOUS_SPARK_PROVIDER, SparkHadoopGraphProvider.class.getCanonicalName());
}
final Map<String, Object> config = super.getBaseConfiguration(graphName, test, testMethodName, loadGraphWith);
config.put(Constants.GREMLIN_SPARK_PERSIST_CONTEXT, true); // this makes the test suite go really fast
// toy graph inputRDD does not have corresponding outputRDD so where jobs chain, it fails (failing makes sense)
if (null != loadGraphWith &&
!test.equals(ProgramTest.Traversals.class) &&
!test.equals(PageRankTest.Traversals.class) &&
!test.equals(PeerPressureTest.Traversals.class) &&
!test.equals(FileSystemStorageCheck.class) &&
!testMethodName.equals("shouldSupportJobChaining") && // GraphComputerTest.shouldSupportJobChaining
RANDOM.nextBoolean()) {
config.put(Constants.GREMLIN_HADOOP_GRAPH_READER, ToyGraphInputRDD.class.getCanonicalName());
}
// tests persisted RDDs
if (test.equals(SparkContextStorageCheck.class)) {
config.put(Constants.GREMLIN_HADOOP_GRAPH_READER, ToyGraphInputRDD.class.getCanonicalName());
config.put(Constants.GREMLIN_HADOOP_GRAPH_WRITER, PersistedOutputRDD.class.getCanonicalName());
}
config.put(Constants.GREMLIN_HADOOP_DEFAULT_GRAPH_COMPUTER, SparkGraphComputer.class.getCanonicalName());
config.put(SparkLauncher.SPARK_MASTER, "local[4]");
config.put(Constants.SPARK_SERIALIZER, KryoSerializer.class.getCanonicalName());
config.put(Constants.SPARK_KRYO_REGISTRATOR, GryoRegistrator.class.getCanonicalName());
config.put(Constants.SPARK_KRYO_REGISTRATION_REQUIRED, true);
return config;
}