本文整理汇总了Java中org.apache.solr.cloud.AbstractZkTestCase类的典型用法代码示例。如果您正苦于以下问题:Java AbstractZkTestCase类的具体用法?Java AbstractZkTestCase怎么用?Java AbstractZkTestCase使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
AbstractZkTestCase类属于org.apache.solr.cloud包,在下文中一共展示了AbstractZkTestCase类的7个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: dihZk_beforeClass
import org.apache.solr.cloud.AbstractZkTestCase; //导入依赖的package包/类
@BeforeClass
public static void dihZk_beforeClass() throws Exception {
zkDir = createTempDir("zkData").getAbsolutePath();
zkServer = new ZkTestServer(zkDir);
zkServer.run();
System.setProperty("solrcloud.skip.autorecovery", "true");
System.setProperty("zkHost", zkServer.getZkAddress());
System.setProperty("jetty.port", "0000");
AbstractZkTestCase.buildZooKeeper(zkServer.getZkHost(), zkServer.getZkAddress(), getFile("dih/solr"),
"dataimport-solrconfig.xml", "dataimport-schema.xml");
//initCore("solrconfig.xml", "schema.xml", getFile("dih/solr").getAbsolutePath());
cc = createDefaultCoreContainer(getFile("dih/solr").getAbsolutePath());
}
示例2: setUp
import org.apache.solr.cloud.AbstractZkTestCase; //导入依赖的package包/类
@Before
public void setUp() throws Exception {
super.setUp();
AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
FileUtils.copyDirectory(MINIMR_INSTANCE_DIR, solrHomeDirectory);
conf = new Configuration();
parser = new MapReduceIndexerTool.MyArgumentParser();
opts = new MapReduceIndexerTool.Options();
oldSystemOut = System.out;
bout = new ByteArrayOutputStream();
System.setOut(new PrintStream(bout, true, "UTF-8"));
oldSystemErr = System.err;
berr = new ByteArrayOutputStream();
System.setErr(new PrintStream(berr, true, "UTF-8"));
}
示例3: dihZk_beforeClass
import org.apache.solr.cloud.AbstractZkTestCase; //导入依赖的package包/类
@BeforeClass
public static void dihZk_beforeClass() throws Exception {
createTempDir();
zkDir = dataDir.getAbsolutePath() + File.separator
+ "zookeeper/server1/data";
zkServer = new ZkTestServer(zkDir);
zkServer.run();
System.setProperty("solrcloud.skip.autorecovery", "true");
System.setProperty("zkHost", zkServer.getZkAddress());
System.setProperty("jetty.port", "0000");
AbstractZkTestCase.buildZooKeeper(zkServer.getZkHost(), zkServer.getZkAddress(), getFile("dih/solr"),
"dataimport-solrconfig.xml", "dataimport-schema.xml");
initCore("dataimport-solrconfig.xml", "dataimport-schema.xml", getFile("dih/solr").getAbsolutePath());
}
示例4: setupClass
import org.apache.solr.cloud.AbstractZkTestCase; //导入依赖的package包/类
@BeforeClass
public static void setupClass() throws Exception {
assumeFalse("This test fails on UNIX with Turkish default locale (https://issues.apache.org/jira/browse/SOLR-6387)",
new Locale("tr").getLanguage().equals(Locale.getDefault().getLanguage()));
solrHomeDirectory = createTempDir();
AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
FileUtils.copyDirectory(SOLR_INSTANCE_DIR, solrHomeDirectory);
}
示例5: beforeSuperClass
import org.apache.solr.cloud.AbstractZkTestCase; //导入依赖的package包/类
@BeforeClass
public static void beforeSuperClass() {
AbstractZkTestCase.SOLRHOME = new File(SOLR_HOME());
}
示例6: setupClass
import org.apache.solr.cloud.AbstractZkTestCase; //导入依赖的package包/类
@BeforeClass
public static void setupClass() throws Exception {
solrHomeDirectory = createTempDir();
assumeTrue(
"Currently this test can only be run without the lucene test security policy in place",
System.getProperty("java.security.manager", "").equals(""));
assumeFalse("HDFS tests were disabled by -Dtests.disableHdfs",
Boolean.parseBoolean(System.getProperty("tests.disableHdfs", "false")));
assumeFalse("FIXME: This test does not work with Windows because of native library requirements", Constants.WINDOWS);
assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
FileUtils.copyDirectory(MINIMR_CONF_DIR, solrHomeDirectory);
File dataDir = createTempDir();
tempDir = dataDir.getAbsolutePath();
new File(tempDir).mkdirs();
FileUtils.copyFile(new File(RESOURCES_DIR + "/custom-mimetypes.xml"), new File(tempDir + "/custom-mimetypes.xml"));
AbstractSolrMorphlineTestBase.setupMorphline(tempDir, "test-morphlines/solrCellDocumentTypes", true);
System.setProperty("hadoop.log.dir", new File(solrHomeDirectory, "logs").getAbsolutePath());
int taskTrackers = 1;
int dataNodes = 2;
// String proxyUser = System.getProperty("user.name");
// String proxyGroup = "g";
// StringBuilder sb = new StringBuilder();
// sb.append("127.0.0.1,localhost");
// for (InetAddress i : InetAddress.getAllByName(InetAddress.getLocalHost().getHostName())) {
// sb.append(",").append(i.getCanonicalHostName());
// }
new File(dataDir, "nm-local-dirs").mkdirs();
System.setProperty("solr.hdfs.blockcache.enabled", "false");
System.setProperty("test.build.dir", dataDir + File.separator + "hdfs" + File.separator + "test-build-dir");
System.setProperty("test.build.data", dataDir + File.separator + "hdfs" + File.separator + "build");
System.setProperty("test.cache.data", dataDir + File.separator + "hdfs" + File.separator + "cache");
JobConf conf = new JobConf();
conf.set("dfs.block.access.token.enable", "false");
conf.set("dfs.permissions", "true");
conf.set("hadoop.security.authentication", "simple");
conf.set(YarnConfiguration.NM_LOCAL_DIRS, dataDir.getPath() + File.separator + "nm-local-dirs");
conf.set(YarnConfiguration.DEFAULT_NM_LOG_DIRS, dataDir + File.separator + "nm-logs");
conf.set("testWorkDir", dataDir.getPath() + File.separator + "testWorkDir");
dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
FileSystem fileSystem = dfsCluster.getFileSystem();
fileSystem.mkdirs(new Path("/tmp"));
fileSystem.mkdirs(new Path("/user"));
fileSystem.mkdirs(new Path("/hadoop/mapred/system"));
fileSystem.setPermission(new Path("/tmp"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"), FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/hadoop/mapred/system"), FsPermission.valueOf("-rwx------"));
String nnURI = fileSystem.getUri().toString();
int numDirs = 1;
String[] racks = null;
String[] hosts = null;
mrCluster = new MiniMRCluster(0, 0, taskTrackers, nnURI, numDirs, racks, hosts, null, conf);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
}
示例7: setupClass
import org.apache.solr.cloud.AbstractZkTestCase; //导入依赖的package包/类
@BeforeClass
public static void setupClass() throws Exception {
System.setProperty("solr.hdfs.blockcache.global", Boolean.toString(LuceneTestCase.random().nextBoolean()));
System.setProperty("solr.hdfs.blockcache.enabled", Boolean.toString(LuceneTestCase.random().nextBoolean()));
System.setProperty("solr.hdfs.blockcache.blocksperbank", "2048");
solrHomeDirectory = createTempDir();
assumeTrue(
"Currently this test can only be run without the lucene test security policy in place",
System.getProperty("java.security.manager", "").equals(""));
assumeFalse("HDFS tests were disabled by -Dtests.disableHdfs",
Boolean.parseBoolean(System.getProperty("tests.disableHdfs", "false")));
assumeFalse("FIXME: This test does not work with Windows because of native library requirements", Constants.WINDOWS);
assumeFalse("FIXME: This test fails under Java 8 due to the Saxon dependency - see SOLR-1301", Constants.JRE_IS_MINIMUM_JAVA8);
assumeFalse("FIXME: This test fails under J9 due to the Saxon dependency - see SOLR-1301", System.getProperty("java.vm.info", "<?>").contains("IBM J9"));
AbstractZkTestCase.SOLRHOME = solrHomeDirectory;
FileUtils.copyDirectory(MINIMR_INSTANCE_DIR, AbstractZkTestCase.SOLRHOME);
tempDir = createTempDir().getAbsolutePath();
new File(tempDir).mkdirs();
FileUtils.copyFile(new File(RESOURCES_DIR + "/custom-mimetypes.xml"), new File(tempDir + "/custom-mimetypes.xml"));
AbstractSolrMorphlineTestBase.setupMorphline(tempDir, "test-morphlines/solrCellDocumentTypes", true);
System.setProperty("hadoop.log.dir", new File(tempDir, "logs").getAbsolutePath());
int taskTrackers = 2;
int dataNodes = 2;
JobConf conf = new JobConf();
conf.set("dfs.block.access.token.enable", "false");
conf.set("dfs.permissions", "true");
conf.set("hadoop.security.authentication", "simple");
conf.set(YarnConfiguration.NM_LOCAL_DIRS, tempDir + File.separator + "nm-local-dirs");
conf.set(YarnConfiguration.DEFAULT_NM_LOG_DIRS, tempDir + File.separator + "nm-logs");
new File(tempDir + File.separator + "nm-local-dirs").mkdirs();
System.setProperty("test.build.dir", tempDir + File.separator + "hdfs" + File.separator + "test-build-dir");
System.setProperty("test.build.data", tempDir + File.separator + "hdfs" + File.separator + "build");
System.setProperty("test.cache.data", tempDir + File.separator + "hdfs" + File.separator + "cache");
dfsCluster = new MiniDFSCluster(conf, dataNodes, true, null);
FileSystem fileSystem = dfsCluster.getFileSystem();
fileSystem.mkdirs(new Path("/tmp"));
fileSystem.mkdirs(new Path("/user"));
fileSystem.mkdirs(new Path("/hadoop/mapred/system"));
fileSystem.setPermission(new Path("/tmp"),
FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/user"),
FsPermission.valueOf("-rwxrwxrwx"));
fileSystem.setPermission(new Path("/hadoop/mapred/system"),
FsPermission.valueOf("-rwx------"));
mrCluster = MiniMRClientClusterFactory.create(MorphlineGoLiveMiniMRTest.class, 1, conf, new File(tempDir, "mrCluster"));
//new MiniMRCluster(0, 0, taskTrackers, nnURI, numDirs, racks,
//hosts, null, conf);
ProxyUsers.refreshSuperUserGroupsConfiguration(conf);
}