本文整理汇总了Java中org.elasticsearch.hadoop.util.StringUtils.hasText方法的典型用法代码示例。如果您正苦于以下问题:Java StringUtils.hasText方法的具体用法?Java StringUtils.hasText怎么用?Java StringUtils.hasText使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类org.elasticsearch.hadoop.util.StringUtils
的用法示例。
在下文中一共展示了StringUtils.hasText方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: createPig
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
protected PigServer createPig() throws ExecException {
HdpBootstrap.hackHadoopStagingOnWin();
Properties properties = HdpBootstrap.asProperties(QueryTestParams.provisionQueries(HdpBootstrap.hadoopConfig()));
String pigHost = properties.getProperty("pig");
// remote Pig instance
if (StringUtils.hasText(pigHost) && !"local".equals(pig)) {
LogFactory.getLog(PigWrapper.class).info("Executing Pig in Map/Reduce mode");
return new PigServer(ExecType.MAPREDUCE, properties);
}
// use local instance
LogFactory.getLog(PigWrapper.class).info("Executing Pig in local mode");
properties.put("mapred.job.tracker", "local");
return new PigServer(ExecType.LOCAL, properties);
}
示例2: prepareToWrite
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
@SuppressWarnings({ "unchecked", "rawtypes" })
@Override
public void prepareToWrite(RecordWriter writer) throws IOException {
this.writer = writer;
Properties props = getUDFProperties();
String s = props.getProperty(ResourceSchema.class.getName());
if (!StringUtils.hasText(s)) {
log.warn("No resource schema found; using an empty one....");
this.schema = new ResourceSchema();
}
else {
this.schema = IOUtils.deserializeFromBase64(s);
}
this.pigTuple = new PigTuple(schema);
}
示例3: tableProps
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
public static String tableProps(String resource, String query, String... params) {
StringBuilder sb = new StringBuilder("STORED BY 'org.elasticsearch.hadoop.hive.EsStorageHandler' ");
sb.append("TBLPROPERTIES('es.resource'='" + resource + "'");
if (StringUtils.hasText(query)) {
sb.append(",'es.query'='" + query + "'");
}
if (params != null && params.length > 0) {
for (String string : params) {
sb.append(",");
sb.append(string);
}
}
if (!isLocal) {
String host = hadoopConfig.get("es.host");
if (StringUtils.hasText(host)) {
sb.append(",'es.host'='" + host + "'");
}
String port = hadoopConfig.get("es.port");
sb.append(",'es.port'='" + port + "'");
}
sb.append(")");
return sb.toString();
}
示例4: loadKeyManagers
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
private KeyManager[] loadKeyManagers() throws GeneralSecurityException, IOException {
if (!StringUtils.hasText(keyStoreLocation)) {
return null;
}
char[] pass = (StringUtils.hasText(keyStorePass) ? keyStorePass.trim().toCharArray() : null);
KeyStore keyStore = loadKeyStore(keyStoreLocation, pass);
KeyManagerFactory kmFactory = KeyManagerFactory.getInstance(KeyManagerFactory.getDefaultAlgorithm());
kmFactory.init(keyStore, pass);
return kmFactory.getKeyManagers();
}
示例5: before
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
@Override
protected void before() throws Throwable {
if (Booleans.parseBoolean(HdpBootstrap.hadoopConfig().get("test.disable.local.es"))) {
disabled = true;
LogFactory.getLog(getClass()).warn(
"local ES disable; assuming an external instance and bailing out...");
return;
}
String host = HdpBootstrap.hadoopConfig().get(ConfigurationOptions.ES_NODES);
String port = HdpBootstrap.hadoopConfig().get(ConfigurationOptions.ES_PORT);
if (StringUtils.hasText(host)) {
disabled = true;
LogFactory.getLog(getClass()).warn("es.nodes/host specified; assuming an external instance and bailing out...");
return;
}
// delete data path to start fresh
TestUtils.delete(new File(ES_DATA_PATH));
if (master == null) {
System.out.println("Starting Elasticsearch Master...");
master = new EsEmbeddedServer(CLUSTER_NAME, ES_HOME_PATH, ES_DATA_PATH, DATA_PORTS, TRANSPORT_PORTS, USE_SLAVE);
master.start();
System.out.println("Started Elasticsearch Master on port " + master.getIpAndPort().port);
System.setProperty(TestUtils.ES_LOCAL_PORT, String.valueOf(master.getIpAndPort().port));
// force initialization of test properties
new TestSettings();
}
if (USE_SLAVE && slave == null) {
System.out.println("Starting Elasticsearch Slave...");
slave = new EsEmbeddedServer(CLUSTER_NAME, ES_HOME_PATH, ES_DATA_PATH, DATA_PORTS_SLAVE, TRANSPORT_PORTS_SLAVE, USE_SLAVE);
slave.start();
}
}
示例6: getProperty
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
protected String getProperty(String name, String defaultValue) {
String value = getProperty(name);
if (!StringUtils.hasText(value)) {
return defaultValue;
}
return value;
}
示例7: format
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
@Override
public String format(String value) {
if (!StringUtils.hasText(value)) {
return null;
}
Calendar calendar = DateUtils.parseDate(value);
dateFormat.setCalendar(calendar);
return dateFormat.format(calendar.getTime());
}
示例8: UpdateBulkFactory
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
public UpdateBulkFactory(Settings settings, boolean upsert, MetadataExtractor metaExtractor) {
super(settings, metaExtractor);
UPSERT = upsert;
RETRY_ON_FAILURE = settings.getUpdateRetryOnConflict();
RETRY_HEADER = "\"_retry_on_conflict\":" + RETRY_ON_FAILURE + "";
HAS_SCRIPT = StringUtils.hasText(settings.getUpdateScript());
HAS_LANG = StringUtils.hasText(settings.getUpdateScriptLang());
HAS_PARAMS = StringUtils.hasText(settings.getUpdateScriptParams());
SCRIPT_LANG = "\"lang\":\"" + settings.getUpdateScriptLang() + "\",";
SCRIPT = "\"script\":\"" + settings.getUpdateScript() + "\"";
}
示例9: seek
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
/**
* Seeks the field with the given name in the stream and positions (and returns) the parser to the next available token (value or not).
* Return null if no token is found.
*
* @param path
* @param parser
* @return token associated with the given path or null if not found
*/
public static Token seek(Parser parser, String path) {
// return current token if no path is given
if (!StringUtils.hasText(path)) {
return null;
}
List<String> tokens = StringUtils.tokenize(path, ".");
return seek(parser, tokens.toArray(new String[tokens.size()]));
}
示例10: extractJsonParse
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
static String extractJsonParse(String errorMessage, ByteSequence body) {
if (!StringUtils.hasText(errorMessage)) {
return null;
}
if (!errorMessage.startsWith("JsonParseException")) {
return null;
}
String match = findMatch(LINE.matcher(errorMessage));
int line = (StringUtils.hasText(match) ? Integer.valueOf(match) : 0);
match = findMatch(COLUMN.matcher(errorMessage));
int column = (StringUtils.hasText(match) ? Integer.valueOf(match) : 0);
String payload = body.toString();
int position = 0;
int linesRead = 1;
for (int index = 0; index < payload.length() && linesRead < line; index++) {
if (payload.charAt(index) == '\n') {
linesRead++;
}
position++;
}
position += column;
// found line, return column +/- some chars
int from = Math.max(position - CHARS_TO_THE_LEFT, 0);
int to = Math.min(position + CHARS_TO_THE_RIGHT, payload.length());
return payload.substring(from, to);
}
示例11: RestRepository
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
public RestRepository(Settings settings) {
this.settings = settings;
if (StringUtils.hasText(settings.getResourceRead())) {
this.resourceR = new Resource(settings, true);
}
if (StringUtils.hasText(settings.getResourceWrite())) {
this.resourceW = new Resource(settings, false);
}
Assert.isTrue(resourceR != null || resourceW != null, "Invalid configuration - No read or write resource specified");
this.client = new RestClient(settings);
}
示例12: EsSpout
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
public EsSpout(String target, String query, Map configuration) {
if (configuration != null) {
spoutConfig.putAll(configuration);
}
if (StringUtils.hasText(query)) {
spoutConfig.put(ES_QUERY, query);
}
if (StringUtils.hasText(target)) {
spoutConfig.put(ES_RESOURCE_READ, target);
}
tupleFields = new StormSettings(spoutConfig).getStormSpoutFields();
}
示例13: getStormBulkSize
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
public int getStormBulkSize() {
String value = getProperty(ES_STORM_BOLT_FLUSH_ENTRIES_SIZE);
if (StringUtils.hasText(value)) {
return Integer.valueOf(value);
}
return getBatchSizeInEntries();
}
示例14: addHttpAuth
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
private void addHttpAuth(Settings settings, Object[] authSettings) {
if (StringUtils.hasText(settings.getNetworkHttpAuthUser())) {
HttpState state = (authSettings[1] != null ? (HttpState) authSettings[1] : new HttpState());
authSettings[1] = state;
state.setCredentials(AuthScope.ANY, new UsernamePasswordCredentials(settings.getNetworkHttpAuthUser(), settings.getNetworkHttpAuthPass()));
if (log.isDebugEnabled()) {
log.info("Using detected HTTP Auth credentials...");
}
}
}
示例15: hasMappingVersionType
import org.elasticsearch.hadoop.util.StringUtils; //导入方法依赖的package包/类
public boolean hasMappingVersionType() {
String versionType = getMappingVersionType();
return (StringUtils.hasText(getMappingVersion()) && StringUtils.hasText(versionType) && !versionType.equals(ES_MAPPING_VERSION_TYPE_INTERNAL));
}