本文整理汇总了Java中org.apache.commons.collections.bidimap.DualHashBidiMap类的典型用法代码示例。如果您正苦于以下问题:Java DualHashBidiMap类的具体用法?Java DualHashBidiMap怎么用?Java DualHashBidiMap使用的例子?那么, 这里精选的类代码示例或许可以为您提供帮助。
DualHashBidiMap类属于org.apache.commons.collections.bidimap包,在下文中一共展示了DualHashBidiMap类的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。
示例1: buildBidirecionalMapFromParameters
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
/**
* parse the given lookup parameter string into a bidirectinal map
*
* @param lookupParameters the lookup parameter string
* @param accountingLinePrefix the actual accounting line prefix
* @return a bidirectinal map that holds all the given lookup parameters
*/
private BidiMap buildBidirecionalMapFromParameters(String parameters, String accountingLinePrefix) {
BidiMap parameterMap = new DualHashBidiMap();
// if we didnt get any incoming parameters, then just return an empty parameterMap
if (StringUtils.isBlank(parameters)) {
return parameterMap;
}
String[] parameterArray = StringUtils.split(parameters, OLEConstants.FIELD_CONVERSIONS_SEPERATOR);
for (String parameter : parameterArray) {
String[] entrySet = StringUtils.split(parameter, OLEConstants.FIELD_CONVERSION_PAIR_SEPERATOR);
if (entrySet != null) {
String parameterKey = escapeAccountingLineName(entrySet[0], accountingLinePrefix);
String parameterValue = escapeAccountingLineName(entrySet[1], accountingLinePrefix);
parameterMap.put(parameterKey, parameterValue);
}
}
return parameterMap;
}
示例2: imageArchiveConfig
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
@Autowired
@Bean
public Config imageArchiveConfig(Environment environment) {
BidiMap languages = new DualHashBidiMap();
languages.put("eng", "en");
languages.put("swe", "sv");
Config config = new Config();
config.setStoragePath(new File(environment.getProperty("ImageArchiveStoragePath")));
config.setTmpPath(new File(environment.getProperty("ImageArchiveTempPath")));
config.setImageMagickPath(new File(environment.getProperty("ImageMagickPath")));
config.setImagesPath(new File(environment.getProperty("ImageArchiveImagesPath")));
config.setLibrariesPath(new File(environment.getProperty("ImageArchiveLibrariesPath")));
config.setOldLibraryPaths(new File[]{new File(environment.getProperty("ImageArchiveOldLibraryPaths"))});
config.setUsersLibraryFolder(environment.getProperty("ImageArchiveUsersLibraryFolder"));
config.setMaxImageUploadSize(Long.parseLong(environment.getProperty("ImageArchiveMaxImageUploadSize")));
config.setMaxZipUploadSize(Long.parseLong(environment.getProperty("ImageArchiveMaxZipUploadSize")));
config.setLanguages(languages);
return config;
}
示例3: buildBidirecionalMapFromParameters
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
/**
* parse the given lookup parameter string into a bidirectinal map
*
* @param lookupParameters the lookup parameter string
* @param accountingLinePrefix the actual accounting line prefix
* @return a bidirectinal map that holds all the given lookup parameters
*/
private BidiMap buildBidirecionalMapFromParameters(String parameters, String accountingLinePrefix) {
BidiMap parameterMap = new DualHashBidiMap();
// if we didnt get any incoming parameters, then just return an empty parameterMap
if (StringUtils.isBlank(parameters)) {
return parameterMap;
}
String[] parameterArray = StringUtils.split(parameters, KFSConstants.FIELD_CONVERSIONS_SEPERATOR);
for (String parameter : parameterArray) {
String[] entrySet = StringUtils.split(parameter, KFSConstants.FIELD_CONVERSION_PAIR_SEPERATOR);
if (entrySet != null) {
String parameterKey = escapeAccountingLineName(entrySet[0], accountingLinePrefix);
String parameterValue = escapeAccountingLineName(entrySet[1], accountingLinePrefix);
parameterMap.put(parameterKey, parameterValue);
}
}
return parameterMap;
}
示例4: identifyFileList
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
/**
* File list identification. Processing a bi-directional map of records
* where each record has one temporary file and each temporary file has one
* record key (strict 1:1 relationship).
*
* @param fileRecidBidiMap Input { "recordkey" <-> "tempfilename" }
* @return Output { "recordkey": [ "tool/property/value" ] }
* @throws IOException
*/
public HashMap<String, List<String>> identifyFileList(DualHashBidiMap fileRecidBidiMap) throws IOException {
HashMap<String, List<String>> resultMap = new HashMap<String, List<String>>();
Iterator iter = fileRecidBidiMap.keySet().iterator();
while (iter.hasNext()) {
String recordKey = (String) iter.next();
String tmpFileName = (String) fileRecidBidiMap.get(recordKey);
File file = new File(tmpFileName);
HashMap idRes = this.identify(file);
String containerFileName = recordKey.substring(0, recordKey.indexOf("/"));
String containerIdentifier = recordKey.substring(recordKey.indexOf("/") + 1);
String outputKey = String.format(outputKeyFormat, containerFileName, containerIdentifier);
List<String> valueLineList = new ArrayList<String>();
for (Object k : idRes.keySet()) {
String property = (String) k;
String value = (String) idRes.get(k);
String outputValue = String.format(outputValueFormat, tool, property, value);
valueLineList.add(outputValue);
}
resultMap.put(outputKey, valueLineList);
}
return resultMap;
}
示例5: testInit
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
/**
* Test of init method, of class ZipContainer.
*/
@Test
public void testInit() throws Exception {
InputStream testFileStream = ZipContainer.class.getResourceAsStream("test.zip");
if(testFileStream == null)
fail();
File tmpTestFile = File.createTempFile("test", ".zip");
FileOutputStream fos = new FileOutputStream(tmpTestFile);
IOUtils.copy(testFileStream, fos);
fos.close();
String containerFileName = "test.zip";
InputStream containerFileStream = new FileInputStream(tmpTestFile);
ZipContainer instance = new ZipContainer();
instance.init(containerFileName, containerFileStream);
DualHashBidiMap bidiIdentifierFilenameMap = instance.getBidiIdentifierFilenameMap();
String key = instance.getExtractDirectoryName() + "test.zip/test.doc";
assertTrue(bidiIdentifierFilenameMap.containsKey(key));
String value = (String) bidiIdentifierFilenameMap.get(key);
assertNotNull(value);
File tmpFile = new File(value);
assertTrue("File does not exist: "+tmpFile.getAbsolutePath(),tmpFile.exists());
}
示例6: testInitWithSubfolder
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
/**
* Test of init method, of class ZipContainer.
*/
@Test
public void testInitWithSubfolder() throws Exception {
InputStream testFileStream = ZipContainer.class.getResourceAsStream("testsub.zip");
if(testFileStream == null)
fail();
File tmpTestFile = File.createTempFile("testsub", ".zip");
FileOutputStream fos = new FileOutputStream(tmpTestFile);
IOUtils.copy(testFileStream, fos);
fos.close();
String containerFileName = "testsub.zip";
InputStream containerFileStream = new FileInputStream(tmpTestFile);
ZipContainer instance = new ZipContainer();
instance.init(containerFileName, containerFileStream);
DualHashBidiMap bidiIdentifierFilenameMap = instance.getBidiIdentifierFilenameMap();
String key = instance.getExtractDirectoryName() + "testsub.zip/test/sub/test.doc";
//String key = "/testsub.zip/test/sub/test.doc";
assertTrue(bidiIdentifierFilenameMap.containsKey(key));
String value = (String) bidiIdentifierFilenameMap.get(key);
assertNotNull(value);
File tmpFile = new File(value);
assertTrue("File does not exist: "+tmpFile.getAbsolutePath(),tmpFile.exists());
}
示例7: flip_map_entries_with_apachecommons
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
@Test
public void flip_map_entries_with_apachecommons() {
BidiMap stateCodeToDescription = new DualHashBidiMap();
stateCodeToDescription.put("WI", "Wisconsin");
stateCodeToDescription.put("MN", "Minnesota");
stateCodeToDescription.put("FL", "Florida");
stateCodeToDescription.put("IA", "Iowa");
stateCodeToDescription.put("OH", "Ohio");
BidiMap descriptionToStateCode = stateCodeToDescription
.inverseBidiMap();
logger.info(descriptionToStateCode);
assertEquals("IA", descriptionToStateCode.get("Iowa"));
}
示例8: assignColumnNames
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
protected void assignColumnNames()
{
BidiMap indexColumns = new DualHashBidiMap();
for (int i = 0; i < crtRecordColumnValues.size(); i++)
{
String name = crtRecordColumnValues.get(i);
Integer existingIdx = (Integer) indexColumns.getKey(name);
if (existingIdx == null)
{
//use the name from the file if possible
indexColumns.put(i, name);
}
else
{
//the name is taken, force COLUMN_i for this column and recursively if COLUMN_x is already used
Integer forceIndex = i;
do
{
String indexName = INDEXED_COLUMN_PREFIX + forceIndex;
Integer existingIndex = (Integer) indexColumns.getKey(indexName);
indexColumns.put(forceIndex, indexName);
forceIndex = existingIndex;
}
while(forceIndex != null);
}
}
this.columnNames = new LinkedHashMap<String, Integer>();
for (int i = 0; i < crtRecordColumnValues.size(); i++)
{
String columnName = (String) indexColumns.get(i);
this.columnNames.put(columnName, i);
}
}
示例9: _refresh
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
/** It's possible, maybe even sensible, to reuse a PUB object.
* This method clears out old lists in preparation for reloading or rebuilding.
*/
private void _refresh() {
exportList = new ArrayList<SymbolRef>();
fileList = new ArrayList<String>();
fileIndexes = new IntegerIndex<String>();
importList = new ArrayList<SymbolRef>();
tableMap = new TreeMap<String, TableRef>();
stringTable = new DualHashBidiMap();
/* String index zero is not used.
* This allows us to use 0 from JPNode.attrGet() to indicate "no string value present".
*/
stringIndex("");
}
示例10: identifyFileList
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
@Override
public HashMap<String, List<String>> identifyFileList(DualHashBidiMap fileRecidBidiMap) throws IOException {
HashMap<String, List<String>> resultMap = new HashMap<String, List<String>>();
String ufidRes = this.identify(fileRecidBidiMap.values());
Scanner s = new Scanner(ufidRes);
// one file identification result per line
s.useDelimiter("\n");
while (s.hasNext()) {
// output syntax of the unix-tool 'file' is ${fileName} : ${mimeType}
StringTokenizer st = new StringTokenizer(s.next(), ":");
String fileName = st.nextToken().trim();
// output key
String key = (String) fileRecidBidiMap.getKey(fileName);
if (key != null) {
String containerFileName = key.substring(0, key.indexOf("/"));
String containerIdentifier = key.substring(key.indexOf("/") + 1);
String outputKey = String.format(outputKeyFormat, containerFileName, containerIdentifier);
// output value
String property = "mime";
String value = st.nextToken().trim();
String outputValue = String.format(outputValueFormat, tool, property, value);
List<String> valueLineList = new ArrayList<String>();
valueLineList.add(outputValue);
resultMap.put(outputKey, valueLineList);
} else {
}
}
return resultMap;
}
示例11: SimilarityWordConfiguration
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
SimilarityWordConfiguration() {
super();
setAssociateScore(true);
THRESHOLD = 0.5f;
STRING_METRICS = new DualHashBidiMap();
STRING_METRICS.put("Levenshtein", new Levenshtein());
STRING_METRICS.put("Jaro", new Jaro());
STRING_METRICS.put("Jaro Winkler", new JaroWinkler());
STRING_METRIC = new Levenshtein();
}
示例12: getBidiIdentifierFilenameMap
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
@Override
public DualHashBidiMap getBidiIdentifierFilenameMap() {
return this;
}
示例13: getBidiIdentifierFilenameMap
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
/**
* Set the bidirectional
* @return extract directory name
*/
@Override
public DualHashBidiMap getBidiIdentifierFilenameMap() {
return this;
}
示例14: DagInfo
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
DagInfo(JSONObject jsonObject) throws JSONException {
super(jsonObject);
vertexNameMap = Maps.newHashMap();
vertexNameIDMapping = new DualHashBidiMap();
edgeInfoMap = Maps.newHashMap();
basicVertexInfoMap = Maps.newHashMap();
containerMapping = LinkedHashMultimap.create();
Preconditions.checkArgument(jsonObject.getString(Constants.ENTITY_TYPE).equalsIgnoreCase
(Constants.TEZ_DAG_ID));
dagId = StringInterner.weakIntern(jsonObject.getString(Constants.ENTITY));
//Parse additional Info
JSONObject otherInfoNode = jsonObject.getJSONObject(Constants.OTHER_INFO);
long sTime = otherInfoNode.optLong(Constants.START_TIME);
long eTime= otherInfoNode.optLong(Constants.FINISH_TIME);
userName = otherInfoNode.optString(Constants.USER);
if (eTime < sTime) {
LOG.warn("DAG has got wrong start/end values. "
+ "startTime=" + sTime + ", endTime=" + eTime + ". Will check "
+ "timestamps in DAG started/finished events");
// Check if events DAG_STARTED, DAG_FINISHED can be made use of
for(Event event : eventList) {
switch (HistoryEventType.valueOf(event.getType())) {
case DAG_STARTED:
sTime = event.getAbsoluteTime();
break;
case DAG_FINISHED:
eTime = event.getAbsoluteTime();
break;
default:
break;
}
}
if (eTime < sTime) {
LOG.warn("DAG has got wrong start/end values in events as well. "
+ "startTime=" + sTime + ", endTime=" + eTime);
}
}
startTime = sTime;
endTime = eTime;
//TODO: Not getting populated correctly for lots of jobs. Verify
submitTime = otherInfoNode.optLong(Constants.START_REQUESTED_TIME);
diagnostics = otherInfoNode.optString(Constants.DIAGNOSTICS);
failedTasks = otherInfoNode.optInt(Constants.NUM_FAILED_TASKS);
JSONObject dagPlan = otherInfoNode.optJSONObject(Constants.DAG_PLAN);
name = StringInterner.weakIntern((dagPlan != null) ? (dagPlan.optString(Constants.DAG_NAME)) : null);
if (dagPlan != null) {
JSONArray vertices = dagPlan.optJSONArray(Constants.VERTICES);
if (vertices != null) {
numVertices = vertices.length();
} else {
numVertices = 0;
}
parseDAGPlan(dagPlan);
} else {
numVertices = 0;
}
status = StringInterner.weakIntern(otherInfoNode.optString(Constants.STATUS));
//parse name id mapping
JSONObject vertexIDMappingJson = otherInfoNode.optJSONObject(Constants.VERTEX_NAME_ID_MAPPING);
if (vertexIDMappingJson != null) {
//get vertex name
for (Map.Entry<String, BasicVertexInfo> entry : basicVertexInfoMap.entrySet()) {
String vertexId = vertexIDMappingJson.optString(entry.getKey());
//vertexName --> vertexId
vertexNameIDMapping.put(entry.getKey(), vertexId);
}
}
}
示例15: GameGatewayServerLogic
import org.apache.commons.collections.bidimap.DualHashBidiMap; //导入依赖的package包/类
private GameGatewayServerLogic() {
clientBidiMap = new DualHashBidiMap();
aliveMap = new HashMap<Long, Boolean>();
loadRelayCallMapping();
loadTrustIPList();
startAliveCheck();
}