当前位置: 首页>>代码示例>>Java>>正文


Java FastMath.mod方法代码示例

本文整理汇总了Java中edu.jhu.prim.util.math.FastMath.mod方法的典型用法代码示例。如果您正苦于以下问题:Java FastMath.mod方法的具体用法?Java FastMath.mod怎么用?Java FastMath.mod使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在edu.jhu.prim.util.math.FastMath的用法示例。


在下文中一共展示了FastMath.mod方法的5个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的Java代码示例。

示例1: addFeat

import edu.jhu.prim.util.math.FastMath; //导入方法依赖的package包/类
private static void addFeat(FeatureVector feats, int mod, long feat) {
    int hash = MurmurHash.hash32(feat);
    if (mod > 0) {
        hash = FastMath.mod(hash, mod);
    }
    feats.add(hash, 1.0);
    // Enable this for debugging of feature creation.
    //        if (feats instanceof LongFeatureVector) {
    //            ((LongFeatureVector)feats).addLong(feat, 1.0);
    //        }
}
 
开发者ID:mgormley,项目名称:pacaya-nlp,代码行数:12,代码来源:BitshiftDepParseFeatures.java

示例2: getValIdx

import edu.jhu.prim.util.math.FastMath; //导入方法依赖的package包/类
public int getValIdx(AnnoSentence sent, int pidx, int cidx) {
    String val = getVal(sent, pidx, cidx);
    if (val == null) {
        return -1;
    } else {
        String data = val;
        if (valueHashMod > 0) {
            int idx = FastMath.mod(MurmurHash3.murmurhash3_x86_32(data, 0, data.length(), 123456789), valueHashMod);
            return valAlphabet.lookupIndex(idx);
        } else {
            return valAlphabet.lookupIndex(val);
        }
    }
}
 
开发者ID:mgormley,项目名称:pacaya-nlp,代码行数:15,代码来源:IGFeatureTemplateSelector.java

示例3: addFeat

import edu.jhu.prim.util.math.FastMath; //导入方法依赖的package包/类
public static void addFeat(FeatureVector feats, int mod, long feat, double value) {
    int hash = MurmurHash.hash32(feat);
    if (mod > 0) {
        hash = FastMath.mod(hash, mod);
    }
    feats.add(hash, value);
}
 
开发者ID:mgormley,项目名称:pacaya-nlp,代码行数:8,代码来源:BitshiftTokenFeatures.java

示例4: getFeatures

import edu.jhu.prim.util.math.FastMath; //导入方法依赖的package包/类
protected static void getFeatures(List<FeatTemplate> tpls, TemplateFeatureExtractor extStr, IntTemplateFeatureExtractor extInt,
        LocalObservations local) {        
    for (FeatTemplate tpl : tpls) {
        List<String> featsStr = new ArrayList<>();
        extStr.addFeatures(tpl, local, featsStr);
        IntArrayList featsInt = new IntArrayList();
        extInt.addFeatures(tpl, local, featsInt);
        if (featsStr.size() != featsInt.size()) {
            log.error("Mismatch in number of features extracted for template: {} str = {} int = {}", tpl.getName(), featsStr.size(), featsInt.size());
        }
        for (int i=0; i<Math.min(featsStr.size(), featsInt.size()); i++) {
            int hash;
            if (useIntExtr) {
                hash = featsInt.get(i);
            } else {
                hash = MurmurHash.hash32(featsStr.get(i));
            }
            hash = FastMath.mod(hash, featureHashMod);
            // Add to the set of strings which are colliding on this hash.
            Set<String> values = collisions[hash];
            if (values == null) {
                values = new HashSet<String>();
                collisions[hash] = values;
            }
            values.add(featsStr.get(i));
            // Increment the token count of collisions on this hash.
            collisionTokenCount[hash] += 1;
        }
    }
}
 
开发者ID:mgormley,项目名称:pacaya-nlp,代码行数:31,代码来源:IntTemplateFeatureExtractorCollisionsTest.java

示例5: computeInformationGain

import edu.jhu.prim.util.math.FastMath; //导入方法依赖的package包/类
private void computeInformationGain(int t, List<FeatTemplate> allTpls, List<ValExtractor> valExts,
        AnnoSentenceCollection inputSents, AnnoSentenceCollection goldSents, CorpusStatistics cs, double[][] ig, int[] featCount) {
    FeatTemplate tpl = allTpls.get(t);

    final IntDoubleDenseVector[][] counts = getCountsArray(valExts);
    IntObjectBimap<String> alphabet = new IntObjectBimap<String>();
    for (int i=0; i<goldSents.size(); i++) {                
        AnnoSentence goldSent = goldSents.get(i);
        AnnoSentence inputSent = inputSents.get(i);
        TemplateFeatureExtractor featExt = new TemplateFeatureExtractor(inputSent, cs);

        for (int pidx=-1; pidx<inputSent.size(); pidx++) {
            for (int cidx=0; cidx<inputSent.size(); cidx++) {
                
                // Feature Extraction.
                List<String> feats = new ArrayList<String>();
                featExt.addFeatures(QLists.getList(tpl), LocalObservations.newPidxCidx(pidx, cidx), feats);
                if (feats.size() == 0) {
                    if (!tplsWithNoFeats.contains(tpl)) {
                        log.warn("No features extracted for template: " + tpl.getName());
                        tplsWithNoFeats.add(tpl);
                    }
                }
                FeatureVector fv = new FeatureVector();
                for (int j=0; j<feats.size(); j++) {                                
                    String featName = feats.get(j);
                    int featIdx;
                    if (prm.featureHashMod > 0) {
                        String data = featName;
                        featIdx = FastMath.mod(MurmurHash3.murmurhash3_x86_32(data, 0, data.length(), 123456789), prm.featureHashMod);
                        featIdx = alphabet.lookupIndex(Integer.toString(featIdx));
                    } else {
                        featIdx = alphabet.lookupIndex(featName);
                    }
                    fv.add(featIdx, 1.0);
                }
                
                // For each value extractor:
                for (int c = 0; c < valExts.size(); c++) {
                    // Value Extraction.
                    ValExtractor valExt = valExts.get(c);
                    final int valIdx = valExt.getValIdx(goldSent, pidx, cidx);
                    
                    if (valIdx != -1) {
                        // Increment counts of feature and value occurrences.
                        counts[c][valIdx].add(fv);
                    }
                }
            }
        }
    }
    
    for (int c = 0; c < valExts.size(); c++) {
        // Compute information gain for this (feature template, value extractor) pair.                
        ig[c][t] = computeInformationGain(counts[c]);
    }
    featCount[t] = alphabet.size();
    
    if (t % 10 == 0) {
        log.debug(String.format("Processed feature template %d of %d: %s #feats=%d", t, allTpls.size(), tpl.getName(), alphabet.size()));
    }
}
 
开发者ID:mgormley,项目名称:pacaya-nlp,代码行数:63,代码来源:IGFeatureTemplateSelector.java


注:本文中的edu.jhu.prim.util.math.FastMath.mod方法示例由纯净天空整理自Github/MSDocs等开源代码及文档管理平台,相关代码片段筛选自各路编程大神贡献的开源项目,源码版权归原作者所有,传播和使用请参考对应项目的License;未经允许,请勿转载。