本文整理汇总了C++中ConsensusFeature::insert方法的典型用法代码示例。如果您正苦于以下问题:C++ ConsensusFeature::insert方法的具体用法?C++ ConsensusFeature::insert怎么用?C++ ConsensusFeature::insert使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ConsensusFeature
的用法示例。
在下文中一共展示了ConsensusFeature::insert方法的11个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: getCFWithIntensites
ConsensusFeature getCFWithIntensites(double v[])
{
ConsensusFeature cf;
BaseFeature bf0, bf1, bf2, bf3;
bf0.setIntensity(v[0]);
bf1.setIntensity(v[1]);
bf2.setIntensity(v[2]);
bf3.setIntensity(v[3]);
cf.insert(0, bf0);cf.insert(1, bf1);cf.insert(2, bf2);cf.insert(3, bf3);
cf.setIntensity(v[0]+v[1]+v[2]+v[3]);
return cf;
}
示例2: transferSubelements
void FeatureGroupingAlgorithm::transferSubelements(const vector<ConsensusMap>& maps, ConsensusMap& out) const
{
// accumulate file descriptions from the input maps:
// cout << "Updating file descriptions..." << endl;
out.getFileDescriptions().clear();
// mapping: (map index, original id) -> new id
map<pair<Size, UInt64>, Size> mapid_table;
for (Size i = 0; i < maps.size(); ++i)
{
const ConsensusMap& consensus = maps[i];
for (ConsensusMap::FileDescriptions::const_iterator desc_it = consensus.getFileDescriptions().begin(); desc_it != consensus.getFileDescriptions().end(); ++desc_it)
{
Size counter = mapid_table.size();
mapid_table[make_pair(i, desc_it->first)] = counter;
out.getFileDescriptions()[counter] = desc_it->second;
}
}
// look-up table: input map -> unique ID -> consensus feature
// cout << "Creating look-up table..." << endl;
vector<map<UInt64, ConsensusMap::ConstIterator> > feat_lookup(maps.size());
for (Size i = 0; i < maps.size(); ++i)
{
const ConsensusMap& consensus = maps[i];
for (ConsensusMap::ConstIterator feat_it = consensus.begin();
feat_it != consensus.end(); ++feat_it)
{
// do NOT use "id_lookup[i][feat_it->getUniqueId()] = feat_it;" here as
// you will get "attempt to copy-construct an iterator from a singular
// iterator" in STL debug mode:
feat_lookup[i].insert(make_pair(feat_it->getUniqueId(), feat_it));
}
}
// adjust the consensus features:
// cout << "Adjusting consensus features..." << endl;
for (ConsensusMap::iterator cons_it = out.begin(); cons_it != out.end(); ++cons_it)
{
ConsensusFeature adjusted = ConsensusFeature(
static_cast<BaseFeature>(*cons_it)); // remove sub-features
for (ConsensusFeature::HandleSetType::const_iterator sub_it = cons_it->getFeatures().begin(); sub_it != cons_it->getFeatures().end(); ++sub_it)
{
UInt64 id = sub_it->getUniqueId();
Size map_index = sub_it->getMapIndex();
ConsensusMap::ConstIterator origin = feat_lookup[map_index][id];
for (ConsensusFeature::HandleSetType::const_iterator handle_it = origin->getFeatures().begin(); handle_it != origin->getFeatures().end(); ++handle_it)
{
FeatureHandle handle = *handle_it;
Size new_id = mapid_table[make_pair(map_index, handle.getMapIndex())];
handle.setMapIndex(new_id);
adjusted.insert(handle);
}
}
*cons_it = adjusted;
}
}
示例3: main_
ExitCodes main_(int, const char**) override
{
//-------------------------------------------------------------
// parameter handling
//-------------------------------------------------------------
String in = getStringOption_("in");
String out = getStringOption_("out");
FileTypes::Type out_type = FileTypes::nameToType(getStringOption_("out_type"));
if (out_type == FileTypes::UNKNOWN)
{
out_type = FileHandler().getTypeByFileName(out);
}
//-------------------------------------------------------------
// loading input
//-------------------------------------------------------------
MzMLFile mz_data_file;
mz_data_file.setLogType(log_type_);
PeakMap ms_peakmap;
std::vector<Int> ms_level(1, 1);
(mz_data_file.getOptions()).setMSLevels(ms_level);
mz_data_file.load(in, ms_peakmap);
if (ms_peakmap.size() == 0)
{
LOG_WARN << "The given file does not contain any conventional peak data, but might"
" contain chromatograms. This tool currently cannot handle them, sorry.";
return INCOMPATIBLE_INPUT_DATA;
}
// make sure that the spectra are sorted by m/z
ms_peakmap.sortSpectra(true);
//-------------------------------------------------------------
// get params for MTD and EPD algorithms
//-------------------------------------------------------------
Param com_param = getParam_().copy("algorithm:common:", true);
writeDebug_("Common parameters passed to both sub-algorithms (mtd and epd)", com_param, 3);
Param mtd_param = getParam_().copy("algorithm:mtd:", true);
writeDebug_("Parameters passed to MassTraceDetection", mtd_param, 3);
Param epd_param = getParam_().copy("algorithm:epd:", true);
writeDebug_("Parameters passed to ElutionPeakDetection", epd_param, 3);
//-------------------------------------------------------------
// configure and run MTD
//-------------------------------------------------------------
MassTraceDetection mt_ext;
mtd_param.insert("", com_param);
mtd_param.remove("chrom_fwhm");
mt_ext.setParameters(mtd_param);
vector<MassTrace> m_traces;
mt_ext.run(ms_peakmap, m_traces);
vector<MassTrace> m_traces_final;
bool use_epd = epd_param.getValue("enabled").toBool();
if (!use_epd)
{
swap(m_traces_final, m_traces);
}
else
{
ElutionPeakDetection ep_det;
epd_param.remove("enabled"); // artificially added above
epd_param.insert("", com_param);
ep_det.setParameters(epd_param);
std::vector<MassTrace> split_mtraces;
// note: this step will destroy any meta data annotation (e.g. FWHM_mz_avg)
ep_det.detectPeaks(m_traces, split_mtraces);
if (ep_det.getParameters().getValue("width_filtering") == "auto")
{
m_traces_final.clear();
ep_det.filterByPeakWidth(split_mtraces, m_traces_final);
LOG_INFO << "Notice: " << split_mtraces.size() - m_traces_final.size()
<< " of total " << split_mtraces.size()
<< " were dropped because of too low peak width." << std::endl;
}
else
{
swap(m_traces_final, split_mtraces);
}
}
//-------------------------------------------------------------
// writing consensus map output
//-------------------------------------------------------------
if (out_type == FileTypes::CONSENSUSXML)
//.........这里部分代码省略.........
示例4: extractChannels
//.........这里部分代码省略.........
if (selected_activation_ == "" || activation_predicate(*it))
{
// check if precursor is available
if (it->getPrecursors().empty())
{
throw Exception::MissingInformation(__FILE__, __LINE__, __PRETTY_FUNCTION__, String("No precursor information given for scan native ID ") + it->getNativeID() + " with RT " + String(it->getRT()));
}
// check precursor constraints
if (!isValidPrecursor_(it->getPrecursors()[0]))
{
LOG_DEBUG << "Skip spectrum " << it->getNativeID() << ": Precursor doesn't fulfill all constraints." << std::endl;
continue;
}
// check precursor purity if we have a valid precursor ..
if (prec_spec != ms_exp_data.end())
{
const DoubleReal purity = computePrecursorPurity_(it, prec_spec);
if (purity < min_precursor_purity_)
{
LOG_DEBUG << "Skip spectrum " << it->getNativeID() << ": Precursor purity is below the threshold. [purity = " << purity << "]" << std::endl;
continue;
}
}
else
{
LOG_INFO << "No precursor available for spectrum: " << it->getNativeID() << std::endl;
}
if (!(prec_spec == ms_exp_data.end()) && computePrecursorPurity_(it, prec_spec) < min_precursor_purity_)
{
LOG_DEBUG << "Skip spectrum " << it->getNativeID() << ": Precursor purity is below the threshold." << std::endl;
continue;
}
// store RT&MZ of parent ion as centroid of ConsensusFeature
ConsensusFeature cf;
cf.setUniqueId();
cf.setRT(it->getRT());
cf.setMZ(it->getPrecursors()[0].getMZ());
Peak2D channel_value;
channel_value.setRT(it->getRT());
// for each each channel
UInt64 map_index = 0;
Peak2D::IntensityType overall_intensity = 0;
for (IsobaricQuantitationMethod::IsobaricChannelList::const_iterator cl_it = quant_method_->getChannelInformation().begin();
cl_it != quant_method_->getChannelInformation().end();
++cl_it)
{
// set mz-position of channel
channel_value.setMZ(cl_it->center);
// reset intensity
channel_value.setIntensity(0);
// as every evaluation requires time, we cache the MZEnd iterator
const MSExperiment<Peak1D>::SpectrumType::ConstIterator mz_end = it->MZEnd(cl_it->center + reporter_mass_shift_);
// add up all signals
for (MSExperiment<Peak1D>::SpectrumType::ConstIterator mz_it = it->MZBegin(cl_it->center - reporter_mass_shift_);
mz_it != mz_end;
++mz_it)
{
channel_value.setIntensity(channel_value.getIntensity() + mz_it->getIntensity());
}
// discard contribution of this channel as it is below the required intensity threshold
if (channel_value.getIntensity() < min_reporter_intensity_)
{
channel_value.setIntensity(0);
}
overall_intensity += channel_value.getIntensity();
// add channel to ConsensusFeature
cf.insert(map_index++, channel_value, element_index);
} // ! channel_iterator
// check if we keep this feature or if it contains low-intensity quantifications
if (remove_low_intensity_quantifications_ && hasLowIntensityReporter_(cf))
{
continue;
}
// check featureHandles are not empty
if (overall_intensity == 0)
{
cf.setMetaValue("all_empty", String("true"));
}
cf.setIntensity(overall_intensity);
consensus_map.push_back(cf);
// the tandem-scan in the order they appear in the experiment
++element_index;
}
} // ! Experiment iterator
/// add meta information to the map
registerChannelsInOutputMap_(consensus_map);
}
示例5: run
void SimplePairFinder::run(const std::vector<ConsensusMap> & input_maps, ConsensusMap & result_map)
{
if (input_maps.size() != 2)
throw Exception::IllegalArgument(__FILE__, __LINE__, OPENMS_PRETTY_FUNCTION, "exactly two input maps required");
checkIds_(input_maps);
// progress dots
Int progress_dots = 0;
if (this->param_.exists("debug::progress_dots"))
{
progress_dots = (Int) this->param_.getValue("debug:progress_dots");
}
Int number_of_considered_element_pairs = 0;
// For each element in map 0, find its best friend in map 1
std::vector<UInt> best_companion_index_0(input_maps[0].size(), UInt(-1));
std::vector<double> best_companion_quality_0(input_maps[0].size(), 0);
for (UInt fi0 = 0; fi0 < input_maps[0].size(); ++fi0)
{
double best_quality = -std::numeric_limits<double>::max();
for (UInt fi1 = 0; fi1 < input_maps[1].size(); ++fi1)
{
double quality = similarity_(input_maps[0][fi0], input_maps[1][fi1]);
if (quality > best_quality)
{
best_quality = quality;
best_companion_index_0[fi0] = fi1;
}
++number_of_considered_element_pairs;
if (progress_dots && !(number_of_considered_element_pairs % progress_dots))
{
std::cout << '-' << std::flush;
}
}
best_companion_quality_0[fi0] = best_quality;
}
// For each element in map 1, find its best friend in map 0
std::vector<UInt> best_companion_index_1(input_maps[1].size(), UInt(-1));
std::vector<double> best_companion_quality_1(input_maps[1].size(), 0);
for (UInt fi1 = 0; fi1 < input_maps[1].size(); ++fi1)
{
double best_quality = -std::numeric_limits<double>::max();
for (UInt fi0 = 0; fi0 < input_maps[0].size(); ++fi0)
{
double quality = similarity_(input_maps[0][fi0], input_maps[1][fi1]);
if (quality > best_quality)
{
best_quality = quality;
best_companion_index_1[fi1] = fi0;
}
++number_of_considered_element_pairs;
if (progress_dots && !(number_of_considered_element_pairs % progress_dots))
{
std::cout << '+' << std::flush;
}
}
best_companion_quality_1[fi1] = best_quality;
}
// And if both like each other, they become a pair.
// element_pairs_->clear();
for (UInt fi0 = 0; fi0 < input_maps[0].size(); ++fi0)
{
// fi0 likes someone ...
if (best_companion_quality_0[fi0] > pair_min_quality_)
{
// ... who likes him too ...
UInt best_companion_of_fi0 = best_companion_index_0[fi0];
if (best_companion_index_1[best_companion_of_fi0] == fi0 &&
best_companion_quality_1[best_companion_of_fi0] > pair_min_quality_
)
{
ConsensusFeature f;
f.insert(input_maps[0][fi0]);
f.insert(input_maps[1][best_companion_of_fi0]);
f.computeConsensus();
f.setQuality(best_companion_quality_0[fi0] + best_companion_quality_1[best_companion_of_fi0]);
result_map.push_back(f);
}
}
}
return;
}
示例6: run
//.........这里部分代码省略.........
#endif
consensus_map.clear(false);
// set <mapList> header
Int index_cnt = 0;
for (ChannelMapType::const_iterator cm_it = channel_map_.begin(); cm_it != channel_map_.end(); ++cm_it)
{
// structure of Map cm_it
// first == channel-name as Int e.g. 114
// second == ChannelInfo struct
ConsensusMap::FileDescription channel_as_map;
// label is the channel + description provided in the Params
if (itraq_type_ != TMT_SIXPLEX)
channel_as_map.label = "iTRAQ_" + String(cm_it->second.name) + "_" + String(cm_it->second.description);
else
channel_as_map.label = "TMT_" + String(cm_it->second.name) + "_" + String(cm_it->second.description);
channel_as_map.size = ms_exp_MS2.size();
//TODO what about .filename? leave empty?
// add some more MetaInfo
channel_as_map.setMetaValue("channel_name", cm_it->second.name);
channel_as_map.setMetaValue("channel_id", cm_it->second.id);
channel_as_map.setMetaValue("channel_description", cm_it->second.description);
channel_as_map.setMetaValue("channel_center", cm_it->second.center);
channel_as_map.setMetaValue("channel_active", String(cm_it->second.active ? "true" : "false"));
consensus_map.getFileDescriptions()[index_cnt++] = channel_as_map;
}
// create consensusElements
Peak2D::CoordinateType allowed_deviation = (Peak2D::CoordinateType) param_.getValue("reporter_mass_shift");
// now we have picked data
// --> assign peaks to channels
UInt element_index(0);
for (MSExperiment<>::ConstIterator it = ms_exp_MS2.begin(); it != ms_exp_MS2.end(); ++it)
{
// store RT&MZ of parent ion as centroid of ConsensusFeature
ConsensusFeature cf;
cf.setUniqueId();
cf.setRT(it->getRT());
if (it->getPrecursors().size() >= 1)
{
cf.setMZ(it->getPrecursors()[0].getMZ());
}
else
{
throw Exception::MissingInformation(__FILE__, __LINE__, __PRETTY_FUNCTION__, String("No precursor information given for scan native ID ") + String(it->getNativeID()) + " with RT " + String(it->getRT()));
}
Peak2D channel_value;
channel_value.setRT(it->getRT());
// for each each channel
Int index = 0;
Peak2D::IntensityType overall_intensity = 0;
for (ChannelMapType::const_iterator cm_it = channel_map_.begin(); cm_it != channel_map_.end(); ++cm_it)
{
// set mz-position of channel
channel_value.setMZ(cm_it->second.center);
// reset intensity
channel_value.setIntensity(0);
//add up all signals
for (MSExperiment<>::SpectrumType::ConstIterator mz_it =
it->MZBegin(cm_it->second.center - allowed_deviation)
; mz_it != it->MZEnd(cm_it->second.center + allowed_deviation)
; ++mz_it
)
{
channel_value.setIntensity(channel_value.getIntensity() + mz_it->getIntensity());
}
overall_intensity += channel_value.getIntensity();
// add channel to ConsensusFeature
cf.insert(index++, channel_value, element_index);
} // ! channel_iterator
// check featureHandles are not empty
if (overall_intensity == 0)
{
cf.setMetaValue("all_empty", String("true"));
}
cf.setIntensity(overall_intensity);
consensus_map.push_back(cf);
// the tandem-scan in the order they appear in the experiment
++element_index;
} // ! Experiment iterator
#ifdef ITRAQ_DEBUG
std::cout << "processed " << element_index << " scans" << std::endl;
#endif
consensus_map.setExperimentType("itraq");
return;
}
示例7: recomputeConsensus_
void BaseLabeler::recomputeConsensus_(const FeatureMapSim & simulated_features)
{
// iterate over all given features stored in the labeling consensus and try to find the corresponding feature in
// in the feature map
// build index for faster access
Map<String, IntList> id_map;
Map<UInt64, Size> features_per_labeled_map;
for (Size i = 0; i < simulated_features.size(); ++i)
{
if (simulated_features[i].metaValueExists("parent_feature"))
{
LOG_DEBUG << "Checking [" << i << "]: " << simulated_features[i].getPeptideIdentifications()[0].getHits()[0].getSequence().toString()
<< " with charge " << simulated_features[i].getCharge() << " (" << simulated_features[i].getMetaValue("charge_adducts") << ")"
<< " parent was " << simulated_features[i].getMetaValue("parent_feature") << std::endl;
id_map[simulated_features[i].getMetaValue("parent_feature")].push_back((Int)i);
UInt64 map_index = 0;
if (simulated_features[i].metaValueExists("map_index"))
{
map_index = simulated_features[i].getMetaValue("map_index");
}
++features_per_labeled_map[map_index];
}
}
for (Map<String, IntList>::iterator it = id_map.begin(); it != id_map.end(); ++it)
{
LOG_DEBUG << it->first << " " << it->second << std::endl;
}
// new consensus map
ConsensusMap new_cm;
// initialize submaps in consensus map
for (Map<UInt64, Size>::Iterator it = features_per_labeled_map.begin(); it != features_per_labeled_map.end(); ++it)
{
new_cm.getFileDescriptions()[it->first].size = it->second;
new_cm.getFileDescriptions()[it->first].unique_id = simulated_features.getUniqueId();
}
for (ConsensusMap::iterator cm_iter = consensus_.begin(); cm_iter != consensus_.end(); ++cm_iter)
{
bool complete = true;
LOG_DEBUG << "Checking consensus feature containing: " << std::endl;
// check if we have all elements of current CF in the new feature map (simulated_features)
for (ConsensusFeature::iterator cf_iter = (*cm_iter).begin(); cf_iter != (*cm_iter).end(); ++cf_iter)
{
complete &= id_map.has(String((*cf_iter).getUniqueId()));
LOG_DEBUG << "\t" << String((*cf_iter).getUniqueId()) << std::endl;
}
if (complete)
{
// get all elements sorted by charge state; since the same charge can be achieved by different
// adduct compositions we use the adduct-string as indicator to find the groups
Map<String, std::set<FeatureHandle, FeatureHandle::IndexLess> > charge_mapping;
for (ConsensusFeature::iterator cf_iter = (*cm_iter).begin(); cf_iter != (*cm_iter).end(); ++cf_iter)
{
IntList feature_indices = id_map[String((*cf_iter).getUniqueId())];
for (IntList::iterator it = feature_indices.begin(); it != feature_indices.end(); ++it)
{
UInt64 map_index = 0;
if (simulated_features[*it].metaValueExists("map_index"))
{
map_index = simulated_features[*it].getMetaValue("map_index");
}
if (charge_mapping.has(simulated_features[*it].getMetaValue("charge_adducts")))
{
charge_mapping[simulated_features[*it].getMetaValue("charge_adducts")].insert(FeatureHandle(map_index, simulated_features[*it]));
}
else
{
LOG_DEBUG << "Create new set with charge composition " << simulated_features[*it].getMetaValue("charge_adducts") << std::endl;
std::set<FeatureHandle, FeatureHandle::IndexLess> fh_set;
fh_set.insert(FeatureHandle(map_index, simulated_features[*it]));
charge_mapping.insert(std::make_pair(simulated_features[*it].getMetaValue("charge_adducts"), fh_set));
}
}
}
// create new consensus feature from derived features (separated by charge, if charge != 0)
for (Map<String, std::set<FeatureHandle, FeatureHandle::IndexLess> >::const_iterator charge_group_it = charge_mapping.begin();
charge_group_it != charge_mapping.end();
++charge_group_it)
{
ConsensusFeature cf;
cf.setCharge((*(*charge_group_it).second.begin()).getCharge());
cf.setMetaValue("charge_adducts", charge_group_it->first);
std::vector<PeptideIdentification> ids;
for (std::set<FeatureHandle, FeatureHandle::IndexLess>::const_iterator fh_it = (charge_group_it->second).begin(); fh_it != (charge_group_it->second).end(); ++fh_it)
{
cf.insert(*fh_it);
//.........这里部分代码省略.........
示例8: postDigestHook
void SILACLabeler::postDigestHook(SimTypes::FeatureMapSimVector& features_to_simulate)
{
SimTypes::FeatureMapSim& light_channel_features = features_to_simulate[0];
SimTypes::FeatureMapSim& medium_channel_features = features_to_simulate[1];
// merge the generated feature maps and create consensus
SimTypes::FeatureMapSim final_feature_map = mergeProteinIdentificationsMaps_(features_to_simulate);
if (features_to_simulate.size() == 2)
{
Map<String, Feature> unlabeled_features_index;
for (SimTypes::FeatureMapSim::iterator unlabeled_features_iter = light_channel_features.begin();
unlabeled_features_iter != light_channel_features.end();
++unlabeled_features_iter)
{
(*unlabeled_features_iter).ensureUniqueId();
unlabeled_features_index.insert(std::make_pair(
(*unlabeled_features_iter).getPeptideIdentifications()[0].getHits()[0].getSequence().toString()
,
*unlabeled_features_iter
));
}
// iterate over second map
for (SimTypes::FeatureMapSim::iterator labeled_feature_iter = medium_channel_features.begin(); labeled_feature_iter != medium_channel_features.end(); ++labeled_feature_iter)
{
const String unmodified_sequence = getUnmodifiedSequence_(*labeled_feature_iter, medium_channel_arginine_label_, medium_channel_lysine_label_);
// guarantee uniqueness
(*labeled_feature_iter).ensureUniqueId();
// check if we have a pair
if (unlabeled_features_index.has(unmodified_sequence))
{
// own scope as we don't know what happens to 'f_modified' once we call erase() below
Feature& unlabeled_feature = unlabeled_features_index[unmodified_sequence];
// guarantee uniqueness
unlabeled_feature.ensureUniqueId();
// feature has a SILAC Label and is not equal to non-labeled
if ((*labeled_feature_iter).getPeptideIdentifications()[0].getHits()[0].getSequence().isModified())
{
// add features to final map
final_feature_map.push_back(*labeled_feature_iter);
final_feature_map.push_back(unlabeled_feature);
// create consensus feature
ConsensusFeature cf;
cf.insert(MEDIUM_FEATURE_MAPID_, *labeled_feature_iter);
cf.insert(LIGHT_FEATURE_MAPID_, unlabeled_feature);
cf.ensureUniqueId();
consensus_.push_back(cf);
// remove unlabeled feature
unlabeled_features_index.erase(unmodified_sequence);
}
else
{
// merge features since they are equal
Feature final_feature = mergeFeatures_(*labeled_feature_iter, unmodified_sequence, unlabeled_features_index, 1, 2);
final_feature_map.push_back(final_feature);
}
}
else // no SILAC pair, just add the labeled one
{
final_feature_map.push_back(*labeled_feature_iter);
}
}
// add singletons from unlabeled channel
// clean up unlabeled_index
for (Map<String, Feature>::iterator unlabeled_index_iter = unlabeled_features_index.begin(); unlabeled_index_iter != unlabeled_features_index.end(); ++unlabeled_index_iter)
{
// the single ones from c0
final_feature_map.push_back(unlabeled_index_iter->second);
}
}
// merge three channels
if (features_to_simulate.size() == 3)
{
// index of unlabeled channelunlabeled_feature
Map<String, Feature> unlabeled_features_index;
for (SimTypes::FeatureMapSim::iterator unlabeled_features_iter = light_channel_features.begin();
unlabeled_features_iter != light_channel_features.end();
++unlabeled_features_iter)
{
(*unlabeled_features_iter).ensureUniqueId();
unlabeled_features_index.insert(std::make_pair(
(*unlabeled_features_iter).getPeptideIdentifications()[0].getHits()[0].getSequence().toString()
,
*unlabeled_features_iter
));
}
// index of labeled channel
Map<String, Feature> medium_features_index;
for (SimTypes::FeatureMapSim::iterator labeled_features_iter = medium_channel_features.begin();
//.........这里部分代码省略.........
示例9: load
//.........这里部分代码省略.........
String("Failed parsing in line ")
+ String((input_it - input.begin()) + 1)
+ ": At least three columns are needed! (got "
+ String(parts.size())
+ ")\nOffending line: '"
+ line_trimmed
+ "' (line "
+ String((input_it - input.begin()) + 1)
+ ")\n");
}
ConsensusFeature cf;
cf.setUniqueId();
try
{
// Convert values. Will return -1 if not available.
rt = checkedToDouble_(parts, 0);
mz = checkedToDouble_(parts, 1);
it = checkedToDouble_(parts, 2);
ch = checkedToInt_(parts, 3);
cf.setRT(rt);
cf.setMZ(mz);
cf.setIntensity(it);
if (input_type != TYPE_OLD_NOCHARGE)
cf.setCharge(ch);
}
catch (Exception::BaseException&)
{
throw Exception::ParseError(__FILE__, __LINE__, __PRETTY_FUNCTION__, "", String("Failed parsing in line ") + String((input_it - input.begin()) + 1) + ": Could not convert the first three columns to a number!\nOffending line: '" + line_trimmed + "' (line " + String((input_it - input.begin()) + 1) + ")\n");
}
// Check all features in one line
for (Size j = 1; j < input_features; ++j)
{
try
{
Feature f;
f.setUniqueId();
// Convert values. Will return -1 if not available.
rt = checkedToDouble_(parts, j * 4 + 0);
mz = checkedToDouble_(parts, j * 4 + 1);
it = checkedToDouble_(parts, j * 4 + 2);
ch = checkedToInt_(parts, j * 4 + 3);
// Only accept features with at least RT and MZ set
if (rt != -1 && mz != -1)
{
f.setRT(rt);
f.setMZ(mz);
f.setIntensity(it);
f.setCharge(ch);
cf.insert(j - 1, f);
}
}
catch (Exception::BaseException&)
{
throw Exception::ParseError(__FILE__, __LINE__, __PRETTY_FUNCTION__, "", String("Failed parsing in line ") + String((input_it - input.begin()) + 1) + ": Could not convert one of the four sub-feature columns (starting at column " + (j * 4 + 1) + ") to a number! Is the correct separator specified?\nOffending line: '" + line_trimmed + "' (line " + String((input_it - input.begin()) + 1) + ")\n");
}
}
//parse meta data
for (Size j = input_features * 4; j < parts.size(); ++j)
{
String part_trimmed = parts[j];
part_trimmed.trim();
if (part_trimmed != "")
{
//check if column name is ok
if (headers.size() <= j || headers[j] == "")
{
throw Exception::ParseError(__FILE__, __LINE__, __PRETTY_FUNCTION__, "",
String("Error: Missing meta data header for column ") + (j + 1) + "!"
+ String("Offending header line: '") + header_trimmed + "' (line 1)");
}
//add meta value
cf.setMetaValue(headers[j], part_trimmed);
}
}
//insert feature to map
consensus_map.push_back(cf);
}
// register FileDescriptions
ConsensusMap::FileDescription fd;
fd.filename = filename;
fd.size = consensus_map.size();
Size maps = std::max(input_features - 1, Size(1)); // its either a simple feature or a consensus map
// (in this case the 'input_features' includes the centroid, which we do not count)
for (Size i = 0; i < maps; ++i)
{
fd.label = String("EDTA_Map ") + String(i);
consensus_map.getFileDescriptions()[i] = fd;
}
}
示例10: makeConsensusFeature_
void QTClusterFinder::makeConsensusFeature_(list<QTCluster> & clustering,
ConsensusFeature & feature, OpenMSBoost::unordered_map<GridFeature *,
std::vector< QTCluster * > > & element_mapping)
{
// find the best cluster (a valid cluster with the highest score)
list<QTCluster>::iterator best = clustering.begin();
while (best != clustering.end() && best->isInvalid()) {++best;}
for (list<QTCluster>::iterator it = best;
it != clustering.end(); ++it)
{
if (!it->isInvalid())
{
if (it->getQuality() > best->getQuality())
{
best = it;
}
}
}
// no more clusters to process -> clear clustering and return
if (best == clustering.end())
{
clustering.clear();
return;
}
OpenMSBoost::unordered_map<Size, GridFeature *> elements;
best->getElements(elements);
// cout << "Elements: " << elements.size() << " with best " << best->getQuality() << " invalid " << best->isInvalid() << endl;
// create consensus feature from best cluster:
feature.setQuality(best->getQuality());
for (OpenMSBoost::unordered_map<Size, GridFeature *>::const_iterator it = elements.begin();
it != elements.end(); ++it)
{
feature.insert(it->first, it->second->getFeature());
}
feature.computeConsensus();
// update the clustering:
// 1. remove current "best" cluster
// 2. update all clusters accordingly and invalidate elements whose central
// element is removed
best->setInvalid();
for (OpenMSBoost::unordered_map<Size, GridFeature *>::const_iterator it = elements.begin();
it != elements.end(); ++it)
{
for (std::vector< QTCluster* >::iterator
cluster = element_mapping[&(*it->second)].begin();
cluster != element_mapping[&(*it->second)].end(); ++cluster)
{
// we do not want to update invalid features (saves time and does not
// recompute the quality)
if (!(*cluster)->isInvalid())
{
if (!(*cluster)->update(elements)) // cluster is invalid (center point removed):
{
(*cluster)->setInvalid();
}
}
}
}
}
示例11: postDigestHook
void ICPLLabeler::postDigestHook(SimTypes::FeatureMapSimVector& features_to_simulate)
{
SimTypes::FeatureMapSim& light_labeled_features = features_to_simulate[0];
SimTypes::FeatureMapSim& medium_labeled_features = features_to_simulate[1];
if (param_.getValue("label_proteins") == "false") // loop for peptide-labeling (post-digest-labeling)
{
// iterate over first map for light labeling
for (SimTypes::FeatureMapSim::iterator lf_iter = light_labeled_features.begin(); lf_iter != light_labeled_features.end(); ++lf_iter)
{
lf_iter->ensureUniqueId();
addModificationToPeptideHit_(*lf_iter, light_channel_label_);
}
// iterate over second map for medium labeling
for (SimTypes::FeatureMapSim::iterator lf_iter = medium_labeled_features.begin(); lf_iter != medium_labeled_features.end(); ++lf_iter)
{
lf_iter->ensureUniqueId();
addModificationToPeptideHit_(*lf_iter, medium_channel_label_);
}
if (features_to_simulate.size() == 3) //third channel labeling can only be done, if a third channel exist
{
SimTypes::FeatureMapSim& heavy_labeled_features = features_to_simulate[2];
// iterate over third map
for (SimTypes::FeatureMapSim::iterator lf_iter = heavy_labeled_features.begin(); lf_iter != heavy_labeled_features.end(); ++lf_iter)
{
lf_iter->ensureUniqueId();
addModificationToPeptideHit_(*lf_iter, heavy_channel_label_);
}
}
}
// merge the generated feature maps and create consensus
SimTypes::FeatureMapSim final_feature_map = mergeProteinIdentificationsMaps_(features_to_simulate);
if (features_to_simulate.size() == 2) // merge_modus for two FeatureMaps
{
// create index of light channel features for easy mapping of medium-to-light channel
Map<String, Feature> light_labeled_features_index;
for (SimTypes::FeatureMapSim::iterator light_labeled_features_iter = light_labeled_features.begin();
light_labeled_features_iter != light_labeled_features.end();
++light_labeled_features_iter)
{
(*light_labeled_features_iter).ensureUniqueId();
light_labeled_features_index.insert(std::make_pair(
getUnmodifiedAASequence_((*light_labeled_features_iter), light_channel_label_),
*light_labeled_features_iter
));
}
// iterate over second map
for (SimTypes::FeatureMapSim::iterator medium_labeled_feature_iter = medium_labeled_features.begin(); medium_labeled_feature_iter != medium_labeled_features.end(); ++medium_labeled_feature_iter)
{
AASequence medium_labeled_feature_sequence = (*medium_labeled_feature_iter).getPeptideIdentifications()[0].getHits()[0].getSequence();
// guarantee uniqueness
(*medium_labeled_feature_iter).ensureUniqueId();
// check if we have a pair
if (light_labeled_features_index.has(getUnmodifiedAASequence_((*medium_labeled_feature_iter), medium_channel_label_)))
{
// own scope as we don't know what happens to 'f_modified' once we call erase() below
Feature& light_labeled_feature = light_labeled_features_index[getUnmodifiedAASequence_((*medium_labeled_feature_iter), medium_channel_label_)];
// guarantee uniqueness
light_labeled_feature.ensureUniqueId();
if (medium_labeled_feature_sequence.isModified()) // feature has a medium ICPL-Label and is not equal to light-labeled
{
// add features to final map
final_feature_map.push_back(*medium_labeled_feature_iter);
final_feature_map.push_back(light_labeled_feature);
// create consensus feature
ConsensusFeature cf;
cf.insert(MEDIUM_FEATURE_MAPID_, *medium_labeled_feature_iter);
cf.insert(LIGHT_FEATURE_MAPID_, light_labeled_feature);
consensus_.push_back(cf);
// remove light-labeled feature
light_labeled_features_index.erase(getUnmodifiedAASequence_((*medium_labeled_feature_iter), medium_channel_label_));
}
else
{
// merge features since they are equal
Feature final_feature = mergeFeatures_(*medium_labeled_feature_iter, medium_labeled_feature_sequence, light_labeled_features_index);
final_feature_map.push_back(final_feature);
}
}
else // no ICPL pair, just add the medium-labeled one
{
final_feature_map.push_back(*medium_labeled_feature_iter);
}
}
// add singletons from light-labeled channel
// clean up light-labeled_index
for (Map<String, Feature>::iterator light_labeled_index_iter = light_labeled_features_index.begin(); light_labeled_index_iter != light_labeled_features_index.end(); ++light_labeled_index_iter)
//.........这里部分代码省略.........