本文整理汇总了C++中ConsensusMap::getFileDescriptions方法的典型用法代码示例。如果您正苦于以下问题:C++ ConsensusMap::getFileDescriptions方法的具体用法?C++ ConsensusMap::getFileDescriptions怎么用?C++ ConsensusMap::getFileDescriptions使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类ConsensusMap
的用法示例。
在下文中一共展示了ConsensusMap::getFileDescriptions方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C++代码示例。
示例1: InvalidParameter
boost::shared_ptr<IsobaricQuantitationMethod> IBSpectraFile::guessExperimentType_(const ConsensusMap& cm)
{
if (cm.getExperimentType() != "labeled_MS2" && cm.getExperimentType() != "itraq")
{
throw Exception::InvalidParameter(__FILE__,
__LINE__,
__PRETTY_FUNCTION__,
"Given ConsensusMap does not hold any isobaric quantification data.");
}
// we take the mapcount as approximation
if (cm.getFileDescriptions().size() == 4)
{
return boost::shared_ptr<IsobaricQuantitationMethod>(new ItraqFourPlexQuantitationMethod);
}
else if (cm.getFileDescriptions().size() == 6)
{
return boost::shared_ptr<IsobaricQuantitationMethod>(new TMTSixPlexQuantitationMethod);
}
else if (cm.getFileDescriptions().size() == 8)
{
return boost::shared_ptr<IsobaricQuantitationMethod>(new ItraqEightPlexQuantitationMethod);
}
else
{
throw Exception::InvalidParameter(__FILE__,
__LINE__,
__PRETTY_FUNCTION__,
"Could not guess isobaric quantification data from ConsensusMap due to non-matching number of input maps.");
}
}
示例2: generateSeedLists
void SeedListGenerator::generateSeedLists(const ConsensusMap& consensus,
Map<UInt64, SeedList>& seed_lists)
{
seed_lists.clear();
// iterate over all consensus features...
for (ConsensusMap::ConstIterator cons_it = consensus.begin();
cons_it != consensus.end(); ++cons_it)
{
DPosition<2> point(cons_it->getRT(), cons_it->getMZ());
// for each sub-map in the consensus map, add a seed at the position of
// this consensus feature:
for (ConsensusMap::FileDescriptions::const_iterator file_it =
consensus.getFileDescriptions().begin(); file_it !=
consensus.getFileDescriptions().end(); ++file_it)
seed_lists[file_it->first].push_back(point);
// for each feature contained in the consensus feature, remove the seed of
// the corresponding map:
for (ConsensusFeature::HandleSetType::const_iterator feat_it =
cons_it->getFeatures().begin(); feat_it !=
cons_it->getFeatures().end(); ++feat_it)
{
seed_lists[feat_it->getMapIndex()].pop_back();
}
// this leaves seeds for maps where no feature was found near the
// consensus position
}
}
示例3: transferSubelements
void FeatureGroupingAlgorithm::transferSubelements(const vector<ConsensusMap>& maps, ConsensusMap& out) const
{
// accumulate file descriptions from the input maps:
// cout << "Updating file descriptions..." << endl;
out.getFileDescriptions().clear();
// mapping: (map index, original id) -> new id
map<pair<Size, UInt64>, Size> mapid_table;
for (Size i = 0; i < maps.size(); ++i)
{
const ConsensusMap& consensus = maps[i];
for (ConsensusMap::FileDescriptions::const_iterator desc_it = consensus.getFileDescriptions().begin(); desc_it != consensus.getFileDescriptions().end(); ++desc_it)
{
Size counter = mapid_table.size();
mapid_table[make_pair(i, desc_it->first)] = counter;
out.getFileDescriptions()[counter] = desc_it->second;
}
}
// look-up table: input map -> unique ID -> consensus feature
// cout << "Creating look-up table..." << endl;
vector<map<UInt64, ConsensusMap::ConstIterator> > feat_lookup(maps.size());
for (Size i = 0; i < maps.size(); ++i)
{
const ConsensusMap& consensus = maps[i];
for (ConsensusMap::ConstIterator feat_it = consensus.begin();
feat_it != consensus.end(); ++feat_it)
{
// do NOT use "id_lookup[i][feat_it->getUniqueId()] = feat_it;" here as
// you will get "attempt to copy-construct an iterator from a singular
// iterator" in STL debug mode:
feat_lookup[i].insert(make_pair(feat_it->getUniqueId(), feat_it));
}
}
// adjust the consensus features:
// cout << "Adjusting consensus features..." << endl;
for (ConsensusMap::iterator cons_it = out.begin(); cons_it != out.end(); ++cons_it)
{
ConsensusFeature adjusted = ConsensusFeature(
static_cast<BaseFeature>(*cons_it)); // remove sub-features
for (ConsensusFeature::HandleSetType::const_iterator sub_it = cons_it->getFeatures().begin(); sub_it != cons_it->getFeatures().end(); ++sub_it)
{
UInt64 id = sub_it->getUniqueId();
Size map_index = sub_it->getMapIndex();
ConsensusMap::ConstIterator origin = feat_lookup[map_index][id];
for (ConsensusFeature::HandleSetType::const_iterator handle_it = origin->getFeatures().begin(); handle_it != origin->getFeatures().end(); ++handle_it)
{
FeatureHandle handle = *handle_it;
Size new_id = mapid_table[make_pair(map_index, handle.getMapIndex())];
handle.setMapIndex(new_id);
adjusted.insert(handle);
}
}
*cons_it = adjusted;
}
}
示例4: group
void FeatureGroupingAlgorithmLabeled::group(const std::vector<FeatureMap<> > & maps, ConsensusMap & out)
{
//check that the number of maps is ok
if (maps.size() != 1)
throw Exception::IllegalArgument(__FILE__, __LINE__, __PRETTY_FUNCTION__, "Exactly one map must be given!");
if (out.getFileDescriptions().size() != 2)
throw Exception::IllegalArgument(__FILE__, __LINE__, __PRETTY_FUNCTION__, "Two file descriptions must be set in 'out'!");
//initialize LabeledPairFinder
LabeledPairFinder pm;
pm.setParameters(param_.copy("", true));
//convert to consensus map
std::vector<ConsensusMap> input(1);
ConsensusMap::convert(0, maps[0], input[0]);
//run
pm.run(input, out);
}
示例5: registerChannelsInOutputMap_
void IsobaricChannelExtractor::registerChannelsInOutputMap_(ConsensusMap& consensus_map)
{
// register the individual channels in the output consensus map
Int index = 0;
for (IsobaricQuantitationMethod::IsobaricChannelList::const_iterator cl_it = quant_method_->getChannelInformation().begin();
cl_it != quant_method_->getChannelInformation().end();
++cl_it)
{
ConsensusMap::FileDescription channel_as_map;
// label is the channel + description provided in the Params
channel_as_map.label = quant_method_->getName() + "_" + cl_it->name;
// TODO(aiche): number of features need to be set later
channel_as_map.size = consensus_map.size();
// add some more MetaInfo
channel_as_map.setMetaValue("channel_name", cl_it->name);
channel_as_map.setMetaValue("channel_id", cl_it->id);
channel_as_map.setMetaValue("channel_description", cl_it->description);
channel_as_map.setMetaValue("channel_center", cl_it->center);
consensus_map.getFileDescriptions()[index++] = channel_as_map;
}
}
示例6: computeLabelingStatistics_
void IsobaricQuantifier::computeLabelingStatistics_(ConsensusMap& consensus_map_out)
{
// number of total quantified spectra
stats_.number_ms2_total = consensus_map_out.size();
// Labeling efficiency statistics
for (size_t i = 0; i < consensus_map_out.size(); ++i)
{
// is whole scan empty?!
if (consensus_map_out[i].getIntensity() == 0) ++stats_.number_ms2_empty;
// look at single reporters
for (ConsensusFeature::HandleSetType::const_iterator it_elements = consensus_map_out[i].begin();
it_elements != consensus_map_out[i].end();
++it_elements)
{
if (it_elements->getIntensity() == 0)
{
String ch_index = consensus_map_out.getFileDescriptions()[it_elements->getMapIndex()].getMetaValue("channel_name");
++stats_.empty_channels[ch_index];
}
}
}
LOG_INFO << "IsobaricQuantifier: skipped " << stats_.number_ms2_empty << " of " << consensus_map_out.size() << " selected scans due to lack of reporter information:\n";
consensus_map_out.setMetaValue("isoquant:scans_noquant", stats_.number_ms2_empty);
consensus_map_out.setMetaValue("isoquant:scans_total", consensus_map_out.size());
LOG_INFO << "IsobaricQuantifier: channels with signal\n";
for (std::map<String, Size>::const_iterator it_m = stats_.empty_channels.begin();
it_m != stats_.empty_channels.end();
++it_m)
{
LOG_INFO << " channel " << it_m->first << ": " << (consensus_map_out.size() - it_m->second) << " / " << consensus_map_out.size() << " (" << ((consensus_map_out.size() - it_m->second) * 100 / consensus_map_out.size()) << "%)\n";
consensus_map_out.setMetaValue(String("isoquant:quantifyable_ch") + it_m->first, (consensus_map_out.size() - it_m->second));
}
}
示例7: main_
//.........这里部分代码省略.........
{
out << ",INT_RAW_" << i + 1 << '_' << j;
}
}
for (UInt i = 0; i <= massShifts[0].size(); ++i)
{
for (UInt j = 1; j <= isotopes_per_peptide_max; ++j)
{
out << ",MZ_RAW_" << i + 1 << '_' << j;
}
}
out << '\n';
// write data
UInt cluster_id = 0;
for (vector<Clustering *>::const_iterator it = cluster_data.begin(); it != cluster_data.end(); ++it)
{
analyzer.generateClusterDebug(out, **it, cluster_id);
}
}
if (out != "")
{
LOG_DEBUG << "Generating output consensus map..." << endl;
ConsensusMap map;
for (vector<Clustering *>::const_iterator it = cluster_data.begin(); it != cluster_data.end(); ++it)
{
analyzer.generateClusterConsensusByCluster(map, **it);
}
LOG_DEBUG << "Adding meta data..." << endl;
// XXX: Need a map per mass shift
ConsensusMap::FileDescriptions& desc = map.getFileDescriptions();
Size id = 0;
for (ConsensusMap::FileDescriptions::iterator it = desc.begin(); it != desc.end(); ++it)
{
if (test_mode_) it->second.filename = in; // skip path, since its not cross platform and complicates verification
else it->second.filename = File::basename(in);
// Write correct label
// (this would crash if used without a label!)
if (id > 0) it->second.label = StringList(analyzer.getSILAClabels()[id - 1]).concatenate(""); // skip first round (empty label is not listed)
++id;
}
std::set<DataProcessing::ProcessingAction> actions;
actions.insert(DataProcessing::DATA_PROCESSING);
actions.insert(DataProcessing::PEAK_PICKING);
actions.insert(DataProcessing::FILTERING);
actions.insert(DataProcessing::QUANTITATION);
addDataProcessing_(map, getProcessingInfo_(actions));
analyzer.writeConsensus(out, map);
if (out_mzq != "")
{
LOG_DEBUG << "Generating output mzQuantML file..." << endl;
ConsensusMap numap(map);
//calc. ratios
for (ConsensusMap::iterator cit = numap.begin(); cit != numap.end(); ++cit)
{
//~ make ratio templates
std::vector<ConsensusFeature::Ratio> rts;
for (std::vector<MSQuantifications::Assay>::const_iterator ait = msq.getAssays().begin() + 1; ait != msq.getAssays().end(); ++ait)
{
ConsensusFeature::Ratio r;
示例8:
vector<double> ConsensusMapNormalizerAlgorithmThreshold::computeCorrelation(const ConsensusMap& map, const double& ratio_threshold, const String& acc_filter, const String& desc_filter)
{
Size number_of_features = map.size();
Size number_of_maps = map.getFileDescriptions().size();
vector<vector<double> > feature_int(number_of_maps);
//get map with most features, resize feature_int
UInt map_with_most_features_idx = 0;
ConsensusMap::FileDescriptions::const_iterator map_with_most_features = map.getFileDescriptions().find(0);
for (UInt i = 0; i < number_of_maps; i++)
{
feature_int[i].resize(number_of_features);
ConsensusMap::FileDescriptions::const_iterator it = map.getFileDescriptions().find(i);
if (it->second.size > map_with_most_features->second.size)
{
map_with_most_features = it;
map_with_most_features_idx = i;
}
}
//fill feature_int with intensities
Size pass_counter = 0;
ConsensusMap::ConstIterator cf_it;
UInt idx = 0;
for (cf_it = map.begin(); cf_it != map.end(); ++cf_it, ++idx)
{
if (!ConsensusMapNormalizerAlgorithmMedian::passesFilters_(cf_it, map, acc_filter, desc_filter))
{
continue;
}
++pass_counter;
ConsensusFeature::HandleSetType::const_iterator f_it;
for (f_it = cf_it->getFeatures().begin(); f_it != cf_it->getFeatures().end(); ++f_it)
{
feature_int[f_it->getMapIndex()][idx] = f_it->getIntensity();
}
}
LOG_INFO << endl << "Using " << pass_counter << "/" << map.size() << " consensus features for computing normalization coefficients" << endl << endl;
//determine ratio
vector<double> ratio_vector(number_of_maps);
for (UInt j = 0; j < number_of_maps; j++)
{
vector<double> ratios;
for (UInt k = 0; k < number_of_features; ++k)
{
if (feature_int[map_with_most_features_idx][k] != 0.0 && feature_int[j][k] != 0.0)
{
double ratio = feature_int[map_with_most_features_idx][k] / feature_int[j][k];
if (ratio > ratio_threshold && ratio < 1 / ratio_threshold)
{
ratios.push_back(ratio);
}
}
}
if (ratios.empty())
{
LOG_WARN << endl << "Not enough features passing filters. Cannot compute normalization coefficients for all maps. Result will be unnormalized." << endl << endl;
return vector<double>(number_of_maps, 1.0);
}
ratio_vector[j] = Math::mean(ratios.begin(), ratios.end());
}
return ratio_vector;
}
示例9: main_
ExitCodes main_(int, const char **)
{
FeatureGroupingAlgorithmUnlabeled * algorithm = new FeatureGroupingAlgorithmUnlabeled();
//-------------------------------------------------------------
// parameter handling
//-------------------------------------------------------------
StringList ins;
ins = getStringList_("in");
String out = getStringOption_("out");
//-------------------------------------------------------------
// check for valid input
//-------------------------------------------------------------
// check if all input files have the correct type
FileTypes::Type file_type = FileHandler::getType(ins[0]);
for (Size i = 0; i < ins.size(); ++i)
{
if (FileHandler::getType(ins[i]) != file_type)
{
writeLog_("Error: All input files must be of the same type!");
return ILLEGAL_PARAMETERS;
}
}
//-------------------------------------------------------------
// set up algorithm
//-------------------------------------------------------------
Param algorithm_param = getParam_().copy("algorithm:", true);
writeDebug_("Used algorithm parameters", algorithm_param, 3);
algorithm->setParameters(algorithm_param);
Size reference_index(0);
//-------------------------------------------------------------
// perform grouping
//-------------------------------------------------------------
// load input
ConsensusMap out_map;
StringList ms_run_locations;
if (file_type == FileTypes::FEATUREXML)
{
// use map with highest number of features as reference:
Size max_count(0);
FeatureXMLFile f;
for (Size i = 0; i < ins.size(); ++i)
{
Size s = f.loadSize(ins[i]);
if (s > max_count)
{
max_count = s;
reference_index = i;
}
}
// Load reference map and input it to the algorithm
UInt64 ref_id;
Size ref_size;
std::vector<PeptideIdentification> ref_pepids;
std::vector<ProteinIdentification> ref_protids;
{
FeatureMap map_ref;
FeatureXMLFile f_fxml_tmp;
f_fxml_tmp.getOptions().setLoadConvexHull(false);
f_fxml_tmp.getOptions().setLoadSubordinates(false);
f_fxml_tmp.load(ins[reference_index], map_ref);
algorithm->setReference(reference_index, map_ref);
ref_id = map_ref.getUniqueId();
ref_size = map_ref.size();
ref_pepids = map_ref.getUnassignedPeptideIdentifications();
ref_protids = map_ref.getProteinIdentifications();
}
ConsensusMap dummy;
// go through all input files and add them to the result one by one
for (Size i = 0; i < ins.size(); ++i)
{
FeatureXMLFile f_fxml_tmp;
FeatureMap tmp_map;
f_fxml_tmp.getOptions().setLoadConvexHull(false);
f_fxml_tmp.getOptions().setLoadSubordinates(false);
f_fxml_tmp.load(ins[i], tmp_map);
// copy over information on the primary MS run
StringList ms_runs;
tmp_map.getPrimaryMSRunPath(ms_runs);
ms_run_locations.insert(ms_run_locations.end(), ms_runs.begin(), ms_runs.end());
if (i != reference_index)
{
algorithm->addToGroup(i, tmp_map);
// store some meta-data about the maps in the "dummy" object -> try to
// keep the same order as they were given in the input independent of
// which map is the reference.
dummy.getFileDescriptions()[i].filename = ins[i];
dummy.getFileDescriptions()[i].size = tmp_map.size();
dummy.getFileDescriptions()[i].unique_id = tmp_map.getUniqueId();
//.........这里部分代码省略.........
示例10: run
/// @brief extracts the iTRAQ channels from the MS data and stores intensity values in a consensus map
///
/// @param ms_exp_data Raw data to read
/// @param consensus_map Output each MS² scan as a consensus feature
/// @throws Exception::MissingInformation if no scans present or MS² scan has no precursor
void ItraqChannelExtractor::run(const MSExperiment<Peak1D>& ms_exp_data, ConsensusMap& consensus_map)
{
if (ms_exp_data.empty())
{
LOG_WARN << "The given file does not contain any conventional peak data, but might"
" contain chromatograms. This tool currently cannot handle them, sorry.";
throw Exception::MissingInformation(__FILE__, __LINE__, __PRETTY_FUNCTION__, "Experiment has no scans!");
}
MSExperiment<> ms_exp_MS2;
String mode = (String) param_.getValue("select_activation");
std::cout << "Selecting scans with activation mode: " << (mode == "" ? "any" : mode) << "\n";
HasActivationMethod<MSExperiment<Peak1D>::SpectrumType> activation_predicate(ListUtils::create<String>(mode));
for (size_t idx = 0; idx < ms_exp_data.size(); ++idx)
{
if (ms_exp_data[idx].getMSLevel() == 2)
{
if (mode == "" || activation_predicate(ms_exp_data[idx]))
{
// copy only MS² scans
ms_exp_MS2.addSpectrum(ms_exp_data[idx]);
}
else
{
//std::cout << "deleting spectrum # " << idx << " with RT: " << ms_exp_data[idx].getRT() << "\n";
}
}
}
#ifdef ITRAQ_DEBUG
std::cout << "we have " << ms_exp_MS2.size() << " scans left of level " << ms_exp_MS2[0].getMSLevel() << std::endl;
std::cout << "run: channel_map_ has " << channel_map_.size() << " entries!" << std::endl;
#endif
consensus_map.clear(false);
// set <mapList> header
Int index_cnt = 0;
for (ChannelMapType::const_iterator cm_it = channel_map_.begin(); cm_it != channel_map_.end(); ++cm_it)
{
// structure of Map cm_it
// first == channel-name as Int e.g. 114
// second == ChannelInfo struct
ConsensusMap::FileDescription channel_as_map;
// label is the channel + description provided in the Params
if (itraq_type_ != TMT_SIXPLEX)
channel_as_map.label = "iTRAQ_" + String(cm_it->second.name) + "_" + String(cm_it->second.description);
else
channel_as_map.label = "TMT_" + String(cm_it->second.name) + "_" + String(cm_it->second.description);
channel_as_map.size = ms_exp_MS2.size();
//TODO what about .filename? leave empty?
// add some more MetaInfo
channel_as_map.setMetaValue("channel_name", cm_it->second.name);
channel_as_map.setMetaValue("channel_id", cm_it->second.id);
channel_as_map.setMetaValue("channel_description", cm_it->second.description);
channel_as_map.setMetaValue("channel_center", cm_it->second.center);
channel_as_map.setMetaValue("channel_active", String(cm_it->second.active ? "true" : "false"));
consensus_map.getFileDescriptions()[index_cnt++] = channel_as_map;
}
// create consensusElements
Peak2D::CoordinateType allowed_deviation = (Peak2D::CoordinateType) param_.getValue("reporter_mass_shift");
// now we have picked data
// --> assign peaks to channels
UInt element_index(0);
for (MSExperiment<>::ConstIterator it = ms_exp_MS2.begin(); it != ms_exp_MS2.end(); ++it)
{
// store RT&MZ of parent ion as centroid of ConsensusFeature
ConsensusFeature cf;
cf.setUniqueId();
cf.setRT(it->getRT());
if (it->getPrecursors().size() >= 1)
{
cf.setMZ(it->getPrecursors()[0].getMZ());
}
else
{
throw Exception::MissingInformation(__FILE__, __LINE__, __PRETTY_FUNCTION__, String("No precursor information given for scan native ID ") + String(it->getNativeID()) + " with RT " + String(it->getRT()));
}
Peak2D channel_value;
channel_value.setRT(it->getRT());
// for each each channel
Int index = 0;
Peak2D::IntensityType overall_intensity = 0;
for (ChannelMapType::const_iterator cm_it = channel_map_.begin(); cm_it != channel_map_.end(); ++cm_it)
{
// set mz-position of channel
channel_value.setMZ(cm_it->second.center);
// reset intensity
channel_value.setIntensity(0);
//.........这里部分代码省略.........
示例11: outputTo
//.........这里部分代码省略.........
{
charges[feat[i].getCharge()]++;
}
os << "Charge distribution" << endl;
for (Map<UInt, UInt>::const_iterator it = charges.begin();
it != charges.end(); ++it)
{
os << "charge " << it->first << ": " << it->second << endl;
}
}
else if (in_type == FileTypes::CONSENSUSXML) //consensus features
{
map<Size, UInt> num_consfeat_of_size;
for (ConsensusMap::const_iterator cmit = cons.begin();
cmit != cons.end(); ++cmit)
{
++num_consfeat_of_size[cmit->size()];
}
os << endl << "Number of consensus features:" << endl;
for (map<Size, UInt>::reverse_iterator i = num_consfeat_of_size.rbegin(); i != num_consfeat_of_size.rend(); ++i)
{
os << " of size " << setw(2) << i->first << ": " << setw(6) << i->second << endl;
}
os << " total: " << setw(6) << cons.size() << endl << endl;
os << "Ranges:" << endl
<< " retention time: " << String::number(cons.getMin()[Peak2D::RT], 2) << " : " << String::number(cons.getMax()[Peak2D::RT], 2) << endl
<< " mass-to-charge: " << String::number(cons.getMin()[Peak2D::MZ], 2) << " : " << String::number(cons.getMax()[Peak2D::MZ], 2) << endl
<< " intensity: " << String::number(cons.getMinInt(), 2) << " : " << String::number(cons.getMaxInt(), 2) << endl;
// file descriptions
const ConsensusMap::FileDescriptions& descs = cons.getFileDescriptions();
if (!descs.empty())
{
os << endl <<
"File descriptions:" << endl;
for (ConsensusMap::FileDescriptions::const_iterator it = descs.begin(); it != descs.end(); ++it)
{
os << " - " << it->second.filename << endl
<< " identifier: " << it->first << endl
<< " label : " << it->second.label << endl
<< " size : " << it->second.size << endl;
}
}
}
os << endl
<< "-- Summary Statistics --" << endl
<< endl;
}
if (in_type == FileTypes::FEATUREXML) //features
{
feat.sortByRT();
vector<double> slice_stats;
Size n = getIntOption_("n");
Size begin = 0;
Size end = 0;
os << "#slice\tRT_begin\tRT_end\tnumber_of_features\ttic\t"
<< "int_mean\tint_stddev\tint_min\tint_max\tint_median\tint_lowerq\tint_upperq\t"
<< "mz_mean\tmz_stddev\tmz_min\tmz_max\tmz_median\tmz_lowerq\tmz_upperq\t"
示例12: ice
pItraq.setValue("channel_116_description", "else");
q_method->setParameters(pItraq);
IsobaricChannelExtractor ice(q_method);
// disable activation filtering
Param p = ice.getParameters();
p.setValue("select_activation", "");
ice.setParameters(p);
// extract channels
ConsensusMap cm_out;
ice.extractChannels(exp, cm_out);
// check channel meta information
TEST_EQUAL(cm_out.getFileDescriptions().size(), 4)
ABORT_IF(cm_out.getFileDescriptions().size() != 4)
TEST_EQUAL(cm_out.getFileDescriptions()[0].label, "itraq4plex_114")
TEST_EQUAL(cm_out.getFileDescriptions()[0].getMetaValue("channel_name"), 114)
TEST_EQUAL(cm_out.getFileDescriptions()[0].getMetaValue("channel_id"), 0)
TEST_EQUAL(cm_out.getFileDescriptions()[0].getMetaValue("channel_description"), "ref")
TEST_EQUAL(cm_out.getFileDescriptions()[0].getMetaValue("channel_center"), 114.1112)
TEST_EQUAL(cm_out.getFileDescriptions()[1].label, "itraq4plex_115")
TEST_EQUAL(cm_out.getFileDescriptions()[1].getMetaValue("channel_name"), 115)
TEST_EQUAL(cm_out.getFileDescriptions()[1].getMetaValue("channel_id"), 1)
TEST_EQUAL(cm_out.getFileDescriptions()[1].getMetaValue("channel_description"), "something")
TEST_EQUAL(cm_out.getFileDescriptions()[1].getMetaValue("channel_center"), 115.1082)
TEST_EQUAL(cm_out.getFileDescriptions()[2].label, "itraq4plex_116")
示例13: run
void LabeledPairFinder::run(const vector<ConsensusMap>& input_maps, ConsensusMap& result_map)
{
if (input_maps.size() != 1)
throw Exception::IllegalArgument(__FILE__, __LINE__, __PRETTY_FUNCTION__, "exactly one input map required");
if (result_map.getFileDescriptions().size() != 2)
throw Exception::IllegalArgument(__FILE__, __LINE__, __PRETTY_FUNCTION__, "two file descriptions required");
if (result_map.getFileDescriptions().begin()->second.filename != result_map.getFileDescriptions().rbegin()->second.filename)
throw Exception::IllegalArgument(__FILE__, __LINE__, __PRETTY_FUNCTION__, "the two file descriptions have to contain the same file name");
checkIds_(input_maps);
//look up the light and heavy index
Size light_index = numeric_limits<Size>::max();
Size heavy_index = numeric_limits<Size>::max();
for (ConsensusMap::FileDescriptions::const_iterator it = result_map.getFileDescriptions().begin();
it != result_map.getFileDescriptions().end();
++it)
{
if (it->second.label == "heavy")
{
heavy_index = it->first;
}
else if (it->second.label == "light")
{
light_index = it->first;
}
}
if (light_index == numeric_limits<Size>::max() || heavy_index == numeric_limits<Size>::max())
{
throw Exception::IllegalArgument(__FILE__, __LINE__, __PRETTY_FUNCTION__, "the input maps have to be labeled 'light' and 'heavy'");
}
result_map.clear(false);
// sort consensus features by RT (and MZ) to speed up searching afterwards
typedef ConstRefVector<ConsensusMap> RefMap;
RefMap model_ref(input_maps[0].begin(), input_maps[0].end());
model_ref.sortByPosition();
//calculate matches
ConsensusMap matches;
//settings
double rt_pair_dist = param_.getValue("rt_pair_dist");
double rt_dev_low = param_.getValue("rt_dev_low");
double rt_dev_high = param_.getValue("rt_dev_high");
double mz_dev = param_.getValue("mz_dev");
DoubleList mz_pair_dists = param_.getValue("mz_pair_dists");
bool mrm = param_.getValue("mrm").toBool();
//estimate RT parameters
if (param_.getValue("rt_estimate") == "true")
{
//find all possible RT distances of features with the same charge and a good m/z distance
vector<double> dists;
dists.reserve(model_ref.size());
for (RefMap::const_iterator it = model_ref.begin(); it != model_ref.end(); ++it)
{
for (RefMap::const_iterator it2 = model_ref.begin(); it2 != model_ref.end(); ++it2)
{
for (DoubleList::const_iterator dist_it = mz_pair_dists.begin(); dist_it != mz_pair_dists.end(); ++dist_it)
{
double mz_pair_dist = *dist_it;
if (it2->getCharge() == it->getCharge()
&& it2->getMZ() >= it->getMZ() + mz_pair_dist / it->getCharge() - mz_dev
&& it2->getMZ() <= it->getMZ() + mz_pair_dist / it->getCharge() + mz_dev)
{
dists.push_back(it2->getRT() - it->getRT());
}
}
}
}
if (dists.empty())
{
cout << "Warning: Could not find pairs for RT distance estimation. The manual settings are used!" << endl;
}
else
{
if (dists.size() < 50)
{
cout << "Warning: Found only " << dists.size() << " pairs. The estimated shift and std deviation are probably not reliable!" << endl;
}
//--------------------------- estimate initial parameters of fit ---------------------------
GaussFitter::GaussFitResult result(-1, -1, -1);
//first estimate of the optimal shift: median of the distances
sort(dists.begin(), dists.end());
Size median_index = dists.size() / 2;
result.x0 = dists[median_index];
//create histogram of distances
//consider only the maximum of pairs, centered around the optimal shift
Size max_pairs = model_ref.size() / 2;
Size start_index = (Size) max((SignedSize)0, (SignedSize)(median_index - max_pairs / 2));
Size end_index = (Size) min((SignedSize)(dists.size() - 1), (SignedSize)(median_index + max_pairs / 2));
double start_value = dists[start_index];
double end_value = dists[end_index];
double bin_step = fabs(end_value - start_value) / 99.999; //ensure that we have 100 bins
Math::Histogram<> hist(start_value, end_value, bin_step);
//std::cout << "HIST from " << start_value << " to " << end_value << " (bin size " << bin_step << ")" << endl;
for (Size i = start_index; i <= end_index; ++i)
{
hist.inc(dists[i]);
}
//.........这里部分代码省略.........
示例14: load
void EDTAFile::load(const String& filename, ConsensusMap& consensus_map)
{
// load input
TextFile input(filename);
TextFile::ConstIterator input_it = input.begin();
// reset map
consensus_map = ConsensusMap();
consensus_map.setUniqueId();
char separator = ' ';
if (input_it->hasSubstring("\t"))
separator = '\t';
else if (input_it->hasSubstring(" "))
separator = ' ';
else if (input_it->hasSubstring(","))
separator = ',';
// parsing header line
std::vector<String> headers;
input_it->split(separator, headers);
int offset = 0;
for (Size i = 0; i < headers.size(); ++i)
{
headers[i].trim();
}
String header_trimmed = *input.begin();
header_trimmed.trim();
enum
{
TYPE_UNDEFINED,
TYPE_OLD_NOCHARGE,
TYPE_OLD_CHARGE,
TYPE_CONSENSUS
}
input_type = TYPE_UNDEFINED;
Size input_features = 1;
double rt = 0.0;
double mz = 0.0;
double it = 0.0;
Int ch = 0;
if (headers.size() <= 2)
{
throw Exception::ParseError(__FILE__, __LINE__, __PRETTY_FUNCTION__, "", String("Failed parsing in line 1: not enough columns! Expected at least 3 columns!\nOffending line: '") + header_trimmed + "' (line 1)\n");
}
else if (headers.size() == 3)
input_type = TYPE_OLD_NOCHARGE;
else if (headers.size() == 4)
input_type = TYPE_OLD_CHARGE;
// see if we have a header
try
{
// try to convert... if not: thats a header
rt = headers[0].toDouble();
mz = headers[1].toDouble();
it = headers[2].toDouble();
}
catch (Exception::BaseException&)
{
offset = 1;
++input_it;
LOG_INFO << "Detected a header line.\n";
}
if (headers.size() >= 5)
{
if (String(headers[4].trim()).toUpper() == "RT1")
input_type = TYPE_CONSENSUS;
else
input_type = TYPE_OLD_CHARGE;
}
if (input_type == TYPE_CONSENSUS)
{
// Every consensus style line includes features with four columns.
// The remainder is meta data
input_features = headers.size() / 4;
}
if (offset == 0 && (input_type == TYPE_OLD_CHARGE || input_type == TYPE_CONSENSUS))
{
throw Exception::ParseError(__FILE__, __LINE__, __PRETTY_FUNCTION__, "", String("Failed parsing in line 1: No HEADER provided. This is only allowed for three columns. You have more!\nOffending line: '") + header_trimmed + "' (line 1)\n");
}
SignedSize input_size = input.end() - input.begin();
ConsensusMap::FileDescription desc;
desc.filename = filename;
desc.size = (input_size) - offset;
consensus_map.getFileDescriptions()[0] = desc;
// parsing features
consensus_map.reserve(input_size);
for (; input_it != input.end(); ++input_it)
{
//do nothing for empty lines
//.........这里部分代码省略.........
示例15: load
START_SECTION((void load(const String &filename, ConsensusMap & map)))
ConsensusMap map;
ConsensusXMLFile file;
file.load(OPENMS_GET_TEST_DATA_PATH("ConsensusXMLFile_1.consensusXML"), map);
//test DocumentIdentifier addition
TEST_STRING_EQUAL(map.getLoadedFilePath(), OPENMS_GET_TEST_DATA_PATH("ConsensusXMLFile_1.consensusXML"));
TEST_STRING_EQUAL(FileTypes::typeToName(map.getLoadedFileType()), "consensusXML");
//meta data
TEST_EQUAL(map.getIdentifier(), "lsid")
TEST_EQUAL(map.getExperimentType() == "label-free", true)
TEST_EQUAL(map.getMetaValue("name1") == DataValue("value1"), true)
TEST_EQUAL(map.getMetaValue("name2") == DataValue(2), true)
//file descriptions
TEST_EQUAL(map.getFileDescriptions()[0].filename == "data/MapAlignmentFeatureMap1.xml", true)
TEST_EQUAL(map.getFileDescriptions()[0].label, "label")
TEST_EQUAL(map.getFileDescriptions()[0].size, 144)
TEST_EQUAL(map.getFileDescriptions()[0].getMetaValue("name3") == DataValue("value3"), true)
TEST_EQUAL(map.getFileDescriptions()[0].getMetaValue("name4") == DataValue(4), true)
TEST_STRING_EQUAL(map.getFileDescriptions()[1].filename, "data/MapAlignmentFeatureMap2.xml")
TEST_EQUAL(map.getFileDescriptions()[1].label, "")
TEST_EQUAL(map.getFileDescriptions()[1].size, 0)
TEST_EQUAL(map.getFileDescriptions()[1].getMetaValue("name5") == DataValue("value5"), true)
TEST_EQUAL(map.getFileDescriptions()[1].getMetaValue("name6") == DataValue(6.0), true)
//data processing
TEST_EQUAL(map.getDataProcessing().size(), 2)
TEST_STRING_EQUAL(map.getDataProcessing()[0].getSoftware().getName(), "Software1")
TEST_STRING_EQUAL(map.getDataProcessing()[0].getSoftware().getVersion(), "0.91a")
TEST_EQUAL(map.getDataProcessing()[0].getProcessingActions().size(), 1)
TEST_EQUAL(map.getDataProcessing()[0].getProcessingActions().count(DataProcessing::DEISOTOPING), 1)