本文整理汇总了C#中IProgressMonitor.UpdateProgress方法的典型用法代码示例。如果您正苦于以下问题:C# IProgressMonitor.UpdateProgress方法的具体用法?C# IProgressMonitor.UpdateProgress怎么用?C# IProgressMonitor.UpdateProgress使用的例子?那么恭喜您, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类IProgressMonitor
的用法示例。
在下文中一共展示了IProgressMonitor.UpdateProgress方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: CreateTextureBitmaps
/// <summary>
/// Creates bitmaps for the texture
/// </summary>
private static Bitmap[] CreateTextureBitmaps( IProgressMonitor progressMonitor, ISpherePlanet planet )
{
Bitmap[] faceBitmaps = new Bitmap[ 6 ];
int width = 256;
int height = 256;
progressMonitor.UpdateProgress( 0 );
ISpherePlanetTerrainRenderer renderer = planet.Renderer.GetRenderer<ISpherePlanetTerrainRenderer>( );
if ( renderer == null )
{
throw new InvalidOperationException( "Expected a valid ISpherePlanetTerrainRenderer to be available" );
}
faceBitmaps[ ( int )CubeMapFace.PositiveX ] = renderer.CreateMarbleTextureFace( CubeMapFace.PositiveX, width, height ); progressMonitor.UpdateProgress( 1 / 6.0f );
faceBitmaps[ ( int )CubeMapFace.NegativeX ] = renderer.CreateMarbleTextureFace( CubeMapFace.NegativeX, width, height ); progressMonitor.UpdateProgress( 2 / 6.0f );
faceBitmaps[ ( int )CubeMapFace.PositiveY ] = renderer.CreateMarbleTextureFace( CubeMapFace.PositiveY, width, height ); progressMonitor.UpdateProgress( 3 / 6.0f );
faceBitmaps[ ( int )CubeMapFace.NegativeY ] = renderer.CreateMarbleTextureFace( CubeMapFace.NegativeY, width, height ); progressMonitor.UpdateProgress( 4 / 6.0f );
faceBitmaps[ ( int )CubeMapFace.PositiveZ ] = renderer.CreateMarbleTextureFace( CubeMapFace.PositiveZ, width, height ); progressMonitor.UpdateProgress( 5 / 6.0f );
faceBitmaps[ ( int )CubeMapFace.NegativeZ ] = renderer.CreateMarbleTextureFace( CubeMapFace.NegativeZ, width, height );
progressMonitor.UpdateProgress( 1 );
foreach ( object cubeMapFace in Enum.GetValues( typeof( CubeMapFace ) ) )
{
faceBitmaps[ ( int )cubeMapFace ].Save( "PlanetCubeMap" + cubeMapFace + ".png" );
}
return faceBitmaps;
}
示例2: GetOptimizationDb
//Throws DatabaseOpeningException
public static OptimizationDb GetOptimizationDb(string path, IProgressMonitor loadMonitor, SrmDocument document)
{
var status = new ProgressStatus(string.Format(Resources.OptimizationDb_GetOptimizationDb_Loading_optimization_library__0_, path));
if (loadMonitor != null)
loadMonitor.UpdateProgress(status);
try
{
if (path == null)
throw new OptimizationsOpeningException(Resources.OptimizationDb_GetOptimizationDb_Library_path_cannot_be_null_);
if (!File.Exists(path))
throw new OptimizationsOpeningException(String.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__does_not_exist_, path));
string message;
try
{
//Check for a valid SQLite file and that it has our schema
//Allow only one thread at a time to read from the same path
using (var sessionFactory = GetSessionFactory(path))
{
lock (sessionFactory)
{
return new OptimizationDb(path, sessionFactory).Load(loadMonitor, status);
}
}
}
catch (UnauthorizedAccessException)
{
message = string.Format(Resources.OptimizationDb_GetOptimizationDb_You_do_not_have_privilieges_to_access_the_file__0__, path);
}
catch (DirectoryNotFoundException)
{
message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_path_containing__0__does_not_exist_, path);
}
catch (FileNotFoundException)
{
message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_created__Perhaps_you_do_not_have_sufficient_privileges_, path);
}
catch (SQLiteException)
{
message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__is_not_a_valid_optimization_library_file_, path);
}
catch (GenericADOException)
{
try
{
return ConvertFromOldFormat(path, loadMonitor, status, document);
}
catch (Exception e)
{
message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened__conversion_from_old_format_failed____1_, path, e.Message);
}
}
catch (Exception e)
{
message = string.Format(Resources.OptimizationDb_GetOptimizationDb_The_file__0__could_not_be_opened___1_, path, e.Message);
}
throw new OptimizationsOpeningException(message);
}
catch (OptimizationsOpeningException x)
{
if (loadMonitor == null)
throw;
loadMonitor.UpdateProgress(status.ChangeErrorException(x));
return null;
}
}
示例3: GetIonMobilityDb
// Throws DatabaseOpeningException
public static IonMobilityDb GetIonMobilityDb(string path, IProgressMonitor loadMonitor)
{
var status = new ProgressStatus(string.Format(Resources.IonMobilityDb_GetIonMobilityDb_Loading_ion_mobility_library__0_, path));
if (loadMonitor != null)
loadMonitor.UpdateProgress(status);
try
{
if (String.IsNullOrEmpty(path))
throw new DatabaseOpeningException(Resources.IonMobilityDb_GetIonMobilityDb_Please_provide_a_path_to_an_existing_ion_mobility_library_);
if (!File.Exists(path))
throw new DatabaseOpeningException(
string.Format(
Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_,
path));
string message;
try
{
//Check for a valid SQLite file and that it has our schema
//Allow only one thread at a time to read from the same path
using (var sessionFactory = GetSessionFactory(path))
{
lock (sessionFactory)
{
return new IonMobilityDb(path, sessionFactory).Load(loadMonitor, status);
}
}
}
catch (UnauthorizedAccessException)
{
message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_You_do_not_have_privileges_to_access_the_ion_mobility_library_file__0_, path);
}
catch (DirectoryNotFoundException)
{
message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_path_containing_ion_mobility_library__0__does_not_exist_, path);
}
catch (FileNotFoundException)
{
message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_ion_mobility_library_file__0__could_not_be_found__Perhaps_you_did_not_have_sufficient_privileges_to_create_it_, path);
}
catch (Exception) // SQLiteException is already something of a catch-all, just lump it with the others here
{
message = string.Format(Resources.IonMobilityDb_GetIonMobilityDb_The_file__0__is_not_a_valid_ion_mobility_library_file_, path);
}
throw new DatabaseOpeningException(message);
}
catch (DatabaseOpeningException x)
{
if (loadMonitor == null)
throw;
loadMonitor.UpdateProgress(status.ChangeErrorException(x));
return null;
}
}
示例4: WriteMultiplexedWindows
/// <summary>
/// Generate an isolation list containing multiplexed windows, attempting to minimize the number
/// and frequency of repeated window pairings within each scan.
/// </summary>
/// <param name="writer">writer to write results</param>
/// <param name="windowsPerScan">how many windows are contained in each scan</param>
/// <param name="progressMonitor">progress monitor</param>
private void WriteMultiplexedWindows(TextWriter writer, int windowsPerScan, IProgressMonitor progressMonitor)
{
int maxInstrumentWindows = Assume.Value(_maxInstrumentWindows);
int windowCount = IsolationScheme.PrespecifiedIsolationWindows.Count;
int cycleCount = maxInstrumentWindows / windowCount;
double totalScore = 0.0;
// Prepare to generate the best isolation list possible within the given time limit.
var startTime = DateTime.Now;
var cycle = new Cycle(windowCount, windowsPerScan);
int cyclesGenerated = 0;
ProgressStatus status = new ProgressStatus(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List);
progressMonitor.UpdateProgress(status);
// Generate each cycle.
for (int cycleNumber = 1; cycleNumber <= cycleCount; cycleNumber++)
{
// Update status.
if (progressMonitor.IsCanceled)
return;
progressMonitor.UpdateProgress(status.ChangePercentComplete(
(int) (DateTime.Now - startTime).TotalSeconds*100/CalculationTime).ChangeMessage(
string.Format(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__1__,
cycleNumber - 1, cycleCount)));
double secondsRemaining = CalculationTime - (DateTime.Now - startTime).TotalSeconds;
double secondsPerCycle = secondsRemaining / (cycleCount - cycleNumber + 1);
var endTime = DateTime.Now.AddSeconds(secondsPerCycle);
Cycle bestCycle = null;
do
{
// Generate a bunch of cycles, looking for one with the lowest score.
const int attemptCount = 50;
for (int i = 0; i < attemptCount; i++)
{
cycle.Generate(cycleNumber);
if (bestCycle == null || bestCycle.CycleScore > cycle.CycleScore)
{
bestCycle = new Cycle(cycle);
if (bestCycle.CycleScore == 0.0)
{
cyclesGenerated += i + 1 - attemptCount;
endTime = DateTime.Now; // Break outer loop.
break;
}
}
}
cyclesGenerated += attemptCount;
} while (DateTime.Now < endTime);
// ReSharper disable PossibleNullReferenceException
totalScore += bestCycle.CycleScore;
WriteCycle(writer, bestCycle, cycleNumber);
WriteCycleInfo(bestCycle, cycleNumber, cyclesGenerated, startTime);
// ReSharper restore PossibleNullReferenceException
}
WriteTotalScore(totalScore);
// Show 100% in the wait dialog.
progressMonitor.UpdateProgress(status.ChangePercentComplete(100).ChangeMessage(
string.Format(Resources.AbstractDiaExporter_WriteMultiplexedWindows_Exporting_Isolation_List__0__cycles_out_of__0__,
cycleCount)));
}
示例5: ConvertBrukerToMzml
private static void ConvertBrukerToMzml(string filePathBruker,
string outputPath, IProgressMonitor monitor, ProgressStatus status)
{
// We use CompassXport, if it is installed, to convert a Bruker raw file to mzML. This solves two
// issues: the Bruker reader can't be called on any thread other than the main thread, and there
// is no 64-bit version of the reader. So we start CompassXport in its own 32-bit process,
// and use it to convert the raw data to mzML in a temporary file, which we read back afterwards.
var key = Registry.LocalMachine.OpenSubKey(KEY_COMPASSXPORT, false);
string compassXportExe = (key != null) ? (string)key.GetValue(string.Empty) : null;
if (compassXportExe == null)
throw new IOException(Resources.VendorIssueHelper_ConvertBrukerToMzml_CompassXport_software_must_be_installed_to_import_Bruker_raw_data_files_);
// CompassXport arguments
// ReSharper disable NonLocalizedString
var argv = new[]
{
"-a \"" + filePathBruker + "\"", // input file (directory)
"-o \"" + outputPath + "\"", // output file (directory)
"-mode 2", // mode 2 (mzML)
"-raw 0" // export line spectra (profile data is HUGE and SLOW!)
};
// ReSharper restore NonLocalizedString
// Start CompassXport in its own process.
var psi = new ProcessStartInfo(compassXportExe)
{
CreateNoWindow = true,
UseShellExecute = false,
// Common directory includes the directory separator
WorkingDirectory = Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location) ?? string.Empty,
Arguments = string.Join(" ", argv), // Not L10N
RedirectStandardError = true,
RedirectStandardOutput = true,
};
var proc = new Process { StartInfo = psi };
proc.Start();
// CompassXport starts by calculating a hash of the input file. This takes a long time, and there is
// no intermediate output during this time. So we set the progress bar some fraction of the way and
// let it sit there and animate while we wait for the start of spectra processing.
const int hashPercent = 25; // percentage of import time allocated to calculating the input file hash
int spectrumCount = 0;
var sbOut = new StringBuilder();
var reader = new ProcessStreamReader(proc);
string line;
while ((line = reader.ReadLine()) != null)
{
if (monitor.IsCanceled)
{
proc.Kill();
throw new LoadCanceledException(status.Cancel());
}
sbOut.AppendLine(line);
line = line.Trim();
// The main part of conversion starts with the hash calculation.
if (line.StartsWith("Calculating hash")) // Not L10N
{
status = status.ChangeMessage(Resources.VendorIssueHelper_ConvertBrukerToMzml_Calculating_hash_of_input_file)
.ChangePercentComplete(hashPercent);
monitor.UpdateProgress(status);
continue;
}
// Determine how many spectra will be converted so we can track progress.
var match = Regex.Match(line, @"Converting (\d+) spectra"); // Not L10N
if (match.Success)
{
spectrumCount = int.Parse(match.Groups[1].Value);
continue;
}
// Update progress as each spectra batch is converted.
match = Regex.Match(line, @"Spectrum \d+ - (\d+)"); // Not L10N
if (match.Success)
{
var spectrumEnd = int.Parse(match.Groups[1].Value);
var percentComplete = hashPercent + (100-hashPercent)*spectrumEnd/spectrumCount;
status = status.ChangeMessage(line).ChangePercentComplete(percentComplete);
monitor.UpdateProgress(status);
}
}
while (!proc.WaitForExit(200))
{
if (monitor.IsCanceled)
{
proc.Kill();
throw new LoadCanceledException(status.Cancel());
}
}
if (proc.ExitCode != 0)
{
throw new IOException(TextUtil.LineSeparate(string.Format(Resources.VendorIssueHelper_ConvertBrukerToMzml_Failure_attempting_to_convert__0__to_mzML_using_CompassXport_,
filePathBruker), string.Empty, sbOut.ToString()));
}
//.........这里部分代码省略.........
示例6: Run
public void Run(ProcessStartInfo psi, string stdin, IProgressMonitor progress, ref ProgressStatus status, TextWriter writer)
{
if (shouldCancel)
{
status.Cancel();
progress.UpdateProgress(status = status.Cancel());
return;
}
if (!string.IsNullOrEmpty(stringToWriteToWriter))
writer.WriteLine(stringToWriteToWriter);
status.ChangePercentComplete(100);
progress.UpdateProgress(status);
}
示例7: Load
private IrtDb Load(IProgressMonitor loadMonitor, ProgressStatus status)
{
var result = ChangeProp(ImClone(this), im => im.LoadPeptides(im.GetPeptides()));
// Not really possible to show progress, unless we switch to raw reading
if (loadMonitor != null)
loadMonitor.UpdateProgress(status.ChangePercentComplete(100));
return result;
}
示例8: Import
public SrmDocument Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount, bool isMinutes, bool removeMissing = false, bool changePeaks = true)
{
var status = new ProgressStatus(Resources.PeakBoundaryImporter_Import_Importing_Peak_Boundaries);
double timeConversionFactor = isMinutes ? 1.0 : 60.0;
int linesRead = 0;
int progressPercent = 0;
var docNew = (SrmDocument) Document.ChangeIgnoreChangingChildren(true);
var docReference = docNew;
var sequenceToNode = new Dictionary<Tuple<string, bool>, IList<IdentityPath>>();
var fileNameToFileMatch = new Dictionary<string, ChromSetFileMatch>();
var trackAdjustedResults = new HashSet<ResultsKey>();
var modMatcher = new ModificationMatcher();
// Make the dictionary of modified peptide strings to doc nodes and paths
for (int i = 0; i < Document.MoleculeCount; ++i)
{
IdentityPath peptidePath = Document.GetPathTo((int) SrmDocument.Level.Molecules, i);
PeptideDocNode peptideNode = (PeptideDocNode) Document.FindNode(peptidePath);
var peptidePair = new Tuple<string, bool>(peptideNode.RawTextId, peptideNode.IsDecoy);
IList<IdentityPath> idPathList;
// Each (sequence, isDecoy) pair can be associated with more than one peptide,
// to handle the case of duplicate peptides in the doucment.
if (sequenceToNode.TryGetValue(peptidePair, out idPathList))
{
idPathList.Add(peptidePath);
sequenceToNode[peptidePair] = idPathList;
}
else
{
idPathList = new List<IdentityPath> { peptidePath };
sequenceToNode.Add(peptidePair, idPathList);
}
}
// Add annotations as possible columns
var allFieldNames = new List<string[]>(FIELD_NAMES);
allFieldNames.AddRange(from def in Document.Settings.DataSettings.AnnotationDefs
where def.AnnotationTargets.Contains(AnnotationDef.AnnotationTarget.precursor_result)
select new[] { def.Name });
string line = reader.ReadLine();
linesRead++;
int[] fieldIndices;
int fieldsTotal;
// If we aren't changing peaks, allow start and end time to be missing
var requiredFields = changePeaks ? REQUIRED_FIELDS : REQUIRED_NO_CHROM;
char correctSeparator = ReadFirstLine(line, allFieldNames, requiredFields, out fieldIndices, out fieldsTotal);
while ((line = reader.ReadLine()) != null)
{
linesRead++;
if (progressMonitor != null)
{
if (progressMonitor.IsCanceled)
return Document;
int progressNew = (int) (linesRead*100/lineCount);
if (progressPercent != progressNew)
{
progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew));
progressPercent = progressNew;
}
}
var dataFields = new DataFields(fieldIndices, line.ParseDsvFields(correctSeparator), allFieldNames);
if (dataFields.Length != fieldsTotal)
{
throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Line__0__field_count__1__differs_from_the_first_line__which_has__2_,
linesRead, dataFields.Length, fieldsTotal));
}
string modifiedPeptideString = dataFields.GetField(Field.modified_peptide);
modMatcher.CreateMatches(Document.Settings,
new List<string> {modifiedPeptideString},
Settings.Default.StaticModList,
Settings.Default.HeavyModList);
// Convert the modified peptide string into a standardized form that
// converts unimod, names, etc, into masses, eg [+57.0]
var nodeForModPep = modMatcher.GetModifiedNode(modifiedPeptideString);
if (nodeForModPep == null)
{
throw new IOException(string.Format(Resources.PeakBoundaryImporter_Import_Peptide_has_unrecognized_modifications__0__at_line__1_, modifiedPeptideString, linesRead));
}
nodeForModPep = nodeForModPep.ChangeSettings(Document.Settings, SrmSettingsDiff.ALL);
modifiedPeptideString = nodeForModPep.RawTextId; // Modified sequence, or custom ion name
string fileName = dataFields.GetField(Field.filename);
bool isDecoy = dataFields.IsDecoy(linesRead);
var peptideIdentifier = new Tuple<string, bool>(modifiedPeptideString, isDecoy);
int charge;
bool chargeSpecified = dataFields.TryGetCharge(linesRead, out charge);
string sampleName = dataFields.GetField(Field.sample_name);
double? startTime = null;
double? endTime = null;
if (changePeaks)
{
startTime = dataFields.GetTime(Field.start_time, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_start_time_, linesRead);
if (startTime.HasValue)
startTime = startTime / timeConversionFactor;
endTime = dataFields.GetTime(Field.end_time, Resources.PeakBoundaryImporter_Import_The_value___0___on_line__1__is_not_a_valid_end_time_, linesRead);
if (endTime.HasValue)
endTime = endTime / timeConversionFactor;
}
//.........这里部分代码省略.........
示例9: CreateLibraryFromSpectra
/// <summary>
/// Make a BiblioSpec SQLite library from a list of spectra and their intensities.
/// </summary>
/// <param name="librarySpec">Library spec for which the new library is created</param>
/// <param name="listSpectra">List of existing spectra, by LibKey</param>
/// <param name="libraryName">Name of the library to be created</param>
/// <param name="progressMonitor">Progress monitor to display progress in creating library</param>
/// <returns>A library of type <see cref="BiblioSpecLiteLibrary"/></returns>
public BiblioSpecLiteLibrary CreateLibraryFromSpectra(BiblioSpecLiteSpec librarySpec,
List<SpectrumMzInfo> listSpectra,
string libraryName,
IProgressMonitor progressMonitor)
{
const string libAuthority = BiblioSpecLiteLibrary.DEFAULT_AUTHORITY;
const int majorVer = 1;
const int minorVer = 0;
string libId = libraryName;
// Use a very specific LSID, since it really only matches this document.
string libLsid = string.Format("urn:lsid:{0}:spectral_libary:bibliospec:nr:minimal:{1}:{2}:{3}.{4}", // Not L10N
libAuthority, libId, Guid.NewGuid(), majorVer, minorVer);
var dictLibrary = new Dictionary<LibKey, BiblioLiteSpectrumInfo>();
using (ISession session = OpenWriteSession())
using (ITransaction transaction = session.BeginTransaction())
{
int progressPercent = 0;
int i = 0;
var status = new ProgressStatus(Resources.BlibDb_CreateLibraryFromSpectra_Creating_spectral_library_for_imported_transition_list);
foreach (var spectrum in listSpectra)
{
++i;
var dbRefSpectrum = RefSpectrumFromPeaks(spectrum);
session.Save(dbRefSpectrum);
dictLibrary.Add(spectrum.Key,
new BiblioLiteSpectrumInfo(spectrum.Key, dbRefSpectrum.Copies,
dbRefSpectrum.NumPeaks,
(int)(dbRefSpectrum.Id ?? 0),
default(IndexedRetentionTimes),
default(IndexedIonMobilities)));
if (progressMonitor != null)
{
if (progressMonitor.IsCanceled)
return null;
int progressNew = (i * 100 / listSpectra.Count);
if (progressPercent != progressNew)
{
progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressNew));
progressPercent = progressNew;
}
}
}
session.Flush();
session.Clear();
// Simulate ctime(d), which is what BlibBuild uses.
string createTime = string.Format("{0:ddd MMM dd HH:mm:ss yyyy}", DateTime.Now); // Not L10N? different date/time format in different countries
DbLibInfo libInfo = new DbLibInfo
{
LibLSID = libLsid,
CreateTime = createTime,
NumSpecs = dictLibrary.Count,
MajorVersion = majorVer,
MinorVersion = minorVer
};
session.Save(libInfo);
session.Flush();
session.Clear();
transaction.Commit();
}
var libraryEntries = dictLibrary.Values.ToArray();
return new BiblioSpecLiteLibrary(librarySpec, libLsid, majorVer, minorVer,
libraryEntries, FileStreamManager.Default);
}
示例10: GenerateNormalMap
private Bitmap GenerateNormalMap( WaveAnimationParameters parameters, float t, float maxT, IProgressMonitor progress, float curProgress, float progressPerFrame )
{
int width = parameters.Width;
int height = parameters.Height;
ComplexF[] frequencyMap = GetFrequencyMap( parameters );
ComplexF[] invFrequencyMap = GetInvFrequencyMap( parameters );
Bitmap bmp = new Bitmap( width, height, PixelFormat.Format24bppRgb );
ComplexF[] resMap = Generate( frequencyMap, invFrequencyMap, width, height, t, maxT, true );
Fourier.FFT2( resMap, width, height, FourierDirection.Backward );
for ( int y = 0; y < height; ++y)
{
for ( int x = 0; x < width; ++x )
{
byte nX = ( byte )( Math.Max( 0, Math.Min( 256, 128 + resMap[ x + y * width ].Re * 8 ) ) );
byte nY = ( byte )( Math.Max( 0, Math.Min( 256, 128 + resMap[ x + y * width ].Im * 8 ) ) );
bmp.SetPixel( x, y, Color.FromArgb( nX, 0, nY ) );
}
progress.UpdateProgress( curProgress + progressPerFrame * ( y / ( float )( height - 1 ) ) );
}
return bmp;
}
示例11: Train
/// <summary>
/// Train the model by iterative calculating weights to separate target and decoy transition groups.
/// </summary>
/// <param name="targets">Target transition groups.</param>
/// <param name="decoys">Decoy transition groups.</param>
/// <param name="initParameters">Initial model parameters (weights and bias)</param>
/// <param name="includeSecondBest"> Include the second best peaks in the targets as decoys?</param>
/// <param name="preTrain">Use a pre-trained model to bootstrap the learning.</param>
/// <param name="progressMonitor"></param>
/// <returns>Immutable model with new weights.</returns>
public override IPeakScoringModel Train(IList<IList<float[]>> targets, IList<IList<float[]>> decoys, LinearModelParams initParameters,
bool includeSecondBest = false, bool preTrain = true, IProgressMonitor progressMonitor = null)
{
if(initParameters == null)
initParameters = new LinearModelParams(_peakFeatureCalculators.Count);
return ChangeProp(ImClone(this), im =>
{
targets = targets.Where(list => list.Count > 0).ToList();
decoys = decoys.Where(list => list.Count > 0).ToList();
var targetTransitionGroups = new ScoredGroupPeaksSet(targets);
var decoyTransitionGroups = new ScoredGroupPeaksSet(decoys);
// Bootstrap from the pre-trained legacy model
if (preTrain)
{
var preTrainedWeights = new double[initParameters.Weights.Count];
for (int i = 0; i < preTrainedWeights.Length; ++i)
{
if (double.IsNaN(initParameters.Weights[i]))
{
preTrainedWeights[i] = double.NaN;
}
}
int standardEnabledCount = GetEnabledCount(LegacyScoringModel.StandardFeatureCalculators, initParameters.Weights);
int analyteEnabledCount = GetEnabledCount(LegacyScoringModel.AnalyteFeatureCalculators, initParameters.Weights);
bool hasStandards = standardEnabledCount >= analyteEnabledCount;
var calculators = hasStandards ? LegacyScoringModel.StandardFeatureCalculators : LegacyScoringModel.AnalyteFeatureCalculators;
for (int i = 0; i < calculators.Length; ++i)
{
if (calculators[i].GetType() == typeof (MQuestRetentionTimePredictionCalc))
continue;
SetCalculatorValue(calculators[i].GetType(), LegacyScoringModel.DEFAULT_WEIGHTS[i], preTrainedWeights);
}
targetTransitionGroups.ScorePeaks(preTrainedWeights);
decoyTransitionGroups.ScorePeaks(preTrainedWeights);
}
// Iteratively refine the weights through multiple iterations.
var calcWeights = new double[initParameters.Weights.Count];
Array.Copy(initParameters.Weights.ToArray(), calcWeights, initParameters.Weights.Count);
double decoyMean = 0;
double decoyStdev = 0;
bool colinearWarning = false;
// This may take a long time between progress updates, but just measure progress by cycles through the training
var status = new ProgressStatus(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model);
if (progressMonitor != null)
progressMonitor.UpdateProgress(status);
for (int iteration = 0; iteration < MAX_ITERATIONS; iteration++)
{
if (progressMonitor != null)
{
if (progressMonitor.IsCanceled)
throw new OperationCanceledException();
progressMonitor.UpdateProgress(status =
status.ChangeMessage(string.Format(Resources.MProphetPeakScoringModel_Train_Training_peak_scoring_model__iteration__0__of__1__, iteration + 1, MAX_ITERATIONS))
.ChangePercentComplete((iteration + 1) * 100 / (MAX_ITERATIONS + 1)));
}
im.CalculateWeights(iteration, targetTransitionGroups, decoyTransitionGroups,
includeSecondBest, calcWeights, out decoyMean, out decoyStdev, ref colinearWarning);
GC.Collect(); // Each loop generates a number of large objects. GC helps to keep private bytes under control
}
if (progressMonitor != null)
progressMonitor.UpdateProgress(status.ChangePercentComplete(100));
var parameters = new LinearModelParams(calcWeights);
parameters = parameters.RescaleParameters(decoyMean, decoyStdev);
im.Parameters = parameters;
im.ColinearWarning = colinearWarning;
im.UsesSecondBest = includeSecondBest;
im.UsesDecoys = decoys.Count > 0;
});
}
示例12: RecalculateAlignments
public static SrmDocument RecalculateAlignments(SrmDocument document, IProgressMonitor progressMonitor)
{
var newSources = ListAvailableRetentionTimeSources(document.Settings);
var newResultsSources = ListSourcesForResults(document.Settings.MeasuredResults, newSources);
var allLibraryRetentionTimes = ReadAllRetentionTimes(document, newSources);
var newFileAlignments = new List<FileRetentionTimeAlignments>();
var progressStatus = new ProgressStatus("Aligning retention times"); // Not L10N? Will users see this?
foreach (var retentionTimeSource in newResultsSources.Values)
{
progressStatus = progressStatus.ChangePercentComplete(100*newFileAlignments.Count/newResultsSources.Count);
progressMonitor.UpdateProgress(progressStatus);
try
{
var fileAlignments = CalculateFileRetentionTimeAlignments(retentionTimeSource.Name, allLibraryRetentionTimes, progressMonitor);
newFileAlignments.Add(fileAlignments);
}
catch (OperationCanceledException)
{
progressMonitor.UpdateProgress(progressStatus.Cancel());
return null;
}
}
var newDocRt = new DocumentRetentionTimes(newSources.Values, newFileAlignments);
var newDocument = document.ChangeSettings(document.Settings.ChangeDocumentRetentionTimes(newDocRt));
Debug.Assert(IsLoaded(newDocument));
progressMonitor.UpdateProgress(progressStatus.Complete());
return newDocument;
}
示例13: Import
public IEnumerable<PeptideGroupDocNode> Import(TextReader reader, IProgressMonitor progressMonitor, long lineCount)
{
// Set starting values for limit counters
_countPeptides = Document.PeptideCount;
_countIons = Document.PeptideTransitionCount;
// Store set of existing FASTA sequences to keep from duplicating
HashSet<FastaSequence> set = new HashSet<FastaSequence>();
foreach (PeptideGroupDocNode nodeGroup in Document.Children)
{
FastaSequence fastaSeq = nodeGroup.Id as FastaSequence;
if (fastaSeq != null)
set.Add(fastaSeq);
}
var peptideGroupsNew = new List<PeptideGroupDocNode>();
PeptideGroupBuilder seqBuilder = null;
long linesRead = 0;
int progressPercent = -1;
string line;
var status = new ProgressStatus(string.Empty);
while ((line = reader.ReadLine()) != null)
{
linesRead++;
if (progressMonitor != null)
{
// TODO when changing from ILongWaitBroker to IProgressMonitor, the old code was:
// if (progressMonitor.IsCanceled || progressMonitor.IsDocumentChanged(Document))
// IProgressMonitor does not have IsDocumentChangesd.
if (progressMonitor.IsCanceled)
return new PeptideGroupDocNode[0];
int progressNew = (int) (linesRead*100/lineCount);
if (progressPercent != progressNew)
progressMonitor.UpdateProgress(status = status.ChangePercentComplete(progressPercent = progressNew));
}
if (line.StartsWith(">")) // Not L10N
{
if (_countIons > SrmDocument.MAX_TRANSITION_COUNT ||
_countPeptides > SrmDocument.MAX_PEPTIDE_COUNT)
throw new InvalidDataException(Resources.FastaImporter_Import_Document_size_limit_exceeded);
if (seqBuilder != null)
AddPeptideGroup(peptideGroupsNew, set, seqBuilder);
seqBuilder = _modMatcher == null
? new PeptideGroupBuilder(line, PeptideList, Document.Settings)
: new PeptideGroupBuilder(line, _modMatcher, Document.Settings);
if (progressMonitor != null)
progressMonitor.UpdateProgress(status = status.ChangeMessage(string.Format(Resources.FastaImporter_Import_Adding_protein__0__, seqBuilder.Name)));
}
else if (seqBuilder == null)
{
if (line.Trim().Length == 0)
continue;
break;
}
else
{
seqBuilder.AppendSequence(line);
}
}
// Add last sequence.
if (seqBuilder != null)
AddPeptideGroup(peptideGroupsNew, set, seqBuilder);
return peptideGroupsNew;
}
示例14: BuildLibrary
public bool BuildLibrary(IProgressMonitor progress)
{
_ambiguousMatches = null;
ProgressStatus status = new ProgressStatus(Resources.BiblioSpecLiteBuilder_BuildLibrary_Preparing_to_build_library);
progress.UpdateProgress(status);
if (InputFiles.Any(f => f.EndsWith(EXT_PILOT)))
{
try
{
InputFiles = VendorIssueHelper.ConvertPilotFiles(InputFiles, progress, status);
if (progress.IsCanceled)
return false;
}
catch (Exception x)
{
progress.UpdateProgress(status.ChangeErrorException(x));
return false;
}
}
string message = string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Building__0__library,
Path.GetFileName(OutputPath));
progress.UpdateProgress(status = status.ChangeMessage(message));
string redundantLibrary = BiblioSpecLiteSpec.GetRedundantName(OutputPath);
var blibBuilder = new BlibBuild(redundantLibrary, InputFiles, TargetSequences)
{
Authority = Authority,
IncludeAmbiguousMatches = IncludeAmbiguousMatches,
CutOffScore = CutOffScore,
Id = Id,
};
try
{
if (!blibBuilder.BuildLibrary(Action, progress, ref status, out _ambiguousMatches))
{
return false;
}
}
catch (IOException x)
{
progress.UpdateProgress(status.ChangeErrorException(x));
return false;
}
catch (Exception x)
{
Console.WriteLine(x.Message);
progress.UpdateProgress(status.ChangeErrorException(
new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_redundant_library__0__,
redundantLibrary))));
return false;
}
var blibFilter = new BlibFilter();
status = new ProgressStatus(message);
progress.UpdateProgress(status);
// Write the non-redundant library to a temporary file first
try
{
using (var saver = new FileSaver(OutputPath))
{
if (!blibFilter.Filter(redundantLibrary, saver.SafeName, progress, ref status))
{
return false;
}
saver.Commit();
}
}
catch (IOException x)
{
progress.UpdateProgress(status.ChangeErrorException(x));
return false;
}
catch
{
progress.UpdateProgress(status.ChangeErrorException(
new Exception(string.Format(Resources.BiblioSpecLiteBuilder_BuildLibrary_Failed_trying_to_build_the_library__0__,
OutputPath))));
return false;
}
finally
{
if (!KeepRedundant)
FileEx.SafeDelete(redundantLibrary, true);
}
return true;
}
示例15: ConvertFromOldFormat
public static OptimizationDb ConvertFromOldFormat(string path, IProgressMonitor loadMonitor, ProgressStatus status, SrmDocument document)
{
// Try to open assuming old format (Id, PeptideModSeq, Charge, Mz, Value, Type)
var precursors = new Dictionary<string, HashSet<int>>(); // PeptideModSeq -> charges
var optimizations = new List<Tuple<DbOptimization, double>>(); // DbOptimization, product m/z
int maxCharge = 1;
using (SQLiteConnection connection = new SQLiteConnection("Data Source = " + path)) // Not L10N
using (SQLiteCommand command = new SQLiteCommand(connection))
{
connection.Open();
command.CommandText = "SELECT PeptideModSeq, Charge, Mz, Value, Type FROM OptimizationLibrary"; // Not L10N
using (SQLiteDataReader reader = command.ExecuteReader())
{
while (reader.Read())
{
var type = (OptimizationType)reader["Type"]; // Not L10N
var modifiedSequence = reader["PeptideModSeq"].ToString(); // Not L10N
var charge = (int)reader["Charge"]; // Not L10N
var productMz = (double)reader["Mz"]; // Not L10N
var value = (double)reader["Value"]; // Not L10N
optimizations.Add(new Tuple<DbOptimization, double>(new DbOptimization(type, modifiedSequence, charge, string.Empty, -1, value), productMz));
if (!precursors.ContainsKey(modifiedSequence))
{
precursors[modifiedSequence] = new HashSet<int>();
}
precursors[modifiedSequence].Add(charge);
if (charge > maxCharge)
{
maxCharge = charge;
}
}
}
}
var peptideList = (from precursor in precursors
from charge in precursor.Value
select string.Format("{0}{1}", precursor.Key, Transition.GetChargeIndicator(charge)) // Not L10N
).ToList();
var newDoc = new SrmDocument(document != null ? document.Settings : SrmSettingsList.GetDefault());
newDoc = newDoc.ChangeSettings(newDoc.Settings
.ChangePeptideLibraries(libs => libs.ChangePick(PeptidePick.filter))
.ChangeTransitionFilter(filter =>
filter.ChangeFragmentRangeFirstName("ion 1") // Not L10N
.ChangeFragmentRangeLastName("last ion") // Not L10N
.ChangeProductCharges(Enumerable.Range(1, maxCharge).ToList())
.ChangeIonTypes(new []{ IonType.y, IonType.b }))
.ChangeTransitionLibraries(libs => libs.ChangePick(TransitionLibraryPick.none))
);
var matcher = new ModificationMatcher { FormatProvider = NumberFormatInfo.InvariantInfo };
matcher.CreateMatches(newDoc.Settings, peptideList, Settings.Default.StaticModList, Settings.Default.HeavyModList);
FastaImporter importer = new FastaImporter(newDoc, matcher);
string text = string.Format(">>{0}\r\n{1}", newDoc.GetPeptideGroupId(true), TextUtil.LineSeparate(peptideList)); // Not L10N
PeptideGroupDocNode imported = importer.Import(new StringReader(text), null, Helpers.CountLinesInString(text)).First();
int optimizationsUpdated = 0;
foreach (PeptideDocNode nodePep in imported.Children)
{
string sequence = newDoc.Settings.GetSourceTextId(nodePep);
foreach (var nodeGroup in nodePep.TransitionGroups)
{
int charge = nodeGroup.PrecursorCharge;
foreach (var nodeTran in nodeGroup.Transitions)
{
double productMz = nodeTran.Mz;
foreach (var optimization in optimizations.Where(opt =>
string.IsNullOrEmpty(opt.Item1.FragmentIon) &&
opt.Item1.ProductCharge == -1 &&
opt.Item1.PeptideModSeq == sequence &&
opt.Item1.Charge == charge &&
Math.Abs(opt.Item2 - productMz) < 0.00001))
{
optimization.Item1.FragmentIon = nodeTran.FragmentIonName;
optimization.Item1.ProductCharge = nodeTran.Transition.Charge;
++optimizationsUpdated;
}
}
}
}
if (optimizations.Count > optimizationsUpdated)
{
throw new OptimizationsOpeningException(string.Format(Resources.OptimizationDb_ConvertFromOldFormat_Failed_to_convert__0__optimizations_to_new_format_,
optimizations.Count - optimizationsUpdated));
}
using (var fs = new FileSaver(path))
{
OptimizationDb db = CreateOptimizationDb(fs.SafeName);
db.UpdateOptimizations(optimizations.Select(opt => opt.Item1).ToArray(), new DbOptimization[0]);
fs.Commit();
if (loadMonitor != null)
loadMonitor.UpdateProgress(status.ChangePercentComplete(100));
return GetOptimizationDb(fs.RealName, null, null);
}
}