本文整理汇总了C#中Sample.KolmogorovSmirnovTest方法的典型用法代码示例。如果您正苦于以下问题:C# Sample.KolmogorovSmirnovTest方法的具体用法?C# Sample.KolmogorovSmirnovTest怎么用?C# Sample.KolmogorovSmirnovTest使用的例子?那么, 这里精选的方法代码示例或许可以为您提供帮助。您也可以进一步了解该方法所在类Sample
的用法示例。
在下文中一共展示了Sample.KolmogorovSmirnovTest方法的15个代码示例,这些例子默认根据受欢迎程度排序。您可以为喜欢或者感觉有用的代码点赞,您的评价将有助于系统推荐出更棒的C#代码示例。
示例1: AnovaDistribution
public void AnovaDistribution()
{
Distribution sDistribution = new NormalDistribution();
Random rng = new Random(1);
Sample fSample = new Sample();
// do 100 ANOVAs
for (int t = 0; t < 100; t++) {
// each ANOVA has 4 groups
List<Sample> groups = new List<Sample>();
for (int g = 0; g < 4; g++) {
// each group has 3 data points
Sample group = new Sample();
for (int i = 0; i < 3; i++) {
group.Add(sDistribution.GetRandomValue(rng));
}
groups.Add(group);
}
OneWayAnovaResult result = Sample.OneWayAnovaTest(groups);
fSample.Add(result.Factor.Result.Statistic);
}
// compare the distribution of F statistics to the expected distribution
Distribution fDistribution = new FisherDistribution(3, 8);
Console.WriteLine("m={0} s={1}", fSample.PopulationMean, fSample.PopulationStandardDeviation);
TestResult kResult = fSample.KolmogorovSmirnovTest(fDistribution);
Console.WriteLine(kResult.LeftProbability);
Assert.IsTrue(kResult.LeftProbability < 0.95);
}
示例2: BivariateNullAssociation
public void BivariateNullAssociation()
{
Random rng = new Random(314159265);
// Create sample sets for our three test statisics
Sample PS = new Sample();
Sample SS = new Sample();
Sample KS = new Sample();
// variables to hold the claimed distribution of teach test statistic
Distribution PD = null;
Distribution SD = null;
Distribution KD = null;
// generate a large number of bivariate samples and conduct our three tests on each
for (int j = 0; j < 100; j++) {
BivariateSample S = new BivariateSample();
// sample size should be large so that asymptotic assumptions are justified
for (int i = 0; i < 100; i++) {
double x = rng.NextDouble();
double y = rng.NextDouble();
S.Add(x, y);
}
TestResult PR = S.PearsonRTest();
PS.Add(PR.Statistic);
PD = PR.Distribution;
TestResult SR = S.SpearmanRhoTest();
SS.Add(SR.Statistic);
SD = SR.Distribution;
TestResult KR = S.KendallTauTest();
KS.Add(KR.Statistic);
KD = KR.Distribution;
}
// do KS to test whether the samples follow the claimed distributions
//Console.WriteLine(PS.KolmogorovSmirnovTest(PD).LeftProbability);
//Console.WriteLine(SS.KolmogorovSmirnovTest(SD).LeftProbability);
//Console.WriteLine(KS.KolmogorovSmirnovTest(KD).LeftProbability);
Assert.IsTrue(PS.KolmogorovSmirnovTest(PD).LeftProbability < 0.95);
Assert.IsTrue(SS.KolmogorovSmirnovTest(SD).LeftProbability < 0.95);
Assert.IsTrue(KS.KolmogorovSmirnovTest(KD).LeftProbability < 0.95);
}
示例3: SampleKuiperTest
public void SampleKuiperTest()
{
// this test has a whiff of meta-statistics about it
// we want to make sure that the Kuiper test statistic V is distributed according to the Kuiper
// distribution; to do this, we create a sample of V statistics and do KS/Kuiper tests
// comparing it to the claimed Kuiper distribution
// start with any 'ol underlying distribution
Distribution distribution = new ExponentialDistribution(2.0);
// generate some samples from it, and for each one get a V statistic from a KS test
Sample VSample = new Sample();
Distribution VDistribution = null;
for (int i = 0; i < 25; i++) {
// the sample size must be large enough that the asymptotic assumptions are satistifed
// at the moment this test fails if we make the sample size much smaller; we should
// be able shrink this number when we expose the finite-sample distributions
Sample sample = CreateSample(distribution, 250, i);
TestResult kuiper = sample.KuiperTest(distribution);
double V = kuiper.Statistic;
Console.WriteLine("V = {0}", V);
VSample.Add(V);
VDistribution = kuiper.Distribution;
}
// check on the mean
Console.WriteLine("m = {0} vs. {1}", VSample.PopulationMean, VDistribution.Mean);
Assert.IsTrue(VSample.PopulationMean.ConfidenceInterval(0.95).ClosedContains(VDistribution.Mean));
// check on the standard deviation
Console.WriteLine("s = {0} vs. {1}", VSample.PopulationStandardDeviation, VDistribution.StandardDeviation);
Assert.IsTrue(VSample.PopulationStandardDeviation.ConfidenceInterval(0.95).ClosedContains(VDistribution.StandardDeviation));
// do a KS test comparing the sample to the expected distribution
TestResult kst = VSample.KolmogorovSmirnovTest(VDistribution);
Console.WriteLine("D = {0}, P = {1}", kst.Statistic, kst.LeftProbability);
Assert.IsTrue(kst.LeftProbability < 0.95);
// do a Kuiper test comparing the sample to the expected distribution
TestResult kut = VSample.KuiperTest(VDistribution);
Console.WriteLine("V = {0}, P = {1}", kut.Statistic, kut.LeftProbability);
Assert.IsTrue(kut.LeftProbability < 0.95);
}
示例4: ZTestDistribution
public void ZTestDistribution()
{
Random rng = new Random(1);
// define the sampling population (which must be normal for a z-test)
Distribution population = new NormalDistribution(2.0, 3.0);
// collect 100 samples
Sample zSample = new Sample();
for (int i = 0; i < 100; i++) {
// each z-statistic is formed by making a 4-count sample from a normal distribution
Sample sample = new Sample();
for (int j = 0; j < 4; j++) {
sample.Add(population.GetRandomValue(rng));
}
// for each sample, do a z-test against the population
TestResult zResult = sample.ZTest(population.Mean, population.StandardDeviation);
zSample.Add(zResult.Statistic);
}
// the z's should be distrubuted normally
TestResult result = zSample.KolmogorovSmirnovTest(new NormalDistribution());
Console.WriteLine("{0} {1}", result.Statistic, result.LeftProbability);
Assert.IsTrue((result.LeftProbability > 0.05) && (result.LeftProbability < 0.95));
}
示例5: FitToSample
//.........这里部分代码省略.........
// and the result is easily solved for the shape parameter
// k = \frac{\log 2}{\log\left(\frac{x_{2/3}}{x_{1/3}}\right)}
double x1 = sample.InverseLeftProbability(1.0 / 3.0);
double x2 = sample.InverseLeftProbability(2.0 / 3.0);
double k0 = Global.LogTwo / Math.Log(x2 / x1);
// Given the shape paramter, we could invert the expression for the mean to get
// the scale parameter, but since we have an expression for \lambda from k, we
// dont' need it.
//double s0 = sample.Mean / AdvancedMath.Gamma(1.0 + 1.0 / k0);
// Simply handing our 1D function to a root-finder works fine until we start to encounter large k. For large k,
// even just computing \lambda goes wrong because we are taking x_i^k which overflows. Horst Rinne, "The Weibull
// Distribution: A Handbook" describes a way out. Basically, we first move to variables z_i = \log(x_i) and
// then w_i = z_i - \bar{z}. Then lots of factors of e^{k \bar{z}} cancel out and, even though we still do
// have some e^{k w_i}, the w_i are small and centered around 0 instead of large and centered around \lambda.
Sample transformedSample = sample.Copy();
transformedSample.Transform(x => Math.Log(x));
double zbar = transformedSample.Mean;
transformedSample.Transform(z => z - zbar);
// After this change of variable the 1D function to zero becomes
// g(k) = \sum_i ( 1 - k w_i ) e^{k w_i}
// It's easy to show that g(0) = n and g(\infinity) = -\infinity, so it must cross zero. It's also easy to take
// a derivative
// g'(k) = - k \sum_i w_i^2 e^{k w_i}
// so we can apply Newton's method.
int i = 0;
double k1 = k0;
while (true) {
i++;
double g = 0.0;
double gp = 0.0;
foreach (double w in transformedSample) {
double e = Math.Exp(k1 * w);
g += (1.0 - k1 * w) * e;
gp -= k1 * w * w * e;
}
double dk = -g / gp;
k1 += dk;
if (Math.Abs(dk) <= Global.Accuracy * Math.Abs(k1)) break;
if (i >= Global.SeriesMax) throw new NonconvergenceException();
}
// The corresponding lambda can also be expressed in terms of zbar and w's.
double t = 0.0;
foreach (double w in transformedSample) {
t += Math.Exp(k1 * w);
}
t /= transformedSample.Count;
double lambda1 = Math.Exp(zbar) * Math.Pow(t, 1.0 / k1);
// We need the curvature matrix at the minimum of our log likelyhood function
// to determine the covariance matrix. Taking more derivatives...
// \frac{\partial^2 \log L} = \frac{N k}{\lambda^2} - \sum_i \frac{k(k+1) x_i^k}{\lambda^{k+2}}
// = - \frac{N k^2}{\lambda^2}
// The second expression follows by inserting the first-derivative-equal-zero relation into the first.
// For k=1, this agrees with the variance formula for the mean of the best-fit exponential.
// Derivatives involving k are less simple.
// We end up needing the means < (x/lambda)^k log(x/lambda) > and < (x/lambda)^k log^2(x/lambda) >
double mpl = 0.0; double mpl2 = 0.0;
foreach (double x in sample) {
double r = x / lambda1;
double p = Math.Pow(r, k1);
double l = Math.Log(r);
double pl = p * l;
double pl2 = pl * l;
mpl += pl;
mpl2 += pl2;
}
mpl = mpl / sample.Count;
mpl2 = mpl2 / sample.Count;
// See if we can't do any better here. Transforming to zbar and w's looked ugly, but perhaps it
// can be simplified? One interesting observation: if we take expectation values (which gives
// the Fisher information matrix) the entries become simple:
// B_{\lambda \lambda} = \frac{N k^2}{\lambda^2}
// B_{\lambda k} = -\Gamma'(2) \frac{N}{\lambda}
// B_{k k } = [1 + \Gamma''(2)] \frac{N}{k^2}
// Would it be bad to just use these directly?
// Construct the curvature matrix and invert it.
SymmetricMatrix B = new SymmetricMatrix(2);
B[0, 0] = sample.Count * MoreMath.Sqr(k1 / lambda1);
B[0, 1] = -sample.Count * k1 / lambda1 * mpl;
B[1, 1] = sample.Count * (1.0 / MoreMath.Pow2(k1) + mpl2);
SymmetricMatrix C = B.CholeskyDecomposition().Inverse();
// Do a KS test to compare sample to best-fit distribution
Distribution distribution = new WeibullDistribution(lambda1, k1);
TestResult test = sample.KolmogorovSmirnovTest(distribution);
// return the result
return (new FitResult(new double[] {lambda1, k1}, C, test));
}
示例6: PearsonRDistribution
public void PearsonRDistribution()
{
Random rng = new Random(1);
// pick some underlying distributions for the sample variables, which must be normal but can have any parameters
NormalDistribution xDistribution = new NormalDistribution(1, 2);
NormalDistribution yDistribution = new NormalDistribution(3, 4);
// try this for several sample sizes, all low so that we see the difference from the normal distribution
// n = 3 maxima at ends; n = 4 uniform; n = 5 semi-circular "mound"; n = 6 parabolic "mound"
foreach (int n in new int[] { 3, 4, 5, 6, 8 }) {
Console.WriteLine("n={0}", n);
// find r values
Sample rSample = new Sample();
for (int i = 0; i < 100; i++) {
// to get each r value, construct a bivariate sample of the given size with no cross-correlation
BivariateSample xySample = new BivariateSample();
for (int j = 0; j < n; j++) {
xySample.Add(xDistribution.GetRandomValue(rng), yDistribution.GetRandomValue(rng));
}
double r = xySample.PearsonRTest().Statistic;
rSample.Add(r);
}
// check whether r is distributed as expected
TestResult result = rSample.KolmogorovSmirnovTest(new PearsonRDistribution(n));
Console.WriteLine("P={0}", result.LeftProbability);
Assert.IsTrue(result.LeftProbability < 0.95);
}
}
示例7: UniformOrderStatistics
public void UniformOrderStatistics()
{
// Check that the order statistics of the uniform distribution are distributed as expected.
Random rng = new Random(1);
UniformDistribution u = new UniformDistribution();
Sample maxima = new Sample();
Sample minima = new Sample();
for (int i = 0; i < 100; i++) {
double maximum = 0.0;
double minimum = 1.0;
for (int j = 0; j < 4; j++) {
double value = u.GetRandomValue(rng);
if (value > maximum) maximum = value;
if (value < minimum) minimum = value;
}
maxima.Add(maximum);
minima.Add(minimum);
}
// maxima should be distributed according to Beta(n,1)
TestResult maxTest = maxima.KolmogorovSmirnovTest(new BetaDistribution(4, 1));
Assert.IsTrue(maxTest.LeftProbability < 0.95);
// minima should be distributed according to Beta(1,n)
TestResult minTest = minima.KolmogorovSmirnovTest(new BetaDistribution(1, 4));
Assert.IsTrue(minTest.LeftProbability < 0.95);
}
示例8: StudentTest
public void StudentTest()
{
// make sure Student t is consistent with its definition
// we are going to take a sample that we expect to be t-distributed
Sample tSample = new Sample();
// begin with an underlying normal distribution
Distribution xDistribution = new NormalDistribution(1.0, 2.0);
// compute a bunch of t satistics from the distribution
for (int i = 0; i < 200000; i++) {
// take a small sample from the underlying distribution
// (as the sample gets large, the t distribution becomes normal)
Random rng = new Random(i);
Sample xSample = new Sample();
for (int j = 0; j < 5; j++) {
double x = xDistribution.InverseLeftProbability(rng.NextDouble());
xSample.Add(x);
}
// compute t for the sample
double t = (xSample.Mean - xDistribution.Mean) / (xSample.PopulationStandardDeviation.Value / Math.Sqrt(xSample.Count));
tSample.Add(t);
//Console.WriteLine(t);
}
// t's should be t-distrubuted; use a KS test to check this
Distribution tDistribution = new StudentDistribution(4);
TestResult result = tSample.KolmogorovSmirnovTest(tDistribution);
Console.WriteLine(result.LeftProbability);
//Assert.IsTrue(result.LeftProbability < 0.95);
// t's should be demonstrably not normally distributed
Console.WriteLine(tSample.KolmogorovSmirnovTest(new NormalDistribution()).LeftProbability);
//Assert.IsTrue(tSample.KolmogorovSmirnovTest(new NormalDistribution()).LeftProbability > 0.95);
}
示例9: TwoSampleKolmogorovNullDistributionTest
public void TwoSampleKolmogorovNullDistributionTest()
{
Distribution population = new ExponentialDistribution();
int[] sizes = new int[] { 23, 30, 175 };
foreach (int na in sizes) {
foreach (int nb in sizes) {
Console.WriteLine("{0} {1}", na, nb);
Sample d = new Sample();
Distribution nullDistribution = null;
for (int i = 0; i < 128; i++) {
Sample a = TestUtilities.CreateSample(population, na, 31415 + na + i);
Sample b = TestUtilities.CreateSample(population, nb, 27182 + nb + i);
TestResult r = Sample.KolmogorovSmirnovTest(a, b);
d.Add(r.Statistic);
nullDistribution = r.Distribution;
}
// Only do full KS test if the number of bins is larger than the sample size, otherwise we are going to fail
// because the KS test detects the granularity of the distribution
TestResult mr = d.KolmogorovSmirnovTest(nullDistribution);
Console.WriteLine(mr.LeftProbability);
if (AdvancedIntegerMath.LCM(na, nb) > d.Count) Assert.IsTrue(mr.LeftProbability < 0.99);
// But always test that mean and standard deviation are as expected
Console.WriteLine("{0} {1}", nullDistribution.Mean, d.PopulationMean.ConfidenceInterval(0.99));
Assert.IsTrue(d.PopulationMean.ConfidenceInterval(0.99).ClosedContains(nullDistribution.Mean));
Console.WriteLine("{0} {1}", nullDistribution.StandardDeviation, d.PopulationStandardDeviation.ConfidenceInterval(0.99));
Assert.IsTrue(d.PopulationStandardDeviation.ConfidenceInterval(0.99).ClosedContains(nullDistribution.StandardDeviation));
Console.WriteLine("{0} {1}", nullDistribution.MomentAboutMean(3), d.PopulationMomentAboutMean(3).ConfidenceInterval(0.99));
//Assert.IsTrue(d.PopulationMomentAboutMean(3).ConfidenceInterval(0.99).ClosedContains(nullDistribution.MomentAboutMean(3)));
//Console.WriteLine("m {0} {1}", nullDistribution.Mean, d.PopulationMean);
}
}
}
示例10: KuiperNullDistributionTest
public void KuiperNullDistributionTest()
{
// The distribution is irrelevent; pick one at random
Distribution sampleDistribution = new NormalDistribution();
// Loop over various sample sizes
foreach (int n in TestUtilities.GenerateIntegerValues(2, 128, 16)) {
// Create a sample to hold the KS statistics
Sample testStatistics = new Sample();
// and a variable to hold the claimed null distribution, which should be the same for each test
Distribution nullDistribution = null;
// Create a bunch of samples, each with n+1 data points
// We pick n+1 instead of n just to have different sample size values than in the KS test case
for (int i = 0; i < 256; i++) {
// Just use n+i as a seed in order to get different points each time
Sample sample = TestUtilities.CreateSample(sampleDistribution, n + 1, 512 * n + i + 2);
// Do a Kuiper test of the sample against the distribution each time
TestResult r1 = sample.KuiperTest(sampleDistribution);
// Record the test statistic value and the claimed null distribution
testStatistics.Add(r1.Statistic);
nullDistribution = r1.Distribution;
}
// Do a KS test of our sample of Kuiper statistics against the claimed null distribution
// We could use a Kuiper test here instead, which would be way cool and meta, but we picked KS instead for variety
TestResult r2 = testStatistics.KolmogorovSmirnovTest(nullDistribution);
Console.WriteLine("{0} {1} {2}", n, r2.Statistic, r2.LeftProbability);
Assert.IsTrue(r2.RightProbability > 0.01);
// Test moment matches, too
Console.WriteLine(" {0} {1}", testStatistics.PopulationMean, nullDistribution.Mean);
Console.WriteLine(" {0} {1}", testStatistics.PopulationVariance, nullDistribution.Variance);
Assert.IsTrue(testStatistics.PopulationMean.ConfidenceInterval(0.99).ClosedContains(nullDistribution.Mean));
Assert.IsTrue(testStatistics.PopulationVariance.ConfidenceInterval(0.99).ClosedContains(nullDistribution.Variance));
}
}
示例11: MultivariateLinearRegressionNullDistribution
public void MultivariateLinearRegressionNullDistribution()
{
int d = 4;
Random rng = new Random(1);
NormalDistribution n = new NormalDistribution();
Sample fs = new Sample();
for (int i = 0; i < 64; i++) {
MultivariateSample ms = new MultivariateSample(d);
for (int j = 0; j < 8; j++) {
double[] x = new double[d];
for (int k = 0; k < d; k++) {
x[k] = n.GetRandomValue(rng);
}
ms.Add(x);
}
FitResult r = ms.LinearRegression(0);
fs.Add(r.GoodnessOfFit.Statistic);
}
// conduct a KS test to check that F follows the expected distribution
TestResult ks = fs.KolmogorovSmirnovTest(new FisherDistribution(3, 4));
Assert.IsTrue(ks.LeftProbability < 0.95);
}
示例12: FitToSample
/// <summary>
/// Computes the normal distribution that best fits the given sample.
/// </summary>
/// <param name="sample">The sample to fit.</param>
/// <returns>The best fit parameters.</returns>
/// <remarks>
/// <para>The returned fit parameters are the μ (<see cref="Mean"/>) and σ (<see cref="StandardDeviation"/>) parameters, in that order.
/// These are the same parameters, in the same order, that are required by the <see cref="NormalDistribution(double,double)"/> constructor to
/// specify a new normal distribution.</para>
/// </remarks>
/// <exception cref="ArgumentNullException"><paramref name="sample"/> is null.</exception>
/// <exception cref="InsufficientDataException"><paramref name="sample"/> contains fewer than three values.</exception>
public static FitResult FitToSample(Sample sample)
{
if (sample == null) throw new ArgumentNullException("sample");
if (sample.Count < 3) throw new InsufficientDataException();
// maximum likelyhood estimates are guaranteed to be asymptotically unbiased, but not necessarily unbiased
// this hits home for the maximum likelyhood estimate of the variance of a normal distribution, which fails
// to include the N/(N-1) correction factor. since we know the bias, there is no reason for us not to correct
// it, and we do so here
UncertainValue mu = sample.PopulationMean;
UncertainValue sigma = sample.PopulationStandardDeviation;
Distribution distribution = new NormalDistribution(mu.Value, sigma.Value);
TestResult test = sample.KolmogorovSmirnovTest(distribution);
// the best-fit sigma and mu are known to be uncorrelated
// you can prove this by writing down the log likelyhood function and
// computing its mixed second derivative, which you will see vanishes
// at the minimum
return (new FitResult(mu.Value, mu.Uncertainty, sigma.Value, sigma.Uncertainty, 0.0, test));
}
示例13: TimeGammaGenerators
public void TimeGammaGenerators()
{
double alpha = 1.0;
Random rng = new Random(1);
//IDeviateGenerator nRng = new AhrensDieterGammaGenerator(alpha);
IDeviateGenerator nRng = new MarsagliaTsangGammaGenerator(new PolarRejectionNormalDeviateGenerator(), alpha);
Distribution d = new GammaDistribution(alpha);
//double sum = 0.0;
Sample sample = new Sample();
Stopwatch timer = Stopwatch.StartNew();
for (int i = 0; i < 1000000; i++) {
//double x = nRng.GetNext(rng);
double x = d.InverseLeftProbability(rng.NextDouble());
//sum += x;
sample.Add(x);
}
timer.Stop();
Console.WriteLine(sample.KolmogorovSmirnovTest(d).RightProbability);
//Console.WriteLine(sum);
Console.WriteLine(timer.ElapsedMilliseconds);
}
示例14: GammaFromExponential
public void GammaFromExponential()
{
// test that x_1 + x_2 + ... + x_n ~ Gamma(n) when z ~ Exponential()
Random rng = new Random(1);
ExponentialDistribution eDistribution = new ExponentialDistribution();
// pick some low values of n so distribution is not simply normal
foreach (int n in new int[] { 2, 3, 4, 5 }) {
Sample gSample = new Sample();
for (int i = 0; i < 100; i++) {
double sum = 0.0;
for (int j = 0; j < n; j++) {
sum += eDistribution.GetRandomValue(rng);
}
gSample.Add(sum);
}
GammaDistribution gDistribution = new GammaDistribution(n);
TestResult result = gSample.KolmogorovSmirnovTest(gDistribution);
Assert.IsTrue(result.LeftProbability < 0.95);
}
}
示例15: FitToSample
/// <summary>
/// Computes the exponential distribution that best fits the given sample.
/// </summary>
/// <param name="sample">The sample to fit.</param>
/// <returns>The best fit parameter.</returns>
/// <remarks>
/// <para>The returned fit parameter is μ (the <see cref="Mean"/>).
/// This is the same parameter that is required by the <see cref="ExponentialDistribution(double)"/> constructor to
/// specify a new exponential distribution.</para>
/// </remarks>
/// <exception cref="ArgumentNullException"><paramref name="sample"/> is null.</exception>
/// <exception cref="InsufficientDataException"><paramref name="sample"/> contains fewer than two values.</exception>
/// <exception cref="InvalidOperationException"><paramref name="sample"/> contains non-positive values.</exception>
public static FitResult FitToSample(Sample sample)
{
if (sample == null) throw new ArgumentNullException("sample");
if (sample.Count < 2) throw new InsufficientDataException();
// none of the data is allowed to be negative
foreach (double value in sample) {
if (value < 0.0) throw new InvalidOperationException();
}
// the best-fit exponential's mean is the sample mean, with corresponding uncertainly
double lambda = sample.Mean;
double dLambda = lambda / Math.Sqrt(sample.Count);
Distribution distribution = new ExponentialDistribution(lambda);
TestResult test = sample.KolmogorovSmirnovTest(distribution);
return (new FitResult(lambda, dLambda, test));
}