diff options
author | Andrew Arnott <andrewarnott@gmail.com> | 2011-05-02 21:37:34 -0700 |
---|---|---|
committer | Andrew Arnott <andrewarnott@gmail.com> | 2011-05-02 21:37:34 -0700 |
commit | bcdd8eb12670332b0de69752d583cba710490e38 (patch) | |
tree | 74b83b186088e742db82958ace7e4cc38c81689c /src | |
parent | 6c751fc1364d94733e099c3a21623033c85ad86d (diff) | |
download | DotNetOpenAuth-bcdd8eb12670332b0de69752d583cba710490e38.zip DotNetOpenAuth-bcdd8eb12670332b0de69752d583cba710490e38.tar.gz DotNetOpenAuth-bcdd8eb12670332b0de69752d583cba710490e38.tar.bz2 |
Perf tests now compare results against a baseline produced on the test machine.
This uses portions of MeasureIt, which normalizes perf measurements in terms of the machine's speed.
We also do other things to reduce noise:
* set process and thread priority
* wait for the CPU to quiet down before beginning.
* set power management to High Performance
* wake the CPU up if it's in a low power mode.
Diffstat (limited to 'src')
-rw-r--r-- | src/DotNetOpenAuth.Test/DotNetOpenAuth.Test.csproj | 7 | ||||
-rw-r--r-- | src/DotNetOpenAuth.Test/Logging.config | 17 | ||||
-rw-r--r-- | src/DotNetOpenAuth.Test/OpenId/Provider/PerformanceTests.cs | 83 | ||||
-rw-r--r-- | src/DotNetOpenAuth.Test/Performance/CodeTimers.cs | 349 | ||||
-rw-r--r-- | src/DotNetOpenAuth.Test/Performance/PerformanceMeasurement.cs | 268 | ||||
-rw-r--r-- | src/DotNetOpenAuth.Test/Performance/PerformanceTestUtilities.cs | 163 | ||||
-rw-r--r-- | src/DotNetOpenAuth.Test/Performance/PowerManagement.cs | 82 | ||||
-rw-r--r-- | src/DotNetOpenAuth.Test/TestBase.cs | 10 |
8 files changed, 914 insertions, 65 deletions
diff --git a/src/DotNetOpenAuth.Test/DotNetOpenAuth.Test.csproj b/src/DotNetOpenAuth.Test/DotNetOpenAuth.Test.csproj index 5e80f2c..8c5034c 100644 --- a/src/DotNetOpenAuth.Test/DotNetOpenAuth.Test.csproj +++ b/src/DotNetOpenAuth.Test/DotNetOpenAuth.Test.csproj @@ -74,6 +74,7 @@ <CodeContractsEmitXMLDocs>False</CodeContractsEmitXMLDocs> <CodeContractsExtraRewriteOptions /> <CodeContractsReferenceAssembly>%28none%29</CodeContractsReferenceAssembly> + <AllowUnsafeBlocks>true</AllowUnsafeBlocks> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'Release|AnyCPU' "> <DebugType>pdbonly</DebugType> @@ -111,6 +112,7 @@ <CodeContractsEmitXMLDocs>False</CodeContractsEmitXMLDocs> <CodeContractsExtraRewriteOptions /> <CodeContractsReferenceAssembly>%28none%29</CodeContractsReferenceAssembly> + <AllowUnsafeBlocks>true</AllowUnsafeBlocks> </PropertyGroup> <PropertyGroup Condition=" '$(Configuration)|$(Platform)' == 'CodeAnalysis|AnyCPU' "> <DebugSymbols>true</DebugSymbols> @@ -150,6 +152,7 @@ <CodeContractsEmitXMLDocs>False</CodeContractsEmitXMLDocs> <CodeContractsExtraRewriteOptions /> <CodeContractsReferenceAssembly>%28none%29</CodeContractsReferenceAssembly> + <AllowUnsafeBlocks>true</AllowUnsafeBlocks> </PropertyGroup> <ItemGroup> <Reference Include="log4net" /> @@ -301,6 +304,10 @@ <Compile Include="OpenId\RelyingParty\IdentifierDiscoveryResultTests.cs" /> <Compile Include="OpenId\UriIdentifierTests.cs" /> <Compile Include="OpenId\XriIdentifierTests.cs" /> + <Compile Include="Performance\CodeTimers.cs" /> + <Compile Include="Performance\PerformanceMeasurement.cs" /> + <Compile Include="Performance\PerformanceTestUtilities.cs" /> + <Compile Include="Performance\PowerManagement.cs" /> <Compile Include="Properties\AssemblyInfo.cs" /> <Compile Include="Messaging\ResponseTests.cs" /> <Compile Include="OAuth\AppendixScenarios.cs" /> diff --git a/src/DotNetOpenAuth.Test/Logging.config b/src/DotNetOpenAuth.Test/Logging.config index 87da027..a32badd 100644 --- a/src/DotNetOpenAuth.Test/Logging.config +++ b/src/DotNetOpenAuth.Test/Logging.config @@ -17,6 +17,23 @@ <conversionPattern value="[%thread] %-5level - %message%newline" /> </layout> </appender> + <appender name="StdOutAppender" type="log4net.Appender.ConsoleAppender"> + <immediateFlush value="true" /> + <layout type="log4net.Layout.PatternLayout"> + <conversionPattern value="%date [%thread] %-5level %logger [%property{NDC}] - %message%newline" /> + </layout> + </appender> + <appender name="StdErrAppender" type="log4net.Appender.ConsoleAppender"> + <immediateFlush value="true" /> + <target value="Console.Error" /> + <filter type="log4net.Filter.LevelRangeFilter"> + <param name="LevelMin" value="ERROR"/> + <param name="LevelMax" value="FATAL"/> + </filter> + <layout type="log4net.Layout.PatternLayout"> + <conversionPattern value="%date [%thread] %-5level %logger [%property{NDC}] - %message%newline" /> + </layout> + </appender> <!-- Setup the root category, add the appenders and set the default level --> <root> <level value="Info" /> diff --git a/src/DotNetOpenAuth.Test/OpenId/Provider/PerformanceTests.cs b/src/DotNetOpenAuth.Test/OpenId/Provider/PerformanceTests.cs index 7984b58..fb1125e 100644 --- a/src/DotNetOpenAuth.Test/OpenId/Provider/PerformanceTests.cs +++ b/src/DotNetOpenAuth.Test/OpenId/Provider/PerformanceTests.cs @@ -19,6 +19,7 @@ namespace DotNetOpenAuth.Test.OpenId.Provider { using DotNetOpenAuth.OpenId.Messages; using DotNetOpenAuth.OpenId.Provider; using NUnit.Framework; + using DotNetOpenAuth.Test.Performance; [TestFixture, Category("Performance")] public class PerformanceTests : OpenIdTestBase { @@ -29,73 +30,50 @@ namespace DotNetOpenAuth.Test.OpenId.Provider { [SetUp] public override void SetUp() { base.SetUp(); - SuspendLogging(); this.provider = CreateProvider(); } - [TearDown] - public override void Cleanup() { - ResumeLogging(); - base.Cleanup(); - } - [TestCase] public void AssociateDH() { var associateRequest = this.CreateAssociateRequest(OPUri); - Stopwatch timer = new Stopwatch(); - timer.Start(); - int iterations; - for (iterations = 0; timer.ElapsedMilliseconds < TestRunTime.TotalMilliseconds; iterations++) { - IRequest request = this.provider.GetRequest(associateRequest); - var response = this.provider.PrepareResponse(request); - Assert.IsInstanceOf<AssociateSuccessfulResponse>(response.OriginalMessage); - } - timer.Stop(); - double executionsPerSecond = GetExecutionsPerSecond(iterations, timer); - TestUtilities.TestLogger.InfoFormat("Created {0} associations in {1}, or {2} per second.", iterations, timer.Elapsed, executionsPerSecond); - Assert.IsTrue(executionsPerSecond >= 2, "Too slow ({0} >= 2 executions per second required.)", executionsPerSecond); + PerformanceTestUtilities.Measure( + () => { + IRequest request = this.provider.GetRequest(associateRequest); + var response = this.provider.PrepareResponse(request); + Assert.IsInstanceOf<AssociateSuccessfulResponse>(response.OriginalMessage); + }, + maximumAllowedUnitTime: 2.8e6f, + iterations: 1); } [TestCase] public void AssociateClearText() { var associateRequest = this.CreateAssociateRequest(OPUriSsl); // SSL will cause a plaintext association - Stopwatch timer = new Stopwatch(); - timer.Start(); - int iterations; - for (iterations = 0; timer.ElapsedMilliseconds < TestRunTime.TotalMilliseconds; iterations++) { - IRequest request = this.provider.GetRequest(associateRequest); - var response = this.provider.PrepareResponse(request); - Assert.IsInstanceOf<AssociateSuccessfulResponse>(response.OriginalMessage); - } - timer.Stop(); - double executionsPerSecond = GetExecutionsPerSecond(iterations, timer); - TestUtilities.TestLogger.InfoFormat("Created {0} associations in {1}, or {2} per second.", iterations, timer.Elapsed, executionsPerSecond); - Assert.IsTrue(executionsPerSecond > 1000, "Too slow ({0} > 1000 executions per second required.)", executionsPerSecond); + PerformanceTestUtilities.Measure( + () => { + IRequest request = this.provider.GetRequest(associateRequest); + var response = this.provider.PrepareResponse(request); + Assert.IsInstanceOf<AssociateSuccessfulResponse>(response.OriginalMessage); + }, + maximumAllowedUnitTime: 1.2e4f, + iterations: 1000); } [TestCase] public void CheckIdSharedHmacSha1Association() { Protocol protocol = Protocol.Default; string assocType = protocol.Args.SignatureAlgorithm.HMAC_SHA1; - double executionsPerSecond = this.ParameterizedCheckIdTest(protocol, assocType); - TestUtilities.TestLogger.InfoFormat("{0} executions per second.", executionsPerSecond); - Assert.IsTrue(executionsPerSecond > 500, "Too slow ({0} > 500 executions per second required.)", executionsPerSecond); + this.ParameterizedCheckIdTest(protocol, assocType); } [TestCase] public void CheckIdSharedHmacSha256Association() { Protocol protocol = Protocol.Default; string assocType = protocol.Args.SignatureAlgorithm.HMAC_SHA256; - double executionsPerSecond = this.ParameterizedCheckIdTest(protocol, assocType); - TestUtilities.TestLogger.InfoFormat("{0} executions per second.", executionsPerSecond); - Assert.IsTrue(executionsPerSecond > 400, "Too slow ({0} > 400 executions per second required.)", executionsPerSecond); + this.ParameterizedCheckIdTest(protocol, assocType); } - private static double GetExecutionsPerSecond(int iterations, Stopwatch timer) { - return (double)iterations / (timer.ElapsedMilliseconds / 1000); - } - - private double ParameterizedCheckIdTest(Protocol protocol, string assocType) { + private void ParameterizedCheckIdTest(Protocol protocol, string assocType) { Association assoc = HmacShaAssociation.Create( protocol, assocType, @@ -103,19 +81,14 @@ namespace DotNetOpenAuth.Test.OpenId.Provider { this.provider.SecuritySettings); this.provider.AssociationStore.StoreAssociation(AssociationRelyingPartyType.Smart, assoc); var checkidRequest = this.CreateCheckIdRequest(true); - Stopwatch timer = new Stopwatch(); - timer.Start(); - int iterations; - for (iterations = 0; timer.ElapsedMilliseconds < TestRunTime.TotalMilliseconds; iterations++) { - var request = (IAuthenticationRequest)this.provider.GetRequest(checkidRequest); - request.IsAuthenticated = true; - var response = this.provider.PrepareResponse(request); - Assert.IsInstanceOf<PositiveAssertionResponse>(response.OriginalMessage); - } - timer.Stop(); - double executionsPerSecond = GetExecutionsPerSecond(iterations, timer); - TestUtilities.TestLogger.InfoFormat("Responded to {0} checkid messages in {1}; or {2} authentications per second.", iterations, timer.Elapsed, executionsPerSecond); - return executionsPerSecond; + var stats = PerformanceTestUtilities.Measure( + () => { + var request = (IAuthenticationRequest)this.provider.GetRequest(checkidRequest); + request.IsAuthenticated = true; + var response = this.provider.PrepareResponse(request); + Assert.IsInstanceOf<PositiveAssertionResponse>(response.OriginalMessage); + }, + maximumAllowedUnitTime: 5.5e4f); } private HttpRequestInfo CreateAssociateRequest(Uri opEndpoint) { diff --git a/src/DotNetOpenAuth.Test/Performance/CodeTimers.cs b/src/DotNetOpenAuth.Test/Performance/CodeTimers.cs new file mode 100644 index 0000000..78f8766 --- /dev/null +++ b/src/DotNetOpenAuth.Test/Performance/CodeTimers.cs @@ -0,0 +1,349 @@ +//----------------------------------------------------------------------- +// <copyright file="CodeTimers.cs" company="Microsoft Corporation"> +// Copyright (c) Microsoft Corporation. All rights reserved. +// </copyright> +// <author>Vance Morrison</author> +//----------------------------------------------------------------------- + +namespace DotNetOpenAuth.Test.Performance { + using System; + using System.Collections.Generic; + using System.Diagnostics; + + /// <summary> + /// Stats represents a list of samples (floating point values) This class can calculate the standard + /// statistics on this list (Mean, Median, StandardDeviation ...) + /// </summary> + internal class Stats : IEnumerable<float> { + public Stats() { data = new List<float>(); } + + public void Add(float dataItem) { statsComputed = false; data.Add(dataItem); } + public void RemoveRange(int index, int count) { + data.RemoveRange(index, count); + statsComputed = false; + } + internal void Adjust(float delta) { + statsComputed = false; + for (int i = 0; i < data.Count; i++) + data[i] += delta; + } + internal void AdjustForScale(float scale) { + statsComputed = false; + for (int i = 0; i < data.Count; i++) + data[i] /= scale; + } + public int Count { get { return data.Count; } } + public float this[int idx] { get { return data[idx]; } } + public IEnumerator<float> GetEnumerator() { return data.GetEnumerator(); } + System.Collections.IEnumerator System.Collections.IEnumerable.GetEnumerator() { return data.GetEnumerator(); } + + public float Minimum { get { if (!statsComputed) ComputeStats(); return minimum; } } + public float Maximum { get { if (!statsComputed) ComputeStats(); return maximum; } } + public float Median { get { if (!statsComputed) ComputeStats(); return median; } } + public float Mean { get { if (!statsComputed) ComputeStats(); return mean; } } + public float StandardDeviation { get { if (!statsComputed) ComputeStats(); return standardDeviation; } } + public override string ToString() { + if (!statsComputed) + ComputeStats(); + return "mean=" + mean.ToString("f3") + " median=" + median.ToString("f3") + + " min=" + minimum.ToString("f3") + " max=" + maximum.ToString("f3") + + " sdtdev=" + standardDeviation.ToString("f3") + " samples=" + Count.ToString(); + } + + #region privates + + public void ComputeStats() { + minimum = float.MaxValue; + maximum = float.MinValue; + median = 0.0F; + mean = 0.0F; + standardDeviation = 0.0F; + + double total = 0; + foreach (float dataPoint in this) { + if (dataPoint < minimum) + minimum = dataPoint; + if (dataPoint > maximum) + maximum = dataPoint; + total += dataPoint; + } + + if (Count > 0) { + data.Sort(); + if (Count % 2 == 1) + median = this[Count / 2]; + else + median = (this[(Count / 2) - 1] + this[Count / 2]) / 2; + mean = (float)(total / Count); + + double squares = 0.0; + foreach (float dataPoint in this) { + double diffFromMean = dataPoint - mean; + squares += diffFromMean * diffFromMean; + } + standardDeviation = (float)Math.Sqrt(squares / Count); + } + + statsComputed = true; + } + + List<float> data; + float minimum; + float maximum; + float median; + float mean; + float standardDeviation; + bool statsComputed; + #endregion + }; + + /// <summary> + /// The CodeTimer class only times one invocation of the code. Often, you want to collect many samples so + /// that you can determine how noisy the resulting data is. This is what MultiSampleCodeTimer does. + /// </summary> + internal class MultiSampleCodeTimer { + public MultiSampleCodeTimer() : this(1) { } + public MultiSampleCodeTimer(int sampleCount) : this(sampleCount, 1) { } + public MultiSampleCodeTimer(int sampleCount, int iterationCount) { + SampleCount = sampleCount; + timer = new CodeTimer(iterationCount); + timer.Prime = false; // We will do the priming (or not). + Prime = true; + } + public MultiSampleCodeTimer(MultiSampleCodeTimer template) + : this(template.SampleCount, template.IterationCount) { + OnMeasure = template.OnMeasure; + } + + /// <summary> + /// If true (the default), the benchmark is run once before the actual measurement to + /// insure that any 'first time' initialization is complete. + /// </summary> + public bool Prime; + /// <summary> + /// The number of times the benchmark is run in a loop for a single measument. + /// </summary> + public int IterationCount { get { return timer.IterationCount; } set { timer.IterationCount = value; } } + /// <summary> + /// The number of measurments to make for a single benchmark. + /// </summary> + public int SampleCount; + /// <summary> + /// The smallest time (in microseconds) that can be resolved by the timer). + /// </summary> + public static float ResolutionUsec { get { return 1000000.0F / Stopwatch.Frequency; } } + + public delegate void MeasureCallback(string name, int iterationCount, float scale, Stats sample); + /// <summary> + /// OnMeasure is signaled every time a Measure() is called. + /// </summary> + public event MeasureCallback OnMeasure; + + public Stats Measure(string name, Action action) { + return Measure(name, 1, action, null); + } + /// <summary> + /// The main measurment routine. Calling this will cause code:OnMeasure event to be + /// raised. + /// </summary> + /// <param name="name">name of the benchmark</param> + /// <param name="scale">The number of times the benchmark is cloned in 'action' (typically 1)</param> + /// <param name="action">The actual code to measure.</param> + /// <returns>A Stats object representing the measurements (in usec)</returns> + public Stats Measure(string name, float scale, Action action) { + return Measure(name, scale, action, null); + } + /// <summary> + /// The main measurment routine. Calling this will cause code:OnMeasure event to be + /// raised. + /// </summary> + /// <param name="name">name of the benchmark</param> + /// <param name="scale">The number of times the benchmark is cloned in 'action' (typically 1)</param> + /// <param name="action">The actual code to measure.</param> + /// <param name="reset">Code that will be called before 'action' to reset the state of the benchmark.</param> + /// <returns>A Stats object representing the measurements (in usec)</returns> + public Stats Measure(string name, float scale, Action action, Action reset) { + if (reset != null && IterationCount != 1) + throw new ApplicationException("Reset can only be used on timers with an iteration count of 1"); + Stats statsUSec = new Stats(); + if (Prime) { + if (reset != null) + reset(); + action(); + } + for (int i = 0; i < SampleCount; i++) { + if (reset != null) + reset(); + statsUSec.Add(timer.Measure(name, scale, action)); + } + + if (OnMeasure != null) + OnMeasure(name, IterationCount, scale, statsUSec); + return statsUSec; + } + + /// <summary> + /// Prints the mean, median, min, max, and stdDev and count of the samples to the Console + /// Useful as a target for OnMeasure + /// </summary> + public static MeasureCallback PrintStats = delegate(string name, int iterationCount, float scale, Stats sample) { + Console.WriteLine(name + ": " + sample.ToString()); + }; + /// <summary> + /// Prints the mean with a error bound (2 standard deviations, which imply a you have + /// 95% confidence that a sampleUsec will be with the bounds (for a normal distribution). + /// This is a good default target for OnMeasure. + /// </summary> + public static MeasureCallback Print = delegate(string name, int iterationCount, float scale, Stats sample) { + // +- two standard deviations covers 95% of all samples in a normal distribution + float errorPercent = (sample.StandardDeviation * 2 * 100) / Math.Abs(sample.Mean); + string errorString = ">400%"; + if (errorPercent < 400) + errorString = (errorPercent.ToString("f0") + "%").PadRight(5); + string countString = ""; + if (iterationCount != 1) + countString = "count: " + iterationCount.ToString() + " "; + Console.WriteLine(name + ": " + countString + sample.Mean.ToString("f3").PadLeft(8) + " +- " + errorString + " msec"); + }; + + #region privates + CodeTimer timer; + #endregion + }; + + /// <summary> + /// CodeTimer is a simple wrapper that uses System.Diagnostics.StopWatch + /// to time the body of some code (given by a delegate), to high precision. + /// </summary> + public class CodeTimer { + public CodeTimer() : this(1) { } + public CodeTimer(int iterationCount) { + this.iterationCount = iterationCount; + Prime = true; + + // Spin the CPU for a while. This should help insure that the CPU gets out of any low power + // mode so so that we get more stable results. + // TODO: see if this is true, and if there is a better way of doing it. + Stopwatch sw = Stopwatch.StartNew(); + while (sw.ElapsedMilliseconds < 32) + ; + } + /// <summary> + /// The number of times the benchmark is run in a loop for a single measument. + /// </summary> + public int IterationCount { + get { return iterationCount; } + set { + iterationCount = value; + overheadValid = false; + } + } + /// <summary> + /// By default CodeTimer will run the action once before doing a + /// measurement run. This insures one-time actions like JIT + /// compilation are not being measured. However if the benchmark is + /// not idempotent, this can be a problem. Setting Prime=false + /// insures that this Priming does not happen. + /// </summary> + public bool Prime; + public delegate void MeasureCallback(string name, int iterationCount, float sample); + /// <summary> + /// OnMeasure is signaled every time a Measure() is called. + /// </summary> + public event MeasureCallback OnMeasure; + /// <summary> + /// The smallest time (in microseconds) that can be resolved by the timer). + /// </summary> + public static float ResolutionUsec { get { return 1000000.0F / Stopwatch.Frequency; } } + /// <summary> + /// Returns the number of microsecond it took to run 'action', 'count' times. + /// </summary> + public float Measure(string name, Action action) { + return Measure(name, 1, action); + } + /// <summary> + /// Returns the number of microseconds it to to run action 'count' times divided by 'scale'. + /// Scaling is useful if you want to normalize to a single iteration for example. + /// </summary> + public float Measure(string name, float scale, Action action) { + Stopwatch sw = new Stopwatch(); + + // Run the action once to do any JITTing that might happen. + if (Prime) + action(); + float overheadUsec = GetOverheadUsec(action); + + sw.Reset(); + sw.Start(); + for (int j = 0; j < iterationCount; j++) + action(); + sw.Stop(); + + float sampleUsec = (float)((sw.Elapsed.TotalMilliseconds * 1000.0F - overheadUsec) / scale / iterationCount); + if (!computingOverhead && OnMeasure != null) + OnMeasure(name, iterationCount, sampleUsec); + return sampleUsec; + } + /// <summary> + /// Prints the result of a CodeTimer to standard output. + /// This is a good default target for OnMeasure. + /// </summary> + public static MeasureCallback Print = delegate(string name, int iterationCount, float sample) { + Console.WriteLine("{0}: count={1} time={2:f3} msec ", name, iterationCount, sample); + }; + #region privates + + /// <summary> + /// Time the overheadUsec of the harness that does nothing so we can subtract it out. + /// + /// Because calling delegates on static methods is more expensive than caling delegates on + /// instance methods we need the action to determine the overheadUsec. + /// </summary> + /// <returns></returns> + float GetOverheadUsec(Action action) { + if (!overheadValid) { + if (computingOverhead) + return 0.0F; + computingOverhead = true; + + // Compute the overheads of calling differnet types of delegates. + Action emptyInstanceAction = new Action(this.emptyMethod); + // Prime the actions (JIT them) + Measure(null, emptyInstanceAction); + // Compute the min over 5 runs (figuring better not to go negative) + instanceOverheadUsec = float.MaxValue; + for (int i = 0; i < 5; i++) { + // We multiply by iteration count because we don't want this scaled by the + // count but 'Measure' does it by whether we want it or not. + instanceOverheadUsec = Math.Min(Measure(null, emptyInstanceAction) * IterationCount, instanceOverheadUsec); + } + + Action emptyStaticAction = new Action(emptyStaticMethod); + Measure(null, emptyStaticAction); + staticOverheadUsec = float.MaxValue; + for (int i = 0; i < 5; i++) + staticOverheadUsec = Math.Min(Measure(null, emptyStaticAction) * IterationCount, staticOverheadUsec); + + computingOverhead = false; + overheadValid = true; + } + + if (action.Target == null) + return staticOverheadUsec; + else + return instanceOverheadUsec; + } + + static private void emptyStaticMethod() { } + private void emptyMethod() { } + + bool overheadValid; + bool computingOverhead; + int iterationCount; + float staticOverheadUsec; + float instanceOverheadUsec; + + #endregion + }; +} + diff --git a/src/DotNetOpenAuth.Test/Performance/PerformanceMeasurement.cs b/src/DotNetOpenAuth.Test/Performance/PerformanceMeasurement.cs new file mode 100644 index 0000000..6199005 --- /dev/null +++ b/src/DotNetOpenAuth.Test/Performance/PerformanceMeasurement.cs @@ -0,0 +1,268 @@ +//----------------------------------------------------------------------- +// <copyright file="CodeTimers.cs" company="Microsoft Corporation"> +// Copyright (c) Microsoft Corporation. All rights reserved. +// </copyright> +// <author>Vance Morrison</author> +//----------------------------------------------------------------------- + +namespace DotNetOpenAuth.Test.Performance { + using System; + using System.Collections.Generic; + using System.Text; + using System.IO; + using System.Web; // for HttpUtility.HtmlEncode + using System.Reflection; // for Assembly + using System.Diagnostics; // for Process, DebuggableAttribute + + /// <summary> + /// A code:StatsLogger is something that remembers a set of common + /// atrributes (verison of the code, Machine used, NGENed or JITed ...) as + /// well as a set of performance results from mulitple benchmarks. + /// (represented by a code:StatsCollection) + /// + /// The primary value of a StatsLogger is the + /// code:StatsLogger.DisplayHtmlReport which displayes the data in a + /// user-friendly way. + /// </summary> + internal class StatsLogger { + public StatsLogger(StatsCollection dataSet) { + this.dataSet = dataSet; + attributes = new Dictionary<string, object>(); + } + StatsCollection DataSet { get { return dataSet; } } + public object this[string key] { + get { return attributes[key]; } + set { attributes[key] = value; } + } + public void Add(string name, Stats sample) { dataSet.Add(name, sample); } + public string Category { + get { return category; } + set { category = value; } + } + public void AddWithCount(string name, int iterationCount, float scale, Stats sample) { + if (!string.IsNullOrEmpty(category)) + name = category + ": " + name; + if (iterationCount != 1 || scale != 1) { + name += " ["; + if (iterationCount != 1) { + name = name + "count=" + iterationCount.ToString(); + if (scale != 1) + name += " "; + } + if (scale != 1) + name = name + " scale=" + scale.ToString("f1"); + name += "]"; + } + + Add(name, sample); + } + public void DisplayHtmlReport(string reportFileName) { + TextWriter writer = File.CreateText(reportFileName); + + writer.WriteLine("<html>"); + writer.WriteLine("<h1> MeasureIt Performance Results </h1>"); + object optimizationValue; + if (attributes.TryGetValue("CodeOptimization", out optimizationValue) && ((string)optimizationValue) == "Unoptimized") { + writer.WriteLine("<font color=red><p>"); + writer.WriteLine("Warning: the MeasureIt code was not optimized. The results are likely invalid."); + writer.WriteLine("</p></font>"); + } + if (Environment.OSVersion.Version.Major < 6) { + writer.WriteLine("<b><p>"); + writer.WriteLine("Data was collected on a Pre-Vista machine. MeasureIt does NOT automatically"); + writer.WriteLine("set the CPU to a high performance power policy. This means the CPU might"); + writer.WriteLine("be throttled to save power and can lead to"); + writer.WriteLine("incorrect and inconsistant benchmark measurements."); + writer.WriteLine("</p></b>"); + } else if (PowerManagment.CurrentPolicy != PowerManagment.PowerProfiles.HighPerformance) { + writer.WriteLine("<font color=red><p>"); + writer.WriteLine("Warning: The power policy settings were not set at 'High Performance' during this run."); + writer.WriteLine("This means that the CPU could be throttled to lower frequency resulting in"); + writer.WriteLine("incorrect and inconsistant benchmark measurements."); + writer.WriteLine("To correct go to Start Menu -> Contol Panel -> System and Maintance -> Power Options"); + writer.WriteLine("and set the power policy to 'High Performance' for the duration of the tests."); + writer.WriteLine("</p></font>"); + } + writer.WriteLine("<p>"); + writer.WriteLine("Below are the results of running a series of benchmarks. Use the"); + writer.WriteLine("<b>MeasureIt /usersGuide</b> for more details on exactly what the benchmarks do."); + writer.WriteLine("</p><p>"); + writer.WriteLine("It is very easy for benchmark results to be wrong or misleading. You should read the guidance"); + writer.WriteLine("in the <b>MeasureIt /usersGuide</b> before making important decisions based on this data."); + writer.WriteLine("</p><p>"); + writer.WriteLine("To improve the stability of the measurements, a may be cloned several times"); + writer.WriteLine("and this cloned code is then run in a loop."); + writer.WriteLine("If the benchmark was cloned the 'scale' attribute represents the number of times"); + writer.WriteLine("it was cloned, and the count represents the number of times the cloned code was run in a loop"); + writer.WriteLine("before the measurement was made. The reported number divides by both"); + writer.WriteLine("of these values, so it represents a single instance of the operation being measured."); + writer.WriteLine("</p>"); + writer.WriteLine("<p>"); + writer.WriteLine("The benchmarks data can vary from run to run, so the benchmark is run several times and"); + writer.WriteLine("the statistics are displayed. If we assume a normal distribution, you can expect 68% of all measureuments"); + writer.WriteLine("to fall within 1 StdDev of the Mean. You can expect over 95% of all measurements"); + writer.WriteLine("to fall witin 2 StdDev of the Mean. Thus 2 StdDev is a good error bound."); + writer.WriteLine("Keep in mind, however that it is not uncommon for the statistics to be quite stable"); + writer.WriteLine("during a run and yet very widely across different runs. See the users guide for more."); + writer.WriteLine("</p>"); + writer.WriteLine("<p>"); + writer.WriteLine("Generally the mean is a better measurment if you use the number to compute an"); + writer.WriteLine("aggregate throughput for a large number of items. The median is a better"); + writer.WriteLine("guess if you want to best guess of a typical sample. The median is also"); + writer.WriteLine("more stable if the sample is noisy (eg has outliers)."); + writer.WriteLine("</p>"); + writer.WriteLine("<h3>Data collected</h3>"); + { + writer.WriteLine("<p>"); + writer.WriteLine(UnitsDescription); + writer.WriteLine("</p>"); + + dataSet.WriteReportTable(writer, Scale); + } + + writer.WriteLine("<p>"); + { + writer.WriteLine("<h2>Attributes of the machine used to collect the data</h2>"); + writer.WriteLine("<table border>"); + writer.WriteLine("<tr><th>Attribute</th><th>Value</th></tr>"); + foreach (string key in attributes.Keys) { + object valueObj = this[key]; + writer.Write("<tr>"); + writer.Write("<td>" + HttpUtility.HtmlEncode(key) + "</td>"); + + string valueStr = HttpUtility.HtmlEncode(valueObj.ToString()); + writer.Write("<td>" + valueStr + "</td>"); + writer.WriteLine("<tr>"); + } + writer.WriteLine("</table>"); + } + writer.WriteLine("</p>"); + + writer.WriteLine("</html>"); + writer.Close(); + } + static public void LaunchIE(string fileName) { + Process process = new Process(); + process.StartInfo = new ProcessStartInfo(fileName); + process.Start(); + } + + public float Scale = 1.0F; + public string UnitsDescription = "Scale in usec"; + + // TODO: probabably does not belong in this class. + public void CaptureCurrentMachineInfo(Assembly assemblyWithCode, bool skipMachineStats) { + this["Computer Name"] = Environment.MachineName; + if (!skipMachineStats) { + ////ComputerSpecs specs = new ComputerSpecs(); + ////this["Number of Processors"] = specs.NumberOfProcessors; + ////this["Processor Name "] = specs.ProcessorName; + ////this["Processor Mhz"] = specs.ProcessorClockSpeedMhz; + ////this["Memory MBytes"] = specs.MemoryMBytes; + ////this["L1 Cache KBytes"] = specs.L1KBytes; + ////this["L2 Cache KBytes"] = specs.L2KBytes; + ////this["Operating System"] = specs.OperatingSystem; + ////this["Operating System Version"] = specs.OperatingSystemVersion; + ////this["Stopwatch resolution (nsec)"] = (CodeTimer.ResolutionUsec * 1000.0).ToString("f3"); + } + + this["Machine Word Size (Bits)"] = (System.Runtime.InteropServices.Marshal.SizeOf(typeof(IntPtr)) * 8).ToString(); + + this["CLR Version"] = Environment.Version.ToString(); + this["CLR Directory"] = System.Runtime.InteropServices.RuntimeEnvironment.GetRuntimeDirectory(); + + // Are we NGENed or JITTed? + if (IsNGenedCodeLoaded(assemblyWithCode)) + this["CompileType"] = "NGEN"; + else + this["CompileType"] = "JIT"; + + // Are we Appdomain Shared, or not? + MethodInfo currentMethod = Assembly.GetEntryAssembly().EntryPoint; + LoaderOptimizationAttribute loaderAttribute = (LoaderOptimizationAttribute)System.Attribute.GetCustomAttribute(currentMethod, typeof(LoaderOptimizationAttribute)); + if (loaderAttribute != null && loaderAttribute.Value == LoaderOptimization.MultiDomain) + this["CodeSharing"] = "AppDomainShared"; + else + this["CodeSharing"] = "AppDomainSpecific"; + + this["CodeOptimization"] = IsOptimized(assemblyWithCode) ? "Optimized" : "Unoptimized"; + } + + internal static bool IsOptimized(Assembly assembly) { + DebuggableAttribute debugAttribute = (DebuggableAttribute)System.Attribute.GetCustomAttribute(assembly, typeof(System.Diagnostics.DebuggableAttribute)); + if (debugAttribute != null && debugAttribute.IsJITOptimizerDisabled) + return false; + else + return true; + } + + static bool IsNGenedCodeLoaded(Assembly assembly) { + // This is a bit of a hack, basically I find the assemblies file name, then + // look for a module loaded called '<filename>.ni.<ext>'. It is not foolproof, + // but it more than good enough for most purposes. + string assemblyFileName = Path.GetFileName(assembly.ManifestModule.FullyQualifiedName); + string nativeImageExt = "ni" + Path.GetExtension(assemblyFileName); + string nativeImageSuffix = @"\" + Path.ChangeExtension(assemblyFileName, nativeImageExt); + + System.Diagnostics.Process myProcess = System.Diagnostics.Process.GetCurrentProcess(); + foreach (System.Diagnostics.ProcessModule module in myProcess.Modules) { + if (module.FileName.EndsWith(nativeImageSuffix, StringComparison.OrdinalIgnoreCase)) + return true; + } + return false; + } + + #region privates + Dictionary<string, object> attributes; + StatsCollection dataSet; + string category; + + #endregion + } + + /// <summary> + /// StatsCollection represents a collecton of named of samples (class + /// Stats) that have have been given string names. The data can be + /// looked up by name, but is also collection also remembers the order in + /// which the samples were added, and the names can be enumerated in that + /// order. + /// </summary> + internal class StatsCollection { + public StatsCollection() { + dict = new Dictionary<string, Stats>(); + order = new List<string>(); + } + + public Stats this[string key] { get { return dict[key]; } } + public bool ContainsKey(string key) { return dict.ContainsKey(key); } + public IEnumerable<string> Keys { get { return order; } } + public void Add(string key, Stats value) { + dict.Add(key, value); + order.Add(key); + } + public void WriteReportTable(TextWriter writer, float scale) { + writer.WriteLine("<table border>"); + writer.WriteLine("<tr><th>Name</th><th>Median</th><th>Mean</th><th>StdDev</th><th>Min</th><th>Max</th><th>Samples</th></tr>"); + + foreach (string key in this.Keys) { + Stats value = this[key]; + writer.Write("<tr>"); + writer.Write("<td>" + HttpUtility.HtmlEncode(key) + "</td>"); + writer.Write("<td>" + (value.Median / scale).ToString("f3") + "</td>"); + writer.Write("<td>" + (value.Mean / scale).ToString("f3") + "</td>"); + writer.Write("<td>" + (value.StandardDeviation / scale).ToString("f3") + "</td>"); + writer.Write("<td>" + (value.Minimum / scale).ToString("f3") + "</td>"); + writer.Write("<td>" + (value.Maximum / scale).ToString("f3") + "</td>"); + writer.Write("<td>" + value.Count + "</td>"); + writer.WriteLine("</tr>"); + } + writer.WriteLine("</table>"); + } + + #region privates + Dictionary<string, Stats> dict; + List<string> order; + #endregion + } +} + diff --git a/src/DotNetOpenAuth.Test/Performance/PerformanceTestUtilities.cs b/src/DotNetOpenAuth.Test/Performance/PerformanceTestUtilities.cs new file mode 100644 index 0000000..1077514 --- /dev/null +++ b/src/DotNetOpenAuth.Test/Performance/PerformanceTestUtilities.cs @@ -0,0 +1,163 @@ +namespace DotNetOpenAuth.Test.Performance { + using System; + using System.Collections.Generic; + using System.Linq; + using System.Text; + using log4net; + using NUnit.Framework; + using DotNetOpenAuth.Messaging; + using DotNetOpenAuth.OpenId.RelyingParty; + using System.Threading; + using System.Diagnostics; + + internal static class PerformanceTestUtilities { + internal static Stats Baseline; + + private static readonly StatsCollection data = new StatsCollection(); + + static PerformanceTestUtilities() { + Baseline = CollectBaseline(); + TestUtilities.TestLogger.InfoFormat( + "Scaled where EmptyStaticFunction = 1.0 ({0:f1} nsec = 1.0 units)", + Baseline.Median * 1000); + } + + internal static Stats Measure(Action action, float maximumAllowedUnitTime, int samples = 10, int iterations = 100, string name = null) { + if (!StatsLogger.IsOptimized(typeof(OpenIdRelyingParty).Assembly)) { + Assert.Inconclusive("Unoptimized code."); + } + + var timer = new MultiSampleCodeTimer(samples, iterations); + Stats stats; + using (new HighPerformance()) { + stats = timer.Measure(name ?? TestContext.CurrentContext.Test.FullName, action); + } + + stats.AdjustForScale(PerformanceTestUtilities.Baseline.Median); + + TestUtilities.TestLogger.InfoFormat( + "Performance counters: median {0}, mean {1}, min {2}, max {3}, stddev {4} ({5}%).", + stats.Median, + stats.Mean, + stats.Minimum, + stats.Maximum, + stats.StandardDeviation, + stats.StandardDeviation / stats.Median * 100); + + Assert.IsTrue(stats.Mean < maximumAllowedUnitTime, "The mean time of {0} exceeded the maximum allowable of {1}.", stats.Mean, maximumAllowedUnitTime); + TestUtilities.TestLogger.InfoFormat("Within {0}% of the maximum allowed time of {1}.", Math.Round((maximumAllowedUnitTime - stats.Mean) / maximumAllowedUnitTime * 100, 1), maximumAllowedUnitTime); + + return stats; + } + + internal static bool CoolOff() { + using (var pc = new System.Diagnostics.PerformanceCounter()) { + pc.CategoryName = "Processor"; + pc.CounterName = "% Processor Time"; + pc.InstanceName = "_Total"; + + TimeSpan samplingInterval = TimeSpan.FromMilliseconds(1000); + TimeSpan minimumQuietTime = TimeSpan.FromSeconds(2); + TimeSpan maximumTimeBeforeBail = TimeSpan.FromSeconds(30); + float maximumCpuUtilization = 10; + DateTime startTryingStamp = DateTime.Now; + int hitsRequired = (int)(minimumQuietTime.TotalMilliseconds / samplingInterval.TotalMilliseconds); + while (DateTime.Now - startTryingStamp < maximumTimeBeforeBail) { + int hits; + for (hits = 0; hits < hitsRequired; hits++) { + float currentCpuUtilization = pc.NextValue(); + if (currentCpuUtilization > maximumCpuUtilization) { + //Console.WriteLine("Miss: CPU at {0}% utilization", currentCpuUtilization); + break; + } + + //Console.WriteLine("Hit: CPU at {0}% utilization", currentCpuUtilization); + Thread.Sleep(samplingInterval); + } + + if (hits == hitsRequired) { + return true; + } + + Thread.Sleep(samplingInterval); + } + + return false; + } + } + + private static Stats CollectBaseline() { + using (new HighPerformance()) { + return new MultiSampleCodeTimer(10, 1000).Measure("MethodCalls: EmptyStaticFunction()", 10, delegate { + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + Class.EmptyStaticFunction(); + }); + } + } + + private class Class { + [System.Runtime.CompilerServices.MethodImpl(System.Runtime.CompilerServices.MethodImplOptions.NoInlining)] + public static void EmptyStaticFunction() { + } + } + } + + /// <summary> + /// Suppresses logging and forces the CPU into a high performance mode. + /// </summary> + internal class HighPerformance : IDisposable { + private readonly log4net.Core.Level originalLoggerThreshold; + private readonly PowerManagment.PowerSetting powerSetting; + private readonly ProcessPriorityClass originalProcessPriority; + +#pragma warning disable 0618 + /// <summary> + /// Initializes a new instance of the <see cref="HighPerformance"/> class. + /// </summary> + internal HighPerformance() { + if (!PerformanceTestUtilities.CoolOff()) { + Assert.Inconclusive("Timed out waiting for a quiet CPU in which to perform perf tests."); + } + + this.originalLoggerThreshold = LogManager.GetLoggerRepository().Threshold; + LogManager.GetLoggerRepository().Threshold = LogManager.GetLoggerRepository().LevelMap["OFF"]; + this.powerSetting = new PowerManagment.PowerSetting(PowerManagment.PowerProfiles.HighPerformance); + this.originalProcessPriority = Process.GetCurrentProcess().PriorityClass; + Process.GetCurrentProcess().PriorityClass = ProcessPriorityClass.High; + Thread.CurrentThread.Priority = ThreadPriority.Highest; + HighCpu(); + } + + /// <summary> + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// </summary> + public void Dispose() { + Thread.CurrentThread.Priority = ThreadPriority.Normal; + Process.GetCurrentProcess().PriorityClass = this.originalProcessPriority; + this.powerSetting.Dispose(); // restores original power setting. + LogManager.GetLoggerRepository().Threshold = this.originalLoggerThreshold; + } +#pragma warning restore 0618 + + /// <summary> + /// Runs the CPU in a tight loop to get it out of any low power state. + /// </summary> + private static void HighCpu() { + int dummy; + new MultiSampleCodeTimer(10, 1000).Measure("Loop 1K times", 1, delegate { + int k = 0; + while (k < 1000) + k++; // still in danger of being optimized. + dummy = k; // avoid optimization. + }); + } + } +} diff --git a/src/DotNetOpenAuth.Test/Performance/PowerManagement.cs b/src/DotNetOpenAuth.Test/Performance/PowerManagement.cs new file mode 100644 index 0000000..66492f3 --- /dev/null +++ b/src/DotNetOpenAuth.Test/Performance/PowerManagement.cs @@ -0,0 +1,82 @@ +//----------------------------------------------------------------------- +// <copyright file="PowerManagement.cs" company="Andrew Arnott"> +// Copyright (c) Andrew Arnott. All rights reserved. +// </copyright> +//----------------------------------------------------------------------- + +namespace DotNetOpenAuth.Test.Performance { + using System; + using System.Runtime.InteropServices; + using System.Diagnostics; + using System.ComponentModel; + + /// <summary> + /// PowerManagement allows you to access the funtionality of the Control Panel -> Power Options + /// dialog in windows. (Currently we only use VISTA APIs). + /// </summary> + internal static class PowerManagment { + internal static class PowerProfiles { + internal static Guid HighPerformance = new Guid(0x8c5e7fda, 0xe8bf, 0x4a96, 0x9a, 0x85, 0xa6, 0xe2, 0x3a, 0x8c, 0x63, 0x5c); + + internal static Guid Balanced = new Guid(0x381b4222, 0xf694, 0x41f0, 0x96, 0x85, 0xff, 0x5b, 0xb2, 0x60, 0xdf, 0x2e); + + internal static Guid PowerSaver = new Guid(0xa1841308, 0x3541, 0x4fab, 0xbc, 0x81, 0xf7, 0x15, 0x56, 0xf2, 0x0b, 0x4a); + } + + internal static unsafe Guid CurrentPolicy { + get { + Guid* retPolicy = null; + Guid ret = Guid.Empty; + try { + int callRet = PowerGetActiveScheme(IntPtr.Zero, ref retPolicy); + if (callRet == 0) { + ret = *retPolicy; + Marshal.FreeHGlobal((IntPtr)retPolicy); + } + } catch (Exception) { } + return ret; + } + + set { + Guid newPolicy = value; + int result = PowerSetActiveScheme(IntPtr.Zero, ref newPolicy); + if (result != 0) { + throw new Win32Exception(result); + } + } + } + + internal class PowerSetting : IDisposable { + /// <summary> + /// The power policy in effect when this instance was constructed. + /// </summary> + private Guid previousPolicy; + + /// <summary> + /// Initializes a new instance of the <see cref="PowerSetting"/> class. + /// </summary> + /// <param name="powerProfile">The power profile.</param> + internal PowerSetting(Guid powerProfile) { + this.previousPolicy = PowerManagment.CurrentPolicy; + if (this.previousPolicy != powerProfile) { + PowerManagment.CurrentPolicy = powerProfile; + } + } + + /// <summary> + /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. + /// </summary> + public void Dispose() { + if (this.previousPolicy != PowerManagment.CurrentPolicy) { + PowerManagment.CurrentPolicy = this.previousPolicy; + } + } + } + + [DllImport("powrprof.dll")] + private static unsafe extern int PowerGetActiveScheme(IntPtr ReservedZero, ref Guid* policyGuidRet); + + [DllImport("powrprof.dll")] + private static extern int PowerSetActiveScheme(IntPtr ReservedZero, ref Guid policyGuid); + } +}
\ No newline at end of file diff --git a/src/DotNetOpenAuth.Test/TestBase.cs b/src/DotNetOpenAuth.Test/TestBase.cs index 8aeca2d..1c18c46 100644 --- a/src/DotNetOpenAuth.Test/TestBase.cs +++ b/src/DotNetOpenAuth.Test/TestBase.cs @@ -73,15 +73,5 @@ namespace DotNetOpenAuth.Test { new HttpRequest("mock", "http://mock", "mock"), new HttpResponse(new StringWriter())); } - -#pragma warning disable 0618 - protected internal static void SuspendLogging() { - LogManager.GetLoggerRepository().Threshold = LogManager.GetLoggerRepository().LevelMap["OFF"]; - } - - protected internal static void ResumeLogging() { - LogManager.GetLoggerRepository().Threshold = LogManager.GetLoggerRepository().LevelMap["ALL"]; - } -#pragma warning restore 0618 } } |