using Unity.PerformanceTesting.Runtime;
using System;
using System.Diagnostics;
using System.Collections.Generic;
using System.Reflection;
namespace Unity.PerformanceTesting.Benchmark
{
    /// 
    /// An interface for performing measurements which works from Performance Test Framework or from the Benchmark Framework.
    /// This functionality is intended to be wrapped in an implemenation specific to a type of benchmark comparison. See some of the included
    /// benchmarking implementations in  such as BenchmarkContainerRunner or BenchmarkAllocatorRunner, as well as
    /// the documentation in the Benchmark Framework repository for examples.
    /// 
    public static class BenchmarkMeasure
    {
        internal static bool ForBenchmarks = false;
        private static SampleGroup LastResultsInternal;
        private static uint LastResultsFootnotes;
        internal static BenchmarkResults CalculateLastResults(SampleUnit unit, BenchmarkRankingStatistic statistic)
        {
            for (int i = 0; i < LastResultsInternal.Samples.Count; i++)
                LastResultsInternal.Samples[i] = Utils.ConvertSample(SampleUnit.Second, unit, LastResultsInternal.Samples[i]);
            LastResultsInternal.Unit = unit;
            Utils.UpdateStatistics(LastResultsInternal);
            return new BenchmarkResults(LastResultsInternal, statistic, LastResultsFootnotes);
        }
        /// 
        /// Measure a set of samples for a given performance test. This functions correctly whether called through the Performance Test Framework
        /// by the Unity Test Runner, or if it is called through the Benchmark framework.
        /// This must be called when a test will be run in a parallel job, as it marks the results with a note specifying the irregularity
        /// of parallel jobs due to work stealing.
        /// When running a single threaded test (job or otherwise), use 
        /// 
        /// A type which contains a single performance test's implementation
        /// The number of warm up runs prior to collecting sample data
        /// The number of runs to collect sample data from
        /// The specific per-sample method to run for measurement
        /// A per-sample setup method that will not be part of measurement
        /// A per-sample teardown method that will not be part of measurement
        public static void MeasureParallel(Type perfMeasureType, int warmup, int measurements, Action action, Action setup = null, Action teardown = null)
        {
            Measure(perfMeasureType, warmup, measurements, action, setup, teardown);
            if (ForBenchmarks)
                LastResultsFootnotes |= BenchmarkResults.kFlagParallelJobs;
        }
        /// 
        /// Measure a set of samples for a given performance test. This functions correctly whether called through the Performance Test Framework
        /// by the Unity Test Runner, or if it is called through the Benchmark framework.
        /// This must not be called when a test will be run in a parallel job, as this does not mark the results with a note specifying the irregularity
        /// of parallel jobs due to work stealing.
        /// When running a multithreaded test, use 
        /// 
        /// A type which contains a single performance test's implementation
        /// The number of warm up runs prior to collecting sample data
        /// The number of runs to collect sample data from
        /// The specific per-sample method to run for measurement
        /// A per-sample setup method that will not be part of measurement
        /// A per-sample teardown method that will not be part of measurement
        public static void Measure(Type perfMeasureType, int warmup, int measurements, Action action, Action setup = null, Action teardown = null)
        {
            if (ForBenchmarks)
            {
                SampleGroup results = new SampleGroup(perfMeasureType.Name, SampleUnit.Second, false);
                results.Samples = new List(measurements);
                Stopwatch stopwatch = Stopwatch.StartNew();
                for (int i = 0; i < warmup; i++)
                {
                    setup?.Invoke();
                    action();
                    teardown?.Invoke();
                }
                for (int i = 0; i < measurements; i++)
                {
                    setup?.Invoke();
                    stopwatch.Restart();
                    action();
                    results.Samples.Add(stopwatch.Elapsed.TotalSeconds);
                    teardown?.Invoke();
                }
                LastResultsInternal = results;
                LastResultsFootnotes = 0;
                // Check if NoOptimization is part of this measurement. MethodImplAttribute is not found in
                // CustomAttributes, and instead is a special-case found in MethodImplementationFlags.
                var methods = perfMeasureType.GetMethods(BindingFlags.Public | BindingFlags.Instance);
                foreach (var m in methods)
                {
                    if (m.MethodImplementationFlags.HasFlag(MethodImplAttributes.NoOptimization))
                    {
                        LastResultsFootnotes |= BenchmarkResults.kFlagNoOptimization;
                        break;
                    }
                }
            }
            else
            {
                PerformanceTesting.Measure.Method(action)
                    .SampleGroup(perfMeasureType.Name)
                    .SetUp(setup)
                    .CleanUp(teardown)
                    .WarmupCount(warmup)
                    .MeasurementCount(measurements)
                    .Run();
            }
        }
    }
}