Last active
February 17, 2026 13:09
-
-
Save hdf/4b18b6154ebcc7138142cd4caa753000 to your computer and use it in GitHub Desktop.
A C# Class, where if you give it a set of points, it will try to figure out, what function generated them, and so you can request any interpolated or extrapolated value by x coordinate.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #:package ScottPlot@5.1.57 | |
| using System; | |
| using System.Collections.Generic; | |
| using System.Globalization; | |
| using System.IO; | |
| using System.Linq; | |
| using System.Text; | |
| using System.Threading; | |
| using System.Threading.Tasks; | |
| public class Program | |
| { | |
| private sealed class TestCase | |
| { | |
| public string Name { get; init; } = string.Empty; | |
| public Tuple<double, double>[] Points { get; init; } = Array.Empty<Tuple<double, double>>(); | |
| public Dictionary<double, double> Expected { get; init; } = new Dictionary<double, double>(); | |
| public string? ExpectedFunction { get; init; } | |
| public bool RequireFunctionMatch { get; init; } = false; | |
| } | |
| private static Tuple<double, double>[] CreateIndexedPoints(params double[] values) | |
| { | |
| Tuple<double, double>[] points = new Tuple<double, double>[values.Length]; | |
| for (int i = 0; i < values.Length; i++) | |
| points[i] = Tuple.Create((double)i, values[i]); | |
| return points; | |
| } | |
| private static IReadOnlyList<TestCase> GetTestCases() | |
| { | |
| return new List<TestCase> | |
| { | |
| new TestCase | |
| { | |
| Name = "Fibonacci", | |
| Points = CreateIndexedPoints(0.0, 1.0, 1.0, 2.0, 3.0, 5.0, 8.0, 13.0, 21.0, 34.0, 55.0), | |
| Expected = new Dictionary<double, double> { { 11, 89 }, { 12, 144 }, { 13, 233 } } | |
| }, | |
| new TestCase | |
| { | |
| Name = "Lucas", | |
| Points = CreateIndexedPoints(2.0, 1.0, 3.0, 4.0, 7.0, 11.0, 18.0, 29.0, 47.0, 76.0, 123.0), | |
| Expected = new Dictionary<double, double> { { 11, 199 }, { 12, 322 }, { 13, 521 } } | |
| }, | |
| new TestCase | |
| { | |
| Name = "Pell", | |
| Points = CreateIndexedPoints(0.0, 1.0, 2.0, 5.0, 12.0, 29.0, 70.0, 169.0, 408.0, 985.0, 2378.0), | |
| Expected = new Dictionary<double, double> { { 11, 5741 }, { 12, 13860 }, { 13, 33461 } } | |
| }, | |
| new TestCase | |
| { | |
| Name = "PowersOfTwo", | |
| Points = CreateIndexedPoints(1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0, 1024.0), | |
| Expected = new Dictionary<double, double> { { 11, 2048 }, { 12, 4096 }, { 13, 8192 } }, | |
| ExpectedFunction = "f(x) = 2^x", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "AlternatingSigns", | |
| Points = CreateIndexedPoints(1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0, -1.0, 1.0), | |
| Expected = new Dictionary<double, double> { { 11, -1 }, { 12, 1 }, { 13, -1 } } | |
| }, | |
| new TestCase | |
| { | |
| Name = "Mersenne", | |
| Points = CreateIndexedPoints(0.0, 1.0, 3.0, 7.0, 15.0, 31.0, 63.0, 127.0, 255.0, 511.0, 1023.0), | |
| Expected = new Dictionary<double, double> { { 11, 2047 }, { 12, 4095 }, { 13, 8191 } } | |
| }, | |
| new TestCase | |
| { | |
| Name = "Jacobsthal", | |
| Points = CreateIndexedPoints(0.0, 1.0, 1.0, 3.0, 5.0, 11.0, 21.0, 43.0, 85.0, 171.0, 341.0), | |
| Expected = new Dictionary<double, double> { { 11, 683 }, { 12, 1365 }, { 13, 2731 } } | |
| }, | |
| new TestCase | |
| { | |
| Name = "Constant", | |
| Points = CreateIndexedPoints(5.0, 5.0, 5.0, 5.0, 5.0, 5.0), | |
| Expected = new Dictionary<double, double> { { 6, 5 }, { 7, 5 } }, | |
| ExpectedFunction = "f(x) = 5", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "Linear", | |
| Points = CreateIndexedPoints(2.0, 5.0, 8.0, 11.0, 14.0, 17.0, 20.0, 23.0, 26.0, 29.0, 32.0), | |
| Expected = new Dictionary<double, double> { { 11, 35 }, { 12, 38 }, { 13, 41 } }, | |
| ExpectedFunction = "f(x) = x*3 + 2", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "Squares", | |
| Points = CreateIndexedPoints(0.0, 1.0, 4.0, 9.0, 16.0, 25.0, 36.0, 49.0, 64.0, 81.0, 100.0), | |
| Expected = new Dictionary<double, double> { { 11, 121 }, { 12, 144 }, { 13, 169 } }, | |
| ExpectedFunction = "f(x) = x^2", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "Triangular", | |
| Points = CreateIndexedPoints(0.0, 1.0, 3.0, 6.0, 10.0, 15.0, 21.0, 28.0, 36.0, 45.0, 55.0), | |
| Expected = new Dictionary<double, double> { { 11, 66 }, { 12, 78 }, { 13, 91 } }, | |
| ExpectedFunction = "f(x) = x^2*1/2 + x*1/2", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "QuarterSine", | |
| Points = CreateIndexedPoints(0.0, 1.0, 0.0, -1.0, 0.0, 1.0, 0.0, -1.0, 0.0, 1.0, 0.0), | |
| Expected = new Dictionary<double, double> { { 11, -1 }, { 12, 0 }, { 13, 1 } }, | |
| ExpectedFunction = "f(x) = sin(x*(1/2)*pi)", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "PowersOfThree", | |
| Points = CreateIndexedPoints(1.0, 3.0, 9.0, 27.0, 81.0, 243.0, 729.0, 2187.0, 6561.0, 19683.0, 59049.0), | |
| Expected = new Dictionary<double, double> { { 11, 177147 }, { 12, 531441 }, { 13, 1594323 } }, | |
| ExpectedFunction = "f(x) = 3^x", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "AlternatingTwos", | |
| Points = CreateIndexedPoints(2.0, -2.0, 2.0, -2.0, 2.0, -2.0, 2.0, -2.0, 2.0, -2.0, 2.0), | |
| Expected = new Dictionary<double, double> { { 11, -2 }, { 12, 2 }, { 13, -2 } }, | |
| ExpectedFunction = "f(x) = (-1)^x*2", | |
| RequireFunctionMatch = true | |
| }, | |
| new TestCase | |
| { | |
| Name = "QuarterSineAmp3", | |
| Points = CreateIndexedPoints(0.0, 3.0, 0.0, -3.0, 0.0, 3.0, 0.0, -3.0, 0.0, 3.0, 0.0), | |
| Expected = new Dictionary<double, double> { { 11, -3 }, { 12, 0 }, { 13, 3 } }, | |
| ExpectedFunction = "f(x) = sin(x*(1/2)*pi)*3", | |
| RequireFunctionMatch = true | |
| } | |
| }; | |
| } | |
| private static void ListTests() | |
| { | |
| Console.WriteLine("Available tests:"); | |
| foreach (var testCase in GetTestCases()) | |
| { | |
| Console.WriteLine($"- {testCase.Name}"); | |
| } | |
| } | |
| // Given a series of points, this class predicts values at arbitrary x. | |
| // It assumes points come from an underlying function and tunes predefined modules | |
| // to recover that function, then uses it for interpolation/extrapolation. | |
| public class Series | |
| { | |
| private const int ParameterCountPerModule = 16; | |
| private const int ExpBaseIndex = 0; | |
| private const int ExpCoeffIndex = 1; | |
| private const int PolyPowerIndex = 2; | |
| private const int PolyCoeffIndex = 3; | |
| private const int LinearCoeffIndex = 4; | |
| private const int ConstantIndex = 5; | |
| private const int LogCoeffIndex = 6; | |
| private const int LogBaseIndex = 7; | |
| private const int TrigFrequencyIndex = 8; | |
| private const int TrigPhaseIndex = 9; | |
| private const int SinCoeffIndex = 10; | |
| private const int CosCoeffIndex = 11; | |
| private const int ReciprocalCoeffIndex = 12; | |
| private const int ReciprocalShiftIndex = 13; | |
| private const int SqrtCoeffIndex = 14; | |
| private const int SqrtShiftIndex = 15; | |
| private const double DisplayZeroThreshold = 1e-8; | |
| // Shared helper utilities for seed construction and sequence shape detection. | |
| // Nested here so Series remains self-contained while keeping math/helper logic centralized. | |
| private static class SeriesSeedHelpers | |
| { | |
| public static List<double> GetContiguousIndexedValuesFromZero(IReadOnlyDictionary<double, double> series) | |
| { | |
| List<double> orderedY = new List<double>(); | |
| int index = 0; | |
| while (series.TryGetValue(index, out double y)) | |
| { | |
| orderedY.Add(y); | |
| index++; | |
| } | |
| return orderedY; | |
| } | |
| public static bool TrySolveSecondOrderRecurrence(IReadOnlyList<double> orderedY, out double r1, out double r2, out double b1, out double b2) | |
| { | |
| r1 = r2 = b1 = b2 = 0; | |
| if (orderedY.Count < 5) | |
| return false; | |
| double s11 = 0; | |
| double s12 = 0; | |
| double s22 = 0; | |
| double t1 = 0; | |
| double t2 = 0; | |
| for (int n = 2; n < orderedY.Count; n++) | |
| { | |
| double y1 = orderedY[n - 1]; | |
| double y2 = orderedY[n - 2]; | |
| double yn = orderedY[n]; | |
| s11 += y1 * y1; | |
| s12 += y1 * y2; | |
| s22 += y2 * y2; | |
| t1 += y1 * yn; | |
| t2 += y2 * yn; | |
| } | |
| double determinant = s11 * s22 - s12 * s12; | |
| if (Math.Abs(determinant) < 1e-12) | |
| return false; | |
| double p = (t1 * s22 - t2 * s12) / determinant; | |
| double q = (s11 * t2 - s12 * t1) / determinant; | |
| double discriminant = p * p + 4 * q; | |
| if (!double.IsFinite(discriminant) || discriminant < 0) | |
| return false; | |
| double sqrtDiscriminant = Math.Sqrt(discriminant); | |
| r1 = (p + sqrtDiscriminant) / 2.0; | |
| r2 = (p - sqrtDiscriminant) / 2.0; | |
| if (!double.IsFinite(r1) || !double.IsFinite(r2) || Math.Abs(r1 - r2) < 1e-12) | |
| return false; | |
| double y0 = orderedY[0]; | |
| double y1First = orderedY[1]; | |
| b1 = (y1First - y0 * r2) / (r1 - r2); | |
| b2 = y0 - b1; | |
| return double.IsFinite(b1) && double.IsFinite(b2); | |
| } | |
| public static bool TrySolveQuadraticFromFirstThree(IReadOnlyList<double> orderedY, out double a, out double b, out double c) | |
| { | |
| a = b = c = 0; | |
| if (orderedY.Count < 3) | |
| return false; | |
| double y0 = orderedY[0]; | |
| double y1 = orderedY[1]; | |
| double y2 = orderedY[2]; | |
| a = (y2 - (2 * y1) + y0) / 2.0; | |
| b = y1 - y0 - a; | |
| c = y0; | |
| return double.IsFinite(a) && double.IsFinite(b) && double.IsFinite(c); | |
| } | |
| public static bool TrySolveGeometricRatio(IReadOnlyList<double> orderedY, out double ratio, out double firstValue) | |
| { | |
| ratio = 0; | |
| firstValue = 0; | |
| if (orderedY.Count < 3 || Math.Abs(orderedY[0]) < 1e-12) | |
| return false; | |
| firstValue = orderedY[0]; | |
| ratio = orderedY[1] / orderedY[0]; | |
| if (!double.IsFinite(ratio)) | |
| return false; | |
| for (int i = 2; i < orderedY.Count; i++) | |
| { | |
| if (Math.Abs(orderedY[i - 1]) < 1e-12) | |
| return false; | |
| double currentRatio = orderedY[i] / orderedY[i - 1]; | |
| if (!double.IsFinite(currentRatio) || Math.Abs(currentRatio - ratio) > 1e-9) | |
| return false; | |
| } | |
| return true; | |
| } | |
| public static bool TryMatchQuarterSine(IReadOnlyList<double> orderedY, out double amplitude) | |
| { | |
| amplitude = 0; | |
| if (orderedY.Count < 8) | |
| return false; | |
| amplitude = orderedY.Select(Math.Abs).Max(); | |
| if (amplitude < 1e-9) | |
| return false; | |
| for (int i = 0; i < Math.Min(orderedY.Count, 12); i++) | |
| { | |
| double expected = Math.Sin((Math.PI / 2.0) * i) * amplitude; | |
| if (Math.Abs(orderedY[i] - expected) > 1e-6) | |
| return false; | |
| } | |
| return true; | |
| } | |
| public static bool TryMatchAlternatingAmplitude(IReadOnlyList<double> orderedY, out double amplitude) | |
| { | |
| amplitude = 0; | |
| if (orderedY.Count < 4) | |
| return false; | |
| amplitude = Math.Abs(orderedY[0]); | |
| if (amplitude < 1e-12) | |
| return false; | |
| for (int i = 0; i < orderedY.Count; i++) | |
| { | |
| double expected = ((i % 2) == 0 ? 1 : -1) * amplitude; | |
| if (Math.Abs(orderedY[i] - expected) > 1e-9) | |
| return false; | |
| } | |
| return true; | |
| } | |
| public static void ZeroSeed(double[][] seed) | |
| { | |
| for (int i = 0; i < seed.Length; i++) | |
| for (int j = 0; j < seed[i].Length; j++) | |
| seed[i][j] = 0; | |
| } | |
| } | |
| public sealed class TuningSnapshot | |
| { | |
| public int Restart { get; init; } | |
| public int Iteration { get; init; } | |
| public double Loss { get; init; } | |
| public double[][] Parameters { get; init; } = Array.Empty<double[]>(); | |
| } | |
| public sealed class EvolutionTraceEntry | |
| { | |
| public string Stage { get; init; } = string.Empty; | |
| public int Step { get; init; } | |
| public double Loss { get; init; } | |
| public string Function { get; init; } = string.Empty; | |
| } | |
| // Selects how tuning progress is shown while optimization is running. | |
| public enum ProgressOutputMode | |
| { | |
| None, | |
| Log, | |
| Bar, | |
| Both | |
| } | |
| public enum TuningMode | |
| { | |
| Gradient, | |
| Evolutionary | |
| } | |
| // Stores known points (x -> y). Unknown points are predicted and then cached here. | |
| internal Dictionary<double, double> _series = new Dictionary<double, double>() { { 0, 0 }, { 1, 1 } }; // Default points for the Fibonacci sequence. | |
| private readonly List<TuningSnapshot> _tuningSnapshots = new List<TuningSnapshot>(); | |
| private List<EvolutionTraceEntry> _evolutionTraceEntries = new List<EvolutionTraceEntry>(); | |
| private bool _isTuned = false; | |
| private bool _hasActiveProgressBar = false; | |
| private bool _evolutionaryFallbackAttempted = false; | |
| // Runtime/tuning configuration | |
| // These switches are safe for callers to tweak without touching optimizer internals. | |
| public ProgressOutputMode ProgressMode { get; set; } = ProgressOutputMode.Log; | |
| public bool UseParallelRestarts { get; set; } = true; | |
| public bool QuietAfterConvergenceInParallel { get; set; } = true; | |
| public bool StopOnFirstPerfectFit { get; set; } = true; | |
| public bool CaptureTuningSnapshots { get; set; } = false; | |
| public bool CaptureEvolutionTrace { get; set; } = false; | |
| public int SnapshotInterval { get; set; } = 50; | |
| public int? RandomSeed { get; set; } = 12345; // null => non-deterministic seeding | |
| public int MaxParallelism { get; set; } = Environment.ProcessorCount; | |
| public bool RequirePerfectFit { get; set; } = true; | |
| public bool AllowEvolutionaryFallbackPrompt { get; set; } = false; | |
| public bool UseStructuralSeeds { get; set; } = true; | |
| public bool UseProgressiveModelSearch { get; set; } = true; | |
| public bool PreferSimpleModelsWhenNoProgressive { get; set; } = true; | |
| public int SimplicityCandidatePoolSize { get; set; } = 24; | |
| public double SimplicityLossSlackFactor { get; set; } = 2.0; | |
| public TuningMode OptimizerMode { get; set; } = TuningMode.Gradient; | |
| public bool HasPerfectFit { get; private set; } = false; | |
| public double LastLoss { get; private set; } = double.PositiveInfinity; | |
| public string LastFitStatusMessage { get; private set; } = "Not tuned yet."; | |
| // Optimizer configuration | |
| // These parameters control speed, stability, and stopping behavior of the tuner. | |
| public int RestartCount { get; set; } = 16; | |
| public int MaxIterations { get; set; } = 3000; | |
| public int ProgressInterval { get; set; } = 250; | |
| public double InitialLearningRate { get; set; } = 0.01; | |
| public double Beta1 { get; set; } = 0.9; | |
| public double Beta2 { get; set; } = 0.999; | |
| public double Epsilon { get; set; } = 1e-8; | |
| public double ConvergenceTolerance { get; set; } = 1e-8; | |
| public bool SnapNearIntegersForIntegerX { get; set; } = true; | |
| public double IntegerSnapTolerance { get; set; } = 1e-9; | |
| // Evolutionary optimizer settings (used when OptimizerMode=Evolutionary). | |
| public int PopulationSize { get; set; } = 64; | |
| public int EliteCount { get; set; } = 10; | |
| public int GenerationCount { get; set; } = 80; | |
| public int TournamentSize { get; set; } = 4; | |
| public double MutationRate { get; set; } = 0.20; | |
| public double MutationScaleFactor { get; set; } = 0.05; | |
| public double InitialScaleFactor { get; set; } = 0.25; | |
| public bool EnableLocalRefinement { get; set; } = true; | |
| public double[][] Parameters { get; protected internal set; } = new double[][] | |
| { | |
| new double[] { 0, 0, 0, 0, 0, 0, 0, Math.E, 1, 0, 0, 0, 0, 0, 0, 0 }, | |
| new double[] { 0, 0, 0, 0, 0, 0, 0, Math.E, 1, 0, 0, 0, 0, 0, 0, 0 }, | |
| new double[] { 0, 0, 0, 0, 0, 0, 0, Math.E, 1, 0, 0, 0, 0, 0, 0, 0 } | |
| }; | |
| public double this[double index] | |
| { | |
| // Fast path: return cached/known point if available, otherwise tune (once) and predict. | |
| get { return _series.TryGetValue(index, out double cached) ? cached : Calc(index); } | |
| set { _series[index] = value; } | |
| } | |
| public IReadOnlyList<TuningSnapshot> GetTuningSnapshots() | |
| { | |
| return _tuningSnapshots; | |
| } | |
| public IReadOnlyList<EvolutionTraceEntry> GetEvolutionTraceEntries() | |
| { | |
| return _evolutionTraceEntries; | |
| } | |
| public IReadOnlyList<KeyValuePair<double, double>> GetKnownPoints() | |
| { | |
| return _series.OrderBy(point => point.Key).ToList(); | |
| } | |
| public void SetPoints(Tuple<double, double>[] points) | |
| { | |
| foreach (var point in points) | |
| { | |
| _series[point.Item1] = point.Item2; | |
| } | |
| } | |
| public void SetPoints(double[] values) | |
| { | |
| for (double i = 0; i < values.Length; i++) | |
| { | |
| _series[i] = values[(int)i]; | |
| } | |
| } | |
| public virtual string GetFunction() | |
| { | |
| string result = "f(x) = "; | |
| List<string> activeModules = new List<string>(); | |
| for (int i = 0; i < Parameters.Length; i++) | |
| { | |
| if (ModuleHasEffect(Parameters[i])) | |
| activeModules.Add(BuildModuleExpression(Parameters[i])); | |
| } | |
| if (activeModules.Count == 0) | |
| return result + "0"; | |
| result += string.Join(" + ", activeModules); | |
| return result; | |
| } | |
| private static bool IsNearZero(double value) | |
| { | |
| return Math.Abs(value) <= DisplayZeroThreshold; | |
| } | |
| private static bool IsApproximately(double actual, double expected, double tolerance = 1e-8) | |
| { | |
| // Relative comparison so large constants don't require unrealistically tiny absolute error. | |
| double scale = Math.Max(1.0, Math.Max(Math.Abs(actual), Math.Abs(expected))); | |
| return Math.Abs(actual - expected) <= tolerance * scale; | |
| } | |
| private static readonly (string Name, double Value)[] KnownConstants = new[] | |
| { | |
| // Canonical symbols we prefer in printed functions. | |
| // Keep this list curated to avoid overly exotic substitutions. | |
| ("pi", Math.PI), | |
| ("tau", 2.0 * Math.PI), | |
| ("e", Math.E), | |
| ("phi", (1.0 + Math.Sqrt(5.0)) / 2.0), | |
| ("ln(2)", Math.Log(2.0)), | |
| ("gamma", 0.5772156649015328606), | |
| ("sqrt(2)", Math.Sqrt(2.0)), | |
| ("sqrt(3)", Math.Sqrt(3.0)), | |
| ("sqrt(5)", Math.Sqrt(5.0)), | |
| ("sqrt(7)", Math.Sqrt(7.0)), | |
| ("sqrt(11)", Math.Sqrt(11.0)), | |
| ("sqrt(13)", Math.Sqrt(13.0)), | |
| ("1/sqrt(2)", 1.0 / Math.Sqrt(2.0)), | |
| ("1/sqrt(3)", 1.0 / Math.Sqrt(3.0)), | |
| ("1/sqrt(5)", 1.0 / Math.Sqrt(5.0)) | |
| }; | |
| private static string? TryFormatSimpleFraction(double value) | |
| { | |
| // Prefer small rational forms (e.g., 1/2, -3/4) for readability. | |
| // We intentionally cap numerator/denominator to keep output compact. | |
| for (int denominator = 2; denominator <= 16; denominator++) | |
| { | |
| for (int numerator = -16; numerator <= 16; numerator++) | |
| { | |
| if (numerator == 0) | |
| continue; | |
| double candidate = numerator / (double)denominator; | |
| if (!IsApproximately(value, candidate)) | |
| continue; | |
| return $"{numerator}/{denominator}"; | |
| } | |
| } | |
| return null; | |
| } | |
| private static string? TryFormatConstantExpression(double value) | |
| { | |
| // Match exact constants first, then scaled versions (fractions/multiples). | |
| foreach (var constant in KnownConstants) | |
| { | |
| if (IsApproximately(value, 1.0 - constant.Value)) | |
| return BuildOneMinusExpression(constant.Name); | |
| if (IsApproximately(value, constant.Value)) | |
| return constant.Name; | |
| if (IsApproximately(value, -constant.Value)) | |
| return $"-{constant.Name}"; | |
| string? fraction = TryFormatSimpleFraction(value / constant.Value); | |
| if (fraction is not null) | |
| { | |
| if (fraction == "1") | |
| return constant.Name; | |
| if (fraction == "-1") | |
| return $"-{constant.Name}"; | |
| return $"({fraction})*{constant.Name}"; | |
| } | |
| for (int multiplier = -12; multiplier <= 12; multiplier++) | |
| { | |
| if (multiplier == 0 || multiplier == 1 || multiplier == -1) | |
| continue; | |
| if (IsApproximately(value, multiplier * constant.Value)) | |
| return $"{multiplier}*{constant.Name}"; | |
| } | |
| } | |
| return null; | |
| } | |
| private static string BuildOneMinusExpression(string constantName) | |
| { | |
| bool needsParentheses = constantName.StartsWith("-", StringComparison.Ordinal) || | |
| constantName.Contains('/') || | |
| constantName.Contains('*') || | |
| constantName.Contains('+') || | |
| constantName.IndexOf('-', 1) >= 0; | |
| return needsParentheses ? $"1-({constantName})" : $"1-{constantName}"; | |
| } | |
| private static string FormatNumber(double value) | |
| { | |
| // Number formatting precedence: | |
| // 1) trivial values (0 / integer) | |
| // 2) small fractions | |
| // 3) known symbolic constants and scaled forms | |
| // 4) fallback compact numeric form | |
| if (!double.IsFinite(value)) | |
| return value.ToString(CultureInfo.InvariantCulture); | |
| if (IsNearZero(value)) | |
| return "0"; | |
| if (Math.Abs(value - Math.Round(value)) <= 1e-10) | |
| return Math.Round(value).ToString(CultureInfo.InvariantCulture); | |
| string? fraction = TryFormatSimpleFraction(value); | |
| if (fraction is not null) | |
| return fraction; | |
| string? constantExpression = TryFormatConstantExpression(value); | |
| if (constantExpression is not null) | |
| return constantExpression; | |
| return value.ToString("G12", CultureInfo.InvariantCulture); | |
| } | |
| private static string FormatPowerBase(double value) | |
| { | |
| // Composite tokens must be parenthesized before '^x' to avoid ambiguous output. | |
| string token = FormatNumber(value); | |
| bool requiresParentheses = token.StartsWith("-", StringComparison.Ordinal) || | |
| token.Contains('/') || | |
| token.Contains('*') || | |
| token.Contains('+') || | |
| token.IndexOf('-', 1) >= 0; | |
| return requiresParentheses ? $"({token})" : token; | |
| } | |
| private static string BuildTrigArgument(double frequency, double phase) | |
| { | |
| string argument = IsApproximately(frequency, 1.0) | |
| ? "x" | |
| : IsApproximately(frequency, -1.0) | |
| ? "-x" | |
| : $"x*{FormatNumber(frequency)}"; | |
| if (IsNearZero(phase)) | |
| return argument; | |
| if (phase < 0) | |
| return $"{argument} - {FormatNumber(-phase)}"; | |
| string phaseToken = FormatNumber(phase); | |
| return $"{argument} + {phaseToken}"; | |
| } | |
| private static bool ModuleHasEffect(double[] module) | |
| { | |
| if (module.Length < ParameterCountPerModule) | |
| return module.Any(value => !IsNearZero(value)); | |
| return | |
| !IsNearZero(module[ExpCoeffIndex]) || | |
| !IsNearZero(module[PolyCoeffIndex]) || | |
| !IsNearZero(module[LinearCoeffIndex]) || | |
| !IsNearZero(module[ConstantIndex]) || | |
| !IsNearZero(module[LogCoeffIndex]) || | |
| !IsNearZero(module[SinCoeffIndex]) || | |
| !IsNearZero(module[CosCoeffIndex]) || | |
| !IsNearZero(module[ReciprocalCoeffIndex]) || | |
| !IsNearZero(module[SqrtCoeffIndex]); | |
| } | |
| private static string BuildModuleExpression(double[] module) | |
| { | |
| List<string> terms = new List<string>(); | |
| double constantAccumulator = module[ConstantIndex]; | |
| // If base is effectively 1, a^x*b collapses to just b; fold it into the constant term. | |
| bool expIsConstant = Math.Abs(module[ExpBaseIndex] - 1.0) <= DisplayZeroThreshold; | |
| if (!IsNearZero(module[ExpCoeffIndex]) && expIsConstant) | |
| constantAccumulator += module[ExpCoeffIndex]; | |
| if (!IsNearZero(module[ExpCoeffIndex]) && !expIsConstant) | |
| { | |
| string expCore = $"{FormatPowerBase(module[ExpBaseIndex])}^x"; | |
| terms.Add(IsApproximately(module[ExpCoeffIndex], 1.0) | |
| ? expCore | |
| : $"{expCore}*{FormatNumber(module[ExpCoeffIndex])}"); | |
| } | |
| if (!IsNearZero(module[PolyCoeffIndex])) | |
| { | |
| string polyCore = $"x^{FormatNumber(module[PolyPowerIndex])}"; | |
| terms.Add(IsApproximately(module[PolyCoeffIndex], 1.0) | |
| ? polyCore | |
| : $"{polyCore}*{FormatNumber(module[PolyCoeffIndex])}"); | |
| } | |
| if (!IsNearZero(module[LinearCoeffIndex])) | |
| terms.Add(IsApproximately(module[LinearCoeffIndex], 1.0) | |
| ? "x" | |
| : $"x*{FormatNumber(module[LinearCoeffIndex])}"); | |
| if (!IsNearZero(constantAccumulator)) | |
| terms.Add($"{FormatNumber(constantAccumulator)}"); | |
| if (!IsNearZero(module[LogCoeffIndex])) | |
| terms.Add($"log(x)*{FormatNumber(module[LogCoeffIndex])}/log({FormatNumber(module[LogBaseIndex])})"); | |
| if (!IsNearZero(module[SinCoeffIndex])) | |
| { | |
| string sinCore = $"sin({BuildTrigArgument(module[TrigFrequencyIndex], module[TrigPhaseIndex])})"; | |
| terms.Add(IsApproximately(module[SinCoeffIndex], 1.0) | |
| ? sinCore | |
| : $"{sinCore}*{FormatNumber(module[SinCoeffIndex])}"); | |
| } | |
| if (!IsNearZero(module[CosCoeffIndex])) | |
| { | |
| string cosCore = $"cos({BuildTrigArgument(module[TrigFrequencyIndex], module[TrigPhaseIndex])})"; | |
| terms.Add(IsApproximately(module[CosCoeffIndex], 1.0) | |
| ? cosCore | |
| : $"{cosCore}*{FormatNumber(module[CosCoeffIndex])}"); | |
| } | |
| if (!IsNearZero(module[ReciprocalCoeffIndex])) | |
| { | |
| string reciprocalCore = $"1/(x+{FormatNumber(module[ReciprocalShiftIndex])})"; | |
| terms.Add(IsApproximately(module[ReciprocalCoeffIndex], 1.0) | |
| ? reciprocalCore | |
| : $"{FormatNumber(module[ReciprocalCoeffIndex])}/(x+{FormatNumber(module[ReciprocalShiftIndex])})"); | |
| } | |
| if (!IsNearZero(module[SqrtCoeffIndex])) | |
| { | |
| string sqrtCore = $"sqrt(x+{FormatNumber(module[SqrtShiftIndex])})"; | |
| terms.Add(IsApproximately(module[SqrtCoeffIndex], 1.0) | |
| ? sqrtCore | |
| : $"{sqrtCore}*{FormatNumber(module[SqrtCoeffIndex])}"); | |
| } | |
| return terms.Count == 0 ? "0" : string.Join(" + ", terms); | |
| } | |
| private static double[] SanitizeModuleForDisplay(double[] module) | |
| { | |
| double[] sanitized = new double[module.Length]; | |
| Array.Copy(module, sanitized, module.Length); | |
| if (sanitized.Length < ParameterCountPerModule) | |
| { | |
| for (int i = 0; i < sanitized.Length; i++) | |
| if (IsNearZero(sanitized[i])) sanitized[i] = 0; | |
| return sanitized; | |
| } | |
| if (IsNearZero(sanitized[ExpCoeffIndex])) sanitized[ExpCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[PolyCoeffIndex])) sanitized[PolyCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[LinearCoeffIndex])) sanitized[LinearCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[ConstantIndex])) sanitized[ConstantIndex] = 0; | |
| if (IsNearZero(sanitized[LogCoeffIndex])) sanitized[LogCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[SinCoeffIndex])) sanitized[SinCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[CosCoeffIndex])) sanitized[CosCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[ReciprocalCoeffIndex])) sanitized[ReciprocalCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[SqrtCoeffIndex])) sanitized[SqrtCoeffIndex] = 0; | |
| if (IsNearZero(sanitized[ReciprocalShiftIndex])) sanitized[ReciprocalShiftIndex] = 0; | |
| if (IsNearZero(sanitized[SqrtShiftIndex])) sanitized[SqrtShiftIndex] = 0; | |
| if (IsNearZero(sanitized[TrigPhaseIndex])) sanitized[TrigPhaseIndex] = 0; | |
| return sanitized; | |
| } | |
| internal double[][] GetDisplayParameters(double[][] source) | |
| { | |
| double[][] sanitized = CloneParameters(source); | |
| for (int i = 0; i < sanitized.Length; i++) | |
| sanitized[i] = SanitizeModuleForDisplay(sanitized[i]); | |
| return sanitized; | |
| } | |
| public string GetFunctionWithParameters(double[][] parameters) | |
| { | |
| // Render a symbolic function for an arbitrary parameter snapshot without mutating caller state. | |
| double[][] backup = CloneCurrentParameters(); | |
| try | |
| { | |
| Parameters = GetDisplayParameters(parameters); | |
| return GetFunction(); | |
| } | |
| finally | |
| { | |
| Parameters = backup; | |
| } | |
| } | |
| public string GetSeries() | |
| { | |
| return "[" + string.Join(", ", _series).Replace('[', '(').Replace(']', ')') + "]"; | |
| } | |
| public override string ToString() | |
| { | |
| return GetFunction() + "\n" + GetSeries(); | |
| } | |
| public string GetOptimizerSummary() | |
| { | |
| return OptimizerMode == TuningMode.Gradient | |
| ? $"Gradient(restarts={RestartCount}, maxIter={MaxIterations}, parallel={UseParallelRestarts}, structuralSeeds={UseStructuralSeeds}, progressive={UseProgressiveModelSearch})" | |
| : $"Evolutionary(pop={PopulationSize}, elite={EliteCount}, generations={GenerationCount}, mutationRate={MutationRate.ToString("0.####", CultureInfo.InvariantCulture)}, refine={EnableLocalRefinement}, structuralSeeds={UseStructuralSeeds}, progressive={UseProgressiveModelSearch})"; | |
| } | |
| internal protected virtual void ReportProgress(string message) | |
| { | |
| // Log output mode: text lines with thread information. | |
| if (ProgressMode == ProgressOutputMode.None || ProgressMode == ProgressOutputMode.Bar) | |
| return; | |
| EnsureProgressBarLineCompleted(); | |
| int threadId = Thread.CurrentThread.ManagedThreadId; | |
| Console.WriteLine($"[Thread {threadId}] {message}"); | |
| } | |
| internal protected virtual void ReportProgressBar(int restart, int restartCount, int iteration, int maxIterations, double loss) | |
| { | |
| // Progress bar mode: one continuously updated console line. | |
| if (ProgressMode == ProgressOutputMode.None || ProgressMode == ProgressOutputMode.Log) | |
| return; | |
| double completedUnits = restart * (double)maxIterations + iteration; | |
| double totalUnits = Math.Max(1, restartCount * (double)maxIterations); | |
| double progress = Math.Clamp(completedUnits / totalUnits, 0, 1); | |
| int width = 30; | |
| int filled = (int)Math.Round(progress * width); | |
| string bar = new string('#', filled) + new string('-', width - filled); | |
| string lossText = double.IsFinite(loss) ? loss.ToString("G6") : "n/a"; | |
| Console.Write($"\r[{bar}] {progress * 100,6:0.00}% | restart {restart + 1}/{restartCount} | iter {iteration}/{maxIterations} | loss={lossText} "); | |
| _hasActiveProgressBar = true; | |
| } | |
| internal protected virtual void ReportRestartProgressBar(int completedRestarts, int restartCount, double bestLoss) | |
| { | |
| // Coarser progress bar for parallel mode, where restarts complete out of order. | |
| if (ProgressMode == ProgressOutputMode.None || ProgressMode == ProgressOutputMode.Log) | |
| return; | |
| double progress = Math.Clamp(completedRestarts / (double)Math.Max(1, restartCount), 0, 1); | |
| int width = 30; | |
| int filled = (int)Math.Round(progress * width); | |
| string bar = new string('#', filled) + new string('-', width - filled); | |
| string lossText = double.IsFinite(bestLoss) ? bestLoss.ToString("G6") : "n/a"; | |
| Console.Write($"\r[{bar}] {progress * 100,6:0.00}% | restarts {completedRestarts}/{restartCount} | best loss={lossText} "); | |
| _hasActiveProgressBar = true; | |
| } | |
| internal protected virtual void EnsureProgressBarLineCompleted() | |
| { | |
| // If we have an active '\r' bar, finish the line so subsequent logs render cleanly. | |
| if (_hasActiveProgressBar) | |
| { | |
| Console.WriteLine(); | |
| _hasActiveProgressBar = false; | |
| } | |
| } | |
| // Derived classes can override this to provide deep-copy behavior for additional mutable state. | |
| internal protected virtual Series CreateTuningClone() | |
| { | |
| Series clone = (Series)MemberwiseClone(); | |
| clone._series = new Dictionary<double, double>(_series); | |
| clone.Parameters = CloneParameters(Parameters); | |
| clone._evolutionTraceEntries = new List<EvolutionTraceEntry>(); | |
| clone._isTuned = false; | |
| clone._hasActiveProgressBar = false; | |
| clone._evolutionaryFallbackAttempted = false; | |
| clone.ProgressMode = ProgressOutputMode.None; | |
| clone.UseParallelRestarts = false; | |
| clone.HasPerfectFit = false; | |
| clone.LastLoss = double.PositiveInfinity; | |
| clone.LastFitStatusMessage = "Worker clone not tuned yet."; | |
| return clone; | |
| } | |
| // Return true if gradients were fully accumulated analytically; return false to use numeric fallback. | |
| internal protected virtual bool TryAccumulateGradients(double x, double y, double predicted, double error, double[][] gradients, int sampleCount) | |
| { | |
| // Default behavior: no analytic gradient provided. | |
| return false; | |
| } | |
| private double Calc(double x) | |
| { | |
| // Tune once lazily, the first time we need a prediction. | |
| if (!_isTuned) | |
| { | |
| TuneParameters(); // Happens only once. | |
| _isTuned = true; | |
| } | |
| if (RequirePerfectFit && !HasPerfectFit) | |
| { | |
| bool fallbackSolved = TryEvolutionaryFallbackInteractive(); | |
| if (!fallbackSolved) | |
| throw new InvalidOperationException(LastFitStatusMessage); | |
| } | |
| double result = Func(x); | |
| if (SnapNearIntegersForIntegerX) | |
| { | |
| double rounded = Math.Round(result); | |
| bool isIntegerX = Math.Abs(x - Math.Round(x)) <= IntegerSnapTolerance; | |
| if (isIntegerX) | |
| { | |
| bool seriesLooksIntegerValued = _series.Count > 0 && _series.Values.All(value => Math.Abs(value - Math.Round(value)) <= IntegerSnapTolerance); | |
| if (seriesLooksIntegerValued || Math.Abs(result - rounded) <= IntegerSnapTolerance) | |
| result = rounded; | |
| } | |
| } | |
| _series[x] = result; | |
| return result; | |
| } | |
| internal protected virtual double Func(double x) | |
| { | |
| // Default basis function: exponential + power + linear + constant + logarithmic + trig + reciprocal + sqrt terms. | |
| // Derived classes can override this completely. | |
| double y = 0; | |
| for (int i = 0; i < Parameters.Length; i++) | |
| { | |
| if (Parameters[i].Length < ParameterCountPerModule) | |
| continue; | |
| double expBase = Parameters[i][ExpBaseIndex]; | |
| double expCoeff = Parameters[i][ExpCoeffIndex]; | |
| double polyPower = Parameters[i][PolyPowerIndex]; | |
| double polyCoeff = Parameters[i][PolyCoeffIndex]; | |
| double linearCoeff = Parameters[i][LinearCoeffIndex]; | |
| double constant = Parameters[i][ConstantIndex]; | |
| double logCoeff = Parameters[i][LogCoeffIndex]; | |
| double logBase = Parameters[i][LogBaseIndex]; | |
| double trigFrequency = Parameters[i][TrigFrequencyIndex]; | |
| double trigPhase = Parameters[i][TrigPhaseIndex]; | |
| double sinCoeff = Parameters[i][SinCoeffIndex]; | |
| double cosCoeff = Parameters[i][CosCoeffIndex]; | |
| double reciprocalCoeff = Parameters[i][ReciprocalCoeffIndex]; | |
| double reciprocalShift = Parameters[i][ReciprocalShiftIndex]; | |
| double sqrtCoeff = Parameters[i][SqrtCoeffIndex]; | |
| double sqrtShift = Parameters[i][SqrtShiftIndex]; | |
| double expTerm = SafePow(expBase, x) * expCoeff; | |
| double polyTerm = SafePowX(x, polyPower) * polyCoeff; | |
| double linearTerm = linearCoeff * x; | |
| double constantTerm = constant; | |
| double logTerm = (x > 0 && logBase > 0 && Math.Abs(Math.Log(logBase)) > 1e-12) ? (Math.Log(x) * logCoeff / Math.Log(logBase)) : 0; | |
| double trigAngle = trigFrequency * x + trigPhase; | |
| double sinTerm = double.IsFinite(trigAngle) ? Math.Sin(trigAngle) * sinCoeff : 0; | |
| double cosTerm = double.IsFinite(trigAngle) ? Math.Cos(trigAngle) * cosCoeff : 0; | |
| double denominator = x + reciprocalShift; | |
| double reciprocalTerm = Math.Abs(denominator) > 1e-12 ? reciprocalCoeff / denominator : 0; | |
| double sqrtArgument = x + sqrtShift; | |
| double sqrtTerm = sqrtArgument >= 0 ? Math.Sqrt(sqrtArgument) * sqrtCoeff : 0; | |
| y += expTerm + polyTerm + linearTerm + constantTerm + logTerm + sinTerm + cosTerm + reciprocalTerm + sqrtTerm; | |
| } | |
| return y; | |
| } | |
| internal double[][] CloneParameters(double[][] source) | |
| { | |
| double[][] clone = new double[source.Length][]; | |
| for (int i = 0; i < source.Length; i++) | |
| { | |
| clone[i] = new double[source[i].Length]; | |
| Array.Copy(source[i], clone[i], source[i].Length); | |
| } | |
| return clone; | |
| } | |
| internal double[][] CloneCurrentParameters() | |
| { | |
| return CloneParameters(Parameters); | |
| } | |
| internal double EvaluateWithParameters(double[][] parameters, double x) | |
| { | |
| double[][] backup = CloneCurrentParameters(); | |
| Parameters = CloneParameters(parameters); | |
| double y = Func(x); | |
| Parameters = backup; | |
| return y; | |
| } | |
| internal bool IsPerfectFitForParameters(double[][] parameters, double loss, double convergenceTolerance) | |
| { | |
| return double.IsFinite(loss) && loss <= convergenceTolerance; | |
| } | |
| internal bool ShouldPreferSimpleModels() | |
| { | |
| return PreferSimpleModelsWhenNoProgressive && !UseProgressiveModelSearch; | |
| } | |
| internal double ComputeModelComplexity(double[][] parameters) | |
| { | |
| double complexity = 0; | |
| for (int i = 0; i < parameters.Length; i++) | |
| { | |
| if (parameters[i].Length < ParameterCountPerModule) | |
| continue; | |
| bool activeModule = false; | |
| if (Math.Abs(parameters[i][ExpCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 1.0; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][PolyCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 2.0; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][LinearCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 1.0; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][ConstantIndex]) > 1e-8) | |
| { | |
| complexity += 0.75; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][LogCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 1.5; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][SinCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 2.0; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][CosCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 2.0; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][ReciprocalCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 1.5; | |
| activeModule = true; | |
| } | |
| if (Math.Abs(parameters[i][SqrtCoeffIndex]) > 1e-8) | |
| { | |
| complexity += 1.5; | |
| activeModule = true; | |
| } | |
| if (activeModule) | |
| complexity += 0.5; | |
| } | |
| return complexity; | |
| } | |
| internal bool IsWithinSimplicitySlack(double candidateLoss, double bestLoss) | |
| { | |
| double slack = Math.Max(ConvergenceTolerance * Math.Max(1.0, SimplicityLossSlackFactor), Math.Abs(bestLoss) * 0.02); | |
| return candidateLoss <= bestLoss + slack; | |
| } | |
| internal protected void ClearTuningSnapshots() | |
| { | |
| _tuningSnapshots.Clear(); | |
| } | |
| internal protected void ClearEvolutionTrace() | |
| { | |
| _evolutionTraceEntries.Clear(); | |
| } | |
| internal void AddEvolutionTrace(string stage, int step, double loss, double[][] parameters) | |
| { | |
| if (!CaptureEvolutionTrace) | |
| return; | |
| _evolutionTraceEntries.Add(new EvolutionTraceEntry | |
| { | |
| Stage = stage, | |
| Step = step, | |
| Loss = loss, | |
| Function = GetFunctionWithParameters(parameters) | |
| }); | |
| } | |
| internal void AddTuningSnapshot(int restart, int iteration, double loss) | |
| { | |
| if (!CaptureTuningSnapshots) | |
| return; | |
| _tuningSnapshots.Add(new TuningSnapshot | |
| { | |
| Restart = restart, | |
| Iteration = iteration, | |
| Loss = loss, | |
| Parameters = CloneCurrentParameters() | |
| }); | |
| } | |
| private static double SafePow(double value, double power) | |
| { | |
| double result = Math.Pow(value, power); | |
| return double.IsFinite(result) ? result : 0; | |
| } | |
| private static double SafePowX(double x, double power) | |
| { | |
| if (x == 0 && power <= 0) | |
| return 0; | |
| double result = Math.Pow(x, power); | |
| return double.IsFinite(result) ? result : 0; | |
| } | |
| internal double ComputeSeriesLossForParameters(double[][] parameters) | |
| { | |
| if (_series.Count == 0) | |
| return double.PositiveInfinity; | |
| double total = 0; | |
| int count = 0; | |
| foreach (var point in _series) | |
| { | |
| double predicted = EvaluateWithParameters(parameters, point.Key); | |
| if (!double.IsFinite(predicted)) | |
| return double.PositiveInfinity; | |
| double error = predicted - point.Value; | |
| total += error * error; | |
| count++; | |
| } | |
| return count == 0 ? double.PositiveInfinity : total / count; | |
| } | |
| private double[][] CreateZeroCandidateTemplate() | |
| { | |
| double[][] candidate = CloneCurrentParameters(); | |
| SeriesSeedHelpers.ZeroSeed(candidate); | |
| for (int i = 0; i < candidate.Length; i++) | |
| { | |
| if (candidate[i].Length > LogBaseIndex) | |
| candidate[i][LogBaseIndex] = Math.E; | |
| if (candidate[i].Length > TrigFrequencyIndex) | |
| candidate[i][TrigFrequencyIndex] = 1; | |
| } | |
| return candidate; | |
| } | |
| private bool TryRunProgressiveModelSearch(out double[][] bestParameters, out double bestLoss, out string bestModel) | |
| { | |
| double[][] workingBestParameters = CloneCurrentParameters(); | |
| double workingBestLoss = ComputeSeriesLossForParameters(workingBestParameters); | |
| string workingBestModel = "current-parameters"; | |
| if (_series.Count < 3) | |
| { | |
| bestParameters = workingBestParameters; | |
| bestLoss = workingBestLoss; | |
| bestModel = workingBestModel; | |
| return false; | |
| } | |
| void Consider(string name, double[][] candidate) | |
| { | |
| double loss = ComputeSeriesLossForParameters(candidate); | |
| if (!double.IsFinite(loss)) | |
| return; | |
| if (loss + 1e-14 < workingBestLoss) | |
| { | |
| workingBestLoss = loss; | |
| workingBestParameters = CloneParameters(candidate); | |
| workingBestModel = name; | |
| } | |
| } | |
| if (TryBuildConstantCandidate(out double[][] constantCandidate)) | |
| Consider("constant", constantCandidate); | |
| if (TryBuildLinearCandidate(out double[][] linearCandidate)) | |
| Consider("linear", linearCandidate); | |
| if (TryBuildQuadraticCandidate(out double[][] quadraticCandidate)) | |
| Consider("quadratic", quadraticCandidate); | |
| if (TryBuildGeometricCandidate(out double[][] geometricCandidate)) | |
| Consider("geometric", geometricCandidate); | |
| if (TryBuildSecondOrderRecurrenceCandidate(out double[][] recurrenceCandidate)) | |
| Consider("second-order-recurrence", recurrenceCandidate); | |
| if (TryBuildQuarterSineCandidate(out double[][] quarterSineCandidate)) | |
| Consider("quarter-sine", quarterSineCandidate); | |
| if (TryBuildAlternatingSignCandidate(out double[][] alternatingCandidate)) | |
| Consider("alternating-sign", alternatingCandidate); | |
| bestParameters = workingBestParameters; | |
| bestLoss = workingBestLoss; | |
| bestModel = workingBestModel; | |
| return true; | |
| } | |
| private bool TryBuildConstantCandidate(out double[][] candidate) | |
| { | |
| candidate = CreateZeroCandidateTemplate(); | |
| if (_series.Count == 0 || candidate.Length == 0 || candidate[0].Length <= ConstantIndex) | |
| return false; | |
| double mean = _series.Values.Average(); | |
| candidate[0][ConstantIndex] = mean; | |
| return true; | |
| } | |
| private bool TryBuildLinearCandidate(out double[][] candidate) | |
| { | |
| candidate = CreateZeroCandidateTemplate(); | |
| if (_series.Count < 2 || candidate.Length == 0 || candidate[0].Length <= LinearCoeffIndex) | |
| return false; | |
| double sumX = 0; | |
| double sumY = 0; | |
| double sumXX = 0; | |
| double sumXY = 0; | |
| int n = 0; | |
| foreach (var point in _series) | |
| { | |
| double x = point.Key; | |
| double y = point.Value; | |
| sumX += x; | |
| sumY += y; | |
| sumXX += x * x; | |
| sumXY += x * y; | |
| n++; | |
| } | |
| double denominator = n * sumXX - sumX * sumX; | |
| if (Math.Abs(denominator) < 1e-12) | |
| return false; | |
| double slope = (n * sumXY - sumX * sumY) / denominator; | |
| double intercept = (sumY - slope * sumX) / n; | |
| candidate[0][LinearCoeffIndex] = slope; | |
| candidate[0][ConstantIndex] = intercept; | |
| return true; | |
| } | |
| private bool TryBuildQuadraticCandidate(out double[][] candidate) | |
| { | |
| candidate = CreateZeroCandidateTemplate(); | |
| if (_series.Count < 3 || candidate.Length == 0 || candidate[0].Length <= PolyCoeffIndex) | |
| return false; | |
| double s0 = 0; | |
| double s1 = 0; | |
| double s2 = 0; | |
| double s3 = 0; | |
| double s4 = 0; | |
| double t0 = 0; | |
| double t1 = 0; | |
| double t2 = 0; | |
| foreach (var point in _series) | |
| { | |
| double x = point.Key; | |
| double y = point.Value; | |
| double x2 = x * x; | |
| s0 += 1; | |
| s1 += x; | |
| s2 += x2; | |
| s3 += x2 * x; | |
| s4 += x2 * x2; | |
| t0 += y; | |
| t1 += x * y; | |
| t2 += x2 * y; | |
| } | |
| if (!TrySolveSymmetric3x3(s0, s1, s2, s3, s4, t0, t1, t2, out double c, out double b, out double a)) | |
| return false; | |
| candidate[0][PolyPowerIndex] = 2; | |
| candidate[0][PolyCoeffIndex] = a; | |
| candidate[0][LinearCoeffIndex] = b; | |
| candidate[0][ConstantIndex] = c; | |
| return true; | |
| } | |
| private static bool TrySolveSymmetric3x3(double s0, double s1, double s2, double s3, double s4, double t0, double t1, double t2, out double c, out double b, out double a) | |
| { | |
| c = b = a = 0; | |
| double m00 = s0, m01 = s1, m02 = s2; | |
| double m10 = s1, m11 = s2, m12 = s3; | |
| double m20 = s2, m21 = s3, m22 = s4; | |
| double det = | |
| m00 * (m11 * m22 - m12 * m21) - | |
| m01 * (m10 * m22 - m12 * m20) + | |
| m02 * (m10 * m21 - m11 * m20); | |
| if (!double.IsFinite(det) || Math.Abs(det) < 1e-12) | |
| return false; | |
| double inv00 = (m11 * m22 - m12 * m21) / det; | |
| double inv01 = (m02 * m21 - m01 * m22) / det; | |
| double inv02 = (m01 * m12 - m02 * m11) / det; | |
| double inv10 = (m12 * m20 - m10 * m22) / det; | |
| double inv11 = (m00 * m22 - m02 * m20) / det; | |
| double inv12 = (m02 * m10 - m00 * m12) / det; | |
| double inv20 = (m10 * m21 - m11 * m20) / det; | |
| double inv21 = (m01 * m20 - m00 * m21) / det; | |
| double inv22 = (m00 * m11 - m01 * m10) / det; | |
| c = inv00 * t0 + inv01 * t1 + inv02 * t2; | |
| b = inv10 * t0 + inv11 * t1 + inv12 * t2; | |
| a = inv20 * t0 + inv21 * t1 + inv22 * t2; | |
| return double.IsFinite(c) && double.IsFinite(b) && double.IsFinite(a); | |
| } | |
| private bool TryBuildGeometricCandidate(out double[][] candidate) | |
| { | |
| candidate = CreateZeroCandidateTemplate(); | |
| if (candidate.Length == 0 || candidate[0].Length <= ExpCoeffIndex) | |
| return false; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_series); | |
| if (!SeriesSeedHelpers.TrySolveGeometricRatio(orderedY, out double ratio, out double firstValue)) | |
| return false; | |
| candidate[0][ExpBaseIndex] = ratio; | |
| candidate[0][ExpCoeffIndex] = firstValue; | |
| return true; | |
| } | |
| private bool TryBuildSecondOrderRecurrenceCandidate(out double[][] candidate) | |
| { | |
| candidate = CreateZeroCandidateTemplate(); | |
| if (candidate.Length < 2 || candidate[0].Length <= ExpCoeffIndex || candidate[1].Length <= ExpCoeffIndex) | |
| return false; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_series); | |
| if (!SeriesSeedHelpers.TrySolveSecondOrderRecurrence(orderedY, out double r1, out double r2, out double b1, out double b2)) | |
| return false; | |
| candidate[0][ExpBaseIndex] = r1; | |
| candidate[0][ExpCoeffIndex] = b1; | |
| candidate[1][ExpBaseIndex] = r2; | |
| candidate[1][ExpCoeffIndex] = b2; | |
| return true; | |
| } | |
| private bool TryBuildQuarterSineCandidate(out double[][] candidate) | |
| { | |
| candidate = CreateZeroCandidateTemplate(); | |
| if (candidate.Length == 0 || candidate[0].Length <= SinCoeffIndex) | |
| return false; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_series); | |
| if (!SeriesSeedHelpers.TryMatchQuarterSine(orderedY, out double amplitude)) | |
| return false; | |
| candidate[0][TrigFrequencyIndex] = Math.PI / 2.0; | |
| candidate[0][TrigPhaseIndex] = 0; | |
| candidate[0][SinCoeffIndex] = amplitude; | |
| return true; | |
| } | |
| private bool TryBuildAlternatingSignCandidate(out double[][] candidate) | |
| { | |
| candidate = CreateZeroCandidateTemplate(); | |
| if (candidate.Length == 0 || candidate[0].Length <= ExpCoeffIndex) | |
| return false; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_series); | |
| if (!SeriesSeedHelpers.TryMatchAlternatingAmplitude(orderedY, out double amplitude)) | |
| return false; | |
| candidate[0][ExpBaseIndex] = -1; | |
| candidate[0][ExpCoeffIndex] = amplitude; | |
| return true; | |
| } | |
| internal protected virtual void TuneParameters() | |
| { | |
| // Reset fit state before each fresh tune pass. | |
| ClearTuningSnapshots(); | |
| ClearEvolutionTrace(); | |
| _evolutionaryFallbackAttempted = false; | |
| HasPerfectFit = false; | |
| LastFitStatusMessage = "Tuning in progress."; | |
| if (UseProgressiveModelSearch && TryRunProgressiveModelSearch(out double[][] stageOneBest, out double stageOneLoss, out string stageOneModel)) | |
| { | |
| Parameters = CloneParameters(stageOneBest); | |
| bool stageOnePerfect = IsPerfectFitForParameters(stageOneBest, stageOneLoss, ConvergenceTolerance); | |
| if (stageOnePerfect) | |
| { | |
| SetFitStatus(stageOneLoss, perfectFit: true, message: $"Perfect match found by progressive search ({stageOneModel}, loss={stageOneLoss:G6})."); | |
| ReportProgress(LastFitStatusMessage); | |
| return; | |
| } | |
| ReportProgress($"Progressive stage best: {stageOneModel}, loss={stageOneLoss:G6}. Continuing optimizer."); | |
| } | |
| if (OptimizerMode == TuningMode.Evolutionary) | |
| { | |
| SeriesEvolutionarySearcher searcher = new SeriesEvolutionarySearcher(this, isFallback: false); | |
| _ = searcher.TryFindPerfectFit(); | |
| return; | |
| } | |
| // Keep Series focused on API behavior; delegate optimizer details to SeriesTuner. | |
| SeriesTuner tuner = new SeriesTuner(this); | |
| tuner.TuneParameters(); | |
| } | |
| internal protected void SetFitStatus(double loss, bool perfectFit, string message) | |
| { | |
| LastLoss = loss; | |
| HasPerfectFit = perfectFit; | |
| LastFitStatusMessage = message; | |
| } | |
| private bool TryEvolutionaryFallbackInteractive() | |
| { | |
| if (_evolutionaryFallbackAttempted) | |
| return HasPerfectFit; | |
| _evolutionaryFallbackAttempted = true; | |
| if (!AllowEvolutionaryFallbackPrompt) | |
| return false; | |
| // Avoid blocking/non-interactive contexts (tests, redirected output, automation). | |
| if (!Environment.UserInteractive || Console.IsInputRedirected) | |
| return false; | |
| EnsureProgressBarLineCompleted(); | |
| Console.WriteLine($"No perfect match found with the default tuner. Best loss={LastLoss:G6}, tolerance={ConvergenceTolerance:G6}."); | |
| Console.Write("Try evolutionary fallback search? [y/N]: "); | |
| string? response = Console.ReadLine(); | |
| bool shouldRun = string.Equals(response?.Trim(), "y", StringComparison.OrdinalIgnoreCase) || | |
| string.Equals(response?.Trim(), "yes", StringComparison.OrdinalIgnoreCase); | |
| if (!shouldRun) | |
| { | |
| LastFitStatusMessage = $"No perfect match found (best loss={LastLoss:G6}, tolerance={ConvergenceTolerance:G6}). Evolutionary fallback was declined."; | |
| return false; | |
| } | |
| SeriesEvolutionarySearcher searcher = new SeriesEvolutionarySearcher(this, isFallback: true); | |
| bool found = searcher.TryFindPerfectFit(); | |
| if (!found) | |
| LastFitStatusMessage = $"No perfect match found (best loss={LastLoss:G6}, tolerance={ConvergenceTolerance:G6}) after evolutionary fallback."; | |
| return found; | |
| } | |
| private sealed class SeriesEvolutionarySearcher | |
| { | |
| private readonly Series _owner; | |
| private readonly bool _isFallback; | |
| public SeriesEvolutionarySearcher(Series owner, bool isFallback) | |
| { | |
| _owner = owner; | |
| _isFallback = isFallback; | |
| } | |
| public bool TryFindPerfectFit() | |
| { | |
| // Evolutionary search: broad global search + optional local gradient refinement. | |
| int baseSeed = (_owner.RandomSeed ?? Environment.TickCount) ^ 0x5F3759DF; | |
| Random random = new Random(baseSeed); | |
| int populationSize = Math.Max(8, _owner.PopulationSize); | |
| int eliteCount = Math.Clamp(_owner.EliteCount, 1, populationSize - 1); | |
| int generations = Math.Max(1, _owner.GenerationCount); | |
| int tournamentSize = Math.Clamp(_owner.TournamentSize, 2, populationSize); | |
| double yScale = Math.Max(1, _owner._series.Values.Select(Math.Abs).DefaultIfEmpty(1).Max()); | |
| // Initial population mixes deterministic structural seeds and randomized candidates. | |
| List<double[][]> population = BuildInitialPopulation(random, yScale, populationSize); | |
| double bestLoss = double.PositiveInfinity; | |
| double[][] bestCandidate = _owner.CloneCurrentParameters(); | |
| for (int generation = 0; generation < generations; generation++) | |
| { | |
| var ranked = population | |
| .Select(candidate => (Candidate: candidate, Loss: ComputeLoss(candidate))) | |
| .OrderBy(entry => entry.Loss) | |
| .ToList(); | |
| if (ranked.Count == 0) | |
| continue; | |
| // Always keep the best-so-far candidate across generations. | |
| if (ranked[0].Loss < bestLoss) | |
| { | |
| bestLoss = ranked[0].Loss; | |
| bestCandidate = _owner.CloneParameters(ranked[0].Candidate); | |
| } | |
| _owner.ReportProgress($"Evolution generation {generation + 1}/{generations}, best loss={bestLoss:G6}."); | |
| _owner.AddEvolutionTrace("generation", generation + 1, ranked[0].Loss, ranked[0].Candidate); | |
| if (_owner.IsPerfectFitForParameters(bestCandidate, bestLoss, _owner.ConvergenceTolerance)) | |
| { | |
| _owner.Parameters = _owner.CloneParameters(bestCandidate); | |
| _owner.AddEvolutionTrace("final", generation + 1, bestLoss, bestCandidate); | |
| string successMessage = _isFallback | |
| ? $"Perfect match found by evolutionary fallback (loss={bestLoss:G6})." | |
| : $"Perfect match found by evolutionary tuner (loss={bestLoss:G6})."; | |
| _owner.SetFitStatus(bestLoss, perfectFit: true, message: successMessage); | |
| _owner.ReportProgress(_owner.LastFitStatusMessage); | |
| return true; | |
| } | |
| List<double[][]> nextPopulation = new List<double[][]>(); | |
| for (int i = 0; i < Math.Min(eliteCount, ranked.Count); i++) | |
| nextPopulation.Add(_owner.CloneParameters(ranked[i].Candidate)); | |
| // Fill remainder with offspring generated from tournament-selected parents. | |
| while (nextPopulation.Count < populationSize) | |
| { | |
| double[][] parentA = TournamentSelect(ranked, random, tournamentSize); | |
| double[][] parentB = TournamentSelect(ranked, random, tournamentSize); | |
| double[][] child = Crossover(parentA, parentB, random); | |
| Mutate(child, random, yScale); | |
| nextPopulation.Add(child); | |
| } | |
| population = nextPopulation; | |
| } | |
| // Optional local polishing stage reuses the gradient tuner for final candidate refinement. | |
| if (_owner.EnableLocalRefinement) | |
| { | |
| SeriesTuner tuner = new SeriesTuner(_owner); | |
| int refinementCandidates = Math.Min(6, population.Count); | |
| var finalRanked = population | |
| .Select(candidate => (Candidate: candidate, Loss: ComputeLoss(candidate))) | |
| .OrderBy(entry => entry.Loss) | |
| .Take(refinementCandidates) | |
| .ToList(); | |
| for (int i = 0; i < finalRanked.Count; i++) | |
| { | |
| int seed = unchecked(baseSeed + 104729 * (i + 1)); | |
| var refined = tuner.RefineCandidate(finalRanked[i].Candidate, seed); | |
| _owner.AddEvolutionTrace("refinement", i + 1, refined.loss, refined.parameters); | |
| if (refined.loss < bestLoss) | |
| { | |
| bestLoss = refined.loss; | |
| bestCandidate = _owner.CloneParameters(refined.parameters); | |
| } | |
| if (_owner.IsPerfectFitForParameters(bestCandidate, bestLoss, _owner.ConvergenceTolerance)) | |
| break; | |
| } | |
| } | |
| _owner.Parameters = _owner.CloneParameters(bestCandidate); | |
| _owner.AddEvolutionTrace("final", generations, bestLoss, bestCandidate); | |
| bool perfect = _owner.IsPerfectFitForParameters(bestCandidate, bestLoss, _owner.ConvergenceTolerance); | |
| string message = _isFallback | |
| ? (perfect | |
| ? $"Perfect match found by evolutionary fallback (loss={bestLoss:G6})." | |
| : $"Evolutionary fallback finished without perfect match (best loss={bestLoss:G6}).") | |
| : (perfect | |
| ? $"Perfect match found by evolutionary tuner (loss={bestLoss:G6})." | |
| : $"No perfect match found by evolutionary tuner (best loss={bestLoss:G6}, tolerance={_owner.ConvergenceTolerance:G6})."); | |
| _owner.SetFitStatus(bestLoss, perfect, message); | |
| _owner.ReportProgress(message); | |
| return perfect; | |
| } | |
| private List<double[][]> BuildInitialPopulation(Random random, double yScale, int populationSize) | |
| { | |
| // Seed order intentionally prioritizes exact structural guesses before randomness. | |
| List<double[][]> population = new List<double[][]>(); | |
| double[][] initial = _owner.CloneCurrentParameters(); | |
| if (IsConstrainedNoSeedEvolutionary()) | |
| ApplyNoSeedEvolutionaryConstraints(initial); | |
| population.Add(initial); | |
| if (_owner.UseStructuralSeeds) | |
| { | |
| if (TryBuildSecondOrderRecurrenceSeed(out double[][] recurrenceSeed)) | |
| population.Add(recurrenceSeed); | |
| if (TryBuildPolynomialSeed(out double[][] polynomialSeed)) | |
| population.Add(polynomialSeed); | |
| if (TryBuildQuarterSineSeed(out double[][] quarterSineSeed)) | |
| population.Add(quarterSineSeed); | |
| if (TryBuildGeometricSeed(out double[][] geometricSeed)) | |
| population.Add(geometricSeed); | |
| if (TryBuildAlternatingSignSeed(out double[][] alternatingSeed)) | |
| population.Add(alternatingSeed); | |
| } | |
| else if (IsConstrainedNoSeedEvolutionary()) | |
| { | |
| if (TryBuildGenericRecurrencePrior(p: 1.0, q: 0.98, out double[][] genericRecurrenceNear)) | |
| population.Add(genericRecurrenceNear); | |
| if (TryBuildGenericRecurrencePrior(p: 1.0, q: 1.02, out double[][] genericRecurrenceWide)) | |
| population.Add(genericRecurrenceWide); | |
| } | |
| while (population.Count < populationSize) | |
| population.Add(CreateRandomCandidate(random, yScale)); | |
| return population; | |
| } | |
| private bool IsConstrainedNoSeedEvolutionary() | |
| { | |
| return !_owner.UseStructuralSeeds && _owner.OptimizerMode == TuningMode.Evolutionary; | |
| } | |
| private void ApplyNoSeedEvolutionaryConstraints(double[][] candidate) | |
| { | |
| if (!IsConstrainedNoSeedEvolutionary()) | |
| return; | |
| for (int i = 0; i < candidate.Length; i++) | |
| { | |
| if (candidate[i].Length < ParameterCountPerModule) | |
| continue; | |
| if (i >= 2) | |
| { | |
| for (int j = 0; j < candidate[i].Length; j++) | |
| candidate[i][j] = 0; | |
| continue; | |
| } | |
| for (int j = 0; j < candidate[i].Length; j++) | |
| { | |
| if (!double.IsFinite(candidate[i][j])) | |
| candidate[i][j] = 0; | |
| } | |
| candidate[i][LogBaseIndex] = Math.Max(0.5, Math.Abs(candidate[i][LogBaseIndex])); | |
| } | |
| } | |
| private bool TryBuildGenericRecurrencePrior(double p, double q, out double[][] seed) | |
| { | |
| seed = _owner.CloneCurrentParameters(); | |
| int requiredExponentialSlots = LogBaseIndex + 1; | |
| if (_owner.Parameters.Length < 2 || _owner.Parameters[0].Length < requiredExponentialSlots || _owner.Parameters[1].Length < requiredExponentialSlots) | |
| return false; | |
| double discriminant = p * p + 4 * q; | |
| if (!double.IsFinite(discriminant) || discriminant < 0) | |
| return false; | |
| double sqrtDiscriminant = Math.Sqrt(discriminant); | |
| double r1 = (p + sqrtDiscriminant) / 2.0; | |
| double r2 = (p - sqrtDiscriminant) / 2.0; | |
| if (!double.IsFinite(r1) || !double.IsFinite(r2) || Math.Abs(r1 - r2) < 1e-12) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| double b1 = 0.5; | |
| double b2 = -0.5; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (orderedY.Count >= 2) | |
| { | |
| double y0 = orderedY[0]; | |
| double y1 = orderedY[1]; | |
| b1 = (y1 - y0 * r2) / (r1 - r2); | |
| b2 = y0 - b1; | |
| } | |
| seed[0][ExpBaseIndex] = r1; | |
| seed[0][ExpCoeffIndex] = b1; | |
| seed[0][LogBaseIndex] = Math.E; | |
| seed[1][ExpBaseIndex] = r2; | |
| seed[1][ExpCoeffIndex] = b2; | |
| seed[1][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private double[][] CreateRandomCandidate(Random random, double yScale) | |
| { | |
| double[][] candidate = _owner.CloneCurrentParameters(); | |
| double scale = Math.Max(1e-3, yScale * _owner.InitialScaleFactor); | |
| for (int i = 0; i < candidate.Length; i++) | |
| { | |
| for (int j = 0; j < candidate[i].Length; j++) | |
| { | |
| candidate[i][j] = NextGaussian(random) * scale; | |
| } | |
| candidate[i][LogBaseIndex] = Math.Max(0.5, Math.Abs(candidate[i][LogBaseIndex]) + 0.5); | |
| candidate[i][TrigFrequencyIndex] = random.NextDouble() * 4 - 2; | |
| candidate[i][TrigPhaseIndex] = random.NextDouble() * 2 * Math.PI - Math.PI; | |
| } | |
| ApplyNoSeedEvolutionaryConstraints(candidate); | |
| return candidate; | |
| } | |
| private static double[][] TournamentSelect(List<(double[][] Candidate, double Loss)> ranked, Random random, int tournamentSize) | |
| { | |
| // Tournament selection balances exploitation (best seen) and exploration. | |
| int bestIndex = random.Next(ranked.Count); | |
| for (int i = 1; i < tournamentSize; i++) | |
| { | |
| int contender = random.Next(ranked.Count); | |
| if (ranked[contender].Loss < ranked[bestIndex].Loss) | |
| bestIndex = contender; | |
| } | |
| return ranked[bestIndex].Candidate; | |
| } | |
| private double ComputeLoss(double[][] candidate) | |
| { | |
| double total = 0; | |
| int count = 0; | |
| foreach (var point in _owner._series) | |
| { | |
| double predicted = _owner.EvaluateWithParameters(candidate, point.Key); | |
| if (!double.IsFinite(predicted)) | |
| return double.PositiveInfinity; | |
| double error = predicted - point.Value; | |
| total += error * error; | |
| count++; | |
| } | |
| return count == 0 ? double.PositiveInfinity : total / count; | |
| } | |
| private static double[][] Crossover(double[][] parentA, double[][] parentB, Random random) | |
| { | |
| // Blend crossover (convex combination) keeps offspring within parental value range. | |
| double[][] child = new double[parentA.Length][]; | |
| for (int i = 0; i < parentA.Length; i++) | |
| { | |
| child[i] = new double[parentA[i].Length]; | |
| for (int j = 0; j < parentA[i].Length; j++) | |
| { | |
| child[i][j] = random.NextDouble() < 0.5 ? parentA[i][j] : parentB[i][j]; | |
| } | |
| } | |
| return child; | |
| } | |
| private void Mutate(double[][] candidate, Random random, double yScale) | |
| { | |
| // Gaussian mutation introduces local random walks around each candidate. | |
| double mutationScale = Math.Max(1e-4, yScale * _owner.MutationScaleFactor); | |
| for (int i = 0; i < candidate.Length; i++) | |
| { | |
| for (int j = 0; j < candidate[i].Length; j++) | |
| { | |
| if (random.NextDouble() < _owner.MutationRate) | |
| candidate[i][j] += NextGaussian(random) * mutationScale; | |
| } | |
| candidate[i][LogBaseIndex] = Math.Max(0.5, Math.Abs(candidate[i][LogBaseIndex]) + 0.5); | |
| } | |
| ApplyNoSeedEvolutionaryConstraints(candidate); | |
| } | |
| private bool TryBuildSecondOrderRecurrenceSeed(out double[][] seed) | |
| { | |
| // Closed-form two-root recurrence seed (Fibonacci/Lucas/Pell-like families). | |
| seed = _owner.CloneCurrentParameters(); | |
| int requiredExponentialSlots = LogBaseIndex + 1; | |
| if (_owner.Parameters.Length < 2 || _owner.Parameters[0].Length < requiredExponentialSlots || _owner.Parameters[1].Length < requiredExponentialSlots) | |
| return false; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (!SeriesSeedHelpers.TrySolveSecondOrderRecurrence(orderedY, out double r1, out double r2, out double b1, out double b2)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][ExpBaseIndex] = r1; | |
| seed[0][ExpCoeffIndex] = b1; | |
| seed[0][LogBaseIndex] = Math.E; | |
| seed[1][ExpBaseIndex] = r2; | |
| seed[1][ExpCoeffIndex] = b2; | |
| seed[1][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private bool TryBuildPolynomialSeed(out double[][] seed) | |
| { | |
| // Quadratic seed from first three terms using second-difference identity. | |
| seed = _owner.CloneCurrentParameters(); | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (seed.Length == 0 || seed[0].Length < ParameterCountPerModule) | |
| return false; | |
| if (!SeriesSeedHelpers.TrySolveQuadraticFromFirstThree(orderedY, out double a, out double b, out double c)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][PolyPowerIndex] = 2; | |
| seed[0][PolyCoeffIndex] = a; | |
| seed[0][LinearCoeffIndex] = b; | |
| seed[0][ConstantIndex] = c; | |
| seed[0][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private bool TryBuildQuarterSineSeed(out double[][] seed) | |
| { | |
| // Pattern seed for y = A*sin(pi*x/2), including scaled variants. | |
| seed = _owner.CloneCurrentParameters(); | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (seed.Length == 0 || seed[0].Length < ParameterCountPerModule) | |
| return false; | |
| if (!SeriesSeedHelpers.TryMatchQuarterSine(orderedY, out double maxAbs)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][TrigFrequencyIndex] = Math.PI / 2.0; | |
| seed[0][TrigPhaseIndex] = 0; | |
| seed[0][SinCoeffIndex] = maxAbs; | |
| seed[0][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private bool TryBuildGeometricSeed(out double[][] seed) | |
| { | |
| // Geometric seed for y_n = y_0 * r^n. | |
| seed = _owner.CloneCurrentParameters(); | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (seed.Length == 0 || seed[0].Length < ParameterCountPerModule) | |
| return false; | |
| if (!SeriesSeedHelpers.TrySolveGeometricRatio(orderedY, out double ratio, out double firstValue)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][ExpBaseIndex] = ratio; | |
| seed[0][ExpCoeffIndex] = firstValue; | |
| seed[0][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private bool TryBuildAlternatingSignSeed(out double[][] seed) | |
| { | |
| // Alternating sign seed for y_n = A*(-1)^n. | |
| seed = _owner.CloneCurrentParameters(); | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (seed.Length == 0 || seed[0].Length < ParameterCountPerModule) | |
| return false; | |
| if (!SeriesSeedHelpers.TryMatchAlternatingAmplitude(orderedY, out double amplitude)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][ExpBaseIndex] = -1; | |
| seed[0][ExpCoeffIndex] = amplitude; | |
| seed[0][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private static double NextGaussian(Random random) | |
| { | |
| double u1 = 1.0 - random.NextDouble(); | |
| double u2 = 1.0 - random.NextDouble(); | |
| return Math.Sqrt(-2.0 * Math.Log(u1)) * Math.Cos(2.0 * Math.PI * u2); | |
| } | |
| } | |
| // Encapsulates optimization/tuning implementation details. | |
| private sealed class SeriesTuner | |
| { | |
| private readonly Series _owner; | |
| private readonly List<(int GroupIndex, int ParameterIndex)> _parameterCoordinates; | |
| public SeriesTuner(Series owner) | |
| { | |
| _owner = owner; | |
| // Flatten parameter indices once so gradient loops avoid repeated nested-index setup. | |
| _parameterCoordinates = new List<(int GroupIndex, int ParameterIndex)>(); | |
| for (int i = 0; i < _owner.Parameters.Length; i++) | |
| { | |
| for (int j = 0; j < _owner.Parameters[i].Length; j++) | |
| { | |
| _parameterCoordinates.Add((i, j)); | |
| } | |
| } | |
| } | |
| private double[][] CreateGradientMatrix() | |
| { | |
| // Shape mirrors Parameters so updates can be applied directly. | |
| double[][] gradients = new double[_owner.Parameters.Length][]; | |
| for (int i = 0; i < _owner.Parameters.Length; i++) | |
| { | |
| gradients[i] = new double[_owner.Parameters[i].Length]; | |
| } | |
| return gradients; | |
| } | |
| private double[][] CloneParameters(double[][] source) | |
| { | |
| return _owner.CloneParameters(source); | |
| } | |
| private void RestoreParameters(double[][] source) | |
| { | |
| _owner.Parameters = CloneParameters(source); | |
| } | |
| private void AddCandidate(List<(double loss, double[][] parameters)> candidates, double loss, double[][] parameters) | |
| { | |
| if (!double.IsFinite(loss)) | |
| return; | |
| candidates.Add((loss, CloneParameters(parameters))); | |
| if (_owner.ShouldPreferSimpleModels()) | |
| { | |
| var pruned = BuildPrunedVariant(parameters, loss); | |
| if (double.IsFinite(pruned.loss)) | |
| candidates.Add((pruned.loss, CloneParameters(pruned.parameters))); | |
| } | |
| candidates.Sort((left, right) => left.loss.CompareTo(right.loss)); | |
| int maxCandidates = Math.Max(4, _owner.SimplicityCandidatePoolSize); | |
| if (candidates.Count > maxCandidates) | |
| candidates.RemoveRange(maxCandidates, candidates.Count - maxCandidates); | |
| } | |
| private (double loss, double[][] parameters) BuildPrunedVariant(double[][] parameters, double referenceLoss) | |
| { | |
| double[][] candidate = CloneParameters(parameters); | |
| double currentLoss = _owner.ComputeSeriesLossForParameters(candidate); | |
| if (!double.IsFinite(currentLoss)) | |
| return (double.PositiveInfinity, candidate); | |
| double slack = Math.Max(_owner.ConvergenceTolerance * Math.Max(1.0, _owner.SimplicityLossSlackFactor), Math.Abs(referenceLoss) * 0.02); | |
| double allowedLoss = referenceLoss + slack; | |
| List<(int module, int index, double magnitude)> removable = new List<(int module, int index, double magnitude)>(); | |
| int[] removableIndices = | |
| { | |
| PolyCoeffIndex, | |
| LinearCoeffIndex, | |
| ConstantIndex, | |
| LogCoeffIndex, | |
| SinCoeffIndex, | |
| CosCoeffIndex, | |
| ReciprocalCoeffIndex, | |
| SqrtCoeffIndex | |
| }; | |
| for (int module = 0; module < candidate.Length; module++) | |
| { | |
| if (candidate[module].Length < ParameterCountPerModule) | |
| continue; | |
| foreach (int index in removableIndices) | |
| { | |
| if (index >= candidate[module].Length) | |
| continue; | |
| double magnitude = Math.Abs(candidate[module][index]); | |
| if (magnitude <= 1e-12) | |
| continue; | |
| removable.Add((module, index, magnitude)); | |
| } | |
| } | |
| foreach (var parameter in removable.OrderBy(entry => entry.magnitude)) | |
| { | |
| double backupValue = candidate[parameter.module][parameter.index]; | |
| double backupReciprocalShift = parameter.index == ReciprocalCoeffIndex ? candidate[parameter.module][ReciprocalShiftIndex] : 0; | |
| double backupSqrtShift = parameter.index == SqrtCoeffIndex ? candidate[parameter.module][SqrtShiftIndex] : 0; | |
| candidate[parameter.module][parameter.index] = 0; | |
| if (parameter.index == ReciprocalCoeffIndex) | |
| candidate[parameter.module][ReciprocalShiftIndex] = 0; | |
| if (parameter.index == SqrtCoeffIndex) | |
| candidate[parameter.module][SqrtShiftIndex] = 0; | |
| if (parameter.index == SinCoeffIndex && Math.Abs(candidate[parameter.module][CosCoeffIndex]) <= 1e-12) | |
| { | |
| candidate[parameter.module][TrigFrequencyIndex] = 1; | |
| candidate[parameter.module][TrigPhaseIndex] = 0; | |
| } | |
| if (parameter.index == CosCoeffIndex && Math.Abs(candidate[parameter.module][SinCoeffIndex]) <= 1e-12) | |
| { | |
| candidate[parameter.module][TrigFrequencyIndex] = 1; | |
| candidate[parameter.module][TrigPhaseIndex] = 0; | |
| } | |
| double prunedLoss = _owner.ComputeSeriesLossForParameters(candidate); | |
| if (double.IsFinite(prunedLoss) && prunedLoss <= allowedLoss) | |
| { | |
| currentLoss = prunedLoss; | |
| continue; | |
| } | |
| candidate[parameter.module][parameter.index] = backupValue; | |
| if (parameter.index == ReciprocalCoeffIndex) | |
| candidate[parameter.module][ReciprocalShiftIndex] = backupReciprocalShift; | |
| if (parameter.index == SqrtCoeffIndex) | |
| candidate[parameter.module][SqrtShiftIndex] = backupSqrtShift; | |
| } | |
| return (currentLoss, candidate); | |
| } | |
| private (double loss, double[][] parameters, bool selectedBySimplicity) SelectBestCandidate( | |
| List<(double loss, double[][] parameters)> candidates, | |
| double fallbackLoss, | |
| double[][] fallbackParameters) | |
| { | |
| if (!_owner.ShouldPreferSimpleModels() || candidates.Count == 0) | |
| return (fallbackLoss, fallbackParameters, false); | |
| double bestLoss = candidates.Min(entry => entry.loss); | |
| bool fallbackIsPerfect = _owner.IsPerfectFitForParameters(fallbackParameters, fallbackLoss, _owner.ConvergenceTolerance); | |
| var eligible = candidates | |
| .Where(entry => _owner.IsWithinSimplicitySlack(entry.loss, bestLoss)) | |
| .Where(entry => !fallbackIsPerfect || _owner.IsPerfectFitForParameters(entry.parameters, entry.loss, _owner.ConvergenceTolerance)) | |
| .Select(entry => (entry.loss, entry.parameters, complexity: _owner.ComputeModelComplexity(entry.parameters))) | |
| .OrderBy(entry => entry.complexity) | |
| .ThenBy(entry => entry.loss) | |
| .ToList(); | |
| if (eligible.Count == 0) | |
| return (fallbackLoss, fallbackParameters, false); | |
| var selected = eligible[0]; | |
| bool changed = Math.Abs(selected.loss - fallbackLoss) > 1e-14 || | |
| _owner.ComputeModelComplexity(selected.parameters) + 1e-12 < _owner.ComputeModelComplexity(fallbackParameters); | |
| return (selected.loss, CloneParameters(selected.parameters), changed); | |
| } | |
| private double ComputeLoss() | |
| { | |
| // Mean squared error over all known points. | |
| double total = 0; | |
| int count = 0; | |
| foreach (var point in _owner._series) | |
| { | |
| double predicted = _owner.Func(point.Key); | |
| if (!double.IsFinite(predicted)) | |
| return double.PositiveInfinity; | |
| double error = predicted - point.Value; | |
| total += error * error; | |
| count++; | |
| } | |
| return count == 0 ? double.PositiveInfinity : total / count; | |
| } | |
| private void RandomizeParameters(Random random, double scale) | |
| { | |
| // Gaussian initialization with per-group offset to avoid perfectly symmetric starts. | |
| for (int i = 0; i < _owner.Parameters.Length; i++) | |
| { | |
| double groupOffset = NextGaussian(random) * scale * 0.05; | |
| for (int j = 0; j < _owner.Parameters[i].Length; j++) | |
| { | |
| _owner.Parameters[i][j] = NextGaussian(random) * scale + groupOffset; | |
| StabilizeParameter(i, j); | |
| } | |
| } | |
| } | |
| private static double NextGaussian(Random random) | |
| { | |
| double u1 = 1.0 - random.NextDouble(); | |
| double u2 = 1.0 - random.NextDouble(); | |
| return Math.Sqrt(-2.0 * Math.Log(u1)) * Math.Cos(2.0 * Math.PI * u2); | |
| } | |
| private void PerturbParameters(Random random, double scale) | |
| { | |
| // Small noise injection used when a restart stagnates. | |
| for (int i = 0; i < _owner.Parameters.Length; i++) | |
| { | |
| for (int j = 0; j < _owner.Parameters[i].Length; j++) | |
| { | |
| _owner.Parameters[i][j] += NextGaussian(random) * scale; | |
| StabilizeParameter(i, j); | |
| } | |
| } | |
| } | |
| private bool TryBuildSecondOrderRecurrenceSeed(out double[][] seed) | |
| { | |
| // Data-driven warm start for recurrence-like data (e.g., Fibonacci/Lucas/Pell). | |
| seed = CloneParameters(_owner.Parameters); | |
| int requiredExponentialSlots = LogBaseIndex + 1; | |
| if (_owner.Parameters.Length < 2 || _owner.Parameters[0].Length < requiredExponentialSlots || _owner.Parameters[1].Length < requiredExponentialSlots) | |
| return false; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (orderedY.Count < 5) | |
| return false; | |
| if (!SeriesSeedHelpers.TrySolveSecondOrderRecurrence(orderedY, out double r1, out double r2, out double b1, out double b2)) | |
| return TryBuildGeometricSeed(orderedY, out seed); | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][ExpBaseIndex] = r1; | |
| seed[0][ExpCoeffIndex] = b1; | |
| seed[0][LogBaseIndex] = Math.E; | |
| seed[1][ExpBaseIndex] = r2; | |
| seed[1][ExpCoeffIndex] = b2; | |
| seed[1][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private bool TryBuildPolynomialSeed(out double[][] seed) | |
| { | |
| seed = CloneParameters(_owner.Parameters); | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (seed.Length == 0 || seed[0].Length < ParameterCountPerModule) | |
| return false; | |
| if (!SeriesSeedHelpers.TrySolveQuadraticFromFirstThree(orderedY, out double a, out double b, out double c)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][PolyPowerIndex] = 2; | |
| seed[0][PolyCoeffIndex] = a; | |
| seed[0][LinearCoeffIndex] = b; | |
| seed[0][ConstantIndex] = c; | |
| seed[0][LogBaseIndex] = Math.E; | |
| seed[0][TrigFrequencyIndex] = 1; | |
| return true; | |
| } | |
| private bool TryBuildQuarterSineSeed(out double[][] seed) | |
| { | |
| seed = CloneParameters(_owner.Parameters); | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (seed.Length == 0 || seed[0].Length < ParameterCountPerModule) | |
| return false; | |
| if (!SeriesSeedHelpers.TryMatchQuarterSine(orderedY, out double maxAbs)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][TrigFrequencyIndex] = Math.PI / 2.0; | |
| seed[0][TrigPhaseIndex] = 0; | |
| seed[0][SinCoeffIndex] = maxAbs; | |
| seed[0][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private bool TryBuildGeometricSeed(List<double> orderedY, out double[][] seed) | |
| { | |
| // Fallback warm start for strict geometric progressions (e.g., powers of two). | |
| seed = CloneParameters(_owner.Parameters); | |
| if (!SeriesSeedHelpers.TrySolveGeometricRatio(orderedY, out double ratio, out double firstValue)) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| seed[0][ExpBaseIndex] = ratio; | |
| seed[0][ExpCoeffIndex] = firstValue; | |
| seed[0][LogBaseIndex] = Math.E; | |
| seed[1][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private bool TryBuildGenericRecurrencePrior(double p, double q, out double[][] seed) | |
| { | |
| // Non-data-driven recurrence prior: encourages two-exponential solutions | |
| // without using structural pattern extraction from the observed series. | |
| seed = CloneParameters(_owner.Parameters); | |
| int requiredExponentialSlots = LogBaseIndex + 1; | |
| if (_owner.Parameters.Length < 2 || _owner.Parameters[0].Length < requiredExponentialSlots || _owner.Parameters[1].Length < requiredExponentialSlots) | |
| return false; | |
| double discriminant = p * p + 4 * q; | |
| if (!double.IsFinite(discriminant) || discriminant < 0) | |
| return false; | |
| double sqrtDiscriminant = Math.Sqrt(discriminant); | |
| double r1 = (p + sqrtDiscriminant) / 2.0; | |
| double r2 = (p - sqrtDiscriminant) / 2.0; | |
| if (!double.IsFinite(r1) || !double.IsFinite(r2) || Math.Abs(r1 - r2) < 1e-12) | |
| return false; | |
| SeriesSeedHelpers.ZeroSeed(seed); | |
| double b1 = 0.5; | |
| double b2 = -0.5; | |
| List<double> orderedY = SeriesSeedHelpers.GetContiguousIndexedValuesFromZero(_owner._series); | |
| if (orderedY.Count >= 2) | |
| { | |
| double y0 = orderedY[0]; | |
| double y1 = orderedY[1]; | |
| b1 = (y1 - y0 * r2) / (r1 - r2); | |
| b2 = y0 - b1; | |
| } | |
| seed[0][ExpBaseIndex] = r1; | |
| seed[0][ExpCoeffIndex] = b1; | |
| seed[0][LogBaseIndex] = Math.E; | |
| seed[1][ExpBaseIndex] = r2; | |
| seed[1][ExpCoeffIndex] = b2; | |
| seed[1][LogBaseIndex] = Math.E; | |
| return true; | |
| } | |
| private List<double[][]> BuildRestartInitialStates(int restartCount, double yScale, Random random) | |
| { | |
| // Compose start states in priority order: | |
| // 1) caller-provided parameters | |
| // 2) deterministic structural seeds (recurrence/poly/quarter-sine) | |
| // 3) randomized restarts | |
| List<double[][]> restartStates = new List<double[][]>(); | |
| double[][] initialParameters = CloneParameters(_owner.Parameters); | |
| restartStates.Add(CloneParameters(initialParameters)); | |
| if (_owner.UseStructuralSeeds) | |
| { | |
| if (TryBuildSecondOrderRecurrenceSeed(out double[][] recurrenceSeed)) | |
| restartStates.Add(recurrenceSeed); | |
| if (TryBuildPolynomialSeed(out double[][] polynomialSeed)) | |
| restartStates.Add(polynomialSeed); | |
| if (TryBuildQuarterSineSeed(out double[][] quarterSineSeed)) | |
| restartStates.Add(quarterSineSeed); | |
| } | |
| else | |
| { | |
| if (TryBuildGenericRecurrencePrior(p: 1.0, q: 1.0, out double[][] genericRecurrenceExact)) | |
| { | |
| JitterNoSeedPrior(genericRecurrenceExact, random, yScale); | |
| restartStates.Add(genericRecurrenceExact); | |
| } | |
| if (TryBuildGenericRecurrencePrior(p: 1.0, q: 0.9, out double[][] genericRecurrenceNear)) | |
| { | |
| JitterNoSeedPrior(genericRecurrenceNear, random, yScale); | |
| restartStates.Add(genericRecurrenceNear); | |
| } | |
| if (TryBuildGenericRecurrencePrior(p: 1.0, q: 1.1, out double[][] genericRecurrenceWide)) | |
| { | |
| JitterNoSeedPrior(genericRecurrenceWide, random, yScale); | |
| restartStates.Add(genericRecurrenceWide); | |
| } | |
| } | |
| while (restartStates.Count < restartCount) | |
| { | |
| double[] restartScales = new double[] { yScale * 0.02, yScale * 0.05, yScale * 0.1, yScale * 0.25, yScale * 0.5, yScale }; | |
| double restartScale = restartScales[restartStates.Count % restartScales.Length]; | |
| RestoreParameters(initialParameters); | |
| RandomizeParameters(random, restartScale); | |
| restartStates.Add(CloneParameters(_owner.Parameters)); | |
| } | |
| RestoreParameters(initialParameters); | |
| return restartStates; | |
| } | |
| private void JitterNoSeedPrior(double[][] parameters, Random random, double yScale) | |
| { | |
| if (_owner.UseStructuralSeeds) | |
| return; | |
| double coefficientJitter = Math.Max(1e-6, yScale * 1e-4); | |
| double baseJitter = 1e-5; | |
| int activeModules = Math.Min(2, parameters.Length); | |
| for (int i = 0; i < activeModules; i++) | |
| { | |
| if (parameters[i].Length <= LogBaseIndex) | |
| continue; | |
| parameters[i][ExpBaseIndex] += NextGaussian(random) * baseJitter; | |
| parameters[i][ExpCoeffIndex] += NextGaussian(random) * coefficientJitter; | |
| parameters[i][LogBaseIndex] = Math.Max(0.5, Math.Abs(parameters[i][LogBaseIndex])); | |
| } | |
| } | |
| private void AccumulateGradientsForPoint(double x, double y, double predicted, double error, double[][] gradients, Func<bool>? shouldStop = null) | |
| { | |
| // Prefer analytic gradients when supplied by a derived class. | |
| if (_owner.TryAccumulateGradients(x, y, predicted, error, gradients, _owner._series.Count)) | |
| return; | |
| // Numeric gradient fallback (finite differences per parameter coordinate). | |
| double scale = (2 * error) / _owner._series.Count; | |
| foreach (var coordinate in _parameterCoordinates) | |
| { | |
| if (shouldStop?.Invoke() == true) | |
| return; | |
| double derivative = ComputePredictionDerivative(x, coordinate.GroupIndex, coordinate.ParameterIndex); | |
| if (!double.IsFinite(derivative)) | |
| continue; | |
| gradients[coordinate.GroupIndex][coordinate.ParameterIndex] += scale * derivative; | |
| } | |
| } | |
| private (double loss, double[][] parameters) RunSingleRestart(double[][] startParameters, double yScale, int maxIterations, double initialLearningRate, double beta1, double beta2, double epsilon, double convergenceTolerance, int randomSeed, Func<bool>? shouldStop = null) | |
| { | |
| // Runs one optimizer restart from a provided initial state. | |
| double[] restartScales = new double[] { yScale * 0.02, yScale * 0.05, yScale * 0.1, yScale * 0.25, yScale * 0.5, yScale }; | |
| double restartScale = restartScales[Math.Abs(randomSeed) % restartScales.Length]; | |
| Random random = new Random(randomSeed); | |
| RestoreParameters(startParameters); | |
| double bestRestartLoss = ComputeLoss(); | |
| double[][] bestRestartParameters = CloneParameters(_owner.Parameters); | |
| int stagnationCounter = 0; | |
| double[][] m = CreateGradientMatrix(); | |
| double[][] v = CreateGradientMatrix(); | |
| if (_owner.IsPerfectFitForParameters(bestRestartParameters, bestRestartLoss, convergenceTolerance)) | |
| return (bestRestartLoss, bestRestartParameters); | |
| for (int iteration = 0; iteration < maxIterations; iteration++) | |
| { | |
| // Cooperative cancellation: exit quickly when another worker already found a perfect fit. | |
| if (shouldStop?.Invoke() == true) | |
| break; | |
| double[][] gradients = CreateGradientMatrix(); | |
| foreach (var point in _owner._series) | |
| { | |
| if (shouldStop?.Invoke() == true) | |
| break; | |
| double x = point.Key; | |
| double y = point.Value; | |
| double predicted = _owner.Func(x); | |
| if (!double.IsFinite(predicted)) | |
| continue; | |
| double error = predicted - y; | |
| if (!double.IsFinite(error)) | |
| continue; | |
| AccumulateGradientsForPoint(x, y, predicted, error, gradients, shouldStop); | |
| } | |
| double learningRate = initialLearningRate * Math.Pow(0.98, iteration / 200.0); | |
| int t = iteration + 1; | |
| // Adam-style parameter update. | |
| for (int i = 0; i < _owner.Parameters.Length; i++) | |
| { | |
| if (shouldStop?.Invoke() == true) | |
| break; | |
| for (int j = 0; j < _owner.Parameters[i].Length; j++) | |
| { | |
| if (shouldStop?.Invoke() == true) | |
| break; | |
| double gradient = gradients[i][j]; | |
| if (!double.IsFinite(gradient)) | |
| gradient = 0; | |
| gradient = Math.Clamp(gradient, -1e6, 1e6); | |
| m[i][j] = beta1 * m[i][j] + (1 - beta1) * gradient; | |
| v[i][j] = beta2 * v[i][j] + (1 - beta2) * gradient * gradient; | |
| double mHat = m[i][j] / (1 - Math.Pow(beta1, t)); | |
| double vHat = v[i][j] / (1 - Math.Pow(beta2, t)); | |
| _owner.Parameters[i][j] -= learningRate * mHat / (Math.Sqrt(vHat) + epsilon); | |
| _owner.Parameters[i][j] = Math.Clamp(_owner.Parameters[i][j], -1e6, 1e6); | |
| StabilizeParameter(i, j); | |
| } | |
| } | |
| if ((iteration + 1) % 50 == 0) | |
| { | |
| double currentLoss = ComputeLoss(); | |
| if (double.IsFinite(currentLoss) && currentLoss < bestRestartLoss) | |
| { | |
| bestRestartLoss = currentLoss; | |
| bestRestartParameters = CloneParameters(_owner.Parameters); | |
| stagnationCounter = 0; | |
| } | |
| else | |
| { | |
| stagnationCounter++; | |
| } | |
| if (stagnationCounter >= 6) | |
| { | |
| PerturbParameters(random, Math.Max(1e-3, restartScale * 0.1)); | |
| m = CreateGradientMatrix(); | |
| v = CreateGradientMatrix(); | |
| stagnationCounter = 0; | |
| } | |
| if (_owner.IsPerfectFitForParameters(_owner.Parameters, currentLoss, convergenceTolerance)) | |
| break; | |
| } | |
| } | |
| return (bestRestartLoss, bestRestartParameters); | |
| } | |
| private double ComputePredictionDerivative(double x, int parameterGroupIndex, int parameterIndex) | |
| { | |
| // Central finite-difference derivative for one parameter. | |
| double original = _owner.Parameters[parameterGroupIndex][parameterIndex]; | |
| double step = 1e-5 * (Math.Abs(original) + 1.0); | |
| _owner.Parameters[parameterGroupIndex][parameterIndex] = original + step; | |
| double predictedPlus = _owner.Func(x); | |
| _owner.Parameters[parameterGroupIndex][parameterIndex] = original - step; | |
| double predictedMinus = _owner.Func(x); | |
| _owner.Parameters[parameterGroupIndex][parameterIndex] = original; | |
| if (!double.IsFinite(predictedPlus) || !double.IsFinite(predictedMinus)) | |
| return 0; | |
| return (predictedPlus - predictedMinus) / (2 * step); | |
| } | |
| private void StabilizeParameter(int groupIndex, int parameterIndex) | |
| { | |
| if (!_owner.UseStructuralSeeds && _owner.Parameters[groupIndex].Length >= ParameterCountPerModule) | |
| { | |
| bool outsideModuleBudget = groupIndex >= 2; | |
| if (outsideModuleBudget) | |
| { | |
| _owner.Parameters[groupIndex][parameterIndex] = 0; | |
| return; | |
| } | |
| } | |
| // Keep parameters finite to avoid NaN propagation. | |
| if (!double.IsFinite(_owner.Parameters[groupIndex][parameterIndex])) | |
| _owner.Parameters[groupIndex][parameterIndex] = 0; | |
| if (_owner.Parameters[groupIndex].Length >= ParameterCountPerModule && parameterIndex == LogBaseIndex) | |
| _owner.Parameters[groupIndex][parameterIndex] = Math.Max(0.5, Math.Abs(_owner.Parameters[groupIndex][parameterIndex])); | |
| } | |
| public (double loss, double[][] parameters) RefineCandidate(double[][] startParameters, int seed) | |
| { | |
| return RunSingleRestart( | |
| startParameters, | |
| Math.Max(1, _owner._series.Values.Select(Math.Abs).DefaultIfEmpty(1).Max()), | |
| Math.Max(1, _owner.MaxIterations), | |
| Math.Max(1e-12, _owner.InitialLearningRate), | |
| Math.Clamp(_owner.Beta1, 0, 0.999999999), | |
| Math.Clamp(_owner.Beta2, 0, 0.999999999), | |
| Math.Max(1e-12, _owner.Epsilon), | |
| Math.Max(1e-16, _owner.ConvergenceTolerance), | |
| seed); | |
| } | |
| public void TuneParameters() | |
| { | |
| // Entry point for tuning: build starts, run serial/parallel restarts, keep best solution. | |
| if (_owner._series.Count < 3) | |
| throw new ArgumentException("At least 3 points are required to tune the parameters."); | |
| double yScale = Math.Max(1, _owner._series.Values.Select(Math.Abs).DefaultIfEmpty(1).Max()); | |
| int restartCount = Math.Max(1, _owner.RestartCount); | |
| int maxIterations = Math.Max(1, _owner.MaxIterations); | |
| double initialLearningRate = Math.Max(1e-12, _owner.InitialLearningRate); | |
| double beta1 = Math.Clamp(_owner.Beta1, 0, 0.999999999); | |
| double beta2 = Math.Clamp(_owner.Beta2, 0, 0.999999999); | |
| double epsilon = Math.Max(1e-12, _owner.Epsilon); | |
| double convergenceTolerance = Math.Max(1e-16, _owner.ConvergenceTolerance); | |
| int progressInterval = Math.Max(1, _owner.ProgressInterval); | |
| int baseSeed = _owner.RandomSeed ?? Environment.TickCount; | |
| Random random = new Random(baseSeed); | |
| double bestLoss = double.PositiveInfinity; | |
| double[][] bestParameters = CloneParameters(_owner.Parameters); | |
| List<(double loss, double[][] parameters)> candidatePool = new List<(double loss, double[][] parameters)>(); | |
| List<double[][]> restartStates = BuildRestartInitialStates(restartCount, yScale, random); | |
| int[] restartSeeds = Enumerable.Range(0, restartCount).Select(index => unchecked(baseSeed + 1009 * (index + 1))).ToArray(); | |
| bool useFastSeedPrecheck = _owner.UseStructuralSeeds; | |
| // Fast deterministic pre-check for seeded restart states. | |
| if (useFastSeedPrecheck) | |
| { | |
| for (int restart = 0; restart < restartStates.Count; restart++) | |
| { | |
| RestoreParameters(restartStates[restart]); | |
| double seedLoss = ComputeLoss(); | |
| if (_owner.IsPerfectFitForParameters(_owner.Parameters, seedLoss, convergenceTolerance)) | |
| { | |
| bestLoss = seedLoss; | |
| bestParameters = CloneParameters(_owner.Parameters); | |
| _owner.AddTuningSnapshot(restart + 1, 0, seedLoss); | |
| string precheckSource = "seeded initialization"; | |
| _owner.SetFitStatus(seedLoss, perfectFit: true, message: $"Perfect match found from {precheckSource} (restart {restart + 1}, loss={seedLoss:G6})."); | |
| _owner.ReportProgress($"Tuning skipped: {precheckSource} already fit (restart {restart + 1}, loss={seedLoss:G6})."); | |
| return; | |
| } | |
| AddCandidate(candidatePool, seedLoss, _owner.Parameters); | |
| } | |
| } | |
| RestoreParameters(bestParameters); | |
| double initialLoss = ComputeLoss(); | |
| _owner.AddTuningSnapshot(restart: 0, iteration: 0, loss: initialLoss); | |
| if (_owner.IsPerfectFitForParameters(_owner.Parameters, initialLoss, convergenceTolerance)) | |
| { | |
| _owner.SetFitStatus(initialLoss, perfectFit: true, message: $"Perfect match found without optimization (loss={initialLoss:G6})."); | |
| _owner.ReportProgress($"Tuning skipped: initial parameters already fit (loss={initialLoss:G6})."); | |
| return; | |
| } | |
| _owner.ReportProgress($"Tuning started ({restartCount} restarts, {maxIterations} iterations max). Initial loss={initialLoss:G6}."); | |
| if (_owner.RandomSeed.HasValue) | |
| _owner.ReportProgress($"Random mode: deterministic (seed={_owner.RandomSeed.Value})."); | |
| else | |
| _owner.ReportProgress("Random mode: non-deterministic."); | |
| if (_owner.UseParallelRestarts) | |
| { | |
| // Parallel strategy: each restart is independent, so we optimize them concurrently. | |
| object sync = new object(); | |
| int completedRestarts = 0; | |
| int perfectFitFound = 0; | |
| using CancellationTokenSource cancellationSource = new CancellationTokenSource(); | |
| ParallelOptions options = new ParallelOptions | |
| { | |
| MaxDegreeOfParallelism = Math.Max(1, _owner.MaxParallelism), | |
| CancellationToken = cancellationSource.Token | |
| }; | |
| try | |
| { | |
| Parallel.For(0, restartCount, options, (restart, loopState) => | |
| { | |
| // Fast pre-check to avoid starting extra work after convergence. | |
| if (Volatile.Read(ref perfectFitFound) == 1 || options.CancellationToken.IsCancellationRequested) | |
| { | |
| loopState.Stop(); | |
| return; | |
| } | |
| Series worker = _owner.CreateTuningClone(); | |
| SeriesTuner workerTuner = new SeriesTuner(worker); | |
| var result = workerTuner.RunSingleRestart( | |
| restartStates[restart], | |
| yScale, | |
| maxIterations, | |
| initialLearningRate, | |
| beta1, | |
| beta2, | |
| epsilon, | |
| convergenceTolerance, | |
| restartSeeds[restart], | |
| () => Volatile.Read(ref perfectFitFound) == 1 || options.CancellationToken.IsCancellationRequested); | |
| lock (sync) | |
| { | |
| // Single lock for best-state updates + user-visible progress consistency. | |
| completedRestarts++; | |
| bool hadPerfectFitBeforeUpdate = Volatile.Read(ref perfectFitFound) == 1; | |
| if (double.IsFinite(result.loss) && result.loss < bestLoss) | |
| { | |
| bestLoss = result.loss; | |
| bestParameters = CloneParameters(result.parameters); | |
| if (_owner.IsPerfectFitForParameters(result.parameters, bestLoss, convergenceTolerance)) | |
| { | |
| Interlocked.Exchange(ref perfectFitFound, 1); | |
| cancellationSource.Cancel(); | |
| loopState.Stop(); | |
| } | |
| } | |
| AddCandidate(candidatePool, result.loss, result.parameters); | |
| bool suppressCompletionLog = _owner.QuietAfterConvergenceInParallel && hadPerfectFitBeforeUpdate; | |
| if (!suppressCompletionLog) | |
| _owner.ReportProgress($"Restart {completedRestarts}/{restartCount} completed (task #{restart + 1}), best loss={bestLoss:G6}."); | |
| _owner.ReportRestartProgressBar(completedRestarts, restartCount, bestLoss); | |
| if (_owner.CaptureTuningSnapshots) | |
| _owner.AddTuningSnapshot(restart + 1, maxIterations, bestLoss); | |
| } | |
| }); | |
| } | |
| catch (OperationCanceledException) | |
| { | |
| } | |
| if (Volatile.Read(ref perfectFitFound) == 1) | |
| _owner.ReportProgress("Parallel early-stop: perfect fit found; remaining workers exited."); | |
| var selected = SelectBestCandidate(candidatePool, bestLoss, bestParameters); | |
| bestLoss = selected.loss; | |
| bestParameters = CloneParameters(selected.parameters); | |
| if (selected.selectedBySimplicity) | |
| _owner.ReportProgress($"Final model selected by simplicity within loss slack. loss={bestLoss:G6}."); | |
| _owner.EnsureProgressBarLineCompleted(); | |
| RestoreParameters(bestParameters); | |
| _owner.AddTuningSnapshot(restartCount, maxIterations, bestLoss); | |
| bool perfect = _owner.IsPerfectFitForParameters(bestParameters, bestLoss, convergenceTolerance); | |
| _owner.SetFitStatus( | |
| bestLoss, | |
| perfect, | |
| perfect | |
| ? $"Perfect match found by tuner (loss={bestLoss:G6})." | |
| : $"No perfect match found by tuner (best loss={bestLoss:G6}, tolerance={convergenceTolerance:G6})."); | |
| _owner.ReportProgress($"Tuning completed. Final loss={bestLoss:G6}."); | |
| return; | |
| } | |
| for (int restart = 0; restart < restartCount; restart++) | |
| { | |
| // Serial strategy: evaluate each restart one-by-one. | |
| double[] restartScales = new double[] { yScale * 0.02, yScale * 0.05, yScale * 0.1, yScale * 0.25, yScale * 0.5, yScale }; | |
| double restartScale = restartScales[restart % restartScales.Length]; | |
| Random restartRandom = new Random(restartSeeds[restart]); | |
| RestoreParameters(restartStates[restart]); | |
| double bestRestartLoss = ComputeLoss(); | |
| double[][] bestRestartParameters = CloneParameters(_owner.Parameters); | |
| int stagnationCounter = 0; | |
| _owner.ReportProgress($"Restart {restart + 1}/{restartCount}: start loss={bestRestartLoss:G6}."); | |
| _owner.ReportProgressBar(restart, restartCount, 0, maxIterations, bestRestartLoss); | |
| _owner.AddTuningSnapshot(restart + 1, 0, bestRestartLoss); | |
| if (_owner.IsPerfectFitForParameters(bestRestartParameters, bestRestartLoss, convergenceTolerance)) | |
| { | |
| if (bestRestartLoss < bestLoss) | |
| { | |
| bestLoss = bestRestartLoss; | |
| bestParameters = CloneParameters(bestRestartParameters); | |
| } | |
| _owner.ReportProgress($"Restart {restart + 1}/{restartCount}: already converged at start."); | |
| if (_owner.StopOnFirstPerfectFit) | |
| { | |
| _owner.ReportProgress($"Early stop: converged after restart {restart + 1} with loss={bestLoss:G6}."); | |
| break; | |
| } | |
| } | |
| double[][] m = CreateGradientMatrix(); | |
| double[][] v = CreateGradientMatrix(); | |
| for (int iteration = 0; iteration < maxIterations; iteration++) | |
| { | |
| double[][] gradients = CreateGradientMatrix(); | |
| foreach (var point in _owner._series) | |
| { | |
| double x = point.Key; | |
| double y = point.Value; | |
| double predicted = _owner.Func(x); | |
| if (!double.IsFinite(predicted)) | |
| continue; | |
| double error = predicted - y; | |
| if (!double.IsFinite(error)) | |
| continue; | |
| AccumulateGradientsForPoint(x, y, predicted, error, gradients); | |
| } | |
| double learningRate = initialLearningRate * Math.Pow(0.98, iteration / 200.0); | |
| int t = iteration + 1; | |
| for (int i = 0; i < _owner.Parameters.Length; i++) | |
| { | |
| for (int j = 0; j < _owner.Parameters[i].Length; j++) | |
| { | |
| double gradient = gradients[i][j]; | |
| if (!double.IsFinite(gradient)) | |
| gradient = 0; | |
| gradient = Math.Clamp(gradient, -1e6, 1e6); | |
| m[i][j] = beta1 * m[i][j] + (1 - beta1) * gradient; | |
| v[i][j] = beta2 * v[i][j] + (1 - beta2) * gradient * gradient; | |
| double mHat = m[i][j] / (1 - Math.Pow(beta1, t)); | |
| double vHat = v[i][j] / (1 - Math.Pow(beta2, t)); | |
| _owner.Parameters[i][j] -= learningRate * mHat / (Math.Sqrt(vHat) + epsilon); | |
| _owner.Parameters[i][j] = Math.Clamp(_owner.Parameters[i][j], -1e6, 1e6); | |
| StabilizeParameter(i, j); | |
| } | |
| } | |
| if ((iteration + 1) % 50 == 0) | |
| { | |
| // Periodic quality check + stagnation handling. | |
| double currentLoss = ComputeLoss(); | |
| if ((iteration + 1) % Math.Max(1, _owner.SnapshotInterval) == 0) | |
| _owner.AddTuningSnapshot(restart + 1, iteration + 1, currentLoss); | |
| _owner.ReportProgressBar(restart, restartCount, iteration + 1, maxIterations, currentLoss); | |
| if ((iteration + 1) % progressInterval == 0 || iteration + 1 == 50) | |
| _owner.ReportProgress($"Restart {restart + 1}/{restartCount}, iter {iteration + 1}/{maxIterations}, loss={currentLoss:G6}."); | |
| if (double.IsFinite(currentLoss) && currentLoss < bestRestartLoss) | |
| { | |
| bestRestartLoss = currentLoss; | |
| bestRestartParameters = CloneParameters(_owner.Parameters); | |
| stagnationCounter = 0; | |
| } | |
| else | |
| { | |
| stagnationCounter++; | |
| } | |
| if (stagnationCounter >= 6) | |
| { | |
| PerturbParameters(restartRandom, Math.Max(1e-3, restartScale * 0.1)); | |
| m = CreateGradientMatrix(); | |
| v = CreateGradientMatrix(); | |
| stagnationCounter = 0; | |
| } | |
| if (double.IsFinite(currentLoss) && currentLoss <= convergenceTolerance) | |
| break; | |
| } | |
| } | |
| RestoreParameters(bestRestartParameters); | |
| double restartLoss = ComputeLoss(); | |
| if (double.IsFinite(restartLoss) && restartLoss < bestLoss) | |
| { | |
| bestLoss = restartLoss; | |
| bestParameters = CloneParameters(_owner.Parameters); | |
| } | |
| AddCandidate(candidatePool, restartLoss, _owner.Parameters); | |
| _owner.ReportProgress($"Restart {restart + 1}/{restartCount}: best loss={restartLoss:G6}. Global best={bestLoss:G6}."); | |
| if (_owner.StopOnFirstPerfectFit && _owner.IsPerfectFitForParameters(bestParameters, bestLoss, convergenceTolerance)) | |
| { | |
| _owner.ReportProgress($"Early stop: converged after restart {restart + 1} with loss={bestLoss:G6}."); | |
| break; | |
| } | |
| } | |
| _owner.ReportProgressBar(restartCount - 1, restartCount, maxIterations, maxIterations, bestLoss); | |
| var serialSelected = SelectBestCandidate(candidatePool, bestLoss, bestParameters); | |
| bestLoss = serialSelected.loss; | |
| bestParameters = CloneParameters(serialSelected.parameters); | |
| if (serialSelected.selectedBySimplicity) | |
| _owner.ReportProgress($"Final model selected by simplicity within loss slack. loss={bestLoss:G6}."); | |
| _owner.EnsureProgressBarLineCompleted(); | |
| RestoreParameters(bestParameters); | |
| _owner.AddTuningSnapshot(restartCount, maxIterations, bestLoss); | |
| bool foundPerfectFit = _owner.IsPerfectFitForParameters(bestParameters, bestLoss, convergenceTolerance); | |
| _owner.SetFitStatus( | |
| bestLoss, | |
| foundPerfectFit, | |
| foundPerfectFit | |
| ? $"Perfect match found by tuner (loss={bestLoss:G6})." | |
| : $"No perfect match found by tuner (best loss={bestLoss:G6}, tolerance={convergenceTolerance:G6})."); | |
| _owner.ReportProgress($"Tuning completed. Final loss={bestLoss:G6}."); | |
| } | |
| } | |
| } | |
| private static class CsvExport | |
| { | |
| public static double[] FlattenParameters(double[][] parameters) | |
| { | |
| List<double> values = new List<double>(); | |
| for (int i = 0; i < parameters.Length; i++) | |
| values.AddRange(parameters[i]); | |
| return values.ToArray(); | |
| } | |
| public static void SaveKnownPointsCsv(string filePath, IReadOnlyList<KeyValuePair<double, double>> points, double[][] parameters) | |
| { | |
| string separator = CultureInfo.CurrentCulture.TextInfo.ListSeparator; | |
| string localizedIfFunctionName = GetLocalizedIfFunctionName(); | |
| double[][] sanitizedParameters = SanitizeParametersForDisplay(parameters); | |
| // Local evaluator used only for deterministic point-by-point predictions | |
| // against a fixed parameter snapshot (no tuning involved). | |
| Series evaluator = new Series(); | |
| StringBuilder builder = new StringBuilder(); | |
| builder.AppendLine($"x{separator}y{separator}y_model_raw{separator}y_model_rounded{separator}y_formula{separator}y_formula_local"); | |
| int firstDataRowIndex = 2; | |
| int rowOffset = 0; | |
| foreach (var point in points) | |
| { | |
| int rowIndex = firstDataRowIndex + rowOffset; | |
| string formula = BuildSpreadsheetFormula(sanitizedParameters, $"A{rowIndex}", "IF"); | |
| string localizedFormula = BuildSpreadsheetFormula(sanitizedParameters, $"A{rowIndex}", localizedIfFunctionName); | |
| // Numeric model outputs are written directly to avoid spreadsheet | |
| // recalculation quirks and locale-dependent interpretation. | |
| double modelRaw = evaluator.EvaluateWithParameters(sanitizedParameters, point.Key); | |
| double modelRounded = Math.Round(modelRaw, 8); | |
| string xValue = ToLocalized(point.Key); | |
| string yValue = ToLocalized(point.Value); | |
| string modelRawValue = ToLocalized(modelRaw); | |
| string modelRoundedValue = ToLocalized(modelRounded); | |
| builder.AppendLine($"{xValue}{separator}{yValue}{separator}{modelRawValue}{separator}{modelRoundedValue}{separator}{EscapeCsv(formula, separator)}{separator}{EscapeCsv(localizedFormula, separator)}"); | |
| rowOffset++; | |
| } | |
| File.WriteAllText(filePath, builder.ToString()); | |
| } | |
| private static double[][] SanitizeParametersForDisplay(double[][] parameters) | |
| { | |
| const int expCoeffIndex = 1; | |
| const int polyCoeffIndex = 3; | |
| const int linearCoeffIndex = 4; | |
| const int constantIndex = 5; | |
| const int logCoeffIndex = 6; | |
| const int sinCoeffIndex = 10; | |
| const int cosCoeffIndex = 11; | |
| const int reciprocalCoeffIndex = 12; | |
| const int sqrtCoeffIndex = 14; | |
| double[][] copy = new double[parameters.Length][]; | |
| for (int i = 0; i < parameters.Length; i++) | |
| { | |
| copy[i] = new double[parameters[i].Length]; | |
| Array.Copy(parameters[i], copy[i], parameters[i].Length); | |
| } | |
| for (int i = 0; i < copy.Length; i++) | |
| { | |
| if (copy[i].Length > expCoeffIndex && Math.Abs(copy[i][expCoeffIndex]) <= 1e-8) copy[i][expCoeffIndex] = 0; | |
| if (copy[i].Length > polyCoeffIndex && Math.Abs(copy[i][polyCoeffIndex]) <= 1e-8) copy[i][polyCoeffIndex] = 0; | |
| if (copy[i].Length > linearCoeffIndex && Math.Abs(copy[i][linearCoeffIndex]) <= 1e-8) copy[i][linearCoeffIndex] = 0; | |
| if (copy[i].Length > constantIndex && Math.Abs(copy[i][constantIndex]) <= 1e-8) copy[i][constantIndex] = 0; | |
| if (copy[i].Length > logCoeffIndex && Math.Abs(copy[i][logCoeffIndex]) <= 1e-8) copy[i][logCoeffIndex] = 0; | |
| if (copy[i].Length > sinCoeffIndex && Math.Abs(copy[i][sinCoeffIndex]) <= 1e-8) copy[i][sinCoeffIndex] = 0; | |
| if (copy[i].Length > cosCoeffIndex && Math.Abs(copy[i][cosCoeffIndex]) <= 1e-8) copy[i][cosCoeffIndex] = 0; | |
| if (copy[i].Length > reciprocalCoeffIndex && Math.Abs(copy[i][reciprocalCoeffIndex]) <= 1e-8) copy[i][reciprocalCoeffIndex] = 0; | |
| if (copy[i].Length > sqrtCoeffIndex && Math.Abs(copy[i][sqrtCoeffIndex]) <= 1e-8) copy[i][sqrtCoeffIndex] = 0; | |
| } | |
| return copy; | |
| } | |
| public static void SaveSnapshotsCsv(string filePath, IReadOnlyList<Series.TuningSnapshot> snapshots) | |
| { | |
| string separator = CultureInfo.CurrentCulture.TextInfo.ListSeparator; | |
| if (snapshots.Count == 0) | |
| { | |
| File.WriteAllText(filePath, $"restart{separator}iteration{separator}loss\n"); | |
| return; | |
| } | |
| int parameterCount = FlattenParameters(snapshots[0].Parameters).Length; | |
| StringBuilder builder = new StringBuilder(); | |
| builder.Append($"restart{separator}iteration{separator}loss"); | |
| for (int parameterIndex = 0; parameterIndex < parameterCount; parameterIndex++) | |
| builder.Append($"{separator}p{parameterIndex}"); | |
| builder.AppendLine(); | |
| foreach (var snapshot in snapshots) | |
| { | |
| // Store full parameter vectors so tuning traces are reproducible and plottable. | |
| builder.Append($"{snapshot.Restart}{separator}{snapshot.Iteration}{separator}{ToLocalized(snapshot.Loss)}"); | |
| double[] flattened = FlattenParameters(snapshot.Parameters); | |
| for (int parameterIndex = 0; parameterIndex < flattened.Length; parameterIndex++) | |
| builder.Append($"{separator}{ToLocalized(flattened[parameterIndex])}"); | |
| builder.AppendLine(); | |
| } | |
| File.WriteAllText(filePath, builder.ToString()); | |
| } | |
| private static string ToLocalized(double value) | |
| { | |
| return value.ToString("G17", CultureInfo.CurrentCulture); | |
| } | |
| private static string EscapeCsv(string value, string separator) | |
| { | |
| if (value.Contains('"')) | |
| value = value.Replace("\"", "\"\""); | |
| if (value.Contains(separator) || value.Contains('"') || value.Contains('\n') || value.Contains('\r')) | |
| return $"\"{value}\""; | |
| return value; | |
| } | |
| private static string GetLocalizedIfFunctionName() | |
| { | |
| string language = CultureInfo.CurrentCulture.TwoLetterISOLanguageName; | |
| return language switch | |
| { | |
| "de" => "WENN", | |
| "fr" => "SI", | |
| "es" => "SI", | |
| "it" => "SE", | |
| "hu" => "HA", | |
| _ => "IF" | |
| }; | |
| } | |
| private static string BuildSpreadsheetFormula(double[][] parameters, string xCellReference, string ifFunctionName) | |
| { | |
| const int expBaseIndex = 0; | |
| const int expCoeffIndex = 1; | |
| const int polyPowerIndex = 2; | |
| const int polyCoeffIndex = 3; | |
| const int linearCoeffIndex = 4; | |
| const int constantIndex = 5; | |
| const int logCoeffIndex = 6; | |
| const int logBaseIndex = 7; | |
| const int trigFrequencyIndex = 8; | |
| const int trigPhaseIndex = 9; | |
| const int sinCoeffIndex = 10; | |
| const int cosCoeffIndex = 11; | |
| const int reciprocalCoeffIndex = 12; | |
| const int reciprocalShiftIndex = 13; | |
| const int sqrtCoeffIndex = 14; | |
| const int sqrtShiftIndex = 15; | |
| string argSeparator = CultureInfo.CurrentCulture.TextInfo.ListSeparator; | |
| List<string> terms = new List<string>(); | |
| for (int i = 0; i < parameters.Length; i++) | |
| { | |
| if (parameters[i].Length < (logBaseIndex + 1)) | |
| continue; | |
| double a = parameters[i][expBaseIndex]; | |
| double b = parameters[i][expCoeffIndex]; | |
| double c = parameters[i][polyPowerIndex]; | |
| double d = parameters[i][polyCoeffIndex]; | |
| double e = parameters[i][linearCoeffIndex]; | |
| double f = parameters[i][constantIndex]; | |
| double g = parameters[i][logCoeffIndex]; | |
| double h = parameters[i][logBaseIndex]; | |
| double trigFrequency = parameters[i].Length > trigFrequencyIndex ? parameters[i][trigFrequencyIndex] : 0; | |
| double trigPhase = parameters[i].Length > trigPhaseIndex ? parameters[i][trigPhaseIndex] : 0; | |
| double sinCoeff = parameters[i].Length > sinCoeffIndex ? parameters[i][sinCoeffIndex] : 0; | |
| double cosCoeff = parameters[i].Length > cosCoeffIndex ? parameters[i][cosCoeffIndex] : 0; | |
| double reciprocalCoeff = parameters[i].Length > reciprocalCoeffIndex ? parameters[i][reciprocalCoeffIndex] : 0; | |
| double reciprocalShift = parameters[i].Length > reciprocalShiftIndex ? parameters[i][reciprocalShiftIndex] : 0; | |
| double sqrtCoeff = parameters[i].Length > sqrtCoeffIndex ? parameters[i][sqrtCoeffIndex] : 0; | |
| double sqrtShift = parameters[i].Length > sqrtShiftIndex ? parameters[i][sqrtShiftIndex] : 0; | |
| string safeExp = BuildSafePowExpression(ToLocalized(a), xCellReference, ifFunctionName, argSeparator); | |
| string safePoly = BuildSafePowExpression(xCellReference, ToLocalized(c), ifFunctionName, argSeparator); | |
| string expTerm = $"({safeExp})*{ToLocalized(b)}"; | |
| string polyTerm = $"({safePoly})*{ToLocalized(d)}"; | |
| string linearTerm = $"{xCellReference}*{ToLocalized(e)}"; | |
| string constantTerm = ToLocalized(f); | |
| string logTerm = | |
| $"{ifFunctionName}({xCellReference}>0{argSeparator}" + | |
| $"{ifFunctionName}({ToLocalized(h)}>0{argSeparator}" + | |
| $"{ifFunctionName}(LN({ToLocalized(h)})<>0{argSeparator}LN({xCellReference})*{ToLocalized(g)}/LN({ToLocalized(h)}){argSeparator}0){argSeparator}0){argSeparator}0)"; | |
| string trigAngle = $"({xCellReference}*{ToLocalized(trigFrequency)}+{ToLocalized(trigPhase)})"; | |
| string sinTerm = $"SIN({trigAngle})*{ToLocalized(sinCoeff)}"; | |
| string cosTerm = $"COS({trigAngle})*{ToLocalized(cosCoeff)}"; | |
| string reciprocalDenominator = $"({xCellReference}+{ToLocalized(reciprocalShift)})"; | |
| string reciprocalTerm = | |
| $"{ifFunctionName}({reciprocalDenominator}<>0{argSeparator}{ToLocalized(reciprocalCoeff)}/{reciprocalDenominator}{argSeparator}0)"; | |
| string sqrtArgument = $"({xCellReference}+{ToLocalized(sqrtShift)})"; | |
| string sqrtTerm = | |
| $"{ifFunctionName}({sqrtArgument}>=0{argSeparator}SQRT({sqrtArgument})*{ToLocalized(sqrtCoeff)}{argSeparator}0)"; | |
| terms.Add($"({expTerm}+{polyTerm}+{linearTerm}+{constantTerm}+{logTerm}+{sinTerm}+{cosTerm}+{reciprocalTerm}+{sqrtTerm})"); | |
| } | |
| return "=" + string.Join("+", terms); | |
| } | |
| private static string BuildSafePowExpression(string baseExpression, string exponentExpression, string ifFunctionName, string argSeparator) | |
| { | |
| return | |
| $"{ifFunctionName}({baseExpression}=0{argSeparator}" + | |
| $"{ifFunctionName}({exponentExpression}<=0{argSeparator}0{argSeparator}IFERROR({baseExpression}^{exponentExpression}{argSeparator}0)){argSeparator}" + | |
| $"IFERROR({baseExpression}^{exponentExpression}{argSeparator}0))"; | |
| } | |
| } | |
| private static class PlotExport | |
| { | |
| private static string CompactForPlot(string function) | |
| { | |
| const int maxLength = 160; | |
| if (function.Length <= maxLength) | |
| return function; | |
| return function.Substring(0, maxLength - 3) + "..."; | |
| } | |
| public static void SaveFinalFitPlot(string filePath, Series series) | |
| { | |
| var knownPoints = series.GetKnownPoints(); | |
| if (knownPoints.Count == 0) | |
| return; | |
| double minX = knownPoints.Min(point => point.Key); | |
| double maxX = knownPoints.Max(point => point.Key) + 3; | |
| var (curveX, curveY) = SampleCurve(series, series.CloneCurrentParameters(), minX, maxX, 500); | |
| double[] knownX = knownPoints.Select(point => point.Key).ToArray(); | |
| double[] knownY = knownPoints.Select(point => point.Value).ToArray(); | |
| var plot = new ScottPlot.Plot(); | |
| var curve = plot.Add.Scatter(curveX, curveY); | |
| curve.LegendText = "Fitted curve"; | |
| curve.MarkerSize = 0; | |
| curve.LineWidth = 2; | |
| var known = plot.Add.Scatter(knownX, knownY); | |
| known.LineWidth = 0; | |
| known.MarkerSize = 8; | |
| known.LegendText = "Known points"; | |
| string function = series.GetFunctionWithParameters(series.CloneCurrentParameters()); | |
| string optimizerSummary = series.GetOptimizerSummary(); | |
| plot.Title($"Curve Fit Result\n{optimizerSummary}\n{CompactForPlot(function)}"); | |
| plot.XLabel("x"); | |
| plot.YLabel("y"); | |
| plot.ShowLegend(); | |
| plot.SavePng(filePath, 1200, 700); | |
| } | |
| public static void SaveLossPlot(string filePath, IReadOnlyList<Series.TuningSnapshot> snapshots) | |
| { | |
| if (snapshots.Count == 0) | |
| return; | |
| double[] steps = Enumerable.Range(0, snapshots.Count).Select(index => (double)index).ToArray(); | |
| double[] losses = snapshots.Select(snapshot => snapshot.Loss).ToArray(); | |
| var plot = new ScottPlot.Plot(); | |
| var lossSeries = plot.Add.Scatter(steps, losses); | |
| lossSeries.LegendText = "Loss"; | |
| plot.Title("Tuning Loss Over Time"); | |
| plot.XLabel("Snapshot Step"); | |
| plot.YLabel("Loss"); | |
| plot.ShowLegend(); | |
| plot.SavePng(filePath, 1200, 700); | |
| } | |
| public static void SaveParameterPlot(string filePath, IReadOnlyList<Series.TuningSnapshot> snapshots) | |
| { | |
| if (snapshots.Count == 0) | |
| return; | |
| double[] steps = Enumerable.Range(0, snapshots.Count).Select(index => (double)index).ToArray(); | |
| double[][] parameterRows = snapshots.Select(snapshot => CsvExport.FlattenParameters(snapshot.Parameters)).ToArray(); | |
| int parameterCount = parameterRows[0].Length; | |
| var plot = new ScottPlot.Plot(); | |
| for (int parameterIndex = 0; parameterIndex < parameterCount; parameterIndex++) | |
| { | |
| double[] values = parameterRows.Select(row => row[parameterIndex]).ToArray(); | |
| var parameterSeries = plot.Add.Scatter(steps, values); | |
| parameterSeries.LegendText = $"p{parameterIndex}"; | |
| parameterSeries.LineWidth = 1; | |
| } | |
| plot.Title("Parameter Trajectories"); | |
| plot.XLabel("Snapshot Step"); | |
| plot.YLabel("Parameter Value"); | |
| plot.ShowLegend(); | |
| plot.SavePng(filePath, 1400, 800); | |
| } | |
| public static void SaveAnimationFrames(string directoryPath, Series series, IReadOnlyList<Series.TuningSnapshot> snapshots) | |
| { | |
| if (snapshots.Count == 0) | |
| return; | |
| Directory.CreateDirectory(directoryPath); | |
| var knownPoints = series.GetKnownPoints(); | |
| if (knownPoints.Count == 0) | |
| return; | |
| double[] knownX = knownPoints.Select(point => point.Key).ToArray(); | |
| double[] knownY = knownPoints.Select(point => point.Value).ToArray(); | |
| double minX = knownPoints.Min(point => point.Key); | |
| double maxX = knownPoints.Max(point => point.Key) + 3; | |
| // Cap frame count so visualization remains responsive and file size reasonable. | |
| int maxFrames = 120; | |
| int stride = Math.Max(1, snapshots.Count / maxFrames); | |
| int frameIndex = 0; | |
| for (int i = 0; i < snapshots.Count; i += stride) | |
| { | |
| var snapshot = snapshots[i]; | |
| var (curveX, curveY) = SampleCurve(series, snapshot.Parameters, minX, maxX, 300); | |
| var plot = new ScottPlot.Plot(); | |
| var curve = plot.Add.Scatter(curveX, curveY); | |
| curve.LegendText = "Current fit"; | |
| curve.MarkerSize = 0; | |
| curve.LineWidth = 2; | |
| var known = plot.Add.Scatter(knownX, knownY); | |
| known.LineWidth = 0; | |
| known.MarkerSize = 8; | |
| known.LegendText = "Known points"; | |
| string function = series.GetFunctionWithParameters(snapshot.Parameters); | |
| string optimizerSummary = series.GetOptimizerSummary(); | |
| plot.Title($"Tuning Progress - restart {snapshot.Restart}, iter {snapshot.Iteration}, loss {snapshot.Loss:G6}\n{optimizerSummary}\n{CompactForPlot(function)}"); | |
| plot.XLabel("x"); | |
| plot.YLabel("y"); | |
| plot.ShowLegend(); | |
| string framePath = Path.Combine(directoryPath, $"frame_{frameIndex:D4}.png"); | |
| plot.SavePng(framePath, 1200, 700); | |
| frameIndex++; | |
| } | |
| } | |
| private static (double[] xs, double[] ys) SampleCurve(Series series, double[][] parameters, double minX, double maxX, int sampleCount) | |
| { | |
| double[] xs = new double[sampleCount]; | |
| double[] ys = new double[sampleCount]; | |
| for (int i = 0; i < sampleCount; i++) | |
| { | |
| double t = sampleCount <= 1 ? 0 : i / (double)(sampleCount - 1); | |
| double x = minX + (maxX - minX) * t; | |
| xs[i] = x; | |
| ys[i] = series.EvaluateWithParameters(parameters, x); | |
| } | |
| return (xs, ys); | |
| } | |
| } | |
| private static void RunVisualize(string? testName, string? outputDir, bool useEvolutionary, bool useStructuralSeeds = true, bool useProgressiveModelSearch = true) | |
| { | |
| var testCases = GetTestCases(); | |
| var selected = string.IsNullOrWhiteSpace(testName) | |
| ? testCases.First(testCase => string.Equals(testCase.Name, "Fibonacci", StringComparison.OrdinalIgnoreCase)) | |
| : testCases.FirstOrDefault(testCase => string.Equals(testCase.Name, testName, StringComparison.OrdinalIgnoreCase)); | |
| if (selected is null) | |
| { | |
| Console.WriteLine($"No test found with name '{testName}'."); | |
| ListTests(); | |
| return; | |
| } | |
| string finalOutputDir = string.IsNullOrWhiteSpace(outputDir) | |
| ? Path.Combine(Path.GetTempPath(), $"series_viz_{selected.Name}_{DateTime.Now:yyyyMMdd_HHmmss}") | |
| : Environment.ExpandEnvironmentVariables(outputDir.Trim()); | |
| Directory.CreateDirectory(finalOutputDir); | |
| Console.WriteLine($"Generating visualization for '{selected.Name}'..."); | |
| Series series = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.Log, useParallelRestarts: false, randomSeed: 12345, useEvolutionary: useEvolutionary, useStructuralSeeds: useStructuralSeeds, useProgressiveModelSearch: useProgressiveModelSearch); | |
| Console.WriteLine($"Optimizer: {series.GetOptimizerSummary()}"); | |
| series.CaptureTuningSnapshots = true; | |
| series.CaptureEvolutionTrace = useEvolutionary; | |
| series.SnapshotInterval = 50; | |
| series.SetPoints(selected.Points); | |
| _ = series[selected.Expected.Keys.Max()]; | |
| var snapshots = series.GetTuningSnapshots(); | |
| string solvedFunction = series.GetFunctionWithParameters(series.CloneCurrentParameters()); | |
| string optimizerSummary = series.GetOptimizerSummary(); | |
| Console.WriteLine($"Writing Visualization for '{selected.Name}' output to: {finalOutputDir} ..."); | |
| CsvExport.SaveKnownPointsCsv(Path.Combine(finalOutputDir, "known_points.csv"), series.GetKnownPoints(), series.CloneCurrentParameters()); | |
| File.WriteAllText(Path.Combine(finalOutputDir, "function.txt"), $"Optimizer: {optimizerSummary}{Environment.NewLine}Found function: {solvedFunction}{Environment.NewLine}"); | |
| if (useEvolutionary) | |
| { | |
| StringBuilder evolutionBuilder = new StringBuilder(); | |
| evolutionBuilder.AppendLine($"Optimizer: {optimizerSummary}"); | |
| evolutionBuilder.AppendLine($"Final function: {solvedFunction}"); | |
| evolutionBuilder.AppendLine("--- Evolution Trace ---"); | |
| foreach (var entry in series.GetEvolutionTraceEntries()) | |
| { | |
| evolutionBuilder.AppendLine($"[{entry.Stage} {entry.Step}] loss={entry.Loss.ToString("G12", CultureInfo.InvariantCulture)}"); | |
| evolutionBuilder.AppendLine($" {entry.Function}"); | |
| } | |
| File.WriteAllText(Path.Combine(finalOutputDir, "evolution.txt"), evolutionBuilder.ToString()); | |
| } | |
| CsvExport.SaveSnapshotsCsv(Path.Combine(finalOutputDir, "tuning_snapshots.csv"), snapshots); | |
| PlotExport.SaveFinalFitPlot(Path.Combine(finalOutputDir, "final_fit.png"), series); | |
| PlotExport.SaveLossPlot(Path.Combine(finalOutputDir, "loss.png"), snapshots); | |
| PlotExport.SaveParameterPlot(Path.Combine(finalOutputDir, "parameters.png"), snapshots); | |
| PlotExport.SaveAnimationFrames(Path.Combine(finalOutputDir, "frames"), series, snapshots); | |
| Console.WriteLine(useEvolutionary | |
| ? "Done. Files: known_points.csv, function.txt, evolution.txt, tuning_snapshots.csv, final_fit.png, loss.png, parameters.png, frames/*.png" | |
| : "Done. Files: known_points.csv, function.txt, tuning_snapshots.csv, final_fit.png, loss.png, parameters.png, frames/*.png"); | |
| } | |
| private static string NormalizeFunctionString(string text) | |
| { | |
| return new string(text.Where(ch => !char.IsWhiteSpace(ch)).ToArray()).ToLowerInvariant(); | |
| } | |
| private static Series CreateSeriesForRun( | |
| Series.ProgressOutputMode progressMode, | |
| bool useParallelRestarts, | |
| int? randomSeed, | |
| bool allowEvolutionaryPrompt = false, | |
| bool useEvolutionary = false, | |
| bool useStructuralSeeds = true, | |
| bool useProgressiveModelSearch = true) | |
| { | |
| Series series = new Series(); | |
| series.ProgressMode = progressMode; | |
| series.UseParallelRestarts = useParallelRestarts && !useEvolutionary; | |
| series.RandomSeed = randomSeed; | |
| series.AllowEvolutionaryFallbackPrompt = allowEvolutionaryPrompt && !useEvolutionary; | |
| series.UseStructuralSeeds = useStructuralSeeds; | |
| series.UseProgressiveModelSearch = useProgressiveModelSearch; | |
| series.RequirePerfectFit = true; | |
| series.OptimizerMode = useEvolutionary ? Series.TuningMode.Evolutionary : Series.TuningMode.Gradient; | |
| if (!useStructuralSeeds) | |
| { | |
| series.ConvergenceTolerance = Math.Max(series.ConvergenceTolerance, 1e-5); | |
| if (useEvolutionary) | |
| { | |
| series.GenerationCount = Math.Max(series.GenerationCount, 320); | |
| series.PopulationSize = Math.Max(series.PopulationSize, 96); | |
| series.EliteCount = Math.Max(series.EliteCount, 16); | |
| series.MutationRate = Math.Max(series.MutationRate, 0.25); | |
| } | |
| } | |
| return series; | |
| } | |
| // Lightweight test harness to guard future refactors. | |
| private static void RunTests(string? onlyTestName = null, bool useEvolutionary = false, bool useProgressiveModelSearch = true) | |
| { | |
| Console.WriteLine("Running tests..."); | |
| Series summarySeries = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: true, randomSeed: 12345, useEvolutionary: useEvolutionary, useProgressiveModelSearch: useProgressiveModelSearch); | |
| Console.WriteLine($"Optimizer: {summarySeries.GetOptimizerSummary()}"); | |
| int corePassed = 0; | |
| int coreFailed = 0; | |
| var testCases = GetTestCases(); | |
| if (!string.IsNullOrWhiteSpace(onlyTestName)) | |
| { | |
| testCases = testCases | |
| .Where(testCase => string.Equals(testCase.Name, onlyTestName, StringComparison.OrdinalIgnoreCase)) | |
| .ToList(); | |
| if (testCases.Count == 0) | |
| { | |
| Console.WriteLine($"No test found with name '{onlyTestName}'."); | |
| ListTests(); | |
| return; | |
| } | |
| } | |
| foreach (var testCase in testCases) | |
| { | |
| Series series = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: true, randomSeed: 12345, useEvolutionary: useEvolutionary, useProgressiveModelSearch: useProgressiveModelSearch); | |
| series.SetPoints(testCase.Points); | |
| bool casePass = true; | |
| foreach (var expected in testCase.Expected) | |
| { | |
| double actual; | |
| try | |
| { | |
| actual = series[expected.Key]; | |
| } | |
| catch (InvalidOperationException ex) | |
| { | |
| casePass = false; | |
| Console.WriteLine($"[FAIL] {testCase.Name}: no perfect fit. {ex.Message}"); | |
| break; | |
| } | |
| if (Math.Abs(actual - expected.Value) > 1e-6) | |
| { | |
| casePass = false; | |
| Console.WriteLine($"[FAIL] {testCase.Name}: x={expected.Key}, expected={expected.Value}, actual={actual}"); | |
| } | |
| } | |
| if (casePass && !series.HasPerfectFit) | |
| { | |
| casePass = false; | |
| Console.WriteLine($"[FAIL] {testCase.Name}: expected perfect fit but solver status was '{series.LastFitStatusMessage}'"); | |
| } | |
| if (casePass && !string.IsNullOrWhiteSpace(testCase.ExpectedFunction)) | |
| { | |
| string actualFunction = series.GetFunctionWithParameters(series.CloneCurrentParameters()); | |
| bool functionMatch = NormalizeFunctionString(actualFunction) == NormalizeFunctionString(testCase.ExpectedFunction); | |
| if (testCase.RequireFunctionMatch && !functionMatch) | |
| { | |
| casePass = false; | |
| Console.WriteLine($"[FAIL] {testCase.Name}: function mismatch."); | |
| Console.WriteLine($" expected: {testCase.ExpectedFunction}"); | |
| Console.WriteLine($" actual : {actualFunction}"); | |
| } | |
| else | |
| { | |
| Console.WriteLine($"[INFO] {testCase.Name}: function match={(functionMatch ? "yes" : "no")}"); | |
| } | |
| } | |
| if (casePass) | |
| { | |
| Console.WriteLine($"[PASS] {testCase.Name}"); | |
| corePassed++; | |
| } | |
| else | |
| { | |
| coreFailed++; | |
| } | |
| } | |
| var formattingChecks = RunFormattingSpecificTests(); | |
| int evolutionaryPassed = 0; | |
| int evolutionaryFailed = 0; | |
| if (useEvolutionary) | |
| { | |
| var evolutionaryChecks = RunEvolutionarySpecificTests(); | |
| evolutionaryPassed = evolutionaryChecks.passed; | |
| evolutionaryFailed = evolutionaryChecks.failed; | |
| } | |
| int passed = corePassed + formattingChecks.passed + evolutionaryPassed; | |
| int failed = coreFailed + formattingChecks.failed + evolutionaryFailed; | |
| string sectionSummary = | |
| $"Section summary: core={corePassed} passed, {coreFailed} failed; " + | |
| $"formatting={formattingChecks.passed} passed, {formattingChecks.failed} failed" + | |
| (useEvolutionary ? $"; evolutionary={evolutionaryPassed} passed, {evolutionaryFailed} failed" : string.Empty) + | |
| "."; | |
| Console.WriteLine(sectionSummary); | |
| Console.WriteLine($"Test summary: {passed} passed, {failed} failed."); | |
| } | |
| // Targeted function-string tests for formatter behavior that may not be stable | |
| // when inferred from data (because multiple equivalent symbolic forms can exist). | |
| private static (int passed, int failed) RunFormattingSpecificTests() | |
| { | |
| Console.WriteLine("Running formatting-specific checks..."); | |
| const int logBaseIndex = 7; | |
| const int trigFrequencyIndex = 8; | |
| const int trigPhaseIndex = 9; | |
| const int sinCoeffIndex = 10; | |
| const int cosCoeffIndex = 11; | |
| int passed = 0; | |
| int failed = 0; | |
| try | |
| { | |
| Series series = new Series(); | |
| double[][] parameters = new double[][] | |
| { | |
| new double[16], | |
| new double[16], | |
| new double[16] | |
| }; | |
| // f(x) = sin(x + pi/2) | |
| parameters[0][logBaseIndex] = Math.E; | |
| parameters[0][trigFrequencyIndex] = 1; | |
| parameters[0][trigPhaseIndex] = Math.PI / 2.0; | |
| parameters[0][sinCoeffIndex] = 1; | |
| string function = series.GetFunctionWithParameters(parameters); | |
| string expected = "f(x) = sin(x + (1/2)*pi)"; | |
| if (NormalizeFunctionString(function) != NormalizeFunctionString(expected)) | |
| throw new InvalidOperationException($"expected '{expected}', got '{function}'"); | |
| Console.WriteLine("[PASS] TrigPhasePositiveFormatting"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] TrigPhasePositiveFormatting: {ex.Message}"); | |
| failed++; | |
| } | |
| try | |
| { | |
| Series series = new Series(); | |
| double[][] parameters = new double[][] | |
| { | |
| new double[16], | |
| new double[16], | |
| new double[16] | |
| }; | |
| // f(x) = cos(x - pi/2) | |
| parameters[0][logBaseIndex] = Math.E; | |
| parameters[0][trigFrequencyIndex] = 1; | |
| parameters[0][trigPhaseIndex] = -Math.PI / 2.0; | |
| parameters[0][cosCoeffIndex] = 1; | |
| string function = series.GetFunctionWithParameters(parameters); | |
| string expected = "f(x) = cos(x - (1/2)*pi)"; | |
| if (NormalizeFunctionString(function) != NormalizeFunctionString(expected)) | |
| throw new InvalidOperationException($"expected '{expected}', got '{function}'"); | |
| Console.WriteLine("[PASS] TrigPhaseNegativeFormatting"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] TrigPhaseNegativeFormatting: {ex.Message}"); | |
| failed++; | |
| } | |
| return (passed, failed); | |
| } | |
| // Extra checks that specifically validate evolutionary-mode wiring and behavior, | |
| // beyond the shared sequence/function-match regression cases above. | |
| private static (int passed, int failed) RunEvolutionarySpecificTests() | |
| { | |
| Console.WriteLine("Running evolutionary-specific checks..."); | |
| int passed = 0; | |
| int failed = 0; | |
| try | |
| { | |
| Series summarySeries = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 12345, useEvolutionary: true); | |
| string summary = summarySeries.GetOptimizerSummary(); | |
| if (!summary.StartsWith("Evolutionary(", StringComparison.Ordinal)) | |
| throw new InvalidOperationException($"unexpected summary '{summary}'"); | |
| Console.WriteLine("[PASS] EvolutionarySummary"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] EvolutionarySummary: {ex.Message}"); | |
| failed++; | |
| } | |
| try | |
| { | |
| Series series = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 12345, useEvolutionary: true); | |
| series.EnableLocalRefinement = false; | |
| series.GenerationCount = 1; | |
| series.PopulationSize = 16; | |
| series.EliteCount = 4; | |
| series.SetPoints(CreateIndexedPoints(5.0, 5.0, 5.0, 5.0, 5.0, 5.0)); | |
| double predicted = series[6]; | |
| if (Math.Abs(predicted - 5.0) > 1e-6) | |
| throw new InvalidOperationException($"expected 5 at x=6, got {predicted}"); | |
| if (!series.HasPerfectFit) | |
| throw new InvalidOperationException($"expected perfect fit, status='{series.LastFitStatusMessage}'"); | |
| if (!series.LastFitStatusMessage.Contains("evolutionary", StringComparison.OrdinalIgnoreCase)) | |
| throw new InvalidOperationException($"expected evolutionary status, got '{series.LastFitStatusMessage}'"); | |
| Console.WriteLine("[PASS] EvolutionaryNoRefinementConstant"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] EvolutionaryNoRefinementConstant: {ex.Message}"); | |
| failed++; | |
| } | |
| try | |
| { | |
| Series series = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 12345, useEvolutionary: true); | |
| series.SetPoints(CreateIndexedPoints(1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0, 1024.0)); | |
| double predicted = series[11]; | |
| if (Math.Abs(predicted - 2048.0) > 1e-6) | |
| throw new InvalidOperationException($"expected 2048 at x=11, got {predicted}"); | |
| if (!series.HasPerfectFit) | |
| throw new InvalidOperationException($"expected perfect fit, status='{series.LastFitStatusMessage}'"); | |
| if (!series.LastFitStatusMessage.Contains("evolutionary", StringComparison.OrdinalIgnoreCase)) | |
| throw new InvalidOperationException($"expected evolutionary status, got '{series.LastFitStatusMessage}'"); | |
| Console.WriteLine("[PASS] EvolutionaryStatusMessage"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] EvolutionaryStatusMessage: {ex.Message}"); | |
| failed++; | |
| } | |
| try | |
| { | |
| Series first = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 424242, useEvolutionary: true); | |
| first.SetPoints(CreateIndexedPoints(1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0, 1024.0)); | |
| double firstPrediction = first[11]; | |
| string firstFunction = first.GetFunctionWithParameters(first.CloneCurrentParameters()); | |
| Series second = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 424242, useEvolutionary: true); | |
| second.SetPoints(CreateIndexedPoints(1.0, 2.0, 4.0, 8.0, 16.0, 32.0, 64.0, 128.0, 256.0, 512.0, 1024.0)); | |
| double secondPrediction = second[11]; | |
| string secondFunction = second.GetFunctionWithParameters(second.CloneCurrentParameters()); | |
| if (Math.Abs(firstPrediction - secondPrediction) > 1e-9) | |
| throw new InvalidOperationException($"same seed mismatch: {firstPrediction} vs {secondPrediction}"); | |
| if (NormalizeFunctionString(firstFunction) != NormalizeFunctionString(secondFunction)) | |
| throw new InvalidOperationException($"same seed function mismatch: '{firstFunction}' vs '{secondFunction}'"); | |
| Console.WriteLine("[PASS] EvolutionaryDeterministicSeed"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] EvolutionaryDeterministicSeed: {ex.Message}"); | |
| failed++; | |
| } | |
| try | |
| { | |
| Series series = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 12345, useEvolutionary: true); | |
| // Robust alternating-sign family should be captured by evolutionary structural seed. | |
| series.SetPoints(CreateIndexedPoints(3.0, -3.0, 3.0, -3.0, 3.0, -3.0, 3.0, -3.0, 3.0, -3.0, 3.0)); | |
| double x11 = series[11]; | |
| double x12 = series[12]; | |
| double x13 = series[13]; | |
| if (Math.Abs(x11 - (-3.0)) > 1e-6 || Math.Abs(x12 - 3.0) > 1e-6 || Math.Abs(x13 - (-3.0)) > 1e-6) | |
| throw new InvalidOperationException($"unexpected predictions: x11={x11}, x12={x12}, x13={x13}"); | |
| if (!series.HasPerfectFit) | |
| throw new InvalidOperationException($"expected perfect fit, status='{series.LastFitStatusMessage}'"); | |
| if (!series.LastFitStatusMessage.Contains("evolutionary", StringComparison.OrdinalIgnoreCase)) | |
| throw new InvalidOperationException($"expected evolutionary status, got '{series.LastFitStatusMessage}'"); | |
| Console.WriteLine("[PASS] EvolutionaryAlternatingAmplitude"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] EvolutionaryAlternatingAmplitude: {ex.Message}"); | |
| failed++; | |
| } | |
| try | |
| { | |
| // Deterministic "hard" sequence: intentionally not aligned with structural seeds, | |
| // so the search should explore multiple generations before settling. | |
| Series series = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 314159, useEvolutionary: true); | |
| series.RequirePerfectFit = false; | |
| series.CaptureEvolutionTrace = true; | |
| series.GenerationCount = 20; | |
| series.PopulationSize = 40; | |
| series.EliteCount = 8; | |
| series.SetPoints(CreateIndexedPoints(1.0, 3.0, 6.0, 11.0, 20.0, 37.0, 70.0, 135.0, 264.0, 521.0, 1034.0)); | |
| _ = series[11]; | |
| var generationSteps = series.GetEvolutionTraceEntries() | |
| .Where(entry => string.Equals(entry.Stage, "generation", StringComparison.OrdinalIgnoreCase)) | |
| .Select(entry => entry.Step) | |
| .Distinct() | |
| .OrderBy(step => step) | |
| .ToList(); | |
| if (generationSteps.Count < 2 || generationSteps.Max() <= 1) | |
| throw new InvalidOperationException($"expected multi-generation exploration, observed steps: {string.Join(",", generationSteps)}"); | |
| Console.WriteLine("[PASS] EvolutionaryMultiGenerationExploration"); | |
| passed++; | |
| } | |
| catch (Exception ex) | |
| { | |
| Console.WriteLine($"[FAIL] EvolutionaryMultiGenerationExploration: {ex.Message}"); | |
| failed++; | |
| } | |
| return (passed, failed); | |
| } | |
| // Simple benchmark mode to compare serial vs parallel tuning throughput. | |
| private static void RunBenchmark(int runs, bool useEvolutionary = false, bool useStructuralSeeds = true, bool useProgressiveModelSearch = true) | |
| { | |
| Console.WriteLine($"Running benchmark ({runs} runs per mode)..."); | |
| double[] points = new double[] { 0.0, 1.0, 1.0, 2.0, 3.0, 5.0, 8.0, 13.0, 21.0, 34.0, 55.0 }; | |
| foreach (bool parallel in useEvolutionary ? new[] { false } : new[] { false, true }) | |
| { | |
| string mode = parallel ? "parallel" : "serial"; | |
| if (useEvolutionary) | |
| mode = "evolutionary"; | |
| Series summarySeries = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: parallel, randomSeed: 12345, useEvolutionary: useEvolutionary, useStructuralSeeds: useStructuralSeeds, useProgressiveModelSearch: useProgressiveModelSearch); | |
| Console.WriteLine($"Optimizer ({mode}): {summarySeries.GetOptimizerSummary()}"); | |
| // Warm-up run (JIT/caches) | |
| { | |
| Series warmup = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: parallel, randomSeed: 12345, useEvolutionary: useEvolutionary, useStructuralSeeds: useStructuralSeeds, useProgressiveModelSearch: useProgressiveModelSearch); | |
| warmup.SetPoints(points); | |
| _ = warmup[11]; | |
| } | |
| double totalMs = 0; | |
| for (int run = 0; run < runs; run++) | |
| { | |
| Series series = CreateSeriesForRun(progressMode: Series.ProgressOutputMode.None, useParallelRestarts: parallel, randomSeed: 12345 + run, useEvolutionary: useEvolutionary, useStructuralSeeds: useStructuralSeeds, useProgressiveModelSearch: useProgressiveModelSearch); | |
| series.SetPoints(points); | |
| var watch = System.Diagnostics.Stopwatch.StartNew(); | |
| double value; | |
| try | |
| { | |
| value = series[11]; | |
| } | |
| catch (InvalidOperationException) | |
| { | |
| Console.WriteLine($"[WARN] {mode} run {run + 1}: no perfect fit"); | |
| continue; | |
| } | |
| watch.Stop(); | |
| totalMs += watch.Elapsed.TotalMilliseconds; | |
| if (value != 89) | |
| Console.WriteLine($"[WARN] {mode} run {run + 1}: unexpected result at x=11 -> {value}"); | |
| } | |
| double averageMs = totalMs / (double)runs; | |
| Console.WriteLine($"Benchmark {mode}: avg {averageMs:0.00} ms/run"); | |
| } | |
| } | |
| private sealed class CommandLineOptions | |
| { | |
| public bool ListTests { get; private set; } | |
| public bool Visualize { get; private set; } | |
| public bool RunTests { get; private set; } | |
| public bool Benchmark { get; private set; } | |
| public bool UseEvolutionary { get; private set; } | |
| public bool UseStructuralSeeds { get; private set; } = true; | |
| public bool UseProgressiveModelSearch { get; private set; } = true; | |
| public string? TestName { get; private set; } | |
| public string? OutputDir { get; private set; } | |
| public int Runs { get; private set; } = 5; | |
| public static CommandLineOptions Parse(string[] args) | |
| { | |
| CommandLineOptions options = new CommandLineOptions(); | |
| HashSet<string> argSet = args.Select(arg => arg.Trim().ToLowerInvariant()).ToHashSet(); | |
| options.ListTests = argSet.Contains("--list-tests"); | |
| options.Visualize = argSet.Contains("--visualize") || argSet.Contains("--viz"); | |
| options.Benchmark = argSet.Contains("--benchmark"); | |
| options.UseEvolutionary = argSet.Contains("--evolutionary") || argSet.Contains("--evo"); | |
| if (argSet.Contains("--no-structural-seeds") || argSet.Contains("--no-seeds")) | |
| options.UseStructuralSeeds = false; | |
| if (argSet.Contains("--no-progressive")) | |
| options.UseProgressiveModelSearch = false; | |
| if (argSet.Contains("--progressive")) | |
| options.UseProgressiveModelSearch = true; | |
| foreach (string arg in args) | |
| { | |
| if (arg.StartsWith("--test=", StringComparison.OrdinalIgnoreCase)) | |
| { | |
| options.TestName = arg.Substring(7).Trim(); | |
| } | |
| else if (arg.StartsWith("--out=", StringComparison.OrdinalIgnoreCase)) | |
| { | |
| options.OutputDir = arg.Substring(6).Trim(); | |
| } | |
| else if (arg.StartsWith("--runs=", StringComparison.OrdinalIgnoreCase) && int.TryParse(arg.Substring(7), out int parsedRuns)) | |
| { | |
| options.Runs = Math.Max(1, parsedRuns); | |
| } | |
| } | |
| options.RunTests = argSet.Contains("--tests") || argSet.Contains("--regression") || !string.IsNullOrWhiteSpace(options.TestName); | |
| return options; | |
| } | |
| } | |
| // Usage: | |
| // dotnet run ./Series.cs -> demo run | |
| // dotnet run ./Series.cs --list-tests -> list available test names | |
| // dotnet run ./Series.cs --tests -> run all tests | |
| // dotnet run ./Series.cs --test=Name -> run one named test | |
| // dotnet run ./Series.cs --regression -> alias for --tests | |
| // dotnet run ./Series.cs --benchmark -> run benchmark (default 5 runs/mode) | |
| // dotnet run ./Series.cs --benchmark --runs=10 | |
| // dotnet run ./Series.cs --visualize -> create plots/csv for default test | |
| // dotnet run ./Series.cs --visualize --test=Lucas -> visualize a single named test | |
| // dotnet run ./Series.cs --visualize --out=%TEMP%\series_viz | |
| // dotnet run ./Series.cs --evolutionary -> use Series evolutionary optimizer mode | |
| // dotnet run ./Series.cs --tests --evolutionary -> run tests with evolutionary tuner | |
| // dotnet run ./Series.cs --evolutionary --no-structural-seeds | |
| // -> force broader search (disable structural seed shortcuts) | |
| // dotnet run ./Series.cs --no-progressive -> disable stage-1 progressive model search | |
| // | |
| // In-code minimal evolutionary usage: | |
| // var evo = CreateSeriesForRun(Series.ProgressOutputMode.None, useParallelRestarts: false, randomSeed: 12345, useEvolutionary: true); | |
| // evo.SetPoints(new double[] { 0.0, 1.0, 1.0, 2.0, 3.0, 5.0, 8.0, 13.0, 21.0, 34.0, 55.0 }); | |
| // var nextValue = evo[11]; | |
| public static void Main(string[] args) | |
| { | |
| CommandLineOptions options = CommandLineOptions.Parse(args); | |
| if (options.ListTests) | |
| { | |
| ListTests(); | |
| return; | |
| } | |
| if (options.Visualize) | |
| { | |
| RunVisualize(options.TestName, options.OutputDir, options.UseEvolutionary, options.UseStructuralSeeds, options.UseProgressiveModelSearch); | |
| return; | |
| } | |
| if (options.RunTests) | |
| { | |
| RunTests(options.TestName, options.UseEvolutionary, options.UseProgressiveModelSearch); | |
| return; | |
| } | |
| if (options.Benchmark) | |
| { | |
| RunBenchmark(options.Runs, options.UseEvolutionary, options.UseStructuralSeeds, options.UseProgressiveModelSearch); | |
| return; | |
| } | |
| Series series = CreateSeriesForRun( | |
| progressMode: Series.ProgressOutputMode.Log, | |
| useParallelRestarts: true, | |
| randomSeed: null, | |
| allowEvolutionaryPrompt: true, | |
| useEvolutionary: options.UseEvolutionary, | |
| useStructuralSeeds: options.UseStructuralSeeds, | |
| useProgressiveModelSearch: options.UseProgressiveModelSearch); | |
| Console.WriteLine($"Optimizer: {series.GetOptimizerSummary()}"); | |
| series.SetPoints(new double[] { 0.0, 1.0, 1.0, 2.0, 3.0, 5.0, 8.0, 13.0, 21.0, 34.0, 55.0 }); | |
| // Measure the time taken to tune the parameters and calculate the values. | |
| var watch = System.Diagnostics.Stopwatch.StartNew(); | |
| Console.WriteLine("Tuning parameters..."); | |
| try | |
| { | |
| Console.WriteLine($"The value at x=11 is: {series[11]} (should be 89)"); | |
| } | |
| catch (InvalidOperationException ex) | |
| { | |
| Console.WriteLine(ex.Message); | |
| return; | |
| } | |
| watch.Stop(); | |
| Console.WriteLine($"Tuning completed in {watch.ElapsedMilliseconds} ms."); | |
| Console.WriteLine($"The value at x=12 is: {series[12]} (should be 144)"); | |
| Console.WriteLine($"The value at x=13 is: {series[13]} (should be 233)"); | |
| Console.WriteLine($"Found function: {series}"); | |
| if (!options.UseEvolutionary) | |
| { | |
| // Demo: direct in-code usage of the evolutionary optimizer mode. | |
| // (A minimal snippet is listed in the Usage section above.) | |
| // This runs quietly so the default demo output stays readable. | |
| Console.WriteLine(); | |
| Console.WriteLine("Evolutionary usage demo:"); | |
| Series evolutionaryDemo = CreateSeriesForRun( | |
| progressMode: Series.ProgressOutputMode.None, | |
| useParallelRestarts: false, | |
| randomSeed: 12345, | |
| useEvolutionary: true, | |
| useStructuralSeeds: options.UseStructuralSeeds, | |
| useProgressiveModelSearch: options.UseProgressiveModelSearch); | |
| evolutionaryDemo.CaptureEvolutionTrace = true; | |
| evolutionaryDemo.SetPoints(new double[] { 0.0, 1.0, 1.0, 2.0, 3.0, 5.0, 8.0, 13.0, 21.0, 34.0, 55.0 }); | |
| var evolutionaryWatch = System.Diagnostics.Stopwatch.StartNew(); | |
| try | |
| { | |
| Console.WriteLine($"[evo] Optimizer: {evolutionaryDemo.GetOptimizerSummary()}"); | |
| Console.WriteLine($"[evo] The value at x=11 is: {evolutionaryDemo[11]} (should be 89)"); | |
| Console.WriteLine($"[evo] Found function: {evolutionaryDemo.GetFunctionWithParameters(evolutionaryDemo.CloneCurrentParameters())}"); | |
| Console.WriteLine($"[evo] Trace entries captured: {evolutionaryDemo.GetEvolutionTraceEntries().Count}"); | |
| } | |
| catch (InvalidOperationException ex) | |
| { | |
| Console.WriteLine($"[evo] {ex.Message}"); | |
| } | |
| evolutionaryWatch.Stop(); | |
| Console.WriteLine($"[evo] Completed in {evolutionaryWatch.ElapsedMilliseconds} ms."); | |
| } | |
| } | |
| } |
Author
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Has tests, a benchmark, can visualise using nice little plots:



Created in VSCode using Codex 5.3 in about a day. It was fun. :)