Skip to content

Commit cb98ab0

Browse files
committed
Removed unnecessary StdHandlers module and cleaned up the source code.
1 parent 56d4455 commit cb98ab0

14 files changed

+174
-141
lines changed

EasyMLCore/Data/FeatureFilter/FeatureFilterBase.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -75,7 +75,7 @@ protected FeatureFilterBase(FeatureFilterBase source)
7575
{
7676
ValueType = source.ValueType;
7777
Use = source.Use;
78-
SamplesStat = new BasicStat(source.SamplesStat);
78+
SamplesStat = source.SamplesStat.DeepClone();
7979
return;
8080
}
8181

EasyMLCore/Data/TaskErrStat/CategoricalErrStat.cs

+3-3
Original file line numberDiff line numberDiff line change
@@ -68,9 +68,9 @@ public CategoricalErrStat(IComputableTaskSpecific computableUnit, SampleDataset
6868
public CategoricalErrStat(CategoricalErrStat source)
6969
: base(source)
7070
{
71-
ClassificationLogLossStat = new BasicStat(source.ClassificationLogLossStat);
72-
WrongClassificationStat = new BasicStat(source.WrongClassificationStat);
73-
LowProbabilityClassificationStat = new BasicStat(source.LowProbabilityClassificationStat);
71+
ClassificationLogLossStat = source.ClassificationLogLossStat.DeepClone();
72+
WrongClassificationStat = source.WrongClassificationStat.DeepClone();
73+
LowProbabilityClassificationStat = source.LowProbabilityClassificationStat.DeepClone();
7474
return;
7575
}
7676

EasyMLCore/Data/TaskErrStat/MultipleDecisionErrStat.cs

+4-4
Original file line numberDiff line numberDiff line change
@@ -91,10 +91,10 @@ public MultipleDecisionErrStat(MultipleDecisionErrStat source)
9191
FeatureBinDecisionStats[i] = new SingleDecisionErrStat(source.FeatureBinDecisionStats[i]);
9292
}
9393
TotalBinFalseFlagStat = new BasicStat[2];
94-
TotalBinFalseFlagStat[0] = new BasicStat(source.TotalBinFalseFlagStat[0]);
95-
TotalBinFalseFlagStat[1] = new BasicStat(source.TotalBinFalseFlagStat[1]);
96-
TotalBinWrongDecisionStat = new BasicStat(source.TotalBinWrongDecisionStat);
97-
TotalBinLogLossStat = new BasicStat(source.TotalBinLogLossStat);
94+
TotalBinFalseFlagStat[0] = source.TotalBinFalseFlagStat[0].DeepClone();
95+
TotalBinFalseFlagStat[1] = source.TotalBinFalseFlagStat[1].DeepClone();
96+
TotalBinWrongDecisionStat = source.TotalBinWrongDecisionStat.DeepClone();
97+
TotalBinLogLossStat = source.TotalBinLogLossStat.DeepClone();
9898
return;
9999
}
100100

EasyMLCore/Data/TaskErrStat/MultiplePrecisionErrStat.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -69,7 +69,7 @@ public MultiplePrecisionErrStat(MultiplePrecisionErrStat source)
6969
{
7070
FeaturePrecisionStats[i] = new SinglePrecisionErrStat(source.FeaturePrecisionStats[i]);
7171
}
72-
TotalPrecisionStat = new BasicStat(source.TotalPrecisionStat);
72+
TotalPrecisionStat = source.TotalPrecisionStat.DeepClone();
7373
return;
7474
}
7575

EasyMLCore/Data/TaskErrStat/SingleDecisionErrStat.cs

+5-5
Original file line numberDiff line numberDiff line change
@@ -86,12 +86,12 @@ public SingleDecisionErrStat(IComputableTaskSpecific computableUnit, SampleDatas
8686
public SingleDecisionErrStat(SingleDecisionErrStat source)
8787
: base(source)
8888
{
89-
IdealStat = new BasicStat(source.IdealStat);
89+
IdealStat = source.IdealStat.DeepClone();
9090
FalseFlagStat = new BasicStat[2];
91-
FalseFlagStat[0] = new BasicStat(source.FalseFlagStat[0]);
92-
FalseFlagStat[1] = new BasicStat(source.FalseFlagStat[1]);
93-
WrongDecisionStat = new BasicStat(source.WrongDecisionStat);
94-
LogLossStat = new BasicStat(source.LogLossStat);
91+
FalseFlagStat[0] = source.FalseFlagStat[0].DeepClone();
92+
FalseFlagStat[1] = source.FalseFlagStat[1].DeepClone();
93+
WrongDecisionStat = source.WrongDecisionStat.DeepClone();
94+
LogLossStat = source.LogLossStat.DeepClone();
9595
return;
9696
}
9797

EasyMLCore/Data/TaskErrStat/SinglePrecisionErrStat.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ public SinglePrecisionErrStat(IComputableTaskSpecific computableUnit, SampleData
5757
public SinglePrecisionErrStat(SinglePrecisionErrStat source)
5858
: base(source)
5959
{
60-
FeaturePrecisionStat = new BasicStat(source.FeaturePrecisionStat);
60+
FeaturePrecisionStat = source.FeaturePrecisionStat.DeepClone();
6161
return;
6262
}
6363

EasyMLCore/MLP/Model/MLPModelConfidenceMetrics.cs

+1-1
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ BasicStat featureConfidencesStat
7777
CategoricalAccuracy = categoricalAccuracy;
7878
BinaryAccuracy = binaryAccuracy;
7979
FeatureConfidences = (double[])featureConfidences.Clone();
80-
FeatureConfidencesStat = new BasicStat(featureConfidencesStat);
80+
FeatureConfidencesStat = featureConfidencesStat.DeepClone();
8181
return;
8282
}
8383

EasyMLCore/MLP/Model/Network/MLPEngine.cs

+2-2
Original file line numberDiff line numberDiff line change
@@ -90,8 +90,8 @@ public MLPEngine(MLPEngine source)
9090
LayerCollection.Add(layer.DeepClone());
9191
}
9292
_flatWeights = (double[])source._flatWeights.Clone();
93-
HLWeightsStat = new BasicStat(source.HLWeightsStat);
94-
OLWeightsStat = new BasicStat(source.OLWeightsStat);
93+
HLWeightsStat = source.HLWeightsStat.DeepClone();
94+
OLWeightsStat = source.OLWeightsStat.DeepClone();
9595
return;
9696
}
9797

EasyMLCore/MLP/Model/RVFL/RVFLPreprocessor.cs

+16-19
Original file line numberDiff line numberDiff line change
@@ -121,8 +121,8 @@ public RVFLPreprocessor(RVFLPreprocessor source)
121121
{
122122
_inputFilters[i] = source._inputFilters[i].DeepClone();
123123
}
124-
WeightsStat = new BasicStat(source.WeightsStat);
125-
BiasesStat = new BasicStat(source.BiasesStat);
124+
WeightsStat = source.WeightsStat.DeepClone();
125+
BiasesStat = source.BiasesStat.DeepClone();
126126
Initialized = source.Initialized;
127127
return;
128128
}
@@ -185,15 +185,14 @@ RVFLModelConfig modelCfg
185185
/// <summary>
186186
/// Randomizes internal weights.
187187
/// </summary>
188-
/// <param name="stdTrainingInputs">Standardized training input data.</param>
189188
/// <param name="rand">The random generator to be used.</param>
190-
private void RandomizeWeights(double[][] stdTrainingInputs, Random rand)
189+
private void RandomizeWeights(Random rand)
191190
{
192191
WeightsStat.Reset();
193192
BiasesStat.Reset();
194193
foreach (Layer layer in LayerCollection)
195194
{
196-
layer.RandomizeWeights(stdTrainingInputs, _flatWeights, rand);
195+
layer.RandomizeWeights(_flatWeights, rand);
197196
WeightsStat.Merge(layer.WeightsStat);
198197
BiasesStat.Merge(layer.BiasesStat);
199198
}
@@ -295,7 +294,7 @@ public SampleDataset Init(SampleDataset trainingData,
295294
}
296295
});
297296
//New weights
298-
RandomizeWeights(stdInputs, rand);
297+
RandomizeWeights(rand);
299298
//Activation statistics
300299
BasicStat[][][] activationStats = new BasicStat[LayerCollection.Count][][];
301300
BasicStat[][] weightStats = new BasicStat[LayerCollection.Count][];
@@ -312,8 +311,8 @@ public SampleDataset Init(SampleDataset trainingData,
312311
{
313312
activationStats[i][j][k] = new BasicStat();
314313
}
315-
weightStats[i][j] = new BasicStat(LayerCollection[i].Pools[j].WeightsStat);
316-
biasStats[i][j] = new BasicStat(LayerCollection[i].Pools[j].BiasesStat);
314+
weightStats[i][j] = LayerCollection[i].Pools[j].WeightsStat.DeepClone();
315+
biasStats[i][j] = LayerCollection[i].Pools[j].BiasesStat.DeepClone();
317316
}
318317
}
319318
//Output
@@ -421,8 +420,8 @@ internal Layer(Layer source)
421420
NumOfLayerNeurons = source.NumOfLayerNeurons;
422421
NumOfLayerWeights = source.NumOfLayerWeights;
423422
NumOfPredictors = source.NumOfPredictors;
424-
WeightsStat = new BasicStat(source.WeightsStat);
425-
BiasesStat = new BasicStat(source.BiasesStat);
423+
WeightsStat = source.WeightsStat.DeepClone();
424+
BiasesStat = source.BiasesStat.DeepClone();
426425
return;
427426
}
428427

@@ -474,16 +473,15 @@ internal Layer DeepClone()
474473
/// <summary>
475474
/// Randomly initializes layer weights.
476475
/// </summary>
477-
/// <param name="stdTrainingInputs">Standardized training input data.</param>
478476
/// <param name="flatWeights">RVFL's weights in a flat structure.</param>
479477
/// <param name="rand">Random generator to be used.</param>
480-
internal void RandomizeWeights(double[][] stdTrainingInputs, double[] flatWeights, Random rand)
478+
internal void RandomizeWeights(double[] flatWeights, Random rand)
481479
{
482480
WeightsStat.Reset();
483481
BiasesStat.Reset();
484482
foreach (Pool pool in Pools)
485483
{
486-
pool.RandomizeWeights(stdTrainingInputs, flatWeights, rand);
484+
pool.RandomizeWeights(flatWeights, rand);
487485
WeightsStat.Merge(pool.WeightsStat);
488486
BiasesStat.Merge(pool.BiasesStat);
489487
}
@@ -571,8 +569,8 @@ public class Pool
571569
public BasicStat BiasesStat { get; }
572570

573571
//Attributes
574-
private double _scaleFactorW;
575-
private double _scaleFactorB;
572+
private readonly double _scaleFactorW;
573+
private readonly double _scaleFactorB;
576574

577575
//Constructor
578576
/// <summary>
@@ -591,8 +589,8 @@ internal Pool(Pool source)
591589
BiasesStartFlatIdx = source.BiasesStartFlatIdx;
592590
NeuronsStartFlatIdx = source.NeuronsStartFlatIdx;
593591
NumOfAllWeights = source.NumOfAllWeights;
594-
WeightsStat = new BasicStat(source.WeightsStat);
595-
BiasesStat = new BasicStat(source.BiasesStat);
592+
WeightsStat = source.WeightsStat.DeepClone();
593+
BiasesStat = source.BiasesStat.DeepClone();
596594
_scaleFactorW = source._scaleFactorW;
597595
_scaleFactorB = source._scaleFactorB;
598596
return;
@@ -644,10 +642,9 @@ internal Pool DeepClone()
644642
/// <summary>
645643
/// Randomly initializes pool weights.
646644
/// </summary>
647-
/// <param name="stdTrainingInputs">Standardized training input data.</param>
648645
/// <param name="flatWeights">RVFL's weights in a flat structure.</param>
649646
/// <param name="rand">Random generator to be used.</param>
650-
internal void RandomizeWeights(double[][] stdTrainingInputs, double[] flatWeights, Random rand)
647+
internal void RandomizeWeights(double[] flatWeights, Random rand)
651648
{
652649
double[] wBuff = new double[NumOfInputNodes * NumOfNeurons];
653650
double[] bBuff = new double[NumOfNeurons];

0 commit comments

Comments
 (0)