@@ -121,8 +121,8 @@ public RVFLPreprocessor(RVFLPreprocessor source)
121
121
{
122
122
_inputFilters [ i ] = source . _inputFilters [ i ] . DeepClone ( ) ;
123
123
}
124
- WeightsStat = new BasicStat ( source . WeightsStat ) ;
125
- BiasesStat = new BasicStat ( source . BiasesStat ) ;
124
+ WeightsStat = source . WeightsStat . DeepClone ( ) ;
125
+ BiasesStat = source . BiasesStat . DeepClone ( ) ;
126
126
Initialized = source . Initialized ;
127
127
return ;
128
128
}
@@ -185,15 +185,14 @@ RVFLModelConfig modelCfg
185
185
/// <summary>
186
186
/// Randomizes internal weights.
187
187
/// </summary>
188
- /// <param name="stdTrainingInputs">Standardized training input data.</param>
189
188
/// <param name="rand">The random generator to be used.</param>
190
- private void RandomizeWeights ( double [ ] [ ] stdTrainingInputs , Random rand )
189
+ private void RandomizeWeights ( Random rand )
191
190
{
192
191
WeightsStat . Reset ( ) ;
193
192
BiasesStat . Reset ( ) ;
194
193
foreach ( Layer layer in LayerCollection )
195
194
{
196
- layer . RandomizeWeights ( stdTrainingInputs , _flatWeights , rand ) ;
195
+ layer . RandomizeWeights ( _flatWeights , rand ) ;
197
196
WeightsStat . Merge ( layer . WeightsStat ) ;
198
197
BiasesStat . Merge ( layer . BiasesStat ) ;
199
198
}
@@ -295,7 +294,7 @@ public SampleDataset Init(SampleDataset trainingData,
295
294
}
296
295
} ) ;
297
296
//New weights
298
- RandomizeWeights ( stdInputs , rand ) ;
297
+ RandomizeWeights ( rand ) ;
299
298
//Activation statistics
300
299
BasicStat [ ] [ ] [ ] activationStats = new BasicStat [ LayerCollection . Count ] [ ] [ ] ;
301
300
BasicStat [ ] [ ] weightStats = new BasicStat [ LayerCollection . Count ] [ ] ;
@@ -312,8 +311,8 @@ public SampleDataset Init(SampleDataset trainingData,
312
311
{
313
312
activationStats [ i ] [ j ] [ k ] = new BasicStat ( ) ;
314
313
}
315
- weightStats [ i ] [ j ] = new BasicStat ( LayerCollection [ i ] . Pools [ j ] . WeightsStat ) ;
316
- biasStats [ i ] [ j ] = new BasicStat ( LayerCollection [ i ] . Pools [ j ] . BiasesStat ) ;
314
+ weightStats [ i ] [ j ] = LayerCollection [ i ] . Pools [ j ] . WeightsStat . DeepClone ( ) ;
315
+ biasStats [ i ] [ j ] = LayerCollection [ i ] . Pools [ j ] . BiasesStat . DeepClone ( ) ;
317
316
}
318
317
}
319
318
//Output
@@ -421,8 +420,8 @@ internal Layer(Layer source)
421
420
NumOfLayerNeurons = source . NumOfLayerNeurons ;
422
421
NumOfLayerWeights = source . NumOfLayerWeights ;
423
422
NumOfPredictors = source . NumOfPredictors ;
424
- WeightsStat = new BasicStat ( source . WeightsStat ) ;
425
- BiasesStat = new BasicStat ( source . BiasesStat ) ;
423
+ WeightsStat = source . WeightsStat . DeepClone ( ) ;
424
+ BiasesStat = source . BiasesStat . DeepClone ( ) ;
426
425
return ;
427
426
}
428
427
@@ -474,16 +473,15 @@ internal Layer DeepClone()
474
473
/// <summary>
475
474
/// Randomly initializes layer weights.
476
475
/// </summary>
477
- /// <param name="stdTrainingInputs">Standardized training input data.</param>
478
476
/// <param name="flatWeights">RVFL's weights in a flat structure.</param>
479
477
/// <param name="rand">Random generator to be used.</param>
480
- internal void RandomizeWeights ( double [ ] [ ] stdTrainingInputs , double [ ] flatWeights , Random rand )
478
+ internal void RandomizeWeights ( double [ ] flatWeights , Random rand )
481
479
{
482
480
WeightsStat . Reset ( ) ;
483
481
BiasesStat . Reset ( ) ;
484
482
foreach ( Pool pool in Pools )
485
483
{
486
- pool . RandomizeWeights ( stdTrainingInputs , flatWeights , rand ) ;
484
+ pool . RandomizeWeights ( flatWeights , rand ) ;
487
485
WeightsStat . Merge ( pool . WeightsStat ) ;
488
486
BiasesStat . Merge ( pool . BiasesStat ) ;
489
487
}
@@ -571,8 +569,8 @@ public class Pool
571
569
public BasicStat BiasesStat { get ; }
572
570
573
571
//Attributes
574
- private double _scaleFactorW ;
575
- private double _scaleFactorB ;
572
+ private readonly double _scaleFactorW ;
573
+ private readonly double _scaleFactorB ;
576
574
577
575
//Constructor
578
576
/// <summary>
@@ -591,8 +589,8 @@ internal Pool(Pool source)
591
589
BiasesStartFlatIdx = source . BiasesStartFlatIdx ;
592
590
NeuronsStartFlatIdx = source . NeuronsStartFlatIdx ;
593
591
NumOfAllWeights = source . NumOfAllWeights ;
594
- WeightsStat = new BasicStat ( source . WeightsStat ) ;
595
- BiasesStat = new BasicStat ( source . BiasesStat ) ;
592
+ WeightsStat = source . WeightsStat . DeepClone ( ) ;
593
+ BiasesStat = source . BiasesStat . DeepClone ( ) ;
596
594
_scaleFactorW = source . _scaleFactorW ;
597
595
_scaleFactorB = source . _scaleFactorB ;
598
596
return ;
@@ -644,10 +642,9 @@ internal Pool DeepClone()
644
642
/// <summary>
645
643
/// Randomly initializes pool weights.
646
644
/// </summary>
647
- /// <param name="stdTrainingInputs">Standardized training input data.</param>
648
645
/// <param name="flatWeights">RVFL's weights in a flat structure.</param>
649
646
/// <param name="rand">Random generator to be used.</param>
650
- internal void RandomizeWeights ( double [ ] [ ] stdTrainingInputs , double [ ] flatWeights , Random rand )
647
+ internal void RandomizeWeights ( double [ ] flatWeights , Random rand )
651
648
{
652
649
double [ ] wBuff = new double [ NumOfInputNodes * NumOfNeurons ] ;
653
650
double [ ] bBuff = new double [ NumOfNeurons ] ;
0 commit comments