@@ -344,7 +344,7 @@ var cachedTrainData = mlContext.Data.Cache(trainData);
344
344
var pipeline =
345
345
// First 'normalize' the data (rescale to be
346
346
// between -1 and 1 for all examples)
347
- mlContext.Transforms.Normalize ("FeatureVector")
347
+ mlContext.Transforms.NormalizeMinMax ("FeatureVector")
348
348
// We add a step for caching data in memory so that the downstream iterative training
349
349
// algorithm can efficiently scan through the data multiple times. Otherwise, the following
350
350
// trainer will load data from disk multiple times. The caching mechanism uses an on-demand strategy.
@@ -625,18 +625,15 @@ var trainData = mlContext.Data.LoadFromTextFile<IrisInputAllFeatures>(dataPath,
625
625
separatorChar : ','
626
626
);
627
627
628
- // Apply all kinds of standard ML.NET normalization to the raw features.
628
+ // Apply MinMax normalization to the raw features.
629
629
var pipeline =
630
- mlContext .Transforms .Normalize (
631
- new NormalizingEstimator .MinMaxColumnOptions (" MinMaxNormalized" , " Features" , fixZero : true ),
632
- new NormalizingEstimator .MeanVarianceColumnOptions (" MeanVarNormalized" , " Features" , fixZero : true ),
633
- new NormalizingEstimator .BinningColumnOptions (" BinNormalized" , " Features" , maximumBinCount : 256 ));
630
+ mlContext .Transforms .NormalizeMinMax (" MinMaxNormalized" , " Features" );
634
631
635
632
// Let's train our pipeline of normalizers, and then apply it to the same data.
636
633
var normalizedData = pipeline .Fit (trainData ).Transform (trainData );
637
634
638
635
// Inspect one column of the resulting dataset.
639
- var meanVarValues = normalizedData .GetColumn <float []>(normalizedData .Schema [" MeanVarNormalized " ]).ToArray ();
636
+ var meanVarValues = normalizedData .GetColumn <float []>(normalizedData .Schema [" MinMaxNormalized " ]).ToArray ();
640
637
```
641
638
642
639
## How do I train my model on categorical data?
0 commit comments