diff --git a/src/Microsoft.ML.Data/Prediction/Calibrator.cs b/src/Microsoft.ML.Data/Prediction/Calibrator.cs
index 46e8063df0..d72cb120c7 100644
--- a/src/Microsoft.ML.Data/Prediction/Calibrator.cs
+++ b/src/Microsoft.ML.Data/Prediction/Calibrator.cs
@@ -173,7 +173,7 @@ public abstract class CalibratedModelParametersBase<TSubModel, TCalibrator> :
         where TSubModel : class
         where TCalibrator : class, ICalibrator
     {
-        protected readonly IHost Host;
+        private protected readonly IHost Host;
 
         // Strongly-typed members.
         /// <summary>
diff --git a/src/Microsoft.ML.Mkl.Components/ComputeLRTrainingStdThroughHal.cs b/src/Microsoft.ML.Mkl.Components/ComputeLRTrainingStdThroughHal.cs
index 02c84e25df..12e8fe1894 100644
--- a/src/Microsoft.ML.Mkl.Components/ComputeLRTrainingStdThroughHal.cs
+++ b/src/Microsoft.ML.Mkl.Components/ComputeLRTrainingStdThroughHal.cs
@@ -11,7 +11,7 @@ namespace Microsoft.ML.Trainers
 {
     using MklOls = OrdinaryLeastSquaresRegressionTrainer.Mkl;
 
-    public sealed class ComputeLRTrainingStdThroughMkl : ComputeLRTrainingStd
+    public sealed class ComputeLRTrainingStdThroughMkl : ComputeLogisticRegressionStandardDeviation
     {
         /// <summary>
         /// Computes the standart deviation matrix of each of the non-zero training weights, needed to calculate further the standart deviation,
@@ -23,7 +23,7 @@ public sealed class ComputeLRTrainingStdThroughMkl : ComputeLRTrainingStd
         /// <param name="currentWeightsCount"></param>
         /// <param name="ch">The <see cref="IChannel"/> used for messaging.</param>
         /// <param name="l2Weight">The L2Weight used for training. (Supply the same one that got used during training.)</param>
-        public override VBuffer<float> ComputeStd(double[] hessian, int[] weightIndices, int numSelectedParams, int currentWeightsCount, IChannel ch, float l2Weight)
+        public override VBuffer<float> ComputeStandardDeviation(double[] hessian, int[] weightIndices, int numSelectedParams, int currentWeightsCount, IChannel ch, float l2Weight)
         {
             Contracts.AssertValue(ch);
             Contracts.AssertValue(hessian, nameof(hessian));
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs
index 17c1eadc36..34a7dc61d8 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LinearModelParameters.cs
@@ -261,7 +261,7 @@ private protected virtual float Score(in VBuffer<float> src)
             return Bias + VectorUtils.DotProduct(in _weightsDense, in src);
         }
 
-        protected virtual void GetFeatureContributions(in VBuffer<float> features, ref VBuffer<float> contributions, int top, int bottom, bool normalize)
+        private protected virtual void GetFeatureContributions(in VBuffer<float> features, ref VBuffer<float> contributions, int top, int bottom, bool normalize)
         {
             if (features.Length != Weight.Length)
                 throw Contracts.Except("Input is of length {0} does not match expected length  of weights {1}", features.Length, Weight.Length);
@@ -662,6 +662,9 @@ IList<KeyValuePair<string, object>> ICanGetSummaryInKeyValuePairs.GetSummaryInKe
         }
     }
 
+    /// <summary>
+    /// The model parameters class for Poisson Regression.
+    /// </summary>
     public sealed class PoissonRegressionModelParameters : RegressionModelParameters, IParameterMixer<float>
     {
         internal const string LoaderSignature = "PoissonRegressionExec";
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs
index 0dde905f31..cdca6e8b66 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LbfgsPredictorBase.cs
@@ -22,36 +22,54 @@ public abstract class LbfgsTrainerBase<TOptions, TTransformer, TModel> : Trainer
     {
         public abstract class OptionsBase : TrainerInputBaseWithWeight
         {
-            [Argument(ArgumentType.AtMostOnce, HelpText = "L2 regularization weight", ShortName = "l2", SortOrder = 50)]
+            /// <summary>
+            /// L2 regularization weight.
+            /// </summary>
+            [Argument(ArgumentType.AtMostOnce, HelpText = "L2 regularization weight", ShortName = "l2, L2Weight", SortOrder = 50)]
             [TGUI(Label = "L2 Weight", Description = "Weight of L2 regularizer term", SuggestedSweeps = "0,0.1,1")]
             [TlcModule.SweepableFloatParamAttribute(0.0f, 1.0f, numSteps: 4)]
-            public float L2Weight = Defaults.L2Weight;
+            public float L2Regularization = Defaults.L2Regularization;
 
-            [Argument(ArgumentType.AtMostOnce, HelpText = "L1 regularization weight", ShortName = "l1", SortOrder = 50)]
+            /// <summary>
+            /// L1 regularization weight.
+            /// </summary>
+            [Argument(ArgumentType.AtMostOnce, HelpText = "L1 regularization weight", ShortName = "l1, L1Weight", SortOrder = 50)]
             [TGUI(Label = "L1 Weight", Description = "Weight of L1 regularizer term", SuggestedSweeps = "0,0.1,1")]
             [TlcModule.SweepableFloatParamAttribute(0.0f, 1.0f, numSteps: 4)]
-            public float L1Weight = Defaults.L1Weight;
+            public float L1Regularization = Defaults.L1Regularization;
 
+            /// <summary>
+            /// Tolerance parameter for optimization convergence. (Low = slower, more accurate).
+            /// </summary>
             [Argument(ArgumentType.AtMostOnce, HelpText = "Tolerance parameter for optimization convergence. Low = slower, more accurate",
-                ShortName = "ot", SortOrder = 50)]
+                ShortName = "ot, OptTol", SortOrder = 50)]
             [TGUI(Label = "Optimization Tolerance", Description = "Threshold for optimizer convergence", SuggestedSweeps = "1e-4,1e-7")]
             [TlcModule.SweepableDiscreteParamAttribute(new object[] { 1e-4f, 1e-7f })]
-            public float OptTol = Defaults.OptTol;
+            public float OptmizationTolerance = Defaults.OptimizationTolerance;
 
+            /// <summary>
+            /// Number of previous iterations to remember for estimate of Hessian.
+            /// </summary>
             [Argument(ArgumentType.AtMostOnce, HelpText = "Memory size for L-BFGS. Low=faster, less accurate",
-                ShortName = "m", SortOrder = 50)]
+                ShortName = "m, MemorySize", SortOrder = 50)]
             [TGUI(Description = "Memory size for L-BFGS", SuggestedSweeps = "5,20,50")]
             [TlcModule.SweepableDiscreteParamAttribute("MemorySize", new object[] { 5, 20, 50 })]
-            public int MemorySize = Defaults.MemorySize;
+            public int IterationsToRemember = Defaults.IterationsToRemember;
 
-            [Argument(ArgumentType.AtMostOnce, HelpText = "Maximum iterations.", ShortName = "maxiter")]
+            /// <summary>
+            /// Number of iterations.
+            /// </summary>
+            [Argument(ArgumentType.AtMostOnce, HelpText = "Maximum iterations.", ShortName = "maxiter, MaxIterations")]
             [TGUI(Label = "Max Number of Iterations")]
             [TlcModule.SweepableLongParamAttribute("MaxIterations", 1, int.MaxValue)]
-            public int MaxIterations = Defaults.MaxIterations;
+            public int NumberOfIterations = Defaults.NumberOfIterations;
 
+            /// <summary>
+            /// Run SGD to initialize LR weights, converging to this tolerance.
+            /// </summary>
             [Argument(ArgumentType.AtMostOnce, HelpText = "Run SGD to initialize LR weights, converging to this tolerance",
-                ShortName = "sgd")]
-            public float SgdInitializationTolerance = 0;
+                ShortName = "sgd, SgdInitializationTolerance")]
+            public float StochasticGradientDescentInitilaizationTolerance = 0;
 
             /// <summary>
             /// Features must occur in at least this many instances to be included
@@ -68,37 +86,43 @@ public abstract class OptionsBase : TrainerInputBaseWithWeight
             /// <summary>
             /// Init Weights Diameter
             /// </summary>
-            [Argument(ArgumentType.LastOccurenceWins, HelpText = "Init weights diameter", ShortName = "initwts", SortOrder = 140)]
+            [Argument(ArgumentType.LastOccurenceWins, HelpText = "Init weights diameter", ShortName = "initwts, InitWtsDiameter", SortOrder = 140)]
             [TGUI(Label = "Initial Weights Scale", SuggestedSweeps = "0,0.1,0.5,1")]
             [TlcModule.SweepableFloatParamAttribute("InitWtsDiameter", 0.0f, 1.0f, numSteps: 5)]
-            public float InitWtsDiameter = 0;
+            public float InitialWeightsDiameter = 0;
 
             // Deprecated
             [Argument(ArgumentType.AtMostOnce, HelpText = "Whether or not to use threads. Default is true",
                 ShortName = "t", Hide = true)]
-            public bool UseThreads = true;
+            internal bool UseThreads = true;
 
             /// <summary>
             /// Number of threads. Null means use the number of processors.
             /// </summary>
-            [Argument(ArgumentType.AtMostOnce, HelpText = "Number of threads", ShortName = "nt")]
-            public int? NumThreads;
+            [Argument(ArgumentType.AtMostOnce, HelpText = "Number of threads", ShortName = "nt, NumThreads")]
+            public int? NumberOfThreads;
 
+            /// <summary>
+            /// Force densification of the internal optimization vectors. Default is false.
+            /// </summary>
             [Argument(ArgumentType.AtMostOnce, HelpText = "Force densification of the internal optimization vectors", ShortName = "do")]
             [TlcModule.SweepableDiscreteParamAttribute("DenseOptimizer", new object[] { false, true })]
             public bool DenseOptimizer = false;
 
+            /// <summary>
+            /// Enforce non-negative weights. Default is false.
+            /// </summary>
             [Argument(ArgumentType.AtMostOnce, HelpText = "Enforce non-negative weights", ShortName = "nn", SortOrder = 90)]
             public bool EnforceNonNegativity = Defaults.EnforceNonNegativity;
 
             [BestFriend]
             internal static class Defaults
             {
-                public const float L2Weight = 1;
-                public const float L1Weight = 1;
-                public const float OptTol = 1e-7f;
-                public const int MemorySize = 20;
-                public const int MaxIterations = int.MaxValue;
+                public const float L2Regularization = 1;
+                public const float L1Regularization = 1;
+                public const float OptimizationTolerance = 1e-7f;
+                public const int IterationsToRemember = 20;
+                public const int NumberOfIterations = int.MaxValue;
                 public const bool EnforceNonNegativity = false;
             }
         }
@@ -165,10 +189,10 @@ internal LbfgsTrainerBase(IHostEnvironment env,
                             FeatureColumnName = featureColumn,
                             LabelColumnName = labelColumn.Name,
                             ExampleWeightColumnName = weightColumn,
-                            L1Weight = l1Weight,
-                            L2Weight = l2Weight,
-                            OptTol = optimizationTolerance,
-                            MemorySize = memorySize,
+                            L1Regularization = l1Weight,
+                            L2Regularization = l2Weight,
+                            OptmizationTolerance = optimizationTolerance,
+                            IterationsToRemember = memorySize,
                             EnforceNonNegativity = enforceNoNegativity
                         },
                   labelColumn)
@@ -191,31 +215,31 @@ internal LbfgsTrainerBase(IHostEnvironment env,
             options.FeatureColumnName = FeatureColumn.Name;
             options.LabelColumnName = LabelColumn.Name;
             options.ExampleWeightColumnName = WeightColumn.Name;
-            Host.CheckUserArg(!LbfgsTrainerOptions.UseThreads || LbfgsTrainerOptions.NumThreads > 0 || LbfgsTrainerOptions.NumThreads == null,
-              nameof(LbfgsTrainerOptions.NumThreads), "numThreads must be positive (or empty for default)");
-            Host.CheckUserArg(LbfgsTrainerOptions.L2Weight >= 0, nameof(LbfgsTrainerOptions.L2Weight), "Must be non-negative");
-            Host.CheckUserArg(LbfgsTrainerOptions.L1Weight >= 0, nameof(LbfgsTrainerOptions.L1Weight), "Must be non-negative");
-            Host.CheckUserArg(LbfgsTrainerOptions.OptTol > 0, nameof(LbfgsTrainerOptions.OptTol), "Must be positive");
-            Host.CheckUserArg(LbfgsTrainerOptions.MemorySize > 0, nameof(LbfgsTrainerOptions.MemorySize), "Must be positive");
-            Host.CheckUserArg(LbfgsTrainerOptions.MaxIterations > 0, nameof(LbfgsTrainerOptions.MaxIterations), "Must be positive");
-            Host.CheckUserArg(LbfgsTrainerOptions.SgdInitializationTolerance >= 0, nameof(LbfgsTrainerOptions.SgdInitializationTolerance), "Must be non-negative");
-            Host.CheckUserArg(LbfgsTrainerOptions.NumThreads == null || LbfgsTrainerOptions.NumThreads.Value >= 0, nameof(LbfgsTrainerOptions.NumThreads), "Must be non-negative");
-
-            Host.CheckParam(!(LbfgsTrainerOptions.L2Weight < 0), nameof(LbfgsTrainerOptions.L2Weight), "Must be non-negative, if provided.");
-            Host.CheckParam(!(LbfgsTrainerOptions.L1Weight < 0), nameof(LbfgsTrainerOptions.L1Weight), "Must be non-negative, if provided");
-            Host.CheckParam(!(LbfgsTrainerOptions.OptTol <= 0), nameof(LbfgsTrainerOptions.OptTol), "Must be positive, if provided.");
-            Host.CheckParam(!(LbfgsTrainerOptions.MemorySize <= 0), nameof(LbfgsTrainerOptions.MemorySize), "Must be positive, if provided.");
-
-            L2Weight = LbfgsTrainerOptions.L2Weight;
-            L1Weight = LbfgsTrainerOptions.L1Weight;
-            OptTol = LbfgsTrainerOptions.OptTol;
-            MemorySize =LbfgsTrainerOptions.MemorySize;
-            MaxIterations = LbfgsTrainerOptions.MaxIterations;
-            SgdInitializationTolerance = LbfgsTrainerOptions.SgdInitializationTolerance;
+            Host.CheckUserArg(!LbfgsTrainerOptions.UseThreads || LbfgsTrainerOptions.NumberOfThreads > 0 || LbfgsTrainerOptions.NumberOfThreads == null,
+              nameof(LbfgsTrainerOptions.NumberOfThreads), "Must be positive (or empty for default)");
+            Host.CheckUserArg(LbfgsTrainerOptions.L2Regularization >= 0, nameof(LbfgsTrainerOptions.L2Regularization), "Must be non-negative");
+            Host.CheckUserArg(LbfgsTrainerOptions.L1Regularization >= 0, nameof(LbfgsTrainerOptions.L1Regularization), "Must be non-negative");
+            Host.CheckUserArg(LbfgsTrainerOptions.OptmizationTolerance > 0, nameof(LbfgsTrainerOptions.OptmizationTolerance), "Must be positive");
+            Host.CheckUserArg(LbfgsTrainerOptions.IterationsToRemember > 0, nameof(LbfgsTrainerOptions.IterationsToRemember), "Must be positive");
+            Host.CheckUserArg(LbfgsTrainerOptions.NumberOfIterations > 0, nameof(LbfgsTrainerOptions.NumberOfIterations), "Must be positive");
+            Host.CheckUserArg(LbfgsTrainerOptions.StochasticGradientDescentInitilaizationTolerance >= 0, nameof(LbfgsTrainerOptions.StochasticGradientDescentInitilaizationTolerance), "Must be non-negative");
+            Host.CheckUserArg(LbfgsTrainerOptions.NumberOfThreads == null || LbfgsTrainerOptions.NumberOfThreads.Value >= 0, nameof(LbfgsTrainerOptions.NumberOfThreads), "Must be non-negative");
+
+            Host.CheckParam(!(LbfgsTrainerOptions.L2Regularization < 0), nameof(LbfgsTrainerOptions.L2Regularization), "Must be non-negative, if provided.");
+            Host.CheckParam(!(LbfgsTrainerOptions.L1Regularization < 0), nameof(LbfgsTrainerOptions.L1Regularization), "Must be non-negative, if provided");
+            Host.CheckParam(!(LbfgsTrainerOptions.OptmizationTolerance <= 0), nameof(LbfgsTrainerOptions.OptmizationTolerance), "Must be positive, if provided.");
+            Host.CheckParam(!(LbfgsTrainerOptions.IterationsToRemember <= 0), nameof(LbfgsTrainerOptions.IterationsToRemember), "Must be positive, if provided.");
+
+            L2Weight = LbfgsTrainerOptions.L2Regularization;
+            L1Weight = LbfgsTrainerOptions.L1Regularization;
+            OptTol = LbfgsTrainerOptions.OptmizationTolerance;
+            MemorySize =LbfgsTrainerOptions.IterationsToRemember;
+            MaxIterations = LbfgsTrainerOptions.NumberOfIterations;
+            SgdInitializationTolerance = LbfgsTrainerOptions.StochasticGradientDescentInitilaizationTolerance;
             Quiet = LbfgsTrainerOptions.Quiet;
-            InitWtsDiameter = LbfgsTrainerOptions.InitWtsDiameter;
+            InitWtsDiameter = LbfgsTrainerOptions.InitialWeightsDiameter;
             UseThreads = LbfgsTrainerOptions.UseThreads;
-            NumThreads = LbfgsTrainerOptions.NumThreads;
+            NumThreads = LbfgsTrainerOptions.NumberOfThreads;
             DenseOptimizer = LbfgsTrainerOptions.DenseOptimizer;
             EnforceNonNegativity = LbfgsTrainerOptions.EnforceNonNegativity;
 
@@ -245,10 +269,10 @@ private static TOptions ArgsInit(string featureColumn, SchemaShape.Column labelC
                 FeatureColumnName = featureColumn,
                 LabelColumnName = labelColumn.Name,
                 ExampleWeightColumnName = weightColumn,
-                L1Weight = l1Weight,
-                L2Weight = l2Weight,
-                OptTol = optimizationTolerance,
-                MemorySize = memorySize,
+                L1Regularization = l1Weight,
+                L2Regularization = l2Weight,
+                OptmizationTolerance = optimizationTolerance,
+                IterationsToRemember = memorySize,
                 EnforceNonNegativity = enforceNoNegativity
             };
 
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs
index 02b22f017e..3f4e6bb792 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/LogisticRegression.cs
@@ -42,22 +42,22 @@ public sealed partial class LogisticRegression : LbfgsTrainerBase<LogisticRegres
         public sealed class Options : OptionsBase
         {
             /// <summary>
-            /// If set to <value>true</value>training statistics will be generated at the end of training.
+            /// If set to <value>true</value> training statistics will be generated at the end of training.
             /// If you have a large number of learned training parameters(more than 500),
             /// generating the training statistics might take a few seconds.
-            /// More than 1000 weights might take a few minutes. For those cases consider using the instance of <see cref="ComputeLRTrainingStd"/>
+            /// More than 1000 weights might take a few minutes. For those cases consider using the instance of <see cref="ComputeLogisticRegressionStandardDeviation"/>
             /// present in the Microsoft.ML.Mkl.Components package. That computes the statistics using hardware acceleration.
             /// </summary>
-            [Argument(ArgumentType.AtMostOnce, HelpText = "Show statistics of training examples.", ShortName = "stat", SortOrder = 50)]
-            public bool ShowTrainingStats = false;
+            [Argument(ArgumentType.AtMostOnce, HelpText = "Show statistics of training examples.", ShortName = "stat, ShowTrainingStats", SortOrder = 50)]
+            public bool ShowTrainingStatistics = false;
 
             /// <summary>
-            /// The instance of <see cref="ComputeLRTrainingStd"/> that computes the std of the training statistics, at the end of training.
+            /// The instance of <see cref="ComputeLogisticRegressionStandardDeviation"/> that computes the std of the training statistics, at the end of training.
             /// The calculations are not part of Microsoft.ML package, due to the size of MKL.
-            /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize <see cref="LogisticRegression.Options.StdComputer"/>.
-            /// to the <see cref="ComputeLRTrainingStd"/> implementation in the Microsoft.ML.Mkl.Components package.
+            /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize <see cref="LogisticRegression.Options.ComputeStandardDeviation"/>.
+            /// to the <see cref="ComputeLogisticRegressionStandardDeviation"/> implementation in the Microsoft.ML.Mkl.Components package.
             /// </summary>
-            public ComputeLRTrainingStd StdComputer;
+            public ComputeLogisticRegressionStandardDeviation ComputeStandardDeviation;
         }
 
         private double _posWeight;
@@ -79,10 +79,10 @@ internal LogisticRegression(IHostEnvironment env,
             string labelColumn = DefaultColumnNames.Label,
             string featureColumn = DefaultColumnNames.Features,
             string weights = null,
-            float l1Weight = Options.Defaults.L1Weight,
-            float l2Weight = Options.Defaults.L2Weight,
-            float optimizationTolerance = Options.Defaults.OptTol,
-            int memorySize = Options.Defaults.MemorySize,
+            float l1Weight = Options.Defaults.L1Regularization,
+            float l2Weight = Options.Defaults.L2Regularization,
+            float optimizationTolerance = Options.Defaults.OptimizationTolerance,
+            int memorySize = Options.Defaults.IterationsToRemember,
             bool enforceNoNegativity = Options.Defaults.EnforceNonNegativity)
             : base(env, featureColumn, TrainerUtils.MakeBoolScalarLabel(labelColumn), weights,
                   l1Weight, l2Weight, optimizationTolerance, memorySize, enforceNoNegativity)
@@ -91,7 +91,7 @@ internal LogisticRegression(IHostEnvironment env,
             Host.CheckNonEmpty(labelColumn, nameof(labelColumn));
 
             _posWeight = 0;
-            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStats;
+            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStatistics;
         }
 
         /// <summary>
@@ -101,7 +101,7 @@ internal LogisticRegression(IHostEnvironment env, Options options)
             : base(env, options, TrainerUtils.MakeBoolScalarLabel(options.LabelColumnName))
         {
             _posWeight = 0;
-            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStats;
+            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStatistics;
         }
 
         private protected override PredictionKind PredictionKind => PredictionKind.BinaryClassification;
@@ -354,11 +354,11 @@ private protected override void ComputeTrainingStatistics(IChannel ch, FloatLabe
                 }
             }
 
-            if (LbfgsTrainerOptions.StdComputer == null)
+            if (LbfgsTrainerOptions.ComputeStandardDeviation == null)
                 _stats = new LinearModelStatistics(Host, NumGoodRows, numParams, deviance, nullDeviance);
             else
             {
-                var std = LbfgsTrainerOptions.StdComputer.ComputeStd(hessian, weightIndices, numParams, CurrentWeights.Length, ch, L2Weight);
+                var std = LbfgsTrainerOptions.ComputeStandardDeviation.ComputeStandardDeviation(hessian, weightIndices, numParams, CurrentWeights.Length, ch, L2Weight);
                 _stats = new LinearModelStatistics(Host, NumGoodRows, numParams, deviance, nullDeviance, std);
             }
         }
@@ -429,20 +429,20 @@ internal static CommonOutputs.BinaryClassificationOutput TrainBinary(IHostEnviro
     /// <summary>
     /// Computes the standard deviation matrix of each of the non-zero training weights, needed to calculate further the standard deviation,
     /// p-value and z-Score.
-    /// If you need fast calculations, use the <see cref="ComputeLRTrainingStd"/> implementation in the Microsoft.ML.Mkl.Components package,
+    /// If you need fast calculations, use the <see cref="ComputeLogisticRegressionStandardDeviation"/> implementation in the Microsoft.ML.Mkl.Components package,
     /// which makes use of hardware acceleration.
     /// Due to the existence of regularization, an approximation is used to compute the variances of the trained linear coefficients.
     /// </summary>
-    public abstract class ComputeLRTrainingStd
+    public abstract class ComputeLogisticRegressionStandardDeviation
     {
         /// <summary>
         /// Computes the standard deviation matrix of each of the non-zero training weights, needed to calculate further the standard deviation,
         /// p-value and z-Score.
         /// The calculations are not part of Microsoft.ML package, due to the size of MKL.
-        /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize <see cref="LogisticRegression.Options.StdComputer"/>
-        /// to the <see cref="ComputeLRTrainingStd"/> implementation in the Microsoft.ML.Mkl.Components package.
+        /// If you need these calculations, add the Microsoft.ML.Mkl.Components package, and initialize <see cref="LogisticRegression.Options.ComputeStandardDeviation"/>
+        /// to the <see cref="ComputeLogisticRegressionStandardDeviation"/> implementation in the Microsoft.ML.Mkl.Components package.
         /// Due to the existence of regularization, an approximation is used to compute the variances of the trained linear coefficients.
         /// </summary>
-        public abstract VBuffer<float> ComputeStd(double[] hessian, int[] weightIndices, int parametersCount, int currentWeightsCount, IChannel ch, float l2Weight);
+        public abstract VBuffer<float> ComputeStandardDeviation(double[] hessian, int[] weightIndices, int parametersCount, int currentWeightsCount, IChannel ch, float l2Weight);
     }
 }
diff --git a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs
index 4d4469c10c..c6b69986fd 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/LogisticRegression/MulticlassLogisticRegression.cs
@@ -45,8 +45,11 @@ public sealed class MulticlassLogisticRegression : LbfgsTrainerBase<MulticlassLo
 
         public sealed class Options : OptionsBase
         {
-            [Argument(ArgumentType.AtMostOnce, HelpText = "Show statistics of training examples.", ShortName = "stat", SortOrder = 50)]
-            public bool ShowTrainingStats = false;
+            /// <summary>
+            /// If set to <value>true</value> training statistics will be generated at the end of training.
+            /// </summary>
+            [Argument(ArgumentType.AtMostOnce, HelpText = "Show statistics of training examples.", ShortName = "stat, ShowTrainingStats", SortOrder = 50)]
+            public bool ShowTrainingStatistics = false;
         }
 
         private int _numClasses;
@@ -84,17 +87,17 @@ internal MulticlassLogisticRegression(IHostEnvironment env,
             string labelColumn = DefaultColumnNames.Label,
             string featureColumn = DefaultColumnNames.Features,
             string weights = null,
-            float l1Weight = Options.Defaults.L1Weight,
-            float l2Weight = Options.Defaults.L2Weight,
-            float optimizationTolerance = Options.Defaults.OptTol,
-            int memorySize = Options.Defaults.MemorySize,
+            float l1Weight = Options.Defaults.L1Regularization,
+            float l2Weight = Options.Defaults.L2Regularization,
+            float optimizationTolerance = Options.Defaults.OptimizationTolerance,
+            int memorySize = Options.Defaults.IterationsToRemember,
             bool enforceNoNegativity = Options.Defaults.EnforceNonNegativity)
             : base(env, featureColumn, TrainerUtils.MakeU4ScalarColumn(labelColumn), weights, l1Weight, l2Weight, optimizationTolerance, memorySize, enforceNoNegativity)
         {
             Host.CheckNonEmpty(featureColumn, nameof(featureColumn));
             Host.CheckNonEmpty(labelColumn, nameof(labelColumn));
 
-            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStats;
+            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStatistics;
         }
 
         /// <summary>
@@ -103,7 +106,7 @@ internal MulticlassLogisticRegression(IHostEnvironment env,
         internal MulticlassLogisticRegression(IHostEnvironment env, Options options)
             : base(env, options, TrainerUtils.MakeU4ScalarColumn(options.LabelColumnName))
         {
-            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStats;
+            ShowTrainingStats = LbfgsTrainerOptions.ShowTrainingStatistics;
         }
 
         private protected override PredictionKind PredictionKind => PredictionKind.MultiClassClassification;
@@ -333,6 +336,9 @@ public MulticlassPredictionTransformer<MulticlassLogisticRegressionModelParamete
             => TrainTransformer(trainData, initPredictor: modelParameters);
     }
 
+    /// <summary>
+    /// The model parameters class for Multiclass Logistic Regression.
+    /// </summary>
     public sealed class MulticlassLogisticRegressionModelParameters :
         ModelParametersBase<VBuffer<float>>,
         IValueMapper,
diff --git a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs
index 165f662cc9..bb1e9eb792 100644
--- a/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs
+++ b/src/Microsoft.ML.StandardLearners/Standard/PoissonRegression/PoissonRegression.cs
@@ -54,10 +54,10 @@ internal PoissonRegression(IHostEnvironment env,
             string labelColumn = DefaultColumnNames.Label,
             string featureColumn = DefaultColumnNames.Features,
             string weights = null,
-            float l1Weight = Options.Defaults.L1Weight,
-            float l2Weight = Options.Defaults.L2Weight,
-            float optimizationTolerance = Options.Defaults.OptTol,
-            int memorySize = Options.Defaults.MemorySize,
+            float l1Weight = Options.Defaults.L1Regularization,
+            float l2Weight = Options.Defaults.L2Regularization,
+            float optimizationTolerance = Options.Defaults.OptimizationTolerance,
+            int memorySize = Options.Defaults.IterationsToRemember,
             bool enforceNoNegativity = Options.Defaults.EnforceNonNegativity)
             : base(env, featureColumn, TrainerUtils.MakeR4ScalarColumn(labelColumn), weights,
                   l1Weight, l2Weight, optimizationTolerance, memorySize, enforceNoNegativity)
diff --git a/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs b/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs
index f66c94ccbd..bff013f45a 100644
--- a/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs
+++ b/src/Microsoft.ML.StandardLearners/StandardLearnersCatalog.cs
@@ -449,10 +449,10 @@ public static OnlineGradientDescentTrainer OnlineGradientDescent(this Regression
         /// <param name="labelColumnName">The name of the label column.</param>
         /// <param name="featureColumnName">The name of the feature column.</param>
         /// <param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
-        /// <param name="enforceNoNegativity">Enforce non-negative weights.</param>
-        /// <param name="l1Weight">Weight of L1 regularization term.</param>
-        /// <param name="l2Weight">Weight of L2 regularization term.</param>
-        /// <param name="memorySize">Memory size for <see cref="Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
+        /// <param name="enforceNonNegativity">Enforce non-negative weights.</param>
+        /// <param name="l1Regularization">Weight of L1 regularization term.</param>
+        /// <param name="l2Regularization">Weight of L2 regularization term.</param>
+        /// <param name="iterationsToRemember">Memory size for <see cref="Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
         /// <param name="optimizationTolerance">Threshold for optimizer convergence.</param>
         /// <example>
         /// <format type="text/markdown">
@@ -465,15 +465,15 @@ public static LogisticRegression LogisticRegression(this BinaryClassificationCat
             string labelColumnName = DefaultColumnNames.Label,
             string featureColumnName = DefaultColumnNames.Features,
             string exampleWeightColumnName = null,
-            float l1Weight = LROptions.Defaults.L1Weight,
-            float l2Weight = LROptions.Defaults.L2Weight,
-            float optimizationTolerance = LROptions.Defaults.OptTol,
-            int memorySize = LROptions.Defaults.MemorySize,
-            bool enforceNoNegativity = LROptions.Defaults.EnforceNonNegativity)
+            float l1Regularization = LROptions.Defaults.L1Regularization,
+            float l2Regularization = LROptions.Defaults.L2Regularization,
+            float optimizationTolerance = LROptions.Defaults.OptimizationTolerance,
+            int iterationsToRemember = LROptions.Defaults.IterationsToRemember,
+            bool enforceNonNegativity = LROptions.Defaults.EnforceNonNegativity)
         {
             Contracts.CheckValue(catalog, nameof(catalog));
             var env = CatalogUtils.GetEnvironment(catalog);
-            return new LogisticRegression(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Weight, l2Weight, optimizationTolerance, memorySize, enforceNoNegativity);
+            return new LogisticRegression(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity);
         }
 
         /// <summary>
@@ -497,24 +497,24 @@ public static LogisticRegression LogisticRegression(this BinaryClassificationCat
         /// <param name="labelColumnName">The name of the label column.</param>
         /// <param name="featureColumnName">The name of the feature column.</param>
         /// <param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
-        /// <param name="l1Weight">Weight of L1 regularization term.</param>
-        /// <param name="l2Weight">Weight of L2 regularization term.</param>
+        /// <param name="l1Regularization">Weight of L1 regularization term.</param>
+        /// <param name="l2Regularization">Weight of L2 regularization term.</param>
         /// <param name="optimizationTolerance">Threshold for optimizer convergence.</param>
-        /// <param name="memorySize">Memory size for <see cref="Microsoft.ML.Trainers.PoissonRegression"/>. Low=faster, less accurate.</param>
-        /// <param name="enforceNoNegativity">Enforce non-negative weights.</param>
+        /// <param name="iterationsToRemember">Memory size for <see cref="Microsoft.ML.Trainers.PoissonRegression"/>. Low=faster, less accurate.</param>
+        /// <param name="enforceNonNegativity">Enforce non-negative weights.</param>
         public static PoissonRegression PoissonRegression(this RegressionCatalog.RegressionTrainers catalog,
             string labelColumnName = DefaultColumnNames.Label,
             string featureColumnName = DefaultColumnNames.Features,
             string exampleWeightColumnName = null,
-            float l1Weight = LROptions.Defaults.L1Weight,
-            float l2Weight = LROptions.Defaults.L2Weight,
-            float optimizationTolerance = LROptions.Defaults.OptTol,
-            int memorySize = LROptions.Defaults.MemorySize,
-            bool enforceNoNegativity = LROptions.Defaults.EnforceNonNegativity)
+            float l1Regularization = LROptions.Defaults.L1Regularization,
+            float l2Regularization = LROptions.Defaults.L2Regularization,
+            float optimizationTolerance = LROptions.Defaults.OptimizationTolerance,
+            int iterationsToRemember = LROptions.Defaults.IterationsToRemember,
+            bool enforceNonNegativity = LROptions.Defaults.EnforceNonNegativity)
         {
             Contracts.CheckValue(catalog, nameof(catalog));
             var env = CatalogUtils.GetEnvironment(catalog);
-            return new PoissonRegression(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Weight, l2Weight, optimizationTolerance, memorySize, enforceNoNegativity);
+            return new PoissonRegression(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity);
         }
 
         /// <summary>
@@ -538,24 +538,24 @@ public static PoissonRegression PoissonRegression(this RegressionCatalog.Regress
         /// <param name="labelColumnName">The name of the label column.</param>
         /// <param name="featureColumnName">The name of the feature column.</param>
         /// <param name="exampleWeightColumnName">The name of the example weight column (optional).</param>
-        /// <param name="enforceNoNegativity">Enforce non-negative weights.</param>
-        /// <param name="l1Weight">Weight of L1 regularization term.</param>
-        /// <param name="l2Weight">Weight of L2 regularization term.</param>
-        /// <param name="memorySize">Memory size for <see cref="Microsoft.ML.Trainers.MulticlassLogisticRegression"/>. Low=faster, less accurate.</param>
+        /// <param name="enforceNonNegativity">Enforce non-negative weights.</param>
+        /// <param name="l1Regularization">Weight of L1 regularization term.</param>
+        /// <param name="l2Regularization">Weight of L2 regularization term.</param>
+        /// <param name="iterationsToRemember">Memory size for <see cref="Microsoft.ML.Trainers.MulticlassLogisticRegression"/>. Low=faster, less accurate.</param>
         /// <param name="optimizationTolerance">Threshold for optimizer convergence.</param>
         public static MulticlassLogisticRegression LogisticRegression(this MulticlassClassificationCatalog.MulticlassClassificationTrainers catalog,
             string labelColumnName = DefaultColumnNames.Label,
             string featureColumnName = DefaultColumnNames.Features,
             string exampleWeightColumnName = null,
-            float l1Weight = LROptions.Defaults.L1Weight,
-            float l2Weight = LROptions.Defaults.L2Weight,
-            float optimizationTolerance = LROptions.Defaults.OptTol,
-            int memorySize = LROptions.Defaults.MemorySize,
-            bool enforceNoNegativity = LROptions.Defaults.EnforceNonNegativity)
+            float l1Regularization = LROptions.Defaults.L1Regularization,
+            float l2Regularization = LROptions.Defaults.L2Regularization,
+            float optimizationTolerance = LROptions.Defaults.OptimizationTolerance,
+            int iterationsToRemember = LROptions.Defaults.IterationsToRemember,
+            bool enforceNonNegativity = LROptions.Defaults.EnforceNonNegativity)
         {
             Contracts.CheckValue(catalog, nameof(catalog));
             var env = CatalogUtils.GetEnvironment(catalog);
-            return new MulticlassLogisticRegression(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Weight, l2Weight, optimizationTolerance, memorySize, enforceNoNegativity);
+            return new MulticlassLogisticRegression(env, labelColumnName, featureColumnName, exampleWeightColumnName, l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity);
         }
 
         /// <summary>
diff --git a/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs b/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs
index bc7066fbad..f2fe56401a 100644
--- a/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs
+++ b/src/Microsoft.ML.StaticPipe/LbfgsStatic.cs
@@ -22,10 +22,10 @@ public static class LbfgsBinaryClassificationStaticExtensions
         /// <param name="label">The label, or dependent variable.</param>
         /// <param name="features">The features, or independent variables.</param>
         /// <param name="weights">The optional example weights.</param>
-        /// <param name="enoforceNoNegativity">Enforce non-negative weights.</param>
-        /// <param name="l1Weight">Weight of L1 regularization term.</param>
-        /// <param name="l2Weight">Weight of L2 regularization term.</param>
-        /// <param name="memorySize">Memory size for <see cref="Microsoft.ML.Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
+        /// <param name="enforceNonNegativity">Enforce non-negative weights.</param>
+        /// <param name="l1Regularization">Weight of L1 regularization term.</param>
+        /// <param name="l2Regularization">Weight of L2 regularization term.</param>
+        /// <param name="iterationsToRemember">Memory size for <see cref="Microsoft.ML.Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
         /// <param name="optimizationTolerance">Threshold for optimizer convergence.</param>
         /// <param name="onFit">A delegate that is called every time the
         /// <see cref="Estimator{TInShape, TOutShape, TTransformer}.Fit(DataView{TInShape})"/> method is called on the
@@ -37,20 +37,20 @@ public static (Scalar<float> score, Scalar<float> probability, Scalar<bool> pred
             Scalar<bool> label,
             Vector<float> features,
             Scalar<float> weights = null,
-            float l1Weight = Options.Defaults.L1Weight,
-            float l2Weight = Options.Defaults.L2Weight,
-            float optimizationTolerance = Options.Defaults.OptTol,
-            int memorySize = Options.Defaults.MemorySize,
-            bool enoforceNoNegativity = Options.Defaults.EnforceNonNegativity,
+            float l1Regularization = Options.Defaults.L1Regularization,
+            float l2Regularization = Options.Defaults.L2Regularization,
+            float optimizationTolerance = Options.Defaults.OptimizationTolerance,
+            int iterationsToRemember = Options.Defaults.IterationsToRemember,
+            bool enforceNonNegativity = Options.Defaults.EnforceNonNegativity,
             Action<CalibratedModelParametersBase<LinearBinaryModelParameters,PlattCalibrator>> onFit = null)
         {
-            LbfgsStaticUtils.ValidateParams(label, features, weights, l1Weight, l2Weight, optimizationTolerance, memorySize, enoforceNoNegativity, onFit);
+            LbfgsStaticUtils.ValidateParams(label, features, weights, l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity, onFit);
 
             var rec = new TrainerEstimatorReconciler.BinaryClassifier(
                 (env, labelName, featuresName, weightsName) =>
                 {
                     var trainer = new LogisticRegression(env, labelName, featuresName, weightsName,
-                        l1Weight, l2Weight, optimizationTolerance, memorySize, enoforceNoNegativity);
+                        l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity);
 
                     if (onFit != null)
                         return trainer.WithOnFitDelegate(trans => onFit(trans.Model));
@@ -118,10 +118,10 @@ public static class LbfgsRegressionExtensions
         /// <param name="label">The label, or dependent variable.</param>
         /// <param name="features">The features, or independent variables.</param>
         /// <param name="weights">The optional example weights.</param>
-        /// <param name="enoforceNoNegativity">Enforce non-negative weights.</param>
-        /// <param name="l1Weight">Weight of L1 regularization term.</param>
-        /// <param name="l2Weight">Weight of L2 regularization term.</param>
-        /// <param name="memorySize">Memory size for <see cref="Microsoft.ML.Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
+        /// <param name="enforceNonNegativity">Enforce non-negative weights.</param>
+        /// <param name="l1Regularization">Weight of L1 regularization term.</param>
+        /// <param name="l2Regularization">Weight of L2 regularization term.</param>
+        /// <param name="iterationsToRemember">Memory size for <see cref="Microsoft.ML.Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
         /// <param name="optimizationTolerance">Threshold for optimizer convergence.</param>
         /// <param name="onFit">A delegate that is called every time the
         /// <see cref="Estimator{TInShape, TOutShape, TTransformer}.Fit(DataView{TInShape})"/> method is called on the
@@ -133,20 +133,20 @@ public static Scalar<float> PoissonRegression(this RegressionCatalog.RegressionT
             Scalar<float> label,
             Vector<float> features,
             Scalar<float> weights = null,
-            float l1Weight = Options.Defaults.L1Weight,
-            float l2Weight = Options.Defaults.L2Weight,
-            float optimizationTolerance = Options.Defaults.OptTol,
-            int memorySize = Options.Defaults.MemorySize,
-            bool enoforceNoNegativity = Options.Defaults.EnforceNonNegativity,
+            float l1Regularization = Options.Defaults.L1Regularization,
+            float l2Regularization = Options.Defaults.L2Regularization,
+            float optimizationTolerance = Options.Defaults.OptimizationTolerance,
+            int iterationsToRemember = Options.Defaults.IterationsToRemember,
+            bool enforceNonNegativity = Options.Defaults.EnforceNonNegativity,
             Action<PoissonRegressionModelParameters> onFit = null)
         {
-            LbfgsStaticUtils.ValidateParams(label, features, weights, l1Weight, l2Weight, optimizationTolerance, memorySize, enoforceNoNegativity, onFit);
+            LbfgsStaticUtils.ValidateParams(label, features, weights, l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity, onFit);
 
             var rec = new TrainerEstimatorReconciler.Regression(
                 (env, labelName, featuresName, weightsName) =>
                 {
                     var trainer = new PoissonRegression(env, labelName, featuresName, weightsName,
-                        l1Weight, l2Weight, optimizationTolerance, memorySize, enoforceNoNegativity);
+                        l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity);
 
                     if (onFit != null)
                         return trainer.WithOnFitDelegate(trans => onFit(trans.Model));
@@ -214,10 +214,10 @@ public static class LbfgsMulticlassExtensions
         /// <param name="label">The label, or dependent variable.</param>
         /// <param name="features">The features, or independent variables.</param>
         /// <param name="weights">The optional example weights.</param>
-        /// <param name="enoforceNoNegativity">Enforce non-negative weights.</param>
-        /// <param name="l1Weight">Weight of L1 regularization term.</param>
-        /// <param name="l2Weight">Weight of L2 regularization term.</param>
-        /// <param name="memorySize">Memory size for <see cref="Microsoft.ML.Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
+        /// <param name="enforceNonNegativity">Enforce non-negative weights.</param>
+        /// <param name="l1Regularization">Weight of L1 regularization term.</param>
+        /// <param name="l2Regularization">Weight of L2 regularization term.</param>
+        /// <param name="iterationsToRemember">Memory size for <see cref="Microsoft.ML.Trainers.LogisticRegression"/>. Low=faster, less accurate.</param>
         /// <param name="optimizationTolerance">Threshold for optimizer convergence.</param>
         /// <param name="onFit">A delegate that is called every time the
         /// <see cref="Estimator{TInShape, TOutShape, TTransformer}.Fit(DataView{TInShape})"/> method is called on the
@@ -230,20 +230,20 @@ public static (Vector<float> score, Key<uint, TVal> predictedLabel)
             Key<uint, TVal> label,
             Vector<float> features,
             Scalar<float> weights = null,
-            float l1Weight = Options.Defaults.L1Weight,
-            float l2Weight = Options.Defaults.L2Weight,
-            float optimizationTolerance = Options.Defaults.OptTol,
-            int memorySize = Options.Defaults.MemorySize,
-            bool enoforceNoNegativity = Options.Defaults.EnforceNonNegativity,
+            float l1Regularization = Options.Defaults.L1Regularization,
+            float l2Regularization = Options.Defaults.L2Regularization,
+            float optimizationTolerance = Options.Defaults.OptimizationTolerance,
+            int iterationsToRemember = Options.Defaults.IterationsToRemember,
+            bool enforceNonNegativity = Options.Defaults.EnforceNonNegativity,
             Action<MulticlassLogisticRegressionModelParameters> onFit = null)
         {
-            LbfgsStaticUtils.ValidateParams(label, features, weights, l1Weight, l2Weight, optimizationTolerance, memorySize, enoforceNoNegativity, onFit);
+            LbfgsStaticUtils.ValidateParams(label, features, weights, l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity, onFit);
 
             var rec = new TrainerEstimatorReconciler.MulticlassClassifier<TVal>(
                 (env, labelName, featuresName, weightsName) =>
                 {
                     var trainer = new MulticlassLogisticRegression(env, labelName, featuresName, weightsName,
-                         l1Weight, l2Weight, optimizationTolerance, memorySize, enoforceNoNegativity);
+                         l1Regularization, l2Regularization, optimizationTolerance, iterationsToRemember, enforceNonNegativity);
 
                     if (onFit != null)
                         return trainer.WithOnFitDelegate(trans => onFit(trans.Model));
@@ -303,19 +303,19 @@ internal static class LbfgsStaticUtils
         internal static void ValidateParams(PipelineColumn label,
             Vector<float> features,
             Scalar<float> weights = null,
-            float l1Weight = Options.Defaults.L1Weight,
-            float l2Weight = Options.Defaults.L2Weight,
-            float optimizationTolerance = Options.Defaults.OptTol,
-            int memorySize = Options.Defaults.MemorySize,
-            bool enoforceNoNegativity = Options.Defaults.EnforceNonNegativity,
+            float l1Regularization = Options.Defaults.L1Regularization,
+            float l2Regularization = Options.Defaults.L2Regularization,
+            float optimizationTolerance = Options.Defaults.OptimizationTolerance,
+            int iterationsToRemember = Options.Defaults.IterationsToRemember,
+            bool enforceNonNegativity = Options.Defaults.EnforceNonNegativity,
             Delegate onFit = null)
         {
             Contracts.CheckValue(label, nameof(label));
             Contracts.CheckValue(features, nameof(features));
-            Contracts.CheckParam(l2Weight >= 0, nameof(l2Weight), "Must be non-negative");
-            Contracts.CheckParam(l1Weight >= 0, nameof(l1Weight), "Must be non-negative");
+            Contracts.CheckParam(l2Regularization >= 0, nameof(l2Regularization), "Must be non-negative");
+            Contracts.CheckParam(l1Regularization >= 0, nameof(l1Regularization), "Must be non-negative");
             Contracts.CheckParam(optimizationTolerance > 0, nameof(optimizationTolerance), "Must be positive");
-            Contracts.CheckParam(memorySize > 0, nameof(memorySize), "Must be positive");
+            Contracts.CheckParam(iterationsToRemember > 0, nameof(iterationsToRemember), "Must be positive");
             Contracts.CheckValueOrNull(onFit);
         }
     }
diff --git a/test/BaselineOutput/Common/EntryPoints/core_manifest.json b/test/BaselineOutput/Common/EntryPoints/core_manifest.json
index d3e0180dbd..0f99642c54 100644
--- a/test/BaselineOutput/Common/EntryPoints/core_manifest.json
+++ b/test/BaselineOutput/Common/EntryPoints/core_manifest.json
@@ -13495,11 +13495,12 @@
           "Default": "Auto"
         },
         {
-          "Name": "ShowTrainingStats",
+          "Name": "ShowTrainingStatistics",
           "Type": "Bool",
           "Desc": "Show statistics of training examples.",
           "Aliases": [
-            "stat"
+            "stat",
+            "ShowTrainingStats"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13507,11 +13508,12 @@
           "Default": false
         },
         {
-          "Name": "L2Weight",
+          "Name": "L2Regularization",
           "Type": "Float",
           "Desc": "L2 regularization weight",
           "Aliases": [
-            "l2"
+            "l2",
+            "L2Weight"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13525,11 +13527,12 @@
           }
         },
         {
-          "Name": "L1Weight",
+          "Name": "L1Regularization",
           "Type": "Float",
           "Desc": "L1 regularization weight",
           "Aliases": [
-            "l1"
+            "l1",
+            "L1Weight"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13543,11 +13546,12 @@
           }
         },
         {
-          "Name": "OptTol",
+          "Name": "OptmizationTolerance",
           "Type": "Float",
           "Desc": "Tolerance parameter for optimization convergence. Low = slower, more accurate",
           "Aliases": [
-            "ot"
+            "ot",
+            "OptTol"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13562,11 +13566,12 @@
           }
         },
         {
-          "Name": "MemorySize",
+          "Name": "IterationsToRemember",
           "Type": "Int",
           "Desc": "Memory size for L-BFGS. Low=faster, less accurate",
           "Aliases": [
-            "m"
+            "m",
+            "MemorySize"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13594,11 +13599,12 @@
           "Default": false
         },
         {
-          "Name": "InitWtsDiameter",
+          "Name": "InitialWeightsDiameter",
           "Type": "Float",
           "Desc": "Init weights diameter",
           "Aliases": [
-            "initwts"
+            "initwts",
+            "InitWtsDiameter"
           ],
           "Required": false,
           "SortOrder": 140.0,
@@ -13612,11 +13618,12 @@
           }
         },
         {
-          "Name": "MaxIterations",
+          "Name": "NumberOfIterations",
           "Type": "Int",
           "Desc": "Maximum iterations.",
           "Aliases": [
-            "maxiter"
+            "maxiter",
+            "MaxIterations"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -13629,11 +13636,12 @@
           }
         },
         {
-          "Name": "SgdInitializationTolerance",
+          "Name": "StochasticGradientDescentInitilaizationTolerance",
           "Type": "Float",
           "Desc": "Run SGD to initialize LR weights, converging to this tolerance",
           "Aliases": [
-            "sgd"
+            "sgd",
+            "SgdInitializationTolerance"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -13665,11 +13673,12 @@
           "Default": true
         },
         {
-          "Name": "NumThreads",
+          "Name": "NumberOfThreads",
           "Type": "Int",
           "Desc": "Number of threads",
           "Aliases": [
-            "nt"
+            "nt",
+            "NumThreads"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -13806,11 +13815,12 @@
           "Default": "Auto"
         },
         {
-          "Name": "ShowTrainingStats",
+          "Name": "ShowTrainingStatistics",
           "Type": "Bool",
           "Desc": "Show statistics of training examples.",
           "Aliases": [
-            "stat"
+            "stat",
+            "ShowTrainingStats"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13818,11 +13828,12 @@
           "Default": false
         },
         {
-          "Name": "L2Weight",
+          "Name": "L2Regularization",
           "Type": "Float",
           "Desc": "L2 regularization weight",
           "Aliases": [
-            "l2"
+            "l2",
+            "L2Weight"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13836,11 +13847,12 @@
           }
         },
         {
-          "Name": "L1Weight",
+          "Name": "L1Regularization",
           "Type": "Float",
           "Desc": "L1 regularization weight",
           "Aliases": [
-            "l1"
+            "l1",
+            "L1Weight"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13854,11 +13866,12 @@
           }
         },
         {
-          "Name": "OptTol",
+          "Name": "OptmizationTolerance",
           "Type": "Float",
           "Desc": "Tolerance parameter for optimization convergence. Low = slower, more accurate",
           "Aliases": [
-            "ot"
+            "ot",
+            "OptTol"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13873,11 +13886,12 @@
           }
         },
         {
-          "Name": "MemorySize",
+          "Name": "IterationsToRemember",
           "Type": "Int",
           "Desc": "Memory size for L-BFGS. Low=faster, less accurate",
           "Aliases": [
-            "m"
+            "m",
+            "MemorySize"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -13905,11 +13919,12 @@
           "Default": false
         },
         {
-          "Name": "InitWtsDiameter",
+          "Name": "InitialWeightsDiameter",
           "Type": "Float",
           "Desc": "Init weights diameter",
           "Aliases": [
-            "initwts"
+            "initwts",
+            "InitWtsDiameter"
           ],
           "Required": false,
           "SortOrder": 140.0,
@@ -13923,11 +13938,12 @@
           }
         },
         {
-          "Name": "MaxIterations",
+          "Name": "NumberOfIterations",
           "Type": "Int",
           "Desc": "Maximum iterations.",
           "Aliases": [
-            "maxiter"
+            "maxiter",
+            "MaxIterations"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -13940,11 +13956,12 @@
           }
         },
         {
-          "Name": "SgdInitializationTolerance",
+          "Name": "StochasticGradientDescentInitilaizationTolerance",
           "Type": "Float",
           "Desc": "Run SGD to initialize LR weights, converging to this tolerance",
           "Aliases": [
-            "sgd"
+            "sgd",
+            "SgdInitializationTolerance"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -13976,11 +13993,12 @@
           "Default": true
         },
         {
-          "Name": "NumThreads",
+          "Name": "NumberOfThreads",
           "Type": "Int",
           "Desc": "Number of threads",
           "Aliases": [
-            "nt"
+            "nt",
+            "NumThreads"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -14835,11 +14853,12 @@
           "Default": "Auto"
         },
         {
-          "Name": "L2Weight",
+          "Name": "L2Regularization",
           "Type": "Float",
           "Desc": "L2 regularization weight",
           "Aliases": [
-            "l2"
+            "l2",
+            "L2Weight"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -14853,11 +14872,12 @@
           }
         },
         {
-          "Name": "L1Weight",
+          "Name": "L1Regularization",
           "Type": "Float",
           "Desc": "L1 regularization weight",
           "Aliases": [
-            "l1"
+            "l1",
+            "L1Weight"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -14871,11 +14891,12 @@
           }
         },
         {
-          "Name": "OptTol",
+          "Name": "OptmizationTolerance",
           "Type": "Float",
           "Desc": "Tolerance parameter for optimization convergence. Low = slower, more accurate",
           "Aliases": [
-            "ot"
+            "ot",
+            "OptTol"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -14890,11 +14911,12 @@
           }
         },
         {
-          "Name": "MemorySize",
+          "Name": "IterationsToRemember",
           "Type": "Int",
           "Desc": "Memory size for L-BFGS. Low=faster, less accurate",
           "Aliases": [
-            "m"
+            "m",
+            "MemorySize"
           ],
           "Required": false,
           "SortOrder": 50.0,
@@ -14922,11 +14944,12 @@
           "Default": false
         },
         {
-          "Name": "InitWtsDiameter",
+          "Name": "InitialWeightsDiameter",
           "Type": "Float",
           "Desc": "Init weights diameter",
           "Aliases": [
-            "initwts"
+            "initwts",
+            "InitWtsDiameter"
           ],
           "Required": false,
           "SortOrder": 140.0,
@@ -14940,11 +14963,12 @@
           }
         },
         {
-          "Name": "MaxIterations",
+          "Name": "NumberOfIterations",
           "Type": "Int",
           "Desc": "Maximum iterations.",
           "Aliases": [
-            "maxiter"
+            "maxiter",
+            "MaxIterations"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -14957,11 +14981,12 @@
           }
         },
         {
-          "Name": "SgdInitializationTolerance",
+          "Name": "StochasticGradientDescentInitilaizationTolerance",
           "Type": "Float",
           "Desc": "Run SGD to initialize LR weights, converging to this tolerance",
           "Aliases": [
-            "sgd"
+            "sgd",
+            "SgdInitializationTolerance"
           ],
           "Required": false,
           "SortOrder": 150.0,
@@ -14993,11 +15018,12 @@
           "Default": true
         },
         {
-          "Name": "NumThreads",
+          "Name": "NumberOfThreads",
           "Type": "Int",
           "Desc": "Number of threads",
           "Aliases": [
-            "nt"
+            "nt",
+            "NumThreads"
           ],
           "Required": false,
           "SortOrder": 150.0,
diff --git a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs
index fd806a5d0b..f57247d65e 100644
--- a/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs
+++ b/test/Microsoft.ML.Benchmarks/KMeansAndLogisticRegressionBench.cs
@@ -40,7 +40,7 @@ public CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrato
                 .Append(ml.Clustering.Trainers.KMeans("Features"))
                 .Append(ml.Transforms.Concatenate("Features", "Features", "Score"))
                 .Append(ml.BinaryClassification.Trainers.LogisticRegression(
-                    new LogisticRegression.Options { EnforceNonNegativity = true, OptTol = 1e-3f, }));
+                    new LogisticRegression.Options { EnforceNonNegativity = true, OptmizationTolerance = 1e-3f, }));
 
             var model = estimatorPipeline.Fit(input);
             // Return the last model in the chain.
diff --git a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs
index b32708754c..1636bda966 100644
--- a/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs
+++ b/test/Microsoft.ML.Core.Tests/UnitTests/TestEntryPoints.cs
@@ -424,8 +424,8 @@ public void EntryPointCreateEnsemble()
                 var lrInput = new LogisticRegression.Options
                 {
                     TrainingData = data,
-                    L1Weight = (Single)0.1 * i,
-                    L2Weight = (Single)0.01 * (1 + i),
+                    L1Regularization = (Single)0.1 * i,
+                    L2Regularization = (Single)0.01 * (1 + i),
                     NormalizeFeatures = NormalizeOption.No
                 };
                 predictorModels[i] = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel;
@@ -728,8 +728,8 @@ public void EntryPointPipelineEnsemble()
                 var lrInput = new LogisticRegression.Options
                 {
                     TrainingData = data,
-                    L1Weight = (Single)0.1 * i,
-                    L2Weight = (Single)0.01 * (1 + i),
+                    L1Regularization = (Single)0.1 * i,
+                    L2Regularization = (Single)0.01 * (1 + i),
                     NormalizeFeatures = NormalizeOption.Yes
                 };
                 predictorModels[i] = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel;
@@ -989,8 +989,8 @@ public void EntryPointPipelineEnsembleText()
                 var lrInput = new LogisticRegression.Options
                 {
                     TrainingData = data,
-                    L1Weight = (Single)0.1 * i,
-                    L2Weight = (Single)0.01 * (1 + i),
+                    L1Regularization = (Single)0.1 * i,
+                    L2Regularization = (Single)0.01 * (1 + i),
                     NormalizeFeatures = NormalizeOption.Yes
                 };
                 predictorModels[i] = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel;
@@ -1323,9 +1323,9 @@ public void EntryPointPipelineEnsembleGetSummary()
                     {
                         TrainingData = data,
                         NormalizeFeatures = NormalizeOption.Yes,
-                        NumThreads = 1,
-                        ShowTrainingStats = true,
-                        StdComputer = new ComputeLRTrainingStdThroughMkl()
+                        NumberOfThreads = 1,
+                        ShowTrainingStatistics = true,
+                        ComputeStandardDeviation = new ComputeLRTrainingStdThroughMkl()
                     };
                     predictorModels[i] = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel;
                     var transformModel = new TransformModelImpl(Env, data, splitOutput.TrainData[i]);
@@ -3320,9 +3320,9 @@ public void EntryPointLinearPredictorSummary()
             {
                 TrainingData = dataView,
                 NormalizeFeatures = NormalizeOption.Yes,
-                NumThreads = 1,
-                ShowTrainingStats = true,
-                StdComputer = new ComputeLRTrainingStdThroughMkl()
+                NumberOfThreads = 1,
+                ShowTrainingStatistics = true,
+                ComputeStandardDeviation = new ComputeLRTrainingStdThroughMkl()
             };
             var model = LogisticRegression.TrainBinary(Env, lrInput).PredictorModel;
 
@@ -3330,8 +3330,8 @@ public void EntryPointLinearPredictorSummary()
             {
                 TrainingData = dataView,
                 NormalizeFeatures = NormalizeOption.Yes,
-                NumThreads = 1,
-                ShowTrainingStats = true
+                NumberOfThreads = 1,
+                ShowTrainingStatistics = true
             };
             var mcModel = LogisticRegression.TrainMultiClass(Env, mcLrInput).PredictorModel;
 
@@ -5621,4 +5621,4 @@ public void LoadEntryPointModel()
             }
         }
     }
-}
\ No newline at end of file
+}
diff --git a/test/Microsoft.ML.Functional.Tests/Evaluation.cs b/test/Microsoft.ML.Functional.Tests/Evaluation.cs
index fd16635d6e..df6448676a 100644
--- a/test/Microsoft.ML.Functional.Tests/Evaluation.cs
+++ b/test/Microsoft.ML.Functional.Tests/Evaluation.cs
@@ -94,7 +94,7 @@ public void TrainAndEvaluateBinaryClassificationWithCalibration()
             var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText")
                 .AppendCacheCheckpoint(mlContext)
                 .Append(mlContext.BinaryClassification.Trainers.LogisticRegression(
-                    new LogisticRegression.Options { NumThreads = 1 }));
+                    new LogisticRegression.Options { NumberOfThreads = 1 }));
 
             // Train the model.
             var model = pipeline.Fit(data);
@@ -279,7 +279,7 @@ public void TrainAndEvaluateWithPrecisionRecallCurves()
             var pipeline = mlContext.Transforms.Text.FeaturizeText("Features", "SentimentText")
                 .AppendCacheCheckpoint(mlContext)
                 .Append(mlContext.BinaryClassification.Trainers.LogisticRegression(
-                    new LogisticRegression.Options { NumThreads = 1 }));
+                    new LogisticRegression.Options { NumberOfThreads = 1 }));
 
             // Train the model.
             var model = pipeline.Fit(data);
diff --git a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs
index 796a32e42f..97e229965d 100644
--- a/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs
+++ b/test/Microsoft.ML.Predictor.Tests/TestPredictors.cs
@@ -748,7 +748,7 @@ public void TestEnsembleCombiner()
                 {
                     FeatureColumnName = "Features",
                     LabelColumnName = DefaultColumnNames.Label,
-                    OptTol = 10e-4F,
+                    OptmizationTolerance = 10e-4F,
                     TrainingData = dataView,
                     NormalizeFeatures = NormalizeOption.No
                 }).PredictorModel,
@@ -756,7 +756,7 @@ public void TestEnsembleCombiner()
                 {
                     FeatureColumnName = "Features",
                     LabelColumnName = DefaultColumnNames.Label,
-                    OptTol = 10e-3F,
+                    OptmizationTolerance = 10e-3F,
                     TrainingData = dataView,
                     NormalizeFeatures = NormalizeOption.No
                 }).PredictorModel
@@ -785,7 +785,7 @@ public void TestMultiClassEnsembleCombiner()
                 {
                     FeatureColumnName = "Features",
                     LabelColumnName = DefaultColumnNames.Label,
-                    OptTol = 10e-4F,
+                    OptmizationTolerance = 10e-4F,
                     TrainingData = dataView,
                     NormalizeFeatures = NormalizeOption.No
                 }).PredictorModel,
@@ -793,7 +793,7 @@ public void TestMultiClassEnsembleCombiner()
                 {
                     FeatureColumnName = "Features",
                     LabelColumnName = DefaultColumnNames.Label,
-                    OptTol = 10e-3F,
+                    OptmizationTolerance = 10e-3F,
                     TrainingData = dataView,
                     NormalizeFeatures = NormalizeOption.No
                 }).PredictorModel
diff --git a/test/Microsoft.ML.StaticPipelineTesting/Training.cs b/test/Microsoft.ML.StaticPipelineTesting/Training.cs
index 85aa462334..0676656da5 100644
--- a/test/Microsoft.ML.StaticPipelineTesting/Training.cs
+++ b/test/Microsoft.ML.StaticPipelineTesting/Training.cs
@@ -618,7 +618,7 @@ public void PoissonRegression()
 
             var est = reader.MakeNewEstimator()
                 .Append(r => (r.label, score: catalog.Trainers.PoissonRegression(r.label, r.features, null,
-                                new PoissonRegression.Options { L2Weight = 2, EnforceNonNegativity = true, NumThreads = 1 },
+                                new PoissonRegression.Options { L2Regularization = 2, EnforceNonNegativity = true, NumberOfThreads = 1 },
                                 onFit: (p) => { pred = p; })));
 
             var pipe = reader.Append(est);
@@ -655,7 +655,7 @@ public void LogisticRegressionBinaryClassification()
 
             var est = reader.MakeNewEstimator()
                 .Append(r => (r.label, preds: catalog.Trainers.LogisticRegressionBinaryClassifier(r.label, r.features, null,
-                                    new LogisticRegression.Options { L1Weight = 10, NumThreads = 1 }, onFit: (p) => { pred = p; })));
+                                    new LogisticRegression.Options { L1Regularization = 10, NumberOfThreads = 1 }, onFit: (p) => { pred = p; })));
 
             var pipe = reader.Append(est);
 
@@ -695,7 +695,7 @@ public void MulticlassLogisticRegression()
                     r.label,
                     r.features,
                     null,
-                    new MulticlassLogisticRegression.Options { NumThreads = 1 },
+                    new MulticlassLogisticRegression.Options { NumberOfThreads = 1 },
                     onFit: p => pred = p)));
 
             var pipe = reader.Append(est);
diff --git a/test/Microsoft.ML.Tests/FeatureContributionTests.cs b/test/Microsoft.ML.Tests/FeatureContributionTests.cs
index 2db4dcb93c..8d319f3d2b 100644
--- a/test/Microsoft.ML.Tests/FeatureContributionTests.cs
+++ b/test/Microsoft.ML.Tests/FeatureContributionTests.cs
@@ -89,7 +89,7 @@ public void TestOnlineGradientDescentRegression()
         public void TestPoissonRegression()
         {
             TestFeatureContribution(ML.Regression.Trainers.PoissonRegression(
-                new PoissonRegression.Options { NumThreads = 1 }), GetSparseDataset(numberOfInstances: 100), "PoissonRegression");
+                new PoissonRegression.Options { NumberOfThreads = 1 }), GetSparseDataset(numberOfInstances: 100), "PoissonRegression");
         }
 
         [Fact]
diff --git a/test/Microsoft.ML.Tests/OnnxConversionTest.cs b/test/Microsoft.ML.Tests/OnnxConversionTest.cs
index 492a71a501..4e83b3aad4 100644
--- a/test/Microsoft.ML.Tests/OnnxConversionTest.cs
+++ b/test/Microsoft.ML.Tests/OnnxConversionTest.cs
@@ -376,7 +376,7 @@ public void MulticlassLogisticRegressionOnnxConversionTest()
 
             var pipeline = mlContext.Transforms.Normalize("Features").
                 Append(mlContext.Transforms.Conversion.MapValueToKey("Label")).
-                Append(mlContext.MulticlassClassification.Trainers.LogisticRegression(new MulticlassLogisticRegression.Options() { UseThreads = false }));
+                Append(mlContext.MulticlassClassification.Trainers.LogisticRegression(new MulticlassLogisticRegression.Options() { NumberOfThreads = 1 }));
 
             var model = pipeline.Fit(data);
             var transformedData = model.Transform(data);
diff --git a/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs b/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs
index fee083d368..26af1402a3 100644
--- a/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs
+++ b/test/Microsoft.ML.Tests/PermutationFeatureImportanceTests.cs
@@ -153,7 +153,7 @@ public void TestPfiBinaryClassificationOnDenseFeatures()
         {
             var data = GetDenseDataset(TaskType.BinaryClassification);
             var model = ML.BinaryClassification.Trainers.LogisticRegression(
-                new LogisticRegression.Options { NumThreads = 1 }).Fit(data);
+                new LogisticRegression.Options { NumberOfThreads = 1 }).Fit(data);
             var pfi = ML.BinaryClassification.PermutationFeatureImportance(model, data);
 
             // Pfi Indices:
@@ -191,7 +191,7 @@ public void TestPfiBinaryClassificationOnSparseFeatures()
         {
             var data = GetSparseDataset(TaskType.BinaryClassification);
             var model = ML.BinaryClassification.Trainers.LogisticRegression(
-                new LogisticRegression.Options { NumThreads = 1 }).Fit(data);
+                new LogisticRegression.Options { NumberOfThreads = 1 }).Fit(data);
             var pfi = ML.BinaryClassification.PermutationFeatureImportance(model, data);
 
             // Pfi Indices:
@@ -270,7 +270,7 @@ public void TestPfiMulticlassClassificationOnSparseFeatures()
         {
             var data = GetSparseDataset(TaskType.MulticlassClassification);
             var model = ML.MulticlassClassification.Trainers.LogisticRegression(
-                new MulticlassLogisticRegression.Options { MaxIterations = 1000 }).Fit(data);
+                new MulticlassLogisticRegression.Options { NumberOfIterations = 1000 }).Fit(data);
             var pfi = ML.MulticlassClassification.PermutationFeatureImportance(model, data);
 
             // Pfi Indices:
diff --git a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs
index 0bb2c6b064..0e63507710 100644
--- a/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs
+++ b/test/Microsoft.ML.Tests/TrainerEstimators/LbfgsTests.cs
@@ -57,7 +57,7 @@ public void TestLogisticRegressionNoStats()
         {
             (IEstimator<ITransformer> pipe, IDataView dataView) = GetBinaryClassificationPipeline();
 
-            pipe = pipe.Append(ML.BinaryClassification.Trainers.LogisticRegression(new LogisticRegression.Options { ShowTrainingStats = true }));
+            pipe = pipe.Append(ML.BinaryClassification.Trainers.LogisticRegression(new LogisticRegression.Options { ShowTrainingStatistics = true }));
             var transformerChain = pipe.Fit(dataView) as TransformerChain<BinaryPredictionTransformer<CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator>>>;
 
             var linearModel = transformerChain.LastTransformer.Model.SubModel as LinearBinaryModelParameters;
@@ -76,8 +76,8 @@ public void TestLogisticRegressionWithStats()
             pipe = pipe.Append(ML.BinaryClassification.Trainers.LogisticRegression(
                 new LogisticRegression.Options
                 {
-                    ShowTrainingStats = true,
-                    StdComputer = new ComputeLRTrainingStdThroughMkl(),
+                    ShowTrainingStatistics = true,
+                    ComputeStandardDeviation = new ComputeLRTrainingStdThroughMkl(),
                 }));
 
             var transformer = pipe.Fit(dataView) as TransformerChain<BinaryPredictionTransformer<CalibratedModelParametersBase<LinearBinaryModelParameters, PlattCalibrator>>>;