Skip to content

Binary LR samples using T4 templates #3099

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Mar 28, 2019
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 0 additions & 86 deletions docs/samples/Microsoft.ML.Samples/Dynamic/LogisticRegression.cs

This file was deleted.

Original file line number Diff line number Diff line change
@@ -0,0 +1,97 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML.Data;
<# if (TrainerOptions != null) { #>
<#=OptionsInclude#>
<# } #>

namespace Microsoft.ML.Samples.Dynamic.Trainers.BinaryClassification
{
public static class <#=ClassName#>
{<#=Comments#>
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
// as a catalog of available operations and as the source of randomness.
// Setting the seed to a fixed number in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);

// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);

// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

<# if (TrainerOptions == null) { #>
// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers.<#=Trainer#>();
<# } else { #>
// Define trainer options.
var options = new <#=TrainerOptions#>;

// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers.<#=Trainer#>(options);
<# } #>

// Train the model.
var model = pipeline.Fit(trainingData);

// Create testing data. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));

// Run the model on test data set.
var transformedTestData = model.Transform(testData);

// Convert IDataView object to a list.
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();

// Look at 5 predictions
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, Prediction: {p.PredictedLabel}");

<#=ExpectedOutputPerInstance#>
<# string Evaluator = IsCalibrated ? "Evaluate" : "EvaluateNonCalibrated"; #>

// Evaluate the overall metrics
var metrics = mlContext.BinaryClassification.<#=Evaluator#>(transformedTestData);
SamplesUtils.ConsoleUtils.PrintMetrics(metrics);

<#=ExpectedOutput#>
}

private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50).Select(x => x ? randomFloat() : randomFloat() + <#=DataSepValue#>).ToArray()
};
}
}

// Example with label and 50 feature values. A data set is a collection of such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}

// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
}
}
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
<#@ include file="TreeSamplesTemplate.ttinclude"#>

<#+
string ClassName="FastForest";
string Trainer = "FastForest";
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
<#@ include file="TreeSamplesTemplate.ttinclude"#>

<#+
string ClassName="FastForestWithOptions";
string Trainer = "FastForest";
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
<#@ include file="TreeSamplesTemplate.ttinclude"#>

<#+
string ClassName="FastTree";
string Trainer = "FastTree";
Expand Down
Original file line number Diff line number Diff line change
@@ -1,5 +1,4 @@
<#@ include file="TreeSamplesTemplate.ttinclude"#>

<#+
string ClassName="FastTreeWithOptions";
string Trainer = "FastTree";
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,100 @@
using System;
using System.Collections.Generic;
using System.Linq;
using Microsoft.ML.Data;

namespace Microsoft.ML.Samples.Dynamic.Trainers.BinaryClassification
{
public static class LbfgsLogisticRegression
{
public static void Example()
{
// Create a new context for ML.NET operations. It can be used for exception tracking and logging,
// as a catalog of available operations and as the source of randomness.
// Setting the seed to a fixed number in this example to make outputs deterministic.
var mlContext = new MLContext(seed: 0);

// Create a list of training data points.
var dataPoints = GenerateRandomDataPoints(1000);

// Convert the list of data points to an IDataView object, which is consumable by ML.NET API.
var trainingData = mlContext.Data.LoadFromEnumerable(dataPoints);

// Define the trainer.
var pipeline = mlContext.BinaryClassification.Trainers.LbfgsLogisticRegression();

// Train the model.
var model = pipeline.Fit(trainingData);

// Create testing data. Use different random seed to make it different from training data.
var testData = mlContext.Data.LoadFromEnumerable(GenerateRandomDataPoints(500, seed:123));

// Run the model on test data set.
var transformedTestData = model.Transform(testData);

// Convert IDataView object to a list.
var predictions = mlContext.Data.CreateEnumerable<Prediction>(transformedTestData, reuseRowObject: false).ToList();

// Look at 5 predictions
foreach (var p in predictions.Take(5))
Console.WriteLine($"Label: {p.Label}, Prediction: {p.PredictedLabel}");

// Expected output:
// Label: True, Prediction: True
// Label: False, Prediction: True
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: False

// Evaluate the overall metrics
var metrics = mlContext.BinaryClassification.Evaluate(transformedTestData);
SamplesUtils.ConsoleUtils.PrintMetrics(metrics);

// Expected output:
// Accuracy: 0.88
// AUC: 0.96
// F1 Score: 0.87
// Negative Precision: 0.90
// Negative Recall: 0.87
// Positive Precision: 0.86
// Positive Recall: 0.89
// Log Loss: 0.38
// Log Loss Reduction: 0.62
// Entropy: 1.00
}

private static IEnumerable<DataPoint> GenerateRandomDataPoints(int count, int seed=0)
{
var random = new Random(seed);
float randomFloat() => (float)random.NextDouble();
for (int i = 0; i < count; i++)
{
var label = randomFloat() > 0.5f;
yield return new DataPoint
{
Label = label,
// Create random features that are correlated with the label.
// For data points with false label, the feature values are slightly increased by adding a constant.
Features = Enumerable.Repeat(label, 50).Select(x => x ? randomFloat() : randomFloat() + 0.1f).ToArray()
};
}
}

// Example with label and 50 feature values. A data set is a collection of such examples.
private class DataPoint
{
public bool Label { get; set; }
[VectorType(50)]
public float[] Features { get; set; }
}

// Class used to capture predictions.
private class Prediction
{
// Original label.
public bool Label { get; set; }
// Predicted label from the trainer.
public bool PredictedLabel { get; set; }
}
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
<#@ include file="BinaryClassification.ttinclude"#>
<#+
string ClassName="LbfgsLogisticRegression";
string Trainer = "LbfgsLogisticRegression";
string TrainerOptions = null;
bool IsCalibrated = true;

string DataSepValue = "0.1f";
string OptionsInclude = "";
string Comments= "";

string ExpectedOutputPerInstance= @"// Expected output:
// Label: True, Prediction: True
// Label: False, Prediction: True
// Label: True, Prediction: True
// Label: True, Prediction: True
// Label: False, Prediction: False";

string ExpectedOutput = @"// Expected output:
// Accuracy: 0.88
// AUC: 0.96
// F1 Score: 0.87
// Negative Precision: 0.90
// Negative Recall: 0.87
// Positive Precision: 0.86
// Positive Recall: 0.89
// Log Loss: 0.38
// Log Loss Reduction: 0.62
// Entropy: 1.00";
#>
Loading