diff --git a/docs/code/MlNetCookBook.md b/docs/code/MlNetCookBook.md
index 716e8fac12..727ed415e2 100644
--- a/docs/code/MlNetCookBook.md
+++ b/docs/code/MlNetCookBook.md
@@ -383,19 +383,18 @@ var metrics = mlContext.Regression.Evaluate(model.Transform(testData), labelColu
 
 Assuming that the model metrics look good to you, it's time to 'operationalize' the model. This is where ML.NET really shines: the `model` object you just built is ready for immediate consumption, it will apply all the same steps that it has 'learned' during training, and it can be persisted and reused in different environments.
 
-Here's what you do to save the model to a file, and reload it (potentially in a different context).
+Here's what you do to save the model as well as its input schema to a file, and reload it (potentially in a different context).
 
 ```csharp
-using (var stream = File.Create(modelPath))
-{
-    mlContext.Model.Save(model, stream);
-}
+// Saving and loading happens to transformers. We save the input schema with this model.
+mlContext.Model.Save(model, trainData.Schema, modelPath);
 
 // Potentially, the lines below can be in a different process altogether.
-ITransformer loadedModel;
-using (var stream = File.OpenRead(modelPath))
-    loadedModel = mlContext.Model.Load(stream);
+// When you load the model, it's a non-specific ITransformer. We also recover
+// the original schema.
+ITransformer loadedModel = mlContext.Model.Load(modelPath, out var schema);
 ```
+
 ## How do I use the model to make one prediction?
 
 Since any ML.NET model is a transformer, you can of course use `model.Transform` to apply the model to the 'data view' and obtain predictions this way. 
@@ -1018,7 +1017,5 @@ using (var fs = File.Create(modelPath))
 newContext.ComponentCatalog.RegisterAssembly(typeof(CustomMappings).Assembly);
 
 // Now we can load the model.
-ITransformer loadedModel;
-using (var fs = File.OpenRead(modelPath))
-    loadedModel = newContext.Model.Load(fs);
+ITransformer loadedModel = newContext.Model.Load(modelPath, out var schema);
 ```
diff --git a/docs/code/experimental/MlNetCookBookStaticApi.md b/docs/code/experimental/MlNetCookBookStaticApi.md
index fcdb2c45ae..086e3b8e3b 100644
--- a/docs/code/experimental/MlNetCookBookStaticApi.md
+++ b/docs/code/experimental/MlNetCookBookStaticApi.md
@@ -396,18 +396,13 @@ This is where ML.NET really shines: the `model` object you just built is ready f
 Here's what you do to save the model to a file, and reload it (potentially in a different context).
 
 ```csharp
-using (var stream = File.Create(modelPath))
-{
-    // Saving and loading happens to 'dynamic' models, so the static typing is lost in the process.
-    mlContext.Model.Save(model.AsDynamic, stream);
-}
+// Saving and loading happens to 'dynamic' models, so the static typing is lost in the process.
+mlContext.Model.Save(model.AsDynamic, trainData.AsDynamic.Schema, modelPath);
 
 // Potentially, the lines below can be in a different process altogether.
 
 // When you load the model, it's a 'dynamic' transformer. 
-ITransformer loadedModel;
-using (var stream = File.OpenRead(modelPath))
-    loadedModel = mlContext.Model.Load(stream);
+ITransformer loadedModel = mlContext.Model.Load(modelPath, out var schema);
 ```
 
 ## How do I use the model to make one prediction?
diff --git a/src/Microsoft.ML.Data/Model/ModelOperationsCatalog.cs b/src/Microsoft.ML.Data/Model/ModelOperationsCatalog.cs
index dc4cb551d0..311fad8888 100644
--- a/src/Microsoft.ML.Data/Model/ModelOperationsCatalog.cs
+++ b/src/Microsoft.ML.Data/Model/ModelOperationsCatalog.cs
@@ -29,63 +29,60 @@ internal ModelOperationsCatalog(IHostEnvironment env)
         }
 
         /// <summary>
-        /// Save the model to the stream.
+        /// Save a transformer model and the loader used to create its input data to the stream.
         /// </summary>
-        /// <param name="model">The trained model to be saved.</param>
+        /// <param name="model">The trained model to be saved. Note that this can be <see langword="null"/>, as a shorthand
+        /// for an empty transformer chain. Upon loading with <see cref="LoadWithDataLoader(Stream, out IDataLoader{IMultiStreamSource})"/>
+        /// the returned value will be an empty <see cref="TransformerChain{TLastTransformer}"/>.</param>
+        /// <param name="loader">The loader that was used to create data to train the model.</param>
         /// <param name="stream">A writeable, seekable stream to save to.</param>
-        public void Save<TSource>(IDataLoader<TSource> model, Stream stream)
+        public void Save<TSource>(ITransformer model, IDataLoader<TSource> loader, Stream stream)
         {
-            _env.CheckValue(model, nameof(model));
+            _env.CheckValue(loader, nameof(loader));
+            _env.CheckValueOrNull(model);
             _env.CheckValue(stream, nameof(stream));
 
+            // For the sake of consistency of this API specifically, when called upon we save any transformer
+            // in a single element transformer chain.
+            var chainedModel = model == null ? null : new TransformerChain<ITransformer>(model);
+            var compositeLoader = new CompositeDataLoader<TSource, ITransformer>(loader, chainedModel);
+
             using (var rep = RepositoryWriter.CreateNew(stream))
             {
-                ModelSaveContext.SaveModel(rep, model, null);
+                ModelSaveContext.SaveModel(rep, compositeLoader, null);
                 rep.Commit();
             }
         }
 
-        /// <summary>
-        /// Save the model to the file.
-        /// </summary>
-        /// <param name="model">The trained model to be saved.</param>
-        /// <param name="filePath">Path where model should be saved.</param>
-        public void Save<TSource>(IDataLoader<TSource> model, string filePath)
-        {
-            using (var stream = File.Create(filePath))
-                Save(model, stream);
-        }
-
-        /// <summary>
-        /// Save a transformer model and the loader used to create its input data to the stream.
-        /// </summary>
-        /// <param name="loader">The loader that was used to create data to train the model</param>
-        /// <param name="model">The trained model to be saved</param>
-        /// <param name="stream">A writeable, seekable stream to save to.</param>
-        public void Save<TSource>(IDataLoader<TSource> loader, ITransformer model, Stream stream) =>
-            Save(new CompositeDataLoader<TSource, ITransformer>(loader, new TransformerChain<ITransformer>(model)), stream);
-
         /// <summary>
         /// Save a transformer model and the loader used to create its input data to the file.
         /// </summary>
-        /// <param name="loader">The loader that was used to create data to train the model</param>
-        /// <param name="model">The trained model to be saved</param>
+        /// <param name="model">The trained model to be saved. Note that this can be <see langword="null"/>, as a shorthand
+        /// for an empty transformer chain. Upon loading with <see cref="LoadWithDataLoader(Stream, out IDataLoader{IMultiStreamSource})"/>
+        /// the returned value will be an empty <see cref="TransformerChain{TLastTransformer}"/>.</param>
+        /// <param name="loader">The loader that was used to create data to train the model.</param>
         /// <param name="filePath">Path where model should be saved.</param>
-        public void Save<TSource>(IDataLoader<TSource> loader, ITransformer model, string filePath)
+        public void Save<TSource>(ITransformer model, IDataLoader<TSource> loader, string filePath)
         {
+            _env.CheckValueOrNull(model);
+            _env.CheckValue(loader, nameof(loader));
+            _env.CheckNonEmpty(filePath, nameof(filePath));
+
             using (var stream = File.Create(filePath))
-                Save(loader, model, stream);
+                Save(model, loader, stream);
         }
 
         /// <summary>
         /// Save a transformer model and the schema of the data that was used to train it to the stream.
         /// </summary>
-        /// <param name="model">The trained model to be saved.</param>
-        /// <param name="inputSchema">The schema of the input to the transformer. This can be null.</param>
+        /// <param name="model">The trained model to be saved. Note that this can be <see langword="null"/>, as a shorthand
+        /// for an empty transformer chain. Upon loading with <see cref="Load(Stream, out DataViewSchema)"/> the returned value will
+        /// be an empty <see cref="TransformerChain{TLastTransformer}"/>.</param>
+        /// <param name="inputSchema">The schema of the input to the transformer. This can be <see langword="null"/>.</param>
         /// <param name="stream">A writeable, seekable stream to save to.</param>
         public void Save(ITransformer model, DataViewSchema inputSchema, Stream stream)
         {
-            _env.CheckValue(model, nameof(model));
+            _env.CheckValueOrNull(model);
             _env.CheckValueOrNull(inputSchema);
             _env.CheckValue(stream, nameof(stream));
 
@@ -100,11 +97,17 @@ public void Save(ITransformer model, DataViewSchema inputSchema, Stream stream)
         /// <summary>
         /// Save a transformer model and the schema of the data that was used to train it to the file.
         /// </summary>
-        /// <param name="model">The trained model to be saved.</param>
-        /// <param name="inputSchema">The schema of the input to the transformer. This can be null.</param>
+        /// <param name="model">The trained model to be saved. Note that this can be <see langword="null"/>, as a shorthand
+        /// for an empty transformer chain. Upon loading with <see cref="Load(Stream, out DataViewSchema)"/> the returned value will
+        /// be an empty <see cref="TransformerChain{TLastTransformer}"/>.</param>
+        /// <param name="inputSchema">The schema of the input to the transformer. This can be <see langword="null"/>.</param>
         /// <param name="filePath">Path where model should be saved.</param>
         public void Save(ITransformer model, DataViewSchema inputSchema, string filePath)
         {
+            _env.CheckValueOrNull(model);
+            _env.CheckValueOrNull(inputSchema);
+            _env.CheckNonEmpty(filePath, nameof(filePath));
+
             using (var stream = File.Create(filePath))
                 Save(model, inputSchema, stream);
         }
@@ -126,11 +129,11 @@ private void SaveInputSchema(DataViewSchema inputSchema, RepositoryWriter rep)
         }
 
         /// <summary>
-        /// Load the model and its input schema from the stream.
+        /// Load the model and its input schema from a stream.
         /// </summary>
         /// <param name="stream">A readable, seekable stream to load from.</param>
-        /// <param name="inputSchema">Will contain the input schema for the model. If the model was saved using older APIs
-        /// it may not contain an input schema, in this case <paramref name="inputSchema"/> will be null.</param>
+        /// <param name="inputSchema">Will contain the input schema for the model. If the model was saved without
+        /// any description of the input, there will be no input schema. In this case this can be <see langword="null"/>.</param>
         /// <returns>The loaded model.</returns>
         public ITransformer Load(Stream stream, out DataViewSchema inputSchema)
         {
@@ -171,23 +174,67 @@ public ITransformer Load(Stream stream, out DataViewSchema inputSchema)
                         throw _env.Except(ex, "Could not load legacy format model");
                     }
                 }
-                if (dataLoader is CompositeDataLoader<IMultiStreamSource, ITransformer> composite)
-                {
-                    inputSchema = composite.Loader.GetOutputSchema();
-                    return composite.Transformer;
-                }
+                var transformer = DecomposeLoader(ref dataLoader);
                 inputSchema = dataLoader.GetOutputSchema();
-                return new TransformerChain<ITransformer>();
+                return transformer;
+            }
+        }
+
+        /// <summary>
+        /// Load the model and its input schema from a file.
+        /// </summary>
+        /// <param name="filePath">Path to a file where the model should be read from.</param>
+        /// <param name="inputSchema">Will contain the input schema for the model. If the model was saved without
+        /// any description of the input, there will be no input schema. In this case this can be <see langword="null"/>.</param>
+        /// <returns>The loaded model.</returns>
+        public ITransformer Load(string filePath, out DataViewSchema inputSchema)
+        {
+            _env.CheckNonEmpty(filePath, nameof(filePath));
+
+            using (var stream = File.OpenRead(filePath))
+                return Load(stream, out inputSchema);
+        }
+
+        /// <summary>
+        /// Given a loader, test try to "decompose" it into a source loader, and its transform if any.
+        /// If necessary an empty chain will be created to stand in for the trivial transformation; it
+        /// should never return <see langword="null"/>.
+        /// </summary>
+        private ITransformer DecomposeLoader(ref IDataLoader<IMultiStreamSource> loader)
+        {
+            _env.AssertValue(loader);
+
+            if (loader is CompositeDataLoader<IMultiStreamSource, ITransformer> composite)
+            {
+                loader = composite.Loader;
+                var chain = composite.Transformer;
+                // The save method corresponding to this load method encapsulates the input ITransformer
+                // into a single-element transformer chain. If it is that sort, we guess that it is in fact
+                // that sort, and so return it.
+                var accessor = (ITransformerChainAccessor)chain;
+                if (accessor.Transformers.Length == 1)
+                    return accessor.Transformers[0];
+                // If it is some other length than 1 due to, say, some legacy model saving, just return that
+                // chain. Using the above API this is not possible, since the chain saved will always be of length
+                // one, but older APIs behaved differently so we should retain flexibility with those schemes.
+                // (Those schemes are BTW by no means incorrect, they just aren't what the API in this particular
+                // class will specifically do.)
+                return chain;
             }
+            // Maybe we have no transformer stored. Rather than return null, we prefer to return the
+            // empty "trivial" transformer chain.
+            return new TransformerChain<ITransformer>();
         }
 
         /// <summary>
-        /// Load the model and its input schema from the stream.
+        /// Load a transformer model and a data loader model from a stream.
         /// </summary>
         /// <param name="stream">A readable, seekable stream to load from.</param>
-        /// <returns>A model of type <see cref="CompositeDataLoader{IMultiStreamSource, ITransformer}"/> containing the loader
-        /// and the transformer chain.</returns>
-        public IDataLoader<IMultiStreamSource> Load(Stream stream)
+        /// <param name="loader">The data loader from the model stream. Note that if there is no data loader,
+        /// this method will throw an exception. The scenario where no loader is stored in the stream should
+        /// be handled instead using the <see cref="Load(Stream, out DataViewSchema)"/> method.</param>
+        /// <returns>The transformer model from the model stream.</returns>
+        public ITransformer LoadWithDataLoader(Stream stream, out IDataLoader<IMultiStreamSource> loader)
         {
             _env.CheckValue(stream, nameof(stream));
 
@@ -195,33 +242,32 @@ public IDataLoader<IMultiStreamSource> Load(Stream stream)
             {
                 try
                 {
-                    ModelLoadContext.LoadModel<IDataLoader<IMultiStreamSource>, SignatureLoadModel>(_env, out var model, rep, null);
-                    return model;
+                    ModelLoadContext.LoadModel<IDataLoader<IMultiStreamSource>, SignatureLoadModel>(_env, out loader, rep, null);
+                    return DecomposeLoader(ref loader);
                 }
                 catch (Exception ex)
                 {
-                    throw _env.Except(ex, "Model does not contain an IDataLoader");
+                    throw _env.Except(ex, "Model does not contain an " + nameof(IDataLoader<IMultiStreamSource>) +
+                        ". Perhaps this was saved with an " + nameof(DataViewSchema) + ", or even no information on its input at all. " +
+                        "Consider using the " + nameof(Load) + " method instead.");
                 }
             }
         }
 
         /// <summary>
-        /// Load a transformer model and a data loader model from the stream.
+        /// Load a transformer model and a data loader model from a file.
         /// </summary>
-        /// <param name="stream">A readable, seekable stream to load from.</param>
-        /// <param name="loader">The data loader from the model stream.</param>
-        /// <returns>The transformer model from the model stream.</returns>
-        public ITransformer LoadWithDataLoader(Stream stream, out IDataLoader<IMultiStreamSource> loader)
+        /// <param name="filePath">Path to a file where the model should be read from.</param>
+        /// <param name="loader">The data loader from the model stream. Note that if there is no data loader,
+        /// this method will throw an exception. The scenario where no loader is stored in the stream should
+        /// be handled instead using the <see cref="Load(Stream, out DataViewSchema)"/> method.</param>
+        /// <returns>The transformer model from the model file.</returns>
+        public ITransformer LoadWithDataLoader(string filePath, out IDataLoader<IMultiStreamSource> loader)
         {
-            _env.CheckValue(stream, nameof(stream));
+            _env.CheckNonEmpty(filePath, nameof(filePath));
 
-            loader = Load(stream);
-            if (loader is CompositeDataLoader<IMultiStreamSource, ITransformer> composite)
-            {
-                loader = composite.Loader;
-                return composite.Transformer;
-            }
-            return new TransformerChain<ITransformer>();
+            using (var stream = File.OpenRead(filePath))
+                return LoadWithDataLoader(stream, out loader);
         }
 
         /// <summary>
diff --git a/test/Microsoft.ML.Functional.Tests/ModelLoading.cs b/test/Microsoft.ML.Functional.Tests/ModelLoading.cs
index 193ddedad5..4810ad2a09 100644
--- a/test/Microsoft.ML.Functional.Tests/ModelLoading.cs
+++ b/test/Microsoft.ML.Functional.Tests/ModelLoading.cs
@@ -8,7 +8,6 @@
 using Microsoft.ML.Calibrators;
 using Microsoft.ML.Data;
 using Microsoft.ML.RunTests;
-using Microsoft.ML.TestFramework;
 using Microsoft.ML.Trainers.FastTree;
 using Microsoft.ML.Transforms;
 using Xunit;
@@ -16,22 +15,12 @@
 
 namespace Microsoft.ML.Functional.Tests
 {
-    public partial class ModelLoadingTests : BaseTestClass
+    public partial class ModelLoadingTests : TestDataPipeBase
     {
-        private MLContext _ml;
-
         public ModelLoadingTests(ITestOutputHelper output) : base(output)
         {
         }
 
-        protected override void Initialize()
-        {
-            base.Initialize();
-
-            _ml = new MLContext(42);
-            _ml.AddStandardComponents();
-        }
-
         private class InputData
         {
             [LoadColumn(0)]
@@ -45,104 +34,133 @@ private class InputData
         public void LoadModelAndExtractPredictor()
         {
             var file = new MultiFileSource(GetDataPath(TestDatasets.adult.trainFilename));
-            var loader = _ml.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
+            var loader = ML.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
             var data = loader.Load(file);
 
             // Pipeline.
-            var pipeline = _ml.BinaryClassification.Trainers.Gam();
+            var pipeline = ML.BinaryClassification.Trainers.Gam();
             // Define the same pipeline starting with the loader.
-            var pipeline1 = loader.Append(_ml.BinaryClassification.Trainers.Gam());
-            
+            var pipeline1 = loader.Append(ML.BinaryClassification.Trainers.Gam());
+
             // Train.
             var transformerModel = pipeline.Fit(data);
             var compositeLoaderModel = pipeline1.Fit(file);
 
-            // Save and reload.
+            // Save and reload the "same" model with some differences in structure.
+
+            // In this case we are saving the transformer model, but *not* the loader, just the schema from that loader.
             string modelAndSchemaPath = GetOutputPath(FullTestName + "-model-schema.zip");
-            _ml.Model.Save(transformerModel, data.Schema, modelAndSchemaPath);
+            ML.Model.Save(transformerModel, data.Schema, modelAndSchemaPath);
+
+            // In this case we have combined the loader with the transformer model to form a "composite" loader, and are just
+            // saving that one loader to this file.
             string compositeLoaderModelPath = GetOutputPath(FullTestName + "-composite-model.zip");
-            _ml.Model.Save(compositeLoaderModel, compositeLoaderModelPath);
+            ML.Model.Save(null, compositeLoaderModel, compositeLoaderModelPath);
+
+            // In this case we are saving the transformer model, as well as the associated data loader.
             string loaderAndTransformerModelPath = GetOutputPath(FullTestName + "-loader-transformer.zip");
-            _ml.Model.Save(loader, transformerModel, loaderAndTransformerModelPath);
+            ML.Model.Save(transformerModel, loader, loaderAndTransformerModelPath);
 
             ITransformer loadedTransformerModel;
             IDataLoader<IMultiStreamSource> loadedCompositeLoader;
             ITransformer loadedTransformerModel1;
             using (var fs = File.OpenRead(modelAndSchemaPath))
-                loadedTransformerModel = _ml.Model.Load(fs, out var loadedSchema);
+                loadedTransformerModel = ML.Model.Load(fs, out var loadedSchema);
             using (var fs = File.OpenRead(compositeLoaderModelPath))
             {
                 // This model can be loaded either as a composite data loader,
                 // a transformer model + an input schema, or a transformer model + a data loader.
-                var t = _ml.Model.LoadWithDataLoader(fs, out IDataLoader<IMultiStreamSource> l);
-                var t1 = _ml.Model.Load(fs, out var s);
-                loadedCompositeLoader = _ml.Model.Load(fs);
+                var t = ML.Model.LoadWithDataLoader(fs, out loadedCompositeLoader);
+                // This is a bit strange, as it seems to test that it can reload from the same
+                // stream twice opened only once, which as far as I know is not really a requirement
+                // of the design or API, but we are nonetheless testing it. If this winds up failing,
+                // I'm not sure we should really insist on this as a design requirement.
+                var t1 = ML.Model.Load(fs, out var s);
+
+                CheckSameSchemas(loadedCompositeLoader.GetOutputSchema(), s);
+                // We combined the GAM with the loader, so the remaining chain should just be empty.
+                Assert.Empty(Assert.IsType<TransformerChain<ITransformer>>(t));
+                Assert.Empty(Assert.IsType<TransformerChain<ITransformer>>(t1));
             }
             using (var fs = File.OpenRead(loaderAndTransformerModelPath))
             {
                 // This model can be loaded either as a composite data loader,
                 // a transformer model + an input schema, or a transformer model + a data loader.
-                var t = _ml.Model.Load(fs, out var s);
-                var c = _ml.Model.Load(fs);
-                loadedTransformerModel1 = _ml.Model.LoadWithDataLoader(fs, out IDataLoader<IMultiStreamSource> l);
+                var t = ML.Model.Load(fs, out var s);
+                CheckSameSchemas(loader.GetOutputSchema(), s);
+
+                loadedTransformerModel1 = ML.Model.LoadWithDataLoader(fs, out var l);
+            }
+
+            void AssertIsGam(ITransformer trans)
+            {
+                Assert.IsType<GamBinaryModelParameters>(
+                    Assert.IsAssignableFrom<CalibratedModelParametersBase>(
+                        Assert.IsAssignableFrom<ISingleFeaturePredictionTransformer<object>>(trans).Model).SubModel);
             }
 
-            var gam = ((loadedTransformerModel as ISingleFeaturePredictionTransformer<object>).Model
-                as CalibratedModelParametersBase).SubModel
-                as GamBinaryModelParameters;
-            Assert.NotNull(gam);
-
-            gam = (((loadedCompositeLoader as CompositeDataLoader<IMultiStreamSource, ITransformer>).Transformer.LastTransformer
-                as ISingleFeaturePredictionTransformer<object>).Model
-                as CalibratedModelParametersBase).SubModel
-                as GamBinaryModelParameters;
-            Assert.NotNull(gam);
-
-            gam = (((loadedTransformerModel1 as TransformerChain<ITransformer>).LastTransformer
-                as ISingleFeaturePredictionTransformer<object>).Model
-                as CalibratedModelParametersBase).SubModel
-                as GamBinaryModelParameters;
-            Assert.NotNull(gam);
+            // In the case of the directly used transformer model, the thing we loaded should be itself the result from fitting GAM.
+            AssertIsGam(loadedTransformerModel);
+
+            // This is quite similar, the fact that we omitted saving the loader and saved the input schema to the model itself.
+            AssertIsGam(loadedTransformerModel1);
+
+            // If we had combined the transformer with the loader, and then saved *that*, then the resulting loaded "model"
+            // will be empty (as tested above), but the loader itself with a composite loader containing the result from
+            // fitting GAM as the sole item in its transformer chain.
+            var fromComposite = Assert.Single(Assert.IsType<TransformerChain<ITransformer>>(
+                Assert.IsType<CompositeDataLoader<IMultiStreamSource, ITransformer>>(loadedCompositeLoader).Transformer));
+            AssertIsGam(fromComposite);
+
+            Done();
         }
 
         [Fact]
         public void SaveAndLoadModelWithLoader()
         {
             var file = new MultiFileSource(GetDataPath(TestDatasets.adult.trainFilename));
-            var loader = _ml.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
+            var loader = ML.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
             var data = loader.Load(file);
 
             // Pipeline.
-            var pipeline = _ml.BinaryClassification.Trainers.Gam();
+            var pipeline = ML.BinaryClassification.Trainers.Gam();
 
             // Train.
             var model = pipeline.Fit(data);
 
             // Save and reload.
             string modelPath = GetOutputPath(FullTestName + "-model.zip");
-            _ml.Model.Save(loader, model, modelPath);
+            ML.Model.Save(model, loader, modelPath);
 
-            IDataLoader<IMultiStreamSource> loadedModel;
+            IDataLoader<IMultiStreamSource> loadedLoader;
             ITransformer loadedModelWithoutLoader;
+            ITransformer loadedModelWithLoader;
             DataViewSchema loadedSchema;
             using (var fs = File.OpenRead(modelPath))
             {
-                loadedModel = _ml.Model.Load(fs);
-                loadedModelWithoutLoader = _ml.Model.Load(fs, out loadedSchema);
+                loadedModelWithLoader = ML.Model.LoadWithDataLoader(fs, out loadedLoader);
+                Assert.IsAssignableFrom<ISingleFeaturePredictionTransformer<object>>(loadedModelWithLoader);
+                loadedModelWithoutLoader = ML.Model.Load(fs, out loadedSchema);
+                Assert.IsAssignableFrom<ISingleFeaturePredictionTransformer<object>>(loadedModelWithoutLoader);
+
+                CheckSameSchemas(loadedLoader.GetOutputSchema(), loadedSchema);
             }
 
-            // Without deserializing the loader from the model we lose the slot names.
-            data = _ml.Data.LoadFromEnumerable(new[] { new InputData() });
+            // When using a novel data source other than one derived from the loader, we will not have
+            // the slot names.
+            data = ML.Data.LoadFromEnumerable(new[] { new InputData() });
             data = loadedModelWithoutLoader.Transform(data);
-            Assert.True(!data.Schema["Features"].HasSlotNames());
+            Assert.False(data.Schema["Features"].HasSlotNames());
+            // When we plumb the loaded schema through the transformer though, we should have slot names.
+            var noLoaderTransformedSchema = loadedModelWithoutLoader.GetOutputSchema(loadedSchema);
+            Assert.True(noLoaderTransformedSchema["Features"].HasSlotNames());
 
-            data = loadedModel.Load(file);
+            data = loadedLoader.Load(file);
             Assert.True(data.Schema["Features"].HasSlotNames());
             VBuffer<ReadOnlyMemory<char>> slotNames = default;
             data.Schema["Features"].GetSlotNames(ref slotNames);
             var ageIndex = FindIndex(slotNames.GetValues(), "age");
-            var transformer = (loadedModel as CompositeDataLoader<IMultiStreamSource, ITransformer>).Transformer.LastTransformer;
-            var singleFeaturePredictionTransformer = transformer as ISingleFeaturePredictionTransformer<object>;
+            var singleFeaturePredictionTransformer = loadedModelWithLoader as ISingleFeaturePredictionTransformer<object>;
             Assert.NotNull(singleFeaturePredictionTransformer);
             var calibratedModelParameters = singleFeaturePredictionTransformer.Model as CalibratedModelParametersBase;
             Assert.NotNull(calibratedModelParameters);
@@ -156,30 +174,30 @@ public void SaveAndLoadModelWithLoader()
         public void LoadSchemaAndCreateNewData()
         {
             var file = new MultiFileSource(GetDataPath(TestDatasets.adult.trainFilename));
-            var loader = _ml.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
+            var loader = ML.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
             var data = loader.Load(file);
 
             // Pipeline.
-            var pipeline = _ml.Transforms.Normalize("Features");
+            var pipeline = ML.Transforms.Normalize("Features");
 
             // Train.
             var model = pipeline.Fit(data);
 
             // Save and reload.
             string modelPath = GetOutputPath(FullTestName + "-model.zip");
-            _ml.Model.Save(loader, model, modelPath);
+            ML.Model.Save(model, loader, modelPath);
 
             ITransformer loadedModel;
             DataViewSchema loadedSchema;
             using (var fs = File.OpenRead(modelPath))
-                loadedModel = _ml.Model.Load(fs, out loadedSchema);
+                loadedModel = ML.Model.Load(fs, out loadedSchema);
 
             // Without using the schema from the model we lose the slot names.
-            data = _ml.Data.LoadFromEnumerable(new[] { new InputData() });
+            data = ML.Data.LoadFromEnumerable(new[] { new InputData() });
             data = loadedModel.Transform(data);
             Assert.True(!data.Schema["Features"].HasSlotNames());
 
-            data = _ml.Data.LoadFromEnumerable(new[] { new InputData() }, loadedSchema);
+            data = ML.Data.LoadFromEnumerable(new[] { new InputData() }, loadedSchema);
             Assert.True(data.Schema["Features"].HasSlotNames());
         }
 
@@ -187,12 +205,12 @@ public void LoadSchemaAndCreateNewData()
         public void SaveTextLoaderAndLoad()
         {
             var file = new MultiFileSource(GetDataPath(TestDatasets.adult.trainFilename));
-            var loader = _ml.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
+            var loader = ML.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
 
             string modelPath = GetOutputPath(FullTestName + "-model.zip");
-            _ml.Model.Save(loader, modelPath);
+            ML.Model.Save(null, loader, modelPath);
 
-            Load(modelPath, out var loadedWithSchema, out var loadedSchema, out var loadedLoader,
+            Load(modelPath, out var loadedWithSchema, out var loadedSchema,
                 out var loadedWithLoader, out var loadedLoaderWithTransformer);
             Assert.True(loadedWithSchema is TransformerChain<ITransformer>);
             Assert.False((loadedWithSchema as TransformerChain<ITransformer>).Any());
@@ -200,7 +218,6 @@ public void SaveTextLoaderAndLoad()
                 loadedSchema.GetColumnOrNull("Label") != null
                 && loadedSchema.GetColumnOrNull("Features") != null
                 && loadedSchema["Features"].HasSlotNames());
-            Assert.True(loadedLoader is TextLoader);
             Assert.True(loadedWithLoader is TransformerChain<ITransformer>);
             Assert.False((loadedWithLoader as TransformerChain<ITransformer>).Any());
             Assert.True(loadedLoaderWithTransformer is TextLoader);
@@ -215,103 +232,104 @@ public void SaveTextLoaderAndLoad()
         public void SaveCompositeLoaderAndLoad()
         {
             var file = new MultiFileSource(GetDataPath(TestDatasets.adult.trainFilename));
-            var loader = _ml.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
-            var composite = loader.Append(_ml.Transforms.Normalize("Features"));
-            var model = composite.Fit(file);
+            var loader = ML.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
+            var composite = loader.Append(ML.Transforms.Normalize("Features"));
+            var loaderWithEmbeddedModel = composite.Fit(file);
 
             string modelPath = GetOutputPath(FullTestName + "-model.zip");
-            _ml.Model.Save(model, modelPath);
+            ML.Model.Save(null, loaderWithEmbeddedModel, modelPath);
 
-            Load(modelPath, out var loadedWithSchema, out var loadedSchema, out var loadedLoader,
+            Load(modelPath, out var loadedWithSchema, out var loadedSchema,
                 out var loadedWithLoader, out var loadedLoaderWithTransformer);
-            Assert.True(loadedWithSchema is TransformerChain<ITransformer>);
-            Assert.True((loadedWithSchema as TransformerChain<ITransformer>).Count() == 1);
-            Assert.True(loadedSchema.Count == 2 &&
-                loadedSchema.GetColumnOrNull("Label") != null
-                && loadedSchema.GetColumnOrNull("Features") != null
-                && loadedSchema["Features"].HasSlotNames());
-            Assert.True(loadedLoader is CompositeDataLoader<IMultiStreamSource, ITransformer>);
-            Assert.True(loadedWithLoader is TransformerChain<ITransformer>);
-            Assert.True((loadedWithLoader as TransformerChain<ITransformer>).Count() == 1);
-            Assert.True(loadedLoaderWithTransformer is TextLoader);
-            var schema = loadedLoaderWithTransformer.GetOutputSchema();
-            Assert.True(schema.Count == 2 &&
-                schema.GetColumnOrNull("Label") != null
-                && schema.GetColumnOrNull("Features") != null
-                && schema["Features"].HasSlotNames());
+            // Because we saved the transform model as part of the composite loader, with no transforms,
+            // the transform that should be loaded should be an empty transformer chain, since the "model,"
+            // such as it is, has been combined with the loader. 
+            Assert.Empty(Assert.IsType<TransformerChain<ITransformer>>(loadedWithSchema));
+            Assert.Empty(Assert.IsType<TransformerChain<ITransformer>>(loadedWithLoader));
+
+            var expectedSchema = loaderWithEmbeddedModel.GetOutputSchema();
+            Assert.True(expectedSchema.Count == 3);
+            Assert.NotNull(expectedSchema.GetColumnOrNull("Label"));
+            Assert.NotNull(expectedSchema.GetColumnOrNull("Features"));
+            Assert.True(expectedSchema["Features"].HasSlotNames());
+
+            CheckSameSchemas(loaderWithEmbeddedModel.GetOutputSchema(), loadedSchema);
+            var schemaFromLoadedLoader = loadedLoaderWithTransformer.GetOutputSchema();
+            CheckSameSchemas(loaderWithEmbeddedModel.GetOutputSchema(), schemaFromLoadedLoader);
+
+            // The type of the loader itself should be a composite data loader, and its single transformer
+            // should be the normalizing transformer.
+            var compositeLoader = Assert.IsType<CompositeDataLoader<IMultiStreamSource, ITransformer>>(loadedLoaderWithTransformer);
+            var chainFromLoader = compositeLoader.Transformer;
+            Assert.IsType<NormalizingTransformer>(Assert.Single(compositeLoader.Transformer));
+
+            Done();
         }
 
         [Fact]
         public void SaveLoaderAndTransformerAndLoad()
         {
             var file = new MultiFileSource(GetDataPath(TestDatasets.adult.trainFilename));
-            var loader = _ml.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
-            var estimator = _ml.Transforms.Normalize("Features");
-            var model = estimator.Fit(loader.Load(file));
+            var loader = ML.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
+            var estimator = ML.Transforms.Normalize("Features");
+            var data = loader.Load(file);
+            var model = estimator.Fit(data);
+
+            // First get the input schema.
+            var expectedInputSchema = loader.GetOutputSchema();
+            Assert.Equal(2, expectedInputSchema.Count);
+            Assert.NotNull(expectedInputSchema.GetColumnOrNull("Label"));
+            Assert.NotNull(expectedInputSchema.GetColumnOrNull("Features"));
+            Assert.True(expectedInputSchema["Features"].HasSlotNames());
 
             string modelPath = GetOutputPath(FullTestName + "-model.zip");
-            _ml.Model.Save(loader, model, modelPath);
+            ML.Model.Save(model, loader, modelPath);
 
-            Load(modelPath, out var loadedWithSchema, out var loadedSchema, out var loadedLoader,
+            // Reload the loader and schema.
+            Load(modelPath, out var loadedWithSchema, out var loadedInputSchema,
                 out var loadedWithLoader, out var loadedLoaderWithTransformer);
-            Assert.True(loadedWithSchema is TransformerChain<ITransformer>);
-            Assert.True((loadedWithSchema as TransformerChain<ITransformer>).Count() == 1);
-            Assert.True(loadedSchema.Count == 2 &&
-                loadedSchema.GetColumnOrNull("Label") != null
-                && loadedSchema.GetColumnOrNull("Features") != null
-                && loadedSchema["Features"].HasSlotNames());
-            Assert.True(loadedLoader is CompositeDataLoader<IMultiStreamSource, ITransformer>);
-            Assert.True(loadedWithLoader is TransformerChain<ITransformer>);
-            Assert.True((loadedWithLoader as TransformerChain<ITransformer>).Count() == 1);
-            Assert.True(loadedLoaderWithTransformer is TextLoader);
-            var schema = loadedLoaderWithTransformer.GetOutputSchema();
-            Assert.True(schema.Count == 2 &&
-                schema.GetColumnOrNull("Label") != null
-                && schema.GetColumnOrNull("Features") != null
-                && schema["Features"].HasSlotNames());
+            Assert.IsType<NormalizingTransformer>(loadedWithSchema);
+            Assert.IsType<NormalizingTransformer>(loadedWithLoader);
+            Assert.IsType<TextLoader>(loadedLoaderWithTransformer);
+
+            CheckSameSchemas(expectedInputSchema, loadedInputSchema);
+            var reloadedLoaderInputSchema = loadedLoaderWithTransformer.GetOutputSchema();
+            CheckSameSchemas(expectedInputSchema, reloadedLoaderInputSchema);
+
+            Done();
         }
 
         [Fact]
         public void SaveTransformerAndSchemaAndLoad()
         {
             var file = new MultiFileSource(GetDataPath(TestDatasets.adult.trainFilename));
-            var loader = _ml.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
-            var estimator = _ml.Transforms.Normalize("Features");
+            var loader = ML.Data.CreateTextLoader<InputData>(hasHeader: true, dataSample: file);
+            var estimator = ML.Transforms.Normalize("Features");
             var model = estimator.Fit(loader.Load(file));
 
             string modelPath = GetOutputPath(FullTestName + "-model.zip");
-            _ml.Model.Save(model, loader.GetOutputSchema(), modelPath);
+            ML.Model.Save(model, loader.GetOutputSchema(), modelPath);
 
-            Load(modelPath, out var loadedWithSchema, out var loadedSchema, out var loadedLoader,
+            Load(modelPath, out var loadedWithSchema, out var loadedSchema,
                 out var loadedWithLoader, out var loadedLoaderWithTransformer);
             Assert.True(loadedWithSchema is NormalizingTransformer);
             Assert.True(loadedSchema.Count == 2 &&
                 loadedSchema.GetColumnOrNull("Label") != null
                 && loadedSchema.GetColumnOrNull("Features") != null
                 && loadedSchema["Features"].HasSlotNames());
-            Assert.Null(loadedLoader);
             Assert.Null(loadedWithLoader);
             Assert.Null(loadedLoaderWithTransformer);
         }
 
         private void Load(string filename, out ITransformer loadedWithSchema, out DataViewSchema loadedSchema,
-            out IDataLoader<IMultiStreamSource> loadedLoader, out ITransformer loadedWithLoader,
-            out IDataLoader<IMultiStreamSource> loadedLoaderWithTransformer)
+            out ITransformer loadedWithLoader, out IDataLoader<IMultiStreamSource> loadedLoaderWithTransformer)
         {
             using (var fs = File.OpenRead(filename))
             {
+                loadedWithSchema = ML.Model.Load(fs, out loadedSchema);
                 try
                 {
-                    loadedLoader = _ml.Model.Load(fs);
-                }
-                catch (Exception)
-                {
-                    loadedLoader = null;
-                }
-                loadedWithSchema = _ml.Model.Load(fs, out loadedSchema);
-                try
-                {
-                    loadedWithLoader = _ml.Model.LoadWithDataLoader(fs, out loadedLoaderWithTransformer);
+                    loadedWithLoader = ML.Model.LoadWithDataLoader(fs, out loadedLoaderWithTransformer);
                 }
                 catch (Exception)
                 {
diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs
index 30be894425..cce1de27b5 100644
--- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs
+++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamples.cs
@@ -152,9 +152,7 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m
             // Potentially, the lines below can be in a different process altogether.
 
             // When you load the model, it's a 'dynamic' transformer. 
-            ITransformer loadedModel;
-            using (var stream = File.OpenRead(modelPath))
-                loadedModel = mlContext.Model.Load(stream, out var schema);
+            ITransformer loadedModel = mlContext.Model.Load(modelPath, out var schema);
         }
 
         [Fact]
@@ -400,7 +398,7 @@ public void TrainOnAutoGeneratedData()
             // We apply our FastTree binary classifier to predict the 'HasChurned' label.
 
             var dynamicpipeline = mlContext.Transforms.Categorical.OneHotEncoding("DemographicCategory")
-                .Append(new ColumnConcatenatingEstimator (mlContext, "Features", "DemographicCategory", "LastVisits"))
+                .Append(new ColumnConcatenatingEstimator(mlContext, "Features", "DemographicCategory", "LastVisits"))
                 .AppendCacheCheckpoint(mlContext) // FastTree will benefit from caching data in memory.
                 .Append(mlContext.BinaryClassification.Trainers.FastTree("HasChurned", "Features", numberOfTrees: 20));
 
diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs
index 614d6baf91..e2a71c27cd 100644
--- a/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs
+++ b/test/Microsoft.ML.Tests/Scenarios/Api/CookbookSamples/CookbookSamplesDynamicApi.cs
@@ -120,15 +120,13 @@ private void TrainRegression(string trainDataPath, string testDataPath, string m
             // Calculate metrics of the model on the test data.
             var metrics = mlContext.Regression.Evaluate(model.Transform(testData), labelColumnName: "Target");
 
-            // Saving and loading happens to 'dynamic' models.
+            // Saving and loading happens to transformers. We save the input schema with this model.
             mlContext.Model.Save(model, trainData.Schema, modelPath);
 
             // Potentially, the lines below can be in a different process altogether.
-
-            // When you load the model, it's a 'dynamic' transformer. 
-            ITransformer loadedModel;
-            using (var stream = File.OpenRead(modelPath))
-                loadedModel = mlContext.Model.Load(stream, out var schema);
+            // When you load the model, it's a non-specific ITransformer. We also recover
+            // the original schema.
+            ITransformer loadedModel = mlContext.Model.Load(modelPath, out var schema);
         }
 
         [Fact]
@@ -529,9 +527,7 @@ private static void RunEndToEnd(MLContext mlContext, IDataView trainData, string
             newContext.ComponentCatalog.RegisterAssembly(typeof(CustomMappings).Assembly);
 
             // Now we can load the model.
-            ITransformer loadedModel;
-            using (var fs = File.OpenRead(modelPath))
-                loadedModel = newContext.Model.Load(fs, out var schema);
+            ITransformer loadedModel = newContext.Model.Load(modelPath, out var schema);
         }
 
         public static IDataView PrepareData(MLContext mlContext, IDataView data)
diff --git a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs
index b934c57329..faea0c9f70 100644
--- a/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs
+++ b/test/Microsoft.ML.Tests/Scenarios/Api/Estimators/TrainSaveModelAndPredict.cs
@@ -38,10 +38,7 @@ public void TrainSaveModelAndPredict()
             ml.Model.Save(model, data.Schema, modelPath);
 
             // Load model.
-            ITransformer loadedModel;
-            DataViewSchema inputSchema;
-            using (var file = File.OpenRead(modelPath))
-                loadedModel = ml.Model.Load(file, out inputSchema);
+            var loadedModel = ml.Model.Load(modelPath, out var inputSchema);
 
             // Create prediction engine and test predictions.
             var engine = ml.Model.CreatePredictionEngine<SentimentData, SentimentPrediction>(loadedModel, inputSchema);