1
1
import tensorflow as tf
2
- from tensorflow .python .saved_model .builder import SavedModelBuilder
3
- from tensorflow .python .saved_model .signature_def_utils import build_signature_def
4
- from tensorflow .python .saved_model .signature_constants import REGRESS_METHOD_NAME
5
- from tensorflow .python .saved_model .tag_constants import TRAINING , SERVING
6
- from tensorflow .python .saved_model .utils import build_tensor_info
7
2
8
- x = tf .placeholder (tf .float32 , name = 'x' )
9
- y = tf .placeholder (tf .float32 , name = 'y' )
10
3
11
- w = tf .Variable (tf .random_uniform ([1 ], - 1.0 , 1.0 ), name = 'w' )
12
- b = tf .Variable (tf .zeros ([1 ]), name = 'b' )
13
- y_hat = tf .add (w * x , b , name = "y_hat" )
4
+ class LinearRegresstion (tf .Module ):
5
+ def __init__ (self , name = None ):
6
+ super (LinearRegresstion , self ).__init__ (name = name )
7
+ self .w = tf .Variable (tf .random .uniform ([1 ], - 1.0 , 1.0 ), name = 'w' )
8
+ self .b = tf .Variable (tf .zeros ([1 ]), name = 'b' )
9
+ self .optimizer = tf .keras .optimizers .SGD (0.5 )
14
10
15
- loss = tf .reduce_mean (tf .square (y_hat - y ))
16
- optimizer = tf .train .GradientDescentOptimizer (0.5 )
17
- train = optimizer .minimize (loss , name = 'train' )
11
+ @tf .function
12
+ def __call__ (self , x ):
13
+ y_hat = self .w * x + self .b
14
+ return y_hat
18
15
19
- init = tf .variables_initializer (tf .global_variables (), name = 'init' )
16
+ @tf .function
17
+ def get_w (self ):
18
+ return {'output' : self .w }
19
+
20
+ @tf .function
21
+ def get_b (self ):
22
+ return {'output' : self .b }
23
+
24
+ @tf .function
25
+ def train (self , x , y ):
26
+ with tf .GradientTape () as tape :
27
+ y_hat = self (x )
28
+ loss = tf .reduce_mean (tf .square (y_hat - y ))
29
+ grads = tape .gradient (loss , self .trainable_variables )
30
+ _ = self .optimizer .apply_gradients (zip (grads , self .trainable_variables ))
31
+ return {'loss' : loss }
32
+
33
+
34
+ model = LinearRegresstion ()
35
+
36
+ # Get concrete functions to generate signatures
37
+ x = tf .TensorSpec ([None ], tf .float32 , name = 'x' )
38
+ y = tf .TensorSpec ([None ], tf .float32 , name = 'y' )
39
+
40
+ train = model .train .get_concrete_function (x , y )
41
+ w = model .get_w .get_concrete_function ()
42
+ b = model .get_b .get_concrete_function ()
43
+
44
+ signatures = {'train' : train , 'w' : w , 'b' : b }
20
45
21
46
directory = 'examples/regression_savedmodel'
22
- builder = SavedModelBuilder (directory )
23
-
24
- with tf .Session (graph = tf .get_default_graph ()) as sess :
25
- sess .run (init )
26
-
27
- signature_inputs = {
28
- "x" : build_tensor_info (x ),
29
- "y" : build_tensor_info (y )
30
- }
31
- signature_outputs = {
32
- "out" : build_tensor_info (y_hat )
33
- }
34
- signature_def = build_signature_def (
35
- signature_inputs , signature_outputs ,
36
- REGRESS_METHOD_NAME )
37
- builder .add_meta_graph_and_variables (
38
- sess , [TRAINING , SERVING ],
39
- signature_def_map = {
40
- REGRESS_METHOD_NAME : signature_def
41
- },
42
- assets_collection = tf .get_collection (tf .GraphKeys .ASSET_FILEPATHS ))
43
- builder .save (as_text = False )
47
+ tf .saved_model .save (model , directory , signatures = signatures )
0 commit comments