@@ -36,8 +36,9 @@ Variable *Graph::createVariable(ElemKind T, llvm::ArrayRef<size_t> dims,
36
36
return createVariable (FT, name, initKind, val);
37
37
}
38
38
39
- ConvolutionNode *Graph::createConv (Node *input, size_t depth, size_t kernel,
40
- size_t stride, size_t pad) {
39
+ ConvolutionNode *Graph::createConv (llvm::StringRef name, Node *input,
40
+ size_t depth, size_t kernel, size_t stride,
41
+ size_t pad) {
41
42
ShapeNHWC idim = ShapeNHWC (input->dims ());
42
43
assert (idim.w >= kernel && idim.h >= kernel &&
43
44
" buffer too small for selected stride" );
@@ -59,12 +60,13 @@ ConvolutionNode *Graph::createConv(Node *input, size_t depth, size_t kernel,
59
60
60
61
auto OT = M_.uniqueType (ElemKind::FloatTy, outDims);
61
62
62
- return addNode (new ConvolutionNode (input, OT, " Conv " , filter, bias, kernel,
63
+ return addNode (new ConvolutionNode (input, OT, name , filter, bias, kernel,
63
64
stride, pad, depth));
64
65
}
65
66
66
- PoolNode *Graph::createPool (Node *input, PoolInst::OpKind kind, size_t kernel,
67
- size_t stride, size_t pad) {
67
+ PoolNode *Graph::createPool (llvm::StringRef name, Node *input,
68
+ PoolInst::OpKind kind, size_t kernel, size_t stride,
69
+ size_t pad) {
68
70
ShapeNHWC idim = ShapeNHWC (input->dims ());
69
71
assert (idim.w >= kernel && idim.h >= kernel &&
70
72
" buffer too small for selected stride" );
@@ -75,10 +77,11 @@ PoolNode *Graph::createPool(Node *input, PoolInst::OpKind kind, size_t kernel,
75
77
auto OT = M_.uniqueType (ElemKind::FloatTy,
76
78
{idim.n , outSz.first , outSz.second , idim.c });
77
79
78
- return addNode (new PoolNode (input, OT, " pool " , kind, kernel, stride, pad));
80
+ return addNode (new PoolNode (input, OT, name , kind, kernel, stride, pad));
79
81
}
80
82
81
- FullyConnectedNode *Graph::createFullyConnected (Node *input, size_t outDepth) {
83
+ FullyConnectedNode *Graph::createFullyConnected (llvm::StringRef name,
84
+ Node *input, size_t outDepth) {
82
85
TypeRef T = input->getType ();
83
86
auto idim = flattenCdr (input->dims ());
84
87
@@ -91,35 +94,37 @@ FullyConnectedNode *Graph::createFullyConnected(Node *input, size_t outDepth) {
91
94
WeightVar::InitKind::Xavier, 0.1 );
92
95
93
96
auto OT = M_.uniqueType (T->getElementType (), {idim.first , outDepth});
94
- return addNode (
95
- new FullyConnectedNode (input, OT, " Fullyconnected" , W, B, outDepth));
97
+ return addNode (new FullyConnectedNode (input, OT, name, W, B, outDepth));
96
98
}
97
99
98
- ReluNode *Graph::createRELU (Node *input) {
99
- return addNode (new ReluNode (input, " relu " ));
100
+ ReluNode *Graph::createRELU (llvm::StringRef name, Node *input) {
101
+ return addNode (new ReluNode (input, name ));
100
102
}
101
103
102
- SigmoidNode *Graph::createSigmoid (Node *input) {
103
- return addNode (new SigmoidNode (input, " Sigmoid " ));
104
+ SigmoidNode *Graph::createSigmoid (llvm::StringRef name, Node *input) {
105
+ return addNode (new SigmoidNode (input, name ));
104
106
}
105
107
106
- TanhNode *Graph::createTanh (Node *input) {
107
- return addNode (new TanhNode (input, " Tanh " ));
108
+ TanhNode *Graph::createTanh (llvm::StringRef name, Node *input) {
109
+ return addNode (new TanhNode (input, name ));
108
110
}
109
111
110
- SoftMaxNode *Graph::createSoftMax (Node *input, Node *selected) {
111
- return addNode (new SoftMaxNode (input, " SoftMax" , selected));
112
+ SoftMaxNode *Graph::createSoftMax (llvm::StringRef name, Node *input,
113
+ Node *selected) {
114
+ return addNode (new SoftMaxNode (input, name, selected));
112
115
}
113
116
114
- RegressionNode *Graph::createRegression (Node *input, Node *expected) {
115
- return addNode (new RegressionNode (input, " Regression" , expected));
117
+ RegressionNode *Graph::createRegression (llvm::StringRef name, Node *input,
118
+ Node *expected) {
119
+ return addNode (new RegressionNode (input, name, expected));
116
120
}
117
121
118
- ReshapeNode *Graph::createReshape (Node *input, llvm::ArrayRef<size_t > shape) {
119
- return addNode (new ReshapeNode (input, " Reshape" , shape));
122
+ ReshapeNode *Graph::createReshape (llvm::StringRef name, Node *input,
123
+ llvm::ArrayRef<size_t > shape) {
124
+ return addNode (new ReshapeNode (input, name, shape));
120
125
}
121
126
122
- TransposeNode *Graph::createTranspose (Node *input,
127
+ TransposeNode *Graph::createTranspose (llvm::StringRef name, Node *input,
123
128
llvm::ArrayRef<unsigned > shuffle) {
124
129
std::vector<size_t > shape;
125
130
auto dims = input->dims ();
@@ -128,10 +133,11 @@ TransposeNode *Graph::createTranspose(Node *input,
128
133
}
129
134
130
135
auto NT = M_.uniqueType (input->getElementType (), shape);
131
- return addNode (new TransposeNode (input, NT, " Transpose " , shuffle));
136
+ return addNode (new TransposeNode (input, NT, name , shuffle));
132
137
}
133
138
134
- ConcatNode *Graph::createConcat (llvm::ArrayRef<Node *> inputs,
139
+ ConcatNode *Graph::createConcat (llvm::StringRef name,
140
+ llvm::ArrayRef<Node *> inputs,
135
141
unsigned dimension) {
136
142
auto inDim = inputs[0 ]->dims ();
137
143
@@ -146,10 +152,11 @@ ConcatNode *Graph::createConcat(llvm::ArrayRef<Node *> inputs,
146
152
shape[dimension] *= inputs.size ();
147
153
148
154
auto NT = M_.uniqueType (inputs[0 ]->getElementType (), shape);
149
- return addNode (new ConcatNode (inputs, NT, " Concat " , dimension));
155
+ return addNode (new ConcatNode (inputs, NT, name , dimension));
150
156
}
151
157
152
- BatchNormalizationNode *Graph::createBatchNormalization (Node *input,
158
+ BatchNormalizationNode *Graph::createBatchNormalization (llvm::StringRef name,
159
+ Node *input,
153
160
size_t channelIdx,
154
161
float epsilon,
155
162
float momentum) {
@@ -167,14 +174,14 @@ BatchNormalizationNode *Graph::createBatchNormalization(Node *input,
167
174
auto *variance = createVariable (ElemKind::FloatTy, {channels}, " variance" ,
168
175
WeightVar::InitKind::Broadcast, 0.0 );
169
176
170
- return addNode (new BatchNormalizationNode (input, " Norm" , gamma, beta, mean,
171
- variance, channelIdx, epsilon,
172
- momentum));
177
+ return addNode (new BatchNormalizationNode (
178
+ input, name, gamma, beta, mean, variance, channelIdx, epsilon, momentum));
173
179
}
174
180
175
181
LocalResponseNormalizationNode *
176
- Graph::createLocalResponseNormalization (Node *input, size_t halfWindowSize,
177
- float alpha, float beta, float k) {
182
+ Graph::createLocalResponseNormalization (llvm::StringRef name, Node *input,
183
+ size_t halfWindowSize, float alpha,
184
+ float beta, float k) {
178
185
auto Ty = input->getType ();
179
186
auto *scale =
180
187
createVariable (Ty, " scale" , WeightVar::InitKind::Broadcast, 0.0 );
@@ -184,8 +191,8 @@ Graph::createLocalResponseNormalization(Node *input, size_t halfWindowSize,
184
191
input, " LRN" , scale, halfWindowSize, alpha, beta, k));
185
192
}
186
193
187
- ArithmeticNode *Graph::createArithmetic (Node *LHS , Node *RHS ,
188
- ArithmeticInst::OpKind op) {
194
+ ArithmeticNode *Graph::createArithmetic (llvm::StringRef name , Node *LHS ,
195
+ Node *RHS, ArithmeticInst::OpKind op) {
189
196
assert (LHS->dims () == RHS->dims () && " Invalid operand shapes" );
190
197
// The output tensor is of the same shape as the input tensor.
191
198
return addNode (new ArithmeticNode (" Arithmetic" , LHS, RHS, op));
0 commit comments