4
4
#include " glow/Graph/Nodes.h"
5
5
#include " glow/IR/IR.h"
6
6
#include " glow/IR/IRBuilder.h"
7
- #include " glow/Support/Casting.h"
7
+
8
+ #include " llvm/Support/Casting.h"
8
9
9
10
#include < unordered_map>
10
11
@@ -44,7 +45,7 @@ struct IRGenVisitor : NodeVisitor {
44
45
// / Saves the generated IR in \p v for the node \p N.
45
46
void registerIR (Node *N, Value *v) {
46
47
assert (!generatedNodes.count (N) && " Already generated code for this node" );
47
- assert ((isa<AllocActivationInst>(v) || isa<WeightVar>(v)) &&
48
+ assert ((llvm:: isa<AllocActivationInst>(v) || llvm:: isa<WeightVar>(v)) &&
48
49
" Value operand must be a memory location" );
49
50
generatedNodes[N] = v;
50
51
// Register the fact that we've lowered this variable to the new weight.
@@ -59,7 +60,7 @@ struct IRGenVisitor : NodeVisitor {
59
60
glow_unreachable ();
60
61
break ;
61
62
case glow::Kinded::Kind::ConvolutionNodeKind: {
62
- auto *C = cast<ConvolutionNode>(N);
63
+ auto *C = llvm:: cast<ConvolutionNode>(N);
63
64
auto *in = valueForNode (C->getInput ());
64
65
auto *filter = valueForNode (C->getFilter ());
65
66
auto *bias = valueForNode (C->getBias ());
@@ -72,7 +73,7 @@ struct IRGenVisitor : NodeVisitor {
72
73
break ;
73
74
}
74
75
case glow::Kinded::Kind::PoolNodeKind: {
75
- auto *P = cast<PoolNode>(N);
76
+ auto *P = llvm:: cast<PoolNode>(N);
76
77
auto *in = valueForNode (P->getInput ());
77
78
Instruction *V = nullptr ;
78
79
if (P->getMode () == PoolNode::Mode::Max) {
@@ -88,7 +89,7 @@ struct IRGenVisitor : NodeVisitor {
88
89
break ;
89
90
}
90
91
case glow::Kinded::Kind::FullyConnectedNodeKind: {
91
- auto *FC = cast<FullyConnectedNode>(N);
92
+ auto *FC = llvm:: cast<FullyConnectedNode>(N);
92
93
auto *in = valueForNode (FC->getInput ());
93
94
auto *filter = valueForNode (FC->getFilter ());
94
95
auto *bias = valueForNode (FC->getBias ());
@@ -99,29 +100,29 @@ struct IRGenVisitor : NodeVisitor {
99
100
break ;
100
101
}
101
102
case glow::Kinded::Kind::ReluNodeKind: {
102
- auto *R = cast<ReluNode>(N);
103
+ auto *R = llvm:: cast<ReluNode>(N);
103
104
auto *V = builder_.createRELUOp (valueForNode (R->getInput ()));
104
105
V->setName (N->getName ());
105
106
registerIR (N, V->getDest ());
106
107
107
108
break ;
108
109
}
109
110
case glow::Kinded::Kind::SigmoidNodeKind: {
110
- auto *S = cast<SigmoidNode>(N);
111
+ auto *S = llvm:: cast<SigmoidNode>(N);
111
112
auto *V = builder_.createSigmoidOp (valueForNode (S->getInput ()));
112
113
V->setName (N->getName ());
113
114
registerIR (N, V->getDest ());
114
115
break ;
115
116
}
116
117
case glow::Kinded::Kind::TanhNodeKind: {
117
- auto *T = cast<TanhNode>(N);
118
+ auto *T = llvm:: cast<TanhNode>(N);
118
119
auto *V = builder_.createTanhOp (valueForNode (T->getInput ()));
119
120
V->setName (N->getName ());
120
121
registerIR (N, V->getDest ());
121
122
break ;
122
123
}
123
124
case glow::Kinded::Kind::SoftMaxNodeKind: {
124
- auto *SM = cast<SoftMaxNode>(N);
125
+ auto *SM = llvm:: cast<SoftMaxNode>(N);
125
126
auto *in = valueForNode (SM->getInput ());
126
127
auto *select = valueForNode (SM->getSelected ());
127
128
auto *V = builder_.createSoftMaxOp (in, select );
@@ -130,7 +131,7 @@ struct IRGenVisitor : NodeVisitor {
130
131
break ;
131
132
}
132
133
case glow::Kinded::Kind::RegressionNodeKind: {
133
- auto *RR = cast<RegressionNode>(N);
134
+ auto *RR = llvm:: cast<RegressionNode>(N);
134
135
auto *in = valueForNode (RR->getInput ());
135
136
auto *expected = valueForNode (RR->getExpected ());
136
137
auto *V = builder_.createRegressionOp (in, expected);
@@ -139,23 +140,23 @@ struct IRGenVisitor : NodeVisitor {
139
140
break ;
140
141
}
141
142
case glow::Kinded::Kind::TransposeNodeKind: {
142
- auto *TT = cast<TransposeNode>(N);
143
+ auto *TT = llvm:: cast<TransposeNode>(N);
143
144
auto *in = valueForNode (TT->getInput ());
144
145
auto *V = builder_.createTransposeOp (in, TT->getShuffle ());
145
146
V->setName (N->getName ());
146
147
registerIR (N, V->getDest ());
147
148
break ;
148
149
}
149
150
case glow::Kinded::Kind::ReshapeNodeKind: {
150
- auto *RS = cast<ReshapeNode>(N);
151
+ auto *RS = llvm:: cast<ReshapeNode>(N);
151
152
auto *in = valueForNode (RS->getInput ());
152
153
auto *V = builder_.createReshapeOp (in, RS->getDims ());
153
154
V->setName (N->getName ());
154
155
registerIR (N, V->getDest ());
155
156
break ;
156
157
}
157
158
case glow::Kinded::Kind::ConcatNodeKind: {
158
- auto *CC = cast<ConcatNode>(N);
159
+ auto *CC = llvm:: cast<ConcatNode>(N);
159
160
auto *LHS = valueForNode (CC->getLHS ());
160
161
auto *RHS = valueForNode (CC->getRHS ());
161
162
auto *V = builder_.createConcatOp (LHS, RHS, CC->getDim ());
@@ -164,7 +165,7 @@ struct IRGenVisitor : NodeVisitor {
164
165
break ;
165
166
}
166
167
case glow::Kinded::Kind::BatchNormalizationNodeKind: {
167
- auto *BN = cast<BatchNormalizationNode>(N);
168
+ auto *BN = llvm:: cast<BatchNormalizationNode>(N);
168
169
auto *in = valueForNode (BN->getInput ());
169
170
auto *beta = valueForNode (BN->getBias ());
170
171
auto *gamma = valueForNode (BN->getScale ());
@@ -180,7 +181,7 @@ struct IRGenVisitor : NodeVisitor {
180
181
}
181
182
182
183
case glow::Kinded::Kind::LocalResponseNormalizationNodeKind: {
183
- auto *LR = cast<LocalResponseNormalizationNode>(N);
184
+ auto *LR = llvm:: cast<LocalResponseNormalizationNode>(N);
184
185
auto *in = valueForNode (LR->getInput ());
185
186
auto *V = builder_.createLocalResponseNormalizationOp (
186
187
in, LR->getHalfWindowSize (), LR->getAlpha (), LR->getBeta (),
@@ -190,7 +191,7 @@ struct IRGenVisitor : NodeVisitor {
190
191
break ;
191
192
}
192
193
case glow::Kinded::Kind::ArithmeticNodeKind: {
193
- auto *AR = cast<ArithmeticNode>(N);
194
+ auto *AR = llvm:: cast<ArithmeticNode>(N);
194
195
auto *L = valueForNode (AR->getLHS ());
195
196
auto *R = valueForNode (AR->getRHS ());
196
197
@@ -206,7 +207,7 @@ struct IRGenVisitor : NodeVisitor {
206
207
break ;
207
208
}
208
209
case glow::Kinded::Kind::SaveNodeKind: {
209
- auto *R = cast<SaveNode>(N);
210
+ auto *R = llvm:: cast<SaveNode>(N);
210
211
auto *src = valueForNode (R->getInput ());
211
212
auto *dest = valueForNode (R->getOutput ());
212
213
auto *V = builder_.createCopyInst (dest, src);
@@ -215,7 +216,7 @@ struct IRGenVisitor : NodeVisitor {
215
216
}
216
217
case glow::Kinded::Kind::VariableNodeKind: {
217
218
using MK = WeightVar::MutabilityKind;
218
- auto *V = cast<Variable>(N);
219
+ auto *V = llvm:: cast<Variable>(N);
219
220
bool isConst = V->getInitKind () == Variable::InitKind::Extern;
220
221
auto *W = builder_.createWeightVar (V->getType (), V->getName (),
221
222
isConst ? MK::Constant : MK::Mutable);
@@ -259,7 +260,7 @@ void generateBackwardPass(Module &M) {
259
260
for (auto I : instrs) {
260
261
switch (I->getKind ()) {
261
262
case Kind::AllocActivationInstKind: {
262
- auto *AC = cast<AllocActivationInst>(I);
263
+ auto *AC = llvm:: cast<AllocActivationInst>(I);
263
264
auto *N = new AllocActivationInst (AC->getName (), AC->getType ());
264
265
allocs.push_back (N);
265
266
weightToGradMap[I] = N;
@@ -269,78 +270,82 @@ void generateBackwardPass(Module &M) {
269
270
break ;
270
271
}
271
272
case Kind::CopyInstKind: {
272
- auto *CC = cast<CopyInst>(I);
273
+ auto *CC = llvm:: cast<CopyInst>(I);
273
274
auto *N = new CopyInst (CC->getName (), weightToGradMap[CC->getSrc ()],
274
275
weightToGradMap[CC->getDest ()]);
275
276
toAppend.push_back (N);
276
277
break ;
277
278
}
278
279
case Kind::ConvolutionInstKind: {
279
- toAppend.push_back (cast<ConvolutionInst>(I)->getGrad (weightToGradMap));
280
+ toAppend.push_back (
281
+ llvm::cast<ConvolutionInst>(I)->getGrad (weightToGradMap));
280
282
break ;
281
283
}
282
284
case Kind::PoolMaxInstKind: {
283
- toAppend.push_back (cast<PoolMaxInst>(I)->getGrad (weightToGradMap));
285
+ toAppend.push_back (llvm:: cast<PoolMaxInst>(I)->getGrad (weightToGradMap));
284
286
break ;
285
287
}
286
288
case Kind::PoolAvgInstKind: {
287
- toAppend.push_back (cast<PoolAvgInst>(I)->getGrad (weightToGradMap));
289
+ toAppend.push_back (llvm:: cast<PoolAvgInst>(I)->getGrad (weightToGradMap));
288
290
break ;
289
291
}
290
292
case Kind::FullyConnectedInstKind: {
291
- toAppend.push_back (cast<FullyConnectedInst>(I)->getGrad (weightToGradMap));
293
+ toAppend.push_back (
294
+ llvm::cast<FullyConnectedInst>(I)->getGrad (weightToGradMap));
292
295
break ;
293
296
}
294
297
case Kind::BatchNormalizationInstKind: {
295
298
toAppend.push_back (
296
- cast<BatchNormalizationInst>(I)->getGrad (weightToGradMap));
299
+ llvm:: cast<BatchNormalizationInst>(I)->getGrad (weightToGradMap));
297
300
break ;
298
301
}
299
302
case Kind::LocalResponseNormalizationInstKind: {
300
- toAppend.push_back (
301
- cast<LocalResponseNormalizationInst>(I)-> getGrad ( weightToGradMap));
303
+ toAppend.push_back (llvm::cast<LocalResponseNormalizationInst>(I)-> getGrad (
304
+ weightToGradMap));
302
305
break ;
303
306
}
304
307
case Kind::SoftMaxInstKind: {
305
- toAppend.push_back (cast<SoftMaxInst>(I)->getGrad (weightToGradMap));
308
+ toAppend.push_back (llvm:: cast<SoftMaxInst>(I)->getGrad (weightToGradMap));
306
309
break ;
307
310
}
308
311
case Kind::RegressionInstKind: {
309
- toAppend.push_back (cast<RegressionInst>(I)-> getGrad (weightToGradMap));
310
-
312
+ toAppend.push_back (
313
+ llvm::cast<RegressionInst>(I)-> getGrad (weightToGradMap));
311
314
break ;
312
315
}
313
316
case Kind::ElementAddInstKind: {
314
- toAppend.push_back (cast<ElementAddInst>(I)->getGrad (weightToGradMap));
317
+ toAppend.push_back (
318
+ llvm::cast<ElementAddInst>(I)->getGrad (weightToGradMap));
315
319
break ;
316
320
}
317
321
case Kind::ElementMulInstKind: {
318
- toAppend.push_back (cast<ElementMulInst>(I)->getGrad (weightToGradMap));
322
+ toAppend.push_back (
323
+ llvm::cast<ElementMulInst>(I)->getGrad (weightToGradMap));
319
324
break ;
320
325
}
321
326
case Kind::ReluInstKind: {
322
- toAppend.push_back (cast<ReluInst>(I)->getGrad (weightToGradMap));
323
-
327
+ toAppend.push_back (llvm::cast<ReluInst>(I)->getGrad (weightToGradMap));
324
328
break ;
325
329
}
326
330
case Kind::SigmoidInstKind: {
327
- toAppend.push_back (cast<SigmoidInst>(I)->getGrad (weightToGradMap));
331
+ toAppend.push_back (llvm:: cast<SigmoidInst>(I)->getGrad (weightToGradMap));
328
332
break ;
329
333
}
330
334
case Kind::TanhInstKind: {
331
- toAppend.push_back (cast<TanhInst>(I)->getGrad (weightToGradMap));
335
+ toAppend.push_back (llvm:: cast<TanhInst>(I)->getGrad (weightToGradMap));
332
336
break ;
333
337
}
334
338
case Kind::ReshapeInstKind: {
335
- toAppend.push_back (cast<ReshapeInst>(I)->getGrad (weightToGradMap));
339
+ toAppend.push_back (llvm:: cast<ReshapeInst>(I)->getGrad (weightToGradMap));
336
340
break ;
337
341
}
338
342
case Kind::TransposeInstKind: {
339
- toAppend.push_back (cast<TransposeInst>(I)->getGrad (weightToGradMap));
343
+ toAppend.push_back (
344
+ llvm::cast<TransposeInst>(I)->getGrad (weightToGradMap));
340
345
break ;
341
346
}
342
347
case Kind::ConcatInstKind: {
343
- toAppend.push_back (cast<ConcatInst>(I)->getGrad (weightToGradMap));
348
+ toAppend.push_back (llvm:: cast<ConcatInst>(I)->getGrad (weightToGradMap));
344
349
break ;
345
350
}
346
351
default :
0 commit comments