Skip to content

Commit 88d7ee8

Browse files
authored
Merge pull request #52 from compnerd/rtti
Support: remove custom RTTI handling
2 parents 3708115 + 7d1bbe3 commit 88d7ee8

File tree

17 files changed

+141
-153
lines changed

17 files changed

+141
-153
lines changed

include/glow/Graph/Nodes.h

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,8 @@
33

44
#include "glow/Base/Tensor.h"
55
#include "glow/Graph/Node.h"
6-
#include "glow/Support/Casting.h"
6+
7+
#include "llvm/Support/Casting.h"
78

89
namespace glow {
910

include/glow/IR/Instrs.h

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -4,9 +4,9 @@
44
#include "glow/Base/Type.h"
55
#include "glow/Graph/Nodes.h"
66
#include "glow/IR/IR.h"
7-
#include "glow/Support/Casting.h"
87

98
#include "llvm/ADT/ArrayRef.h"
9+
#include "llvm/Support/Casting.h"
1010

1111
namespace glow {
1212

include/glow/Importer/Caffe2.h

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,10 @@
11
#ifndef GLOW_IMPORTER_CAFFE2_H
22
#define GLOW_IMPORTER_CAFFE2_H
33

4-
#include "llvm/ADT/ArrayRef.h"
5-
64
#include "glow/Graph/Graph.h"
7-
#include "glow/Support/Casting.h"
5+
6+
#include "llvm/ADT/ArrayRef.h"
7+
#include "llvm/Support/Casting.h"
88

99
#include <string>
1010
#include <unordered_map>

include/glow/Support/Casting.h

Lines changed: 0 additions & 27 deletions
This file was deleted.

src/glow/Graph/Graph.cpp

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,10 @@
33
#include "glow/Graph/Graph.h"
44
#include "glow/Graph/Nodes.h"
55
#include "glow/IR/IR.h"
6-
#include "glow/Support/Casting.h"
76
#include "glow/Support/Support.h"
87

8+
#include "llvm/Support/Casting.h"
9+
910
#include <fstream>
1011
#include <iostream>
1112

@@ -274,7 +275,7 @@ struct DottyPrinterPass : NodeVisitor {
274275
std::string repr = escapeDottyString(N->getDebugDesc());
275276
os_ << "\tlabel = " + quote(repr) + "\n";
276277
os_ << "\tshape = \"record\"\n";
277-
if (isa<Variable>(N)) {
278+
if (llvm::isa<Variable>(N)) {
278279
os_ << "\tfillcolor=pink,style=filled\n";
279280
}
280281
os_ << "];\n\n";

src/glow/IR/IR.cpp

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -2,9 +2,10 @@
22

33
#include "glow/IR/IR.h"
44
#include "glow/IR/Instrs.h"
5-
#include "glow/Support/Casting.h"
65
#include "glow/Support/Support.h"
76

7+
#include "llvm/Support/Casting.h"
8+
89
#include <fstream>
910
#include <iostream>
1011
#include <sstream>
@@ -58,7 +59,7 @@ void Instruction::verifyUseList() const {
5859

5960
void Instruction::verify() const {
6061
#define DEF_INSTR(CLASS, NAME) \
61-
if (auto *X = dyn_cast<const CLASS>(this)) \
62+
if (auto *X = llvm::dyn_cast<const CLASS>(this)) \
6263
X->verify();
6364
#define DEF_VALUE(CLASS, NAME)
6465
#include "AutoGenInstr.def"
@@ -120,10 +121,10 @@ Value *Module::getWeightForNode(const Node *V) const {
120121

121122
static void dumpIR(Value *V, std::ostream &out) {
122123
#define DEF_INSTR(CLASS, NAME) \
123-
if (const auto *X = dyn_cast<const CLASS>(V)) \
124+
if (const auto *X = llvm::dyn_cast<const CLASS>(V)) \
124125
return X->dump(out);
125126
#define DEF_VALUE(CLASS, NAME) \
126-
if (const auto *X = dyn_cast<const CLASS>(V)) \
127+
if (const auto *X = llvm::dyn_cast<const CLASS>(V)) \
127128
return X->dump(out);
128129
#include "AutoGenInstr.def"
129130
glow_unreachable();
@@ -132,7 +133,7 @@ static void dumpIR(Value *V, std::ostream &out) {
132133
bool Instruction::isInplaceOp(const Instruction *I, unsigned dstIdx,
133134
unsigned srcIdx) {
134135
#define DEF_INSTR(CLASS, NAME) \
135-
if (const auto *X = dyn_cast<const CLASS>(I)) \
136+
if (const auto *X = llvm::dyn_cast<const CLASS>(I)) \
136137
return X->isInplaceOp(dstIdx, srcIdx);
137138
#define DEF_VALUE(CLASS, NAME)
138139
#include "AutoGenInstr.def"

src/glow/IR/IRGen.cpp

Lines changed: 44 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,8 @@
44
#include "glow/Graph/Nodes.h"
55
#include "glow/IR/IR.h"
66
#include "glow/IR/IRBuilder.h"
7-
#include "glow/Support/Casting.h"
7+
8+
#include "llvm/Support/Casting.h"
89

910
#include <unordered_map>
1011

@@ -44,7 +45,7 @@ struct IRGenVisitor : NodeVisitor {
4445
/// Saves the generated IR in \p v for the node \p N.
4546
void registerIR(Node *N, Value *v) {
4647
assert(!generatedNodes.count(N) && "Already generated code for this node");
47-
assert((isa<AllocActivationInst>(v) || isa<WeightVar>(v)) &&
48+
assert((llvm::isa<AllocActivationInst>(v) || llvm::isa<WeightVar>(v)) &&
4849
"Value operand must be a memory location");
4950
generatedNodes[N] = v;
5051
// Register the fact that we've lowered this variable to the new weight.
@@ -59,7 +60,7 @@ struct IRGenVisitor : NodeVisitor {
5960
glow_unreachable();
6061
break;
6162
case glow::Kinded::Kind::ConvolutionNodeKind: {
62-
auto *C = cast<ConvolutionNode>(N);
63+
auto *C = llvm::cast<ConvolutionNode>(N);
6364
auto *in = valueForNode(C->getInput());
6465
auto *filter = valueForNode(C->getFilter());
6566
auto *bias = valueForNode(C->getBias());
@@ -72,7 +73,7 @@ struct IRGenVisitor : NodeVisitor {
7273
break;
7374
}
7475
case glow::Kinded::Kind::PoolNodeKind: {
75-
auto *P = cast<PoolNode>(N);
76+
auto *P = llvm::cast<PoolNode>(N);
7677
auto *in = valueForNode(P->getInput());
7778
Instruction *V = nullptr;
7879
if (P->getMode() == PoolNode::Mode::Max) {
@@ -88,7 +89,7 @@ struct IRGenVisitor : NodeVisitor {
8889
break;
8990
}
9091
case glow::Kinded::Kind::FullyConnectedNodeKind: {
91-
auto *FC = cast<FullyConnectedNode>(N);
92+
auto *FC = llvm::cast<FullyConnectedNode>(N);
9293
auto *in = valueForNode(FC->getInput());
9394
auto *filter = valueForNode(FC->getFilter());
9495
auto *bias = valueForNode(FC->getBias());
@@ -99,29 +100,29 @@ struct IRGenVisitor : NodeVisitor {
99100
break;
100101
}
101102
case glow::Kinded::Kind::ReluNodeKind: {
102-
auto *R = cast<ReluNode>(N);
103+
auto *R = llvm::cast<ReluNode>(N);
103104
auto *V = builder_.createRELUOp(valueForNode(R->getInput()));
104105
V->setName(N->getName());
105106
registerIR(N, V->getDest());
106107

107108
break;
108109
}
109110
case glow::Kinded::Kind::SigmoidNodeKind: {
110-
auto *S = cast<SigmoidNode>(N);
111+
auto *S = llvm::cast<SigmoidNode>(N);
111112
auto *V = builder_.createSigmoidOp(valueForNode(S->getInput()));
112113
V->setName(N->getName());
113114
registerIR(N, V->getDest());
114115
break;
115116
}
116117
case glow::Kinded::Kind::TanhNodeKind: {
117-
auto *T = cast<TanhNode>(N);
118+
auto *T = llvm::cast<TanhNode>(N);
118119
auto *V = builder_.createTanhOp(valueForNode(T->getInput()));
119120
V->setName(N->getName());
120121
registerIR(N, V->getDest());
121122
break;
122123
}
123124
case glow::Kinded::Kind::SoftMaxNodeKind: {
124-
auto *SM = cast<SoftMaxNode>(N);
125+
auto *SM = llvm::cast<SoftMaxNode>(N);
125126
auto *in = valueForNode(SM->getInput());
126127
auto *select = valueForNode(SM->getSelected());
127128
auto *V = builder_.createSoftMaxOp(in, select);
@@ -130,7 +131,7 @@ struct IRGenVisitor : NodeVisitor {
130131
break;
131132
}
132133
case glow::Kinded::Kind::RegressionNodeKind: {
133-
auto *RR = cast<RegressionNode>(N);
134+
auto *RR = llvm::cast<RegressionNode>(N);
134135
auto *in = valueForNode(RR->getInput());
135136
auto *expected = valueForNode(RR->getExpected());
136137
auto *V = builder_.createRegressionOp(in, expected);
@@ -139,23 +140,23 @@ struct IRGenVisitor : NodeVisitor {
139140
break;
140141
}
141142
case glow::Kinded::Kind::TransposeNodeKind: {
142-
auto *TT = cast<TransposeNode>(N);
143+
auto *TT = llvm::cast<TransposeNode>(N);
143144
auto *in = valueForNode(TT->getInput());
144145
auto *V = builder_.createTransposeOp(in, TT->getShuffle());
145146
V->setName(N->getName());
146147
registerIR(N, V->getDest());
147148
break;
148149
}
149150
case glow::Kinded::Kind::ReshapeNodeKind: {
150-
auto *RS = cast<ReshapeNode>(N);
151+
auto *RS = llvm::cast<ReshapeNode>(N);
151152
auto *in = valueForNode(RS->getInput());
152153
auto *V = builder_.createReshapeOp(in, RS->getDims());
153154
V->setName(N->getName());
154155
registerIR(N, V->getDest());
155156
break;
156157
}
157158
case glow::Kinded::Kind::ConcatNodeKind: {
158-
auto *CC = cast<ConcatNode>(N);
159+
auto *CC = llvm::cast<ConcatNode>(N);
159160
auto *LHS = valueForNode(CC->getLHS());
160161
auto *RHS = valueForNode(CC->getRHS());
161162
auto *V = builder_.createConcatOp(LHS, RHS, CC->getDim());
@@ -164,7 +165,7 @@ struct IRGenVisitor : NodeVisitor {
164165
break;
165166
}
166167
case glow::Kinded::Kind::BatchNormalizationNodeKind: {
167-
auto *BN = cast<BatchNormalizationNode>(N);
168+
auto *BN = llvm::cast<BatchNormalizationNode>(N);
168169
auto *in = valueForNode(BN->getInput());
169170
auto *beta = valueForNode(BN->getBias());
170171
auto *gamma = valueForNode(BN->getScale());
@@ -180,7 +181,7 @@ struct IRGenVisitor : NodeVisitor {
180181
}
181182

182183
case glow::Kinded::Kind::LocalResponseNormalizationNodeKind: {
183-
auto *LR = cast<LocalResponseNormalizationNode>(N);
184+
auto *LR = llvm::cast<LocalResponseNormalizationNode>(N);
184185
auto *in = valueForNode(LR->getInput());
185186
auto *V = builder_.createLocalResponseNormalizationOp(
186187
in, LR->getHalfWindowSize(), LR->getAlpha(), LR->getBeta(),
@@ -190,7 +191,7 @@ struct IRGenVisitor : NodeVisitor {
190191
break;
191192
}
192193
case glow::Kinded::Kind::ArithmeticNodeKind: {
193-
auto *AR = cast<ArithmeticNode>(N);
194+
auto *AR = llvm::cast<ArithmeticNode>(N);
194195
auto *L = valueForNode(AR->getLHS());
195196
auto *R = valueForNode(AR->getRHS());
196197

@@ -206,7 +207,7 @@ struct IRGenVisitor : NodeVisitor {
206207
break;
207208
}
208209
case glow::Kinded::Kind::SaveNodeKind: {
209-
auto *R = cast<SaveNode>(N);
210+
auto *R = llvm::cast<SaveNode>(N);
210211
auto *src = valueForNode(R->getInput());
211212
auto *dest = valueForNode(R->getOutput());
212213
auto *V = builder_.createCopyInst(dest, src);
@@ -215,7 +216,7 @@ struct IRGenVisitor : NodeVisitor {
215216
}
216217
case glow::Kinded::Kind::VariableNodeKind: {
217218
using MK = WeightVar::MutabilityKind;
218-
auto *V = cast<Variable>(N);
219+
auto *V = llvm::cast<Variable>(N);
219220
bool isConst = V->getInitKind() == Variable::InitKind::Extern;
220221
auto *W = builder_.createWeightVar(V->getType(), V->getName(),
221222
isConst ? MK::Constant : MK::Mutable);
@@ -259,7 +260,7 @@ void generateBackwardPass(Module &M) {
259260
for (auto I : instrs) {
260261
switch (I->getKind()) {
261262
case Kind::AllocActivationInstKind: {
262-
auto *AC = cast<AllocActivationInst>(I);
263+
auto *AC = llvm::cast<AllocActivationInst>(I);
263264
auto *N = new AllocActivationInst(AC->getName(), AC->getType());
264265
allocs.push_back(N);
265266
weightToGradMap[I] = N;
@@ -269,78 +270,82 @@ void generateBackwardPass(Module &M) {
269270
break;
270271
}
271272
case Kind::CopyInstKind: {
272-
auto *CC = cast<CopyInst>(I);
273+
auto *CC = llvm::cast<CopyInst>(I);
273274
auto *N = new CopyInst(CC->getName(), weightToGradMap[CC->getSrc()],
274275
weightToGradMap[CC->getDest()]);
275276
toAppend.push_back(N);
276277
break;
277278
}
278279
case Kind::ConvolutionInstKind: {
279-
toAppend.push_back(cast<ConvolutionInst>(I)->getGrad(weightToGradMap));
280+
toAppend.push_back(
281+
llvm::cast<ConvolutionInst>(I)->getGrad(weightToGradMap));
280282
break;
281283
}
282284
case Kind::PoolMaxInstKind: {
283-
toAppend.push_back(cast<PoolMaxInst>(I)->getGrad(weightToGradMap));
285+
toAppend.push_back(llvm::cast<PoolMaxInst>(I)->getGrad(weightToGradMap));
284286
break;
285287
}
286288
case Kind::PoolAvgInstKind: {
287-
toAppend.push_back(cast<PoolAvgInst>(I)->getGrad(weightToGradMap));
289+
toAppend.push_back(llvm::cast<PoolAvgInst>(I)->getGrad(weightToGradMap));
288290
break;
289291
}
290292
case Kind::FullyConnectedInstKind: {
291-
toAppend.push_back(cast<FullyConnectedInst>(I)->getGrad(weightToGradMap));
293+
toAppend.push_back(
294+
llvm::cast<FullyConnectedInst>(I)->getGrad(weightToGradMap));
292295
break;
293296
}
294297
case Kind::BatchNormalizationInstKind: {
295298
toAppend.push_back(
296-
cast<BatchNormalizationInst>(I)->getGrad(weightToGradMap));
299+
llvm::cast<BatchNormalizationInst>(I)->getGrad(weightToGradMap));
297300
break;
298301
}
299302
case Kind::LocalResponseNormalizationInstKind: {
300-
toAppend.push_back(
301-
cast<LocalResponseNormalizationInst>(I)->getGrad(weightToGradMap));
303+
toAppend.push_back(llvm::cast<LocalResponseNormalizationInst>(I)->getGrad(
304+
weightToGradMap));
302305
break;
303306
}
304307
case Kind::SoftMaxInstKind: {
305-
toAppend.push_back(cast<SoftMaxInst>(I)->getGrad(weightToGradMap));
308+
toAppend.push_back(llvm::cast<SoftMaxInst>(I)->getGrad(weightToGradMap));
306309
break;
307310
}
308311
case Kind::RegressionInstKind: {
309-
toAppend.push_back(cast<RegressionInst>(I)->getGrad(weightToGradMap));
310-
312+
toAppend.push_back(
313+
llvm::cast<RegressionInst>(I)->getGrad(weightToGradMap));
311314
break;
312315
}
313316
case Kind::ElementAddInstKind: {
314-
toAppend.push_back(cast<ElementAddInst>(I)->getGrad(weightToGradMap));
317+
toAppend.push_back(
318+
llvm::cast<ElementAddInst>(I)->getGrad(weightToGradMap));
315319
break;
316320
}
317321
case Kind::ElementMulInstKind: {
318-
toAppend.push_back(cast<ElementMulInst>(I)->getGrad(weightToGradMap));
322+
toAppend.push_back(
323+
llvm::cast<ElementMulInst>(I)->getGrad(weightToGradMap));
319324
break;
320325
}
321326
case Kind::ReluInstKind: {
322-
toAppend.push_back(cast<ReluInst>(I)->getGrad(weightToGradMap));
323-
327+
toAppend.push_back(llvm::cast<ReluInst>(I)->getGrad(weightToGradMap));
324328
break;
325329
}
326330
case Kind::SigmoidInstKind: {
327-
toAppend.push_back(cast<SigmoidInst>(I)->getGrad(weightToGradMap));
331+
toAppend.push_back(llvm::cast<SigmoidInst>(I)->getGrad(weightToGradMap));
328332
break;
329333
}
330334
case Kind::TanhInstKind: {
331-
toAppend.push_back(cast<TanhInst>(I)->getGrad(weightToGradMap));
335+
toAppend.push_back(llvm::cast<TanhInst>(I)->getGrad(weightToGradMap));
332336
break;
333337
}
334338
case Kind::ReshapeInstKind: {
335-
toAppend.push_back(cast<ReshapeInst>(I)->getGrad(weightToGradMap));
339+
toAppend.push_back(llvm::cast<ReshapeInst>(I)->getGrad(weightToGradMap));
336340
break;
337341
}
338342
case Kind::TransposeInstKind: {
339-
toAppend.push_back(cast<TransposeInst>(I)->getGrad(weightToGradMap));
343+
toAppend.push_back(
344+
llvm::cast<TransposeInst>(I)->getGrad(weightToGradMap));
340345
break;
341346
}
342347
case Kind::ConcatInstKind: {
343-
toAppend.push_back(cast<ConcatInst>(I)->getGrad(weightToGradMap));
348+
toAppend.push_back(llvm::cast<ConcatInst>(I)->getGrad(weightToGradMap));
344349
break;
345350
}
346351
default:

0 commit comments

Comments
 (0)