Skip to content
This repository was archived by the owner on Jul 1, 2025. It is now read-only.
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion include/glow/Graph/Nodes.h
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,8 @@

#include "glow/Base/Tensor.h"
#include "glow/Graph/Node.h"
#include "glow/Support/Casting.h"

#include "llvm/Support/Casting.h"

namespace glow {

Expand Down
2 changes: 1 addition & 1 deletion include/glow/IR/Instrs.h
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@
#include "glow/Base/Type.h"
#include "glow/Graph/Nodes.h"
#include "glow/IR/IR.h"
#include "glow/Support/Casting.h"

#include "llvm/ADT/ArrayRef.h"
#include "llvm/Support/Casting.h"

namespace glow {

Expand Down
6 changes: 3 additions & 3 deletions include/glow/Importer/Caffe2.h
Original file line number Diff line number Diff line change
@@ -1,10 +1,10 @@
#ifndef GLOW_IMPORTER_CAFFE2_H
#define GLOW_IMPORTER_CAFFE2_H

#include "llvm/ADT/ArrayRef.h"

#include "glow/Graph/Graph.h"
#include "glow/Support/Casting.h"

#include "llvm/ADT/ArrayRef.h"
#include "llvm/Support/Casting.h"

#include <string>
#include <unordered_map>
Expand Down
27 changes: 0 additions & 27 deletions include/glow/Support/Casting.h

This file was deleted.

5 changes: 3 additions & 2 deletions src/glow/Graph/Graph.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3,9 +3,10 @@
#include "glow/Graph/Graph.h"
#include "glow/Graph/Nodes.h"
#include "glow/IR/IR.h"
#include "glow/Support/Casting.h"
#include "glow/Support/Support.h"

#include "llvm/Support/Casting.h"

#include <fstream>
#include <iostream>

Expand Down Expand Up @@ -274,7 +275,7 @@ struct DottyPrinterPass : NodeVisitor {
std::string repr = escapeDottyString(N->getDebugDesc());
os_ << "\tlabel = " + quote(repr) + "\n";
os_ << "\tshape = \"record\"\n";
if (isa<Variable>(N)) {
if (llvm::isa<Variable>(N)) {
os_ << "\tfillcolor=pink,style=filled\n";
}
os_ << "];\n\n";
Expand Down
11 changes: 6 additions & 5 deletions src/glow/IR/IR.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@

#include "glow/IR/IR.h"
#include "glow/IR/Instrs.h"
#include "glow/Support/Casting.h"
#include "glow/Support/Support.h"

#include "llvm/Support/Casting.h"

#include <fstream>
#include <iostream>
#include <sstream>
Expand Down Expand Up @@ -58,7 +59,7 @@ void Instruction::verifyUseList() const {

void Instruction::verify() const {
#define DEF_INSTR(CLASS, NAME) \
if (auto *X = dyn_cast<const CLASS>(this)) \
if (auto *X = llvm::dyn_cast<const CLASS>(this)) \
X->verify();
#define DEF_VALUE(CLASS, NAME)
#include "AutoGenInstr.def"
Expand Down Expand Up @@ -120,10 +121,10 @@ Value *Module::getWeightForNode(const Node *V) const {

static void dumpIR(Value *V, std::ostream &out) {
#define DEF_INSTR(CLASS, NAME) \
if (const auto *X = dyn_cast<const CLASS>(V)) \
if (const auto *X = llvm::dyn_cast<const CLASS>(V)) \
return X->dump(out);
#define DEF_VALUE(CLASS, NAME) \
if (const auto *X = dyn_cast<const CLASS>(V)) \
if (const auto *X = llvm::dyn_cast<const CLASS>(V)) \
return X->dump(out);
#include "AutoGenInstr.def"
glow_unreachable();
Expand All @@ -132,7 +133,7 @@ static void dumpIR(Value *V, std::ostream &out) {
bool Instruction::isInplaceOp(const Instruction *I, unsigned dstIdx,
unsigned srcIdx) {
#define DEF_INSTR(CLASS, NAME) \
if (const auto *X = dyn_cast<const CLASS>(I)) \
if (const auto *X = llvm::dyn_cast<const CLASS>(I)) \
return X->isInplaceOp(dstIdx, srcIdx);
#define DEF_VALUE(CLASS, NAME)
#include "AutoGenInstr.def"
Expand Down
83 changes: 44 additions & 39 deletions src/glow/IR/IRGen.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,8 @@
#include "glow/Graph/Nodes.h"
#include "glow/IR/IR.h"
#include "glow/IR/IRBuilder.h"
#include "glow/Support/Casting.h"

#include "llvm/Support/Casting.h"

#include <unordered_map>

Expand Down Expand Up @@ -44,7 +45,7 @@ struct IRGenVisitor : NodeVisitor {
/// Saves the generated IR in \p v for the node \p N.
void registerIR(Node *N, Value *v) {
assert(!generatedNodes.count(N) && "Already generated code for this node");
assert((isa<AllocActivationInst>(v) || isa<WeightVar>(v)) &&
assert((llvm::isa<AllocActivationInst>(v) || llvm::isa<WeightVar>(v)) &&
"Value operand must be a memory location");
generatedNodes[N] = v;
// Register the fact that we've lowered this variable to the new weight.
Expand All @@ -59,7 +60,7 @@ struct IRGenVisitor : NodeVisitor {
glow_unreachable();
break;
case glow::Kinded::Kind::ConvolutionNodeKind: {
auto *C = cast<ConvolutionNode>(N);
auto *C = llvm::cast<ConvolutionNode>(N);
auto *in = valueForNode(C->getInput());
auto *filter = valueForNode(C->getFilter());
auto *bias = valueForNode(C->getBias());
Expand All @@ -72,7 +73,7 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::PoolNodeKind: {
auto *P = cast<PoolNode>(N);
auto *P = llvm::cast<PoolNode>(N);
auto *in = valueForNode(P->getInput());
Instruction *V = nullptr;
if (P->getMode() == PoolNode::Mode::Max) {
Expand All @@ -88,7 +89,7 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::FullyConnectedNodeKind: {
auto *FC = cast<FullyConnectedNode>(N);
auto *FC = llvm::cast<FullyConnectedNode>(N);
auto *in = valueForNode(FC->getInput());
auto *filter = valueForNode(FC->getFilter());
auto *bias = valueForNode(FC->getBias());
Expand All @@ -99,29 +100,29 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::ReluNodeKind: {
auto *R = cast<ReluNode>(N);
auto *R = llvm::cast<ReluNode>(N);
auto *V = builder_.createRELUOp(valueForNode(R->getInput()));
V->setName(N->getName());
registerIR(N, V->getDest());

break;
}
case glow::Kinded::Kind::SigmoidNodeKind: {
auto *S = cast<SigmoidNode>(N);
auto *S = llvm::cast<SigmoidNode>(N);
auto *V = builder_.createSigmoidOp(valueForNode(S->getInput()));
V->setName(N->getName());
registerIR(N, V->getDest());
break;
}
case glow::Kinded::Kind::TanhNodeKind: {
auto *T = cast<TanhNode>(N);
auto *T = llvm::cast<TanhNode>(N);
auto *V = builder_.createTanhOp(valueForNode(T->getInput()));
V->setName(N->getName());
registerIR(N, V->getDest());
break;
}
case glow::Kinded::Kind::SoftMaxNodeKind: {
auto *SM = cast<SoftMaxNode>(N);
auto *SM = llvm::cast<SoftMaxNode>(N);
auto *in = valueForNode(SM->getInput());
auto *select = valueForNode(SM->getSelected());
auto *V = builder_.createSoftMaxOp(in, select);
Expand All @@ -130,7 +131,7 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::RegressionNodeKind: {
auto *RR = cast<RegressionNode>(N);
auto *RR = llvm::cast<RegressionNode>(N);
auto *in = valueForNode(RR->getInput());
auto *expected = valueForNode(RR->getExpected());
auto *V = builder_.createRegressionOp(in, expected);
Expand All @@ -139,23 +140,23 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::TransposeNodeKind: {
auto *TT = cast<TransposeNode>(N);
auto *TT = llvm::cast<TransposeNode>(N);
auto *in = valueForNode(TT->getInput());
auto *V = builder_.createTransposeOp(in, TT->getShuffle());
V->setName(N->getName());
registerIR(N, V->getDest());
break;
}
case glow::Kinded::Kind::ReshapeNodeKind: {
auto *RS = cast<ReshapeNode>(N);
auto *RS = llvm::cast<ReshapeNode>(N);
auto *in = valueForNode(RS->getInput());
auto *V = builder_.createReshapeOp(in, RS->getDims());
V->setName(N->getName());
registerIR(N, V->getDest());
break;
}
case glow::Kinded::Kind::ConcatNodeKind: {
auto *CC = cast<ConcatNode>(N);
auto *CC = llvm::cast<ConcatNode>(N);
auto *LHS = valueForNode(CC->getLHS());
auto *RHS = valueForNode(CC->getRHS());
auto *V = builder_.createConcatOp(LHS, RHS, CC->getDim());
Expand All @@ -164,7 +165,7 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::BatchNormalizationNodeKind: {
auto *BN = cast<BatchNormalizationNode>(N);
auto *BN = llvm::cast<BatchNormalizationNode>(N);
auto *in = valueForNode(BN->getInput());
auto *beta = valueForNode(BN->getBias());
auto *gamma = valueForNode(BN->getScale());
Expand All @@ -180,7 +181,7 @@ struct IRGenVisitor : NodeVisitor {
}

case glow::Kinded::Kind::LocalResponseNormalizationNodeKind: {
auto *LR = cast<LocalResponseNormalizationNode>(N);
auto *LR = llvm::cast<LocalResponseNormalizationNode>(N);
auto *in = valueForNode(LR->getInput());
auto *V = builder_.createLocalResponseNormalizationOp(
in, LR->getHalfWindowSize(), LR->getAlpha(), LR->getBeta(),
Expand All @@ -190,7 +191,7 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::ArithmeticNodeKind: {
auto *AR = cast<ArithmeticNode>(N);
auto *AR = llvm::cast<ArithmeticNode>(N);
auto *L = valueForNode(AR->getLHS());
auto *R = valueForNode(AR->getRHS());

Expand All @@ -206,7 +207,7 @@ struct IRGenVisitor : NodeVisitor {
break;
}
case glow::Kinded::Kind::SaveNodeKind: {
auto *R = cast<SaveNode>(N);
auto *R = llvm::cast<SaveNode>(N);
auto *src = valueForNode(R->getInput());
auto *dest = valueForNode(R->getOutput());
auto *V = builder_.createCopyInst(dest, src);
Expand All @@ -215,7 +216,7 @@ struct IRGenVisitor : NodeVisitor {
}
case glow::Kinded::Kind::VariableNodeKind: {
using MK = WeightVar::MutabilityKind;
auto *V = cast<Variable>(N);
auto *V = llvm::cast<Variable>(N);
bool isConst = V->getInitKind() == Variable::InitKind::Extern;
auto *W = builder_.createWeightVar(V->getType(), V->getName(),
isConst ? MK::Constant : MK::Mutable);
Expand Down Expand Up @@ -259,7 +260,7 @@ void generateBackwardPass(Module &M) {
for (auto I : instrs) {
switch (I->getKind()) {
case Kind::AllocActivationInstKind: {
auto *AC = cast<AllocActivationInst>(I);
auto *AC = llvm::cast<AllocActivationInst>(I);
auto *N = new AllocActivationInst(AC->getName(), AC->getType());
allocs.push_back(N);
weightToGradMap[I] = N;
Expand All @@ -269,78 +270,82 @@ void generateBackwardPass(Module &M) {
break;
}
case Kind::CopyInstKind: {
auto *CC = cast<CopyInst>(I);
auto *CC = llvm::cast<CopyInst>(I);
auto *N = new CopyInst(CC->getName(), weightToGradMap[CC->getSrc()],
weightToGradMap[CC->getDest()]);
toAppend.push_back(N);
break;
}
case Kind::ConvolutionInstKind: {
toAppend.push_back(cast<ConvolutionInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(
llvm::cast<ConvolutionInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::PoolMaxInstKind: {
toAppend.push_back(cast<PoolMaxInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<PoolMaxInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::PoolAvgInstKind: {
toAppend.push_back(cast<PoolAvgInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<PoolAvgInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::FullyConnectedInstKind: {
toAppend.push_back(cast<FullyConnectedInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(
llvm::cast<FullyConnectedInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::BatchNormalizationInstKind: {
toAppend.push_back(
cast<BatchNormalizationInst>(I)->getGrad(weightToGradMap));
llvm::cast<BatchNormalizationInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::LocalResponseNormalizationInstKind: {
toAppend.push_back(
cast<LocalResponseNormalizationInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<LocalResponseNormalizationInst>(I)->getGrad(
weightToGradMap));
break;
}
case Kind::SoftMaxInstKind: {
toAppend.push_back(cast<SoftMaxInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<SoftMaxInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::RegressionInstKind: {
toAppend.push_back(cast<RegressionInst>(I)->getGrad(weightToGradMap));

toAppend.push_back(
llvm::cast<RegressionInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::ElementAddInstKind: {
toAppend.push_back(cast<ElementAddInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(
llvm::cast<ElementAddInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::ElementMulInstKind: {
toAppend.push_back(cast<ElementMulInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(
llvm::cast<ElementMulInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::ReluInstKind: {
toAppend.push_back(cast<ReluInst>(I)->getGrad(weightToGradMap));

toAppend.push_back(llvm::cast<ReluInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::SigmoidInstKind: {
toAppend.push_back(cast<SigmoidInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<SigmoidInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::TanhInstKind: {
toAppend.push_back(cast<TanhInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<TanhInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::ReshapeInstKind: {
toAppend.push_back(cast<ReshapeInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<ReshapeInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::TransposeInstKind: {
toAppend.push_back(cast<TransposeInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(
llvm::cast<TransposeInst>(I)->getGrad(weightToGradMap));
break;
}
case Kind::ConcatInstKind: {
toAppend.push_back(cast<ConcatInst>(I)->getGrad(weightToGradMap));
toAppend.push_back(llvm::cast<ConcatInst>(I)->getGrad(weightToGradMap));
break;
}
default:
Expand Down
Loading