Skip to content

Commit 46dcb19

Browse files
shajrawifacebook-github-bot
authored andcommitted
Support Backend Specific Verifiers (#3469)
Summary: Adds support for backend-specific verifiers to Glow. I removed the asserts in the backend specific code when executing Nodes with components that depend on the backend for validity and added verifiers to take their place. There are two backend-specific verifiers: one that works on `Function` and one that takes in `IRFunction` - this allows verification at different stages of the Glow pipeline. I also added two error types: one that indicates a verification error at initial IR generation (`COMPILE_UNSUPPORTED_IR_AFTER_GENERATE`) and one that indicates the faulty code appeared after IR optimizations (`COMPILE_UNSUPPORTED_IR_AFTER_OPTIMIZE`). I also refactored the code, such that Graph Optimizer does not check that all nodes are supported directly, that should not be its job, it calls the verifier instead. The base method for the `Function` verifier does said check. Fixes #3450 Pull Request resolved: #3469 Test Plan: `ninja test` Differential Revision: D17128493 Pulled By: shajrawi fbshipit-source-id: d917bca05cf343f2395346fda48e54194fe41dc6
1 parent 82b6007 commit 46dcb19

File tree

16 files changed

+479
-64
lines changed

16 files changed

+479
-64
lines changed

docs/Backends.md

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -65,6 +65,14 @@ Additionally, there are virtual functions that backends can override:
6565
[below](#backend-specific-nodes-and-instructions-transformations) for more
6666
information.
6767

68+
- `virtual bool verify(const Function &F) const;`
69+
70+
- Verifies that `Function &F` conforms to the backend-dependent graph constraints.
71+
72+
- `virtual bool verify(const IRFunction &IR) const;`
73+
74+
- Verifies that `IRFunction &IR` conforms to the backend-specific constraints.
75+
6876
- `virtual bool shouldLower(const Node *N) const;`
6977

7078
- Allow the backend to prevent lowering for some `Node *N`. For example, if a

include/glow/Backend/Backend.h

Lines changed: 23 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,29 @@ class Backend {
9898
/// \returns whether the provided \p NI is supported by the backend.
9999
virtual bool isOpSupported(const NodeInfo &NI) const = 0;
100100

101+
/// \returns whether all nodes inside \p F are supported.
102+
bool checkAllNodesSupported(const Function &F) const;
103+
104+
/// \returns whether the provided \p F conforms to the backend-dependent graph
105+
/// constraints. Giving the backend an opportunity to check that everything
106+
/// conforms to its specific restrictions by overriding this function. It is
107+
/// highly recommended for backends to make their backend specific
108+
/// verifications a super-set of target independent Function::verify() by
109+
/// calling it in their overridden implementation. It is not a strict
110+
/// requirement, of course, in case they diverge / the backend has a good
111+
/// reason not to call Function::verify().
112+
virtual bool verify(const Function &F) const;
113+
114+
/// \returns whether the provided \p IR conforms to the backend-dependent
115+
/// graph constraints. Giving the backend an opportunity to check that
116+
/// everything conforms to its specific restrictions by overriding this
117+
/// function. It is highly recommended for backends to make their backend
118+
/// specific verifications a super-set of target independent
119+
/// IRFunction::verify() by calling it in their overridden implementation. It
120+
/// is not a strict requirement, of course, in case they diverge / the backend
121+
/// has a good reason not to call IRFunction::verify().
122+
virtual bool verify(const IRFunction &IR) const;
123+
101124
/// \returns true if the supplied Node \N should be lowered. By default, all
102125
/// Nodes are candidates for lowering.
103126
virtual bool shouldLower(const Node *N) const { return true; }

include/glow/Backend/BackendUtils.h

Lines changed: 50 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -149,6 +149,28 @@ class RuntimeBundle {
149149
};
150150
} // namespace runtime
151151

152+
/// Generates a struct named has_\p METHOD_NAME that looks for a method called
153+
/// \p METHOD_NAME inside of ClassName with return type ReturnType.
154+
#define CLASS_CONTAINS_METHOD(METHOD_NAME) \
155+
template <typename ClassName, typename ReturnType> \
156+
struct has_##METHOD_NAME { \
157+
private: \
158+
template <typename T> \
159+
static constexpr auto check(T *) -> \
160+
typename std::is_same<decltype(std::declval<T>().METHOD_NAME()), \
161+
ReturnType>::type; \
162+
template <typename> static constexpr std::false_type check(...); \
163+
typedef decltype(check<ClassName>(0)) type; \
164+
\
165+
public: \
166+
static constexpr bool value = type::value; \
167+
};
168+
169+
/// Use template meta-programming to check if typename ClassName contains
170+
/// getFusedActivation() method. Below generates a struct named
171+
/// has_getFusedActivation that looks for said method.
172+
CLASS_CONTAINS_METHOD(getFusedActivation)
173+
152174
/// If \p PH is an output placeholder in the Function \p F,
153175
/// \returns true.
154176
/// This is determined by checking if the PH has a user which uses the PH as an
@@ -173,6 +195,34 @@ bool isOutput(const Placeholder *PH, const IRFunction &F);
173195
/// by the current function.
174196
bool isInput(const Placeholder *PH, const IRFunction &F);
175197

198+
/// If \p N does not have fused activation \returns true.
199+
template <typename T,
200+
std::enable_if_t<!has_getFusedActivation<T, FusedActivation>::value,
201+
int> = 0>
202+
bool checkNoFusion(const T &N) {
203+
(void)N;
204+
return true;
205+
}
206+
207+
/// If \p N does not have fused activation \returns true.
208+
template <typename T,
209+
std::enable_if_t<has_getFusedActivation<T, FusedActivation>::value,
210+
int> = 0>
211+
bool checkNoFusion(const T &N) {
212+
if (N.getFusedActivation() != FusedActivation::NONE) {
213+
report("Glow backend does not support fused Activations for: " +
214+
std::string(N.getKindName()));
215+
return false;
216+
}
217+
return true;
218+
}
219+
220+
/// If \p N does not have fused activation \returns true.
221+
bool checkNoFusionForNode(const Node &N);
222+
223+
/// If \p I does not have fused activation \returns true.
224+
bool checkNoFusionForInstr(const Instruction &I);
225+
176226
/// Contains information for placeholder during allocation.
177227
struct PlaceholderInputOutputInfo {
178228
/// The placeholder address.

include/glow/Optimizer/GraphOptimizer/PassManager.h

Lines changed: 8 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,8 @@
1616
#ifndef GLOW_OPTIMIZER_GRAPHOPTIMIZER_PASSMANAGER_H
1717
#define GLOW_OPTIMIZER_GRAPHOPTIMIZER_PASSMANAGER_H
1818

19+
#include "glow/Backend/Backend.h"
20+
1921
#include "glow/Optimizer/GraphOptimizer/CompilationContext.h"
2022
#include "glow/Optimizer/GraphOptimizer/FunctionPass.h"
2123
#include "glow/Optimizer/GraphOptimizer/FunctionPasses.h"
@@ -34,6 +36,9 @@ class FunctionPassManager : public Named {
3436
/// The pipeline of passes to run.
3537
FunctionPassPipeline pipeline_;
3638

39+
/// The Backend we have for backend-specific verification.
40+
const Backend *backend_;
41+
3742
/// The index of the current pass being executed in the pipeline.
3843
size_t passIdx_ = 0;
3944

@@ -55,8 +60,9 @@ class FunctionPassManager : public Named {
5560
const CompilationContext &cctx);
5661

5762
public:
58-
FunctionPassManager(llvm::StringRef name, FunctionPassPipeline pipeline)
59-
: Named(name), pipeline_(pipeline), passIdx_(0) {}
63+
FunctionPassManager(llvm::StringRef name, FunctionPassPipeline pipeline,
64+
const Backend *backend = nullptr)
65+
: Named(name), pipeline_(pipeline), backend_(backend), passIdx_(0) {}
6066
~FunctionPassManager() = default;
6167

6268
/// Run the FunctionPassPipeline given the \ref pipeline_ and

include/glow/Support/Error.h

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -89,6 +89,10 @@ class GlowErr final : public llvm::ErrorInfo<GlowErr> {
8989
MODEL_WRITER_INVALID_FILENAME,
9090
// Model writer cannot serialize graph to the file.
9191
MODEL_WRITER_SERIALIZATION_ERROR,
92+
// Compilation error; IR unsupported after generation.
93+
COMPILE_UNSUPPORTED_IR_AFTER_GENERATE,
94+
// Compilation error; IR unsupported after optimization.
95+
COMPILE_UNSUPPORTED_IR_AFTER_OPTIMIZE,
9296
};
9397

9498
/// GlowErr is not convertable to std::error_code. This is included for
@@ -164,6 +168,10 @@ class GlowErr final : public llvm::ErrorInfo<GlowErr> {
164168
return "MODEL_WRITER_INVALID_FILENAME";
165169
case ErrorCode::MODEL_WRITER_SERIALIZATION_ERROR:
166170
return "MODEL_WRITER_SERIALIZATION_ERROR";
171+
case ErrorCode::COMPILE_UNSUPPORTED_IR_AFTER_GENERATE:
172+
return "COMPILE_UNSUPPORTED_IR_AFTER_GENERATE";
173+
case ErrorCode::COMPILE_UNSUPPORTED_IR_AFTER_OPTIMIZE:
174+
return "COMPILE_UNSUPPORTED_IR_AFTER_OPTIMIZE";
167175
};
168176

169177
llvm_unreachable("unsupported ErrorCode");

lib/Backend/Backend.cpp

Lines changed: 22 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -157,6 +157,28 @@ void Backend::autoInstrument(TraceInfo &traceInfo, IRFunction *IR) const {
157157
IR->pushInstr(new TraceEventInst("end_trace", backingWeight, index));
158158
}
159159

160+
bool Backend::checkAllNodesSupported(const Function &F) const {
161+
bool allSupported = true;
162+
for (const Node &N : F.getNodes()) {
163+
if (!isOpSupported(N)) {
164+
allSupported = false;
165+
report("Unsupported node found while compiling Function " +
166+
F.getName().str() + " for backend " + getBackendName() + ": " +
167+
N.getDebugDesc());
168+
}
169+
}
170+
return allSupported;
171+
}
172+
173+
bool Backend::verify(const Function &F) const {
174+
return F.verify() && checkAllNodesSupported(F);
175+
}
176+
177+
bool Backend::verify(const IRFunction &IR) const {
178+
(void)IR;
179+
return true;
180+
}
181+
160182
FunctionPassPipeline Backend::getOptimizationPipeline() const {
161183
return createDefaultGraphOptimizationPassPipeline();
162184
};

lib/Backend/BackendUtils.cpp

Lines changed: 39 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -349,6 +349,45 @@ bool isInput(const Placeholder *PH,
349349
return false;
350350
}
351351

352+
/// If \p N does not have fused activation \returns true.
353+
bool checkNoFusionForNode(const Node &N) {
354+
#define DEF_NODE(CLASS, NAME) \
355+
case Kinded::Kind::CLASS##Kind: { \
356+
const CLASS *CI = llvm::cast<CLASS>(&N); \
357+
return checkNoFusion(*CI); \
358+
break; \
359+
}
360+
switch (N.getKind()) {
361+
#include "glow/AutoGenNodes.def"
362+
default:
363+
llvm_unreachable("Invalid node.");
364+
}
365+
return true;
366+
}
367+
368+
/// If \p I does not have fused activation \returns true.
369+
bool checkNoFusionForInstr(const Instruction &I) {
370+
#define DEF_VALUE(CLASS, NAME)
371+
#define DEF_INSTR(CLASS, NAME) \
372+
case Kinded::Kind::CLASS##Kind: { \
373+
const CLASS *CI = llvm::cast<CLASS>(&I); \
374+
return checkNoFusion(*CI); \
375+
break; \
376+
}
377+
#define DEF_BACKEND_SPECIFIC_INSTR(CLASS, NAME) \
378+
case Kinded::Kind::CLASS##Kind: { \
379+
const CLASS *CI = llvm::cast<CLASS>(&I); \
380+
return checkNoFusion(*CI); \
381+
break; \
382+
}
383+
switch (I.getKind()) {
384+
#include "glow/AutoGenInstr.def"
385+
default:
386+
llvm_unreachable("Invalid instruction.");
387+
}
388+
return true;
389+
}
390+
352391
template <typename FUN, typename ARR>
353392
ContiguousPlaceholders getContiguousPlaceHolder(const ARR &holders,
354393
const FUN &F) {

lib/Backends/Interpreter/Interpreter.cpp

Lines changed: 109 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@
2222
#include "glow/Graph/Graph.h"
2323
#include "glow/Graph/Nodes.h"
2424
#include "glow/IR/IR.h"
25+
#include "glow/IR/Instrs.h"
2526
#include "glow/Optimizer/IROptimizer/IROptimizer.h"
2627

2728
using namespace glow;
@@ -467,6 +468,114 @@ bool Interpreter::isOpSupported(const NodeInfo &NI) const {
467468
}
468469
}
469470

471+
/// Use template meta-programming to check if typename ClassName contains
472+
/// has_getLayout() method. Below generates a struct named has_getLayout that
473+
/// looks for said method.
474+
CLASS_CONTAINS_METHOD(getLayout)
475+
476+
template <typename T, std::enable_if_t<
477+
!has_getLayout<T, ConvolutionLayout>::value, int> = 0>
478+
static bool checkLayout(const T &I) {
479+
(void)I;
480+
return true;
481+
}
482+
483+
template <typename T,
484+
std::enable_if_t<has_getLayout<T, ConvolutionLayout>::value, int> = 0>
485+
static bool checkLayout(const T &I) {
486+
if (I.getLayout() != NHWC) {
487+
report("Glow Interpreter supports only NHWC");
488+
return false;
489+
}
490+
return true;
491+
}
492+
493+
static bool checkLayoutForNode(const Node &N) {
494+
#define DEF_NODE(CLASS, NAME) \
495+
case Kinded::Kind::CLASS##Kind: { \
496+
const CLASS *CI = llvm::cast<CLASS>(&N); \
497+
return checkLayout(*CI); \
498+
break; \
499+
}
500+
switch (N.getKind()) {
501+
#include "glow/AutoGenNodes.def"
502+
default:
503+
llvm_unreachable("Invalid instruction.");
504+
}
505+
return true;
506+
}
507+
508+
bool Interpreter::verify(const Function &F) const {
509+
if (!F.verify()) {
510+
return false;
511+
}
512+
if (!checkAllNodesSupported(F)) {
513+
return false;
514+
}
515+
for (const Node &N : F.getNodes()) {
516+
if (!checkLayoutForNode(N)) {
517+
return false;
518+
}
519+
if (!checkNoFusionForNode(N)) {
520+
return false;
521+
}
522+
switch (N.getKind()) {
523+
case Kinded::Kind::ChannelwiseQuantizedConvolutionNodeKind: {
524+
auto *CQCI = llvm::cast<ChannelwiseQuantizedConvolutionNode>(&N);
525+
if (!CQCI->getGroupwise()) {
526+
report("Glow Interpreter does not support Non-groupwise variant");
527+
return false;
528+
}
529+
continue;
530+
}
531+
default:
532+
continue;
533+
}
534+
}
535+
return true;
536+
}
537+
538+
static bool checkLayoutForInstr(const Instruction &I) {
539+
#define DEF_VALUE(CLASS, NAME)
540+
#define DEF_INSTR(CLASS, NAME) \
541+
case Kinded::Kind::CLASS##Kind: { \
542+
const CLASS *CI = llvm::cast<CLASS>(&I); \
543+
return checkLayout(*CI); \
544+
break; \
545+
}
546+
#define DEF_BACKEND_SPECIFIC_INSTR(CLASS, NAME)
547+
switch (I.getKind()) {
548+
#include "glow/AutoGenInstr.def"
549+
default:
550+
llvm_unreachable("Invalid instruction.");
551+
}
552+
return true;
553+
}
554+
555+
bool Interpreter::verify(const IRFunction &IR) const {
556+
for (const auto &I : IR.getInstrs()) {
557+
if (!checkNoFusionForInstr(I)) {
558+
return false;
559+
}
560+
if (!checkLayoutForInstr(I)) {
561+
return false;
562+
}
563+
switch (I.getKind()) {
564+
case Kinded::Kind::ChannelwiseQuantizedConvolutionInstKind: {
565+
auto *CQCI = llvm::cast<ChannelwiseQuantizedConvolutionInst>(&I);
566+
if (!CQCI->getGroupwise()) {
567+
report("Glow Interpreter does not support Non-groupwise variant");
568+
return false;
569+
}
570+
continue;
571+
}
572+
default:
573+
continue;
574+
}
575+
}
576+
return true;
577+
}
578+
470579
bool Interpreter::shouldLower(const Node *N) const {
471580
switch (N->getKind()) {
472581
case Kinded::Kind::ConvolutionNodeKind:

lib/Backends/Interpreter/Interpreter.h

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -49,6 +49,9 @@ class Interpreter final : public BackendUsingGlowIR {
4949

5050
bool isOpSupported(const NodeInfo &NI) const override;
5151

52+
bool verify(const Function &F) const override;
53+
bool verify(const IRFunction &IR) const override;
54+
5255
bool shouldLower(const Node *N) const override;
5356

5457
/// @}

0 commit comments

Comments
 (0)