Skip to content

[Placeholder] Allow executing Placeholders in the execution engine. #1597

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 1 commit into from
Sep 9, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions include/glow/Backends/Backend.h
Original file line number Diff line number Diff line change
Expand Up @@ -39,9 +39,12 @@ class Backend {
/// Dtor.
virtual ~Backend() = default;

/// Generate code for input function \param IR.
/// Generate code for input function \param IR. \p placeholders is a list of
/// Placeholders that are mapped to the concrete input tensor for the
/// specific function.
virtual std::unique_ptr<CompiledFunction>
compile(std::unique_ptr<IRFunction> IR) const = 0;
compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const = 0;

/// Save the bundle for \p IR for a later standalone execution
/// in \p outputDir. Make \p networkName the function name for
Expand Down
8 changes: 8 additions & 0 deletions include/glow/Backends/CompiledFunction.h
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,16 @@
#ifndef GLOW_BACKENDS_COMPILEDFUNCTION_H
#define GLOW_BACKENDS_COMPILEDFUNCTION_H

#include <unordered_map>

namespace glow {

class Placeholder;
class Tensor;

/// Maps placeholders to the tensors that back them.
using PlaceholderMap = std::unordered_map<Placeholder *, Tensor *>;

This comment was marked as off-topic.


/// Interface for executing a compiled function.
class CompiledFunction {
public:
Expand Down
6 changes: 5 additions & 1 deletion include/glow/ExecutionEngine/ExecutionEngine.h
Original file line number Diff line number Diff line change
Expand Up @@ -67,7 +67,11 @@ class ExecutionEngine final {

/// Optimize the graph, generate IR, optimize IR and compile it for a
/// specific target. This method should be invoked before the run method.
void compile(CompilationMode mode, Function *F);
/// The placeholder variables in \p placeholders are mapped to the concrete
/// tensor values in the compiled instance of the function.
void compile(CompilationMode mode, Function *F,
llvm::ArrayRef<Placeholder *> placeholders = {},
llvm::ArrayRef<Tensor *> inputs = {});

/// Save a bundle for a standalone execution. This method takes care of
/// everything when preparing the bundle for saving. There is no need to
Expand Down
3 changes: 2 additions & 1 deletion lib/Backends/CPU/CPUBackend.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,8 @@ CPUBackend::createIRGen(IRFunction *IR,
}

std::unique_ptr<CompiledFunction>
CPUBackend::compile(std::unique_ptr<IRFunction> IR) const {
CPUBackend::compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const {
AllocationsInfo allocationsInfo;
std::unique_ptr<LLVMIRGen> irgen = createIRGen(IR.get(), allocationsInfo);
irgen->initTargetMachine(target.empty() ? "" : target.getValue(),
Expand Down
3 changes: 2 additions & 1 deletion lib/Backends/CPU/CPUBackend.h
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,8 @@ class CPUBackend : public Backend {
~CPUBackend() override = default;

std::unique_ptr<CompiledFunction>
compile(std::unique_ptr<IRFunction> IR) const override;
compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const override;

void save(std::unique_ptr<IRFunction> IR, llvm::StringRef outputDir,
llvm::StringRef networkName) const override;
Expand Down
5 changes: 3 additions & 2 deletions lib/Backends/Interpreter/Interpreter.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,8 +24,9 @@
using namespace glow;

std::unique_ptr<CompiledFunction>
Interpreter::compile(std::unique_ptr<IRFunction> IR) const {
return llvm::make_unique<InterpreterFunction>(std::move(IR));
Interpreter::compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const {
return llvm::make_unique<InterpreterFunction>(std::move(IR), placeholders);
}

bool Interpreter::isOpSupported(Kinded::Kind opKind, ElemKind elementTy) const {
Expand Down
3 changes: 2 additions & 1 deletion lib/Backends/Interpreter/Interpreter.h
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,8 @@ class Interpreter final : public Backend {
~Interpreter() override = default;

std::unique_ptr<CompiledFunction>
compile(std::unique_ptr<IRFunction> IR) const override;
compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const override;

bool isOpSupported(Kinded::Kind opKind, ElemKind elementTy) const override;

Expand Down
15 changes: 10 additions & 5 deletions lib/Backends/Interpreter/InterpreterFunction.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -24,17 +24,22 @@

using namespace glow;

InterpreterFunction::InterpreterFunction(std::unique_ptr<IRFunction> F)
InterpreterFunction::InterpreterFunction(std::unique_ptr<IRFunction> F,
const PlaceholderMap &placeholders)
: F_(std::move(F)) {

// Register the concrete tensors that back the placeholder tensors.
for (auto &ph : placeholders) {
auto *w = F_->getWeightForNode(ph.first);
assert(!externalTensors_.count(w) && "The tensor is already registered");
externalTensors_[w] = ph.second;
}

for (auto &v : F_->getGraph()->getParent()->getVars()) {
auto *w = F_->getWeightForNode(v);
assert(!externalTensors_.count(w) && "The tensor is already registered");
externalTensors_[w] = &v->getPayload();
}

for (auto *W : F_->getWeights()) {
getOrCreateTensor(W);
}
}

InterpreterFunction::~InterpreterFunction() {
Expand Down
3 changes: 2 additions & 1 deletion lib/Backends/Interpreter/InterpreterFunction.h
Original file line number Diff line number Diff line change
Expand Up @@ -48,7 +48,8 @@ class InterpreterFunction final : public CompiledFunction {
std::unordered_map<const Value *, Tensor *> externalTensors_;

public:
InterpreterFunction(std::unique_ptr<IRFunction> F);
InterpreterFunction(std::unique_ptr<IRFunction> F,
const PlaceholderMap &placeholders);

/// \name CompiledFunction interface
///@{
Expand Down
3 changes: 2 additions & 1 deletion lib/Backends/OpenCL/OpenCL.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -1571,6 +1571,7 @@ cl_mem OpenCLFunction::allocDeviceBuffer(uint64_t size) {
void OpenCLFunction::freeDeviceBuffer(cl_mem buf) { clReleaseMemObject(buf); }

std::unique_ptr<CompiledFunction>
OCLBackend::compile(std::unique_ptr<IRFunction> IR) const {
OCLBackend::compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const {
return llvm::make_unique<OpenCLFunction>(std::move(IR));
}
3 changes: 2 additions & 1 deletion lib/Backends/OpenCL/OpenCL.h
Original file line number Diff line number Diff line change
Expand Up @@ -171,7 +171,8 @@ class OCLBackend final : public Backend {
~OCLBackend() override = default;

std::unique_ptr<CompiledFunction>
compile(std::unique_ptr<IRFunction> IR) const override;
compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const override;

bool transformPostLowering(Function *F, CompilationMode mode) const override;

Expand Down
15 changes: 13 additions & 2 deletions lib/ExecutionEngine/ExecutionEngine.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -169,8 +169,19 @@ std::unique_ptr<IRFunction> ExecutionEngine::generateIR(CompilationMode mode,
return IR;
}

void ExecutionEngine::compile(CompilationMode mode, Function *F) {
function_ = backend_->compile(generateIR(mode, F));
void ExecutionEngine::compile(CompilationMode mode, Function *F,
llvm::ArrayRef<Placeholder *> placeholders,
llvm::ArrayRef<Tensor *> inputs) {
PlaceholderMap pmap;
assert(placeholders.size() == inputs.size() &&
"Invalid number of placeholders");

for (size_t i = 0, e = placeholders.size(); i < e; i++) {
pmap[placeholders[i]] = inputs[i];
}

auto IR = generateIR(mode, F);
function_ = backend_->compile(std::move(IR), pmap);
}

void ExecutionEngine::save(CompilationMode mode, Function *F,
Expand Down
8 changes: 5 additions & 3 deletions tests/unittests/BackendCorrectnessTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -239,8 +239,9 @@ class MockCPUBackend : public Backend {
public:
MockCPUBackend() { backend_.reset(createBackend(BackendKind::CPU)); }
std::unique_ptr<CompiledFunction>
compile(std::unique_ptr<IRFunction> IR) const override {
return backend_->compile(std::move(IR));
compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const override {
return backend_->compile(std::move(IR), placeholders);
}
bool isOpSupported(Kinded::Kind opKind, ElemKind elementTy) const override {
return true;
Expand Down Expand Up @@ -304,7 +305,8 @@ TEST_P(CPUOnly, dataParallelStackingTest) {
}

MockCPUBackend backend;
backend.compile(std::move(M))->execute();
PlaceholderMap empty;
backend.compile(std::move(M), empty)->execute();
auto H = var->getHandle();
EXPECT_EQ(H.at(0), 3);
EXPECT_EQ(H.at(1), 4);
Expand Down
18 changes: 17 additions & 1 deletion tests/unittests/BackendTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -145,7 +145,8 @@ TEST_P(BackendTest, debugPrint) {
IRBuilder(IR.get()).createDebugPrintInst("print", *IR->getWeights().begin());

std::unique_ptr<Backend> backend(createBackend(GetParam()));
auto function = backend->compile(std::move(IR));
PlaceholderMap empty;
auto function = backend->compile(std::move(IR), empty);
function->execute();
}

Expand Down Expand Up @@ -176,6 +177,21 @@ TEST_P(BackendTest, decoupleCodegenFromGraph) {
EXPECT_NEAR(HX.at({2}), 9, 1E-5);
}

/// Check that we can pass information to the execution engine using Placeholder
/// variables and read it back using Save nodes (in variables).
TEST(Placeholder, simplePlaceholderValue) {
Tensor data{99.0, 35.0, 2.0, 3.0};
ExecutionEngine EE{BackendKind::Interpreter};
auto &mod = EE.getModule();
Function *F = mod.createFunction("main");
auto *input = mod.createPlaceholder(ElemKind::FloatTy, {4}, "input");
SaveNode *S = F->createSave("ret", input);
EE.compile(CompilationMode::Infer, F, {input}, {&data});
EE.run();
auto &res = S->getVariable()->getPayload();
EXPECT_TRUE(res.isEqual(data));
}

INSTANTIATE_TEST_CASE_P(Interpreter, BackendTest,
::testing::Values(BackendKind::Interpreter));

Expand Down
3 changes: 2 additions & 1 deletion tests/unittests/BackendTestUtils.h
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,8 @@ class MockBackend : public Backend {
void execute() override {}
};
std::unique_ptr<CompiledFunction>
compile(std::unique_ptr<IRFunction> IR) const override {
compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const override {
return llvm::make_unique<MockFunction>();
}
bool isOpSupported(Kinded::Kind opKind, ElemKind elementTy) const override {
Expand Down
5 changes: 3 additions & 2 deletions tests/unittests/quantizationTest.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -615,8 +615,9 @@ class MockQuantBackend : public Backend {
backend_.reset(createBackend(BackendKind::Interpreter));
}
std::unique_ptr<CompiledFunction>
compile(std::unique_ptr<IRFunction> IR) const override {
return backend_->compile(std::move(IR));
compile(std::unique_ptr<IRFunction> IR,
const PlaceholderMap &placeholders) const override {
return backend_->compile(std::move(IR), placeholders);
}
bool isOpSupported(Kinded::Kind opKind, ElemKind elementTy) const override {
if (opKind == Kinded::Kind::SoftMaxNodeKind ||
Expand Down