Skip to content

Create glow::Error and glow::Expected #3515

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 1 commit into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion docs/GraphOptimizationPipeline.md
Original file line number Diff line number Diff line change
Expand Up @@ -86,7 +86,7 @@ Here we describe the API for `glow::optimizeFunction()` and how to use it in
different modes.

```
llvm::Error glow::optimizeFunction(Function *F, const Backend &B,
Error glow::optimizeFunction(Function *F, const Backend &B,
CompilationContext &cctx);
```

Expand Down
3 changes: 1 addition & 2 deletions examples/mnist.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -256,8 +256,7 @@ void testMNISTLoadAndTraining() {
inferMod.uniqueType(glow::ElemKind::FloatTy, {minibatchSize, 1, 28, 28});
const char *inputName = "data";

llvm::Error errPtr = llvm::Error::success();
MARK_ERR_CHECKED(errPtr);
Error errPtr = Error::empty();
// Load and compile LeNet MNIST model.
glow::Caffe2ModelLoader loader("lenet_mnist/predict_net.pb",
"lenet_mnist/init_net.pb", {inputName},
Expand Down
2 changes: 1 addition & 1 deletion examples/resnet-runtime.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -96,7 +96,7 @@ void dispatchClassify(unsigned int id, HostManager *hostManager,
std::promise<void> &finished) {
auto runid = hostManager->runNetwork(
"resnet50" + std::to_string(id), std::move(context),
[path, &returned, &finished](RunIdentifierTy runid, llvm::Error err,
[path, &returned, &finished](RunIdentifierTy runid, Error err,
std::unique_ptr<ExecutionContext> context) {
EXIT_ON_ERR(std::move(err));
auto *bindings = context->getPlaceholderBindings();
Expand Down
24 changes: 11 additions & 13 deletions examples/tracing-compare.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -90,18 +90,16 @@ std::future<void> addToDevice(unsigned int id, DeviceManager *device,
auto compilePromise = std::make_shared<std::promise<void>>();
auto future = compilePromise->get_future();

device->addNetwork(&module, functions,
[compilePromise, id](const Module *, llvm::Error err) {
if (err) {
llvm::errs() << "Failed to compile model for device "
<< id << ".\n";
EXIT_ON_ERR(std::move(err));
} else {
llvm::outs()
<< "Successfully added to Device " << id << ".\n";
}
compilePromise->set_value();
});
device->addNetwork(
&module, functions, [compilePromise, id](const Module *, Error err) {
if (err) {
llvm::errs() << "Failed to compile model for device " << id << ".\n";
EXIT_ON_ERR(std::move(err));
} else {
llvm::outs() << "Successfully added to Device " << id << ".\n";
}
compilePromise->set_value();
});

return future;
}
Expand Down Expand Up @@ -157,7 +155,7 @@ int main(int argc, char **argv) {

devices[i]->runFunction(
"resnet50", std::move(context),
[&promises, i](RunIdentifierTy, llvm::Error err,
[&promises, i](RunIdentifierTy, Error err,
std::unique_ptr<ExecutionContext> context) {
EXIT_ON_ERR(std::move(err));
promises[i].set_value(std::move(context));
Expand Down
2 changes: 1 addition & 1 deletion examples/training/resnet50/main.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -154,7 +154,7 @@ int main(int argc, char **argv) {

// Load ResNet model.
llvm::outs() << "Loading resnet50 model.\n";
llvm::Error errPtr = llvm::Error::success();
Error errPtr = Error::success();

// Loader has randomly initialized trainable weights.
Caffe2ModelLoader loader(resnet50Path + "/predict_net.pbtxt",
Expand Down
8 changes: 4 additions & 4 deletions include/glow/Backend/Backend.h
Original file line number Diff line number Diff line change
Expand Up @@ -51,7 +51,7 @@ class Backend {
/// Generate code for a vector of functions, \p functions. All compilations
/// use the same settings provided by \p opts. This allows the compiler to
/// support shared constants between functions.
virtual llvm::Expected<std::vector<std::unique_ptr<CompiledFunction>>>
virtual Expected<std::vector<std::unique_ptr<CompiledFunction>>>
compileFunctions(llvm::ArrayRef<Function *> functions,
BackendOptions &opts) const {
std::vector<std::unique_ptr<CompiledFunction>> compiledFunctions;
Expand All @@ -62,18 +62,18 @@ class Backend {
return resOrErr.takeError();
}
}
return llvm::Expected<std::vector<std::unique_ptr<CompiledFunction>>>(
return Expected<std::vector<std::unique_ptr<CompiledFunction>>>(
std::move(compiledFunctions));
}

virtual llvm::Expected<std::unique_ptr<CompiledFunction>>
virtual Expected<std::unique_ptr<CompiledFunction>>
compile(Function *F) const {
BackendOptions opts;
return compile(F, opts);
}

/// Generate code for input function \param F given settings in \p opts.
virtual llvm::Expected<std::unique_ptr<CompiledFunction>>
virtual Expected<std::unique_ptr<CompiledFunction>>
compile(Function *F, const BackendOptions &opts) const = 0;

/// Save the bundle for \p F for a later standalone execution in \p outputDir
Expand Down
4 changes: 2 additions & 2 deletions include/glow/Backend/CompiledFunction.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,8 +37,8 @@ class CompiledFunction {
virtual ~CompiledFunction();
/// Execute the network and allocate Placeholder memory with given
/// \p bindings providing mapping between Placeholder and populated tensor.
/// \returns an llvm::Error if an error ocurred during execution.
virtual llvm::Error execute(ExecutionContext *context) = 0;
/// \returns an Error if an error ocurred during execution.
virtual Error execute(ExecutionContext *context) = 0;

/// Getter for the runtimeBundle.
runtime::RuntimeBundle &getRuntimeBundle() { return runtimeBundle_; }
Expand Down
15 changes: 6 additions & 9 deletions include/glow/Backends/DeviceManager.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,11 +32,10 @@ namespace glow {
namespace runtime {

/// Callback signalling success/failure of evicting a function from a Device.
using EvictFunctionCBTy =
std::function<void(std::string functionName, llvm::Error)>;
using EvictFunctionCBTy = std::function<void(std::string functionName, Error)>;

/// Callback signalling success/failure of loading a Module onto a device.
using ReadyCBTy = std::function<void(const Module *, llvm::Error)>;
using ReadyCBTy = std::function<void(const Module *, Error)>;

/// Map of Function name -> CompiledFunction, used when loading a network onto a
/// device.
Expand Down Expand Up @@ -95,7 +94,7 @@ class DeviceManager {
generateDeviceConfigs(llvm::StringRef backendName);

/// Initialize the device.
virtual llvm::Error init() { return llvm::Error::success(); }
virtual Error init() { return Error::success(); }

/// Load the provided module into the device, readyCB will be called when
/// ready to use.
Expand All @@ -108,8 +107,8 @@ class DeviceManager {
/// up space on the device. \p evictCB will be called when the operation
/// is completed or attempted and failed.
virtual void evictNetwork(std::string functionName,
EvictFunctionCBTy evictCB = [](std::string,
llvm::Error) {}) = 0;
EvictFunctionCBTy evictCB = [](std::string, Error) {
}) = 0;

/// Execute the named Function in an already provided network on the device.
/// functionName must match the name of a function already added.
Expand All @@ -122,9 +121,7 @@ class DeviceManager {
runtime::ResultCBTy resultCB) = 0;

/// Stops execution and shuts down the Device.
virtual llvm::Error stop(bool block = true) {
return llvm::Error::success();
};
virtual Error stop(bool block = true) { return Error::success(); };

/// \returns the name of backend that powers this Device.
llvm::StringRef getBackendName() { return config_.backendName; }
Expand Down
9 changes: 5 additions & 4 deletions include/glow/Backends/DummyDeviceManager.h
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,7 @@

#include "glow/Backends/DeviceManager.h"
#include "glow/Runtime/RuntimeTypes.h"
#include "llvm/Support/FormatVariadic.h"

#include <atomic>

Expand Down Expand Up @@ -55,7 +56,7 @@ class DummyDeviceManager : public DeviceManager {
callback(
module,
MAKE_ERR(
GlowErr::ErrorCode::RUNTIME_NET_NOT_FOUND,
ErrorValue::ErrorCode::RUNTIME_NET_NOT_FOUND,
llvm::formatv("Function {0} not found", func.first).str()));
return;
}
Expand All @@ -69,15 +70,15 @@ class DummyDeviceManager : public DeviceManager {
}

// Fire the ready CB.
callback(module, llvm::Error::success());
callback(module, Error::success());
}

/// Remove (and delete) the provided function, freeing
/// up space on the device.
void evictNetwork(std::string functionName,
EvictFunctionCBTy evictCB) override {
functions_.erase(functionName);
evictCB(functionName, llvm::Error::success());
evictCB(functionName, Error::success());
}

/// Execute the named Function in an already provided network on the device.
Expand All @@ -92,7 +93,7 @@ class DummyDeviceManager : public DeviceManager {
if (funcIt == functions_.end()) {
callback(
0,
MAKE_ERR(GlowErr::ErrorCode::RUNTIME_NET_NOT_FOUND,
MAKE_ERR(ErrorValue::ErrorCode::RUNTIME_NET_NOT_FOUND,
llvm::formatv("Function {0} not found", functionName).str()),
std::move(context));
return 0;
Expand Down
8 changes: 4 additions & 4 deletions include/glow/Backends/QueueBackedDeviceManager.h
Original file line number Diff line number Diff line change
Expand Up @@ -37,11 +37,11 @@ class QueueBackedDeviceManager : public DeviceManager {
: DeviceManager(config), workThread_(1) {}

virtual ~QueueBackedDeviceManager() {
llvm::toString(stop(true)); // will join workThread_
ERR_TO_VOID(stop(true)); // will join workThread_
}

/// Initialize the device.
llvm::Error init() override { return llvm::Error::success(); }
Error init() override { return Error::success(); }

/// Load the provided module into the device, readyCB will be called when
/// ready to use
Expand Down Expand Up @@ -81,9 +81,9 @@ class QueueBackedDeviceManager : public DeviceManager {
}

/// Stops execution and shuts down the Device.
llvm::Error stop(bool block = true) override {
Error stop(bool block = true) override {
workThread_.stop(block);
return llvm::Error::success();
return Error::success();
}

protected:
Expand Down
2 changes: 1 addition & 1 deletion include/glow/ExecutionEngine/ExecutionEngine.h
Original file line number Diff line number Diff line change
Expand Up @@ -92,7 +92,7 @@ class ExecutionEngine final {
void clear();

/// \returns the DAG for the specified \p network.
llvm::Expected<runtime::DAG &> getDAG(llvm::StringRef network) {
Expected<runtime::DAG *> getDAG(llvm::StringRef network) {
return hostManager_->getNetworkDAG(network);
}

Expand Down
11 changes: 5 additions & 6 deletions include/glow/Exporter/CommonOperatorWriter.h
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,14 @@ template <typename Traits> class CommonOperatorWriter : public ProtobufWriter {
/// Declare pure virtual methods, one per each node kind.
/// Derived class must to implement all of it.
#define DEF_NODE(CLASS, NAME) \
virtual llvm::Error write##NAME(const CLASS *node, \
typename Traits::GraphProto &graph) = 0;
virtual Error write##NAME(const CLASS *node, \
typename Traits::GraphProto &graph) = 0;
#include "glow/AutoGenNodes.def"

/// Function invokes the correspondent virtual method according to \p node
/// type to serialize node information into \p graph (protobuf), reports
/// visited intermediate nodes through \p reporter, \returns llvm::Error.
llvm::Error writeOperator(const Node *node,
typename Traits::GraphProto &graph) {
/// visited intermediate nodes through \p reporter, \returns Error.
Error writeOperator(const Node *node, typename Traits::GraphProto &graph) {
switch (node->getKind()) {
#define DEF_NODE(CLASS, NAME) \
case glow::Kinded::Kind::CLASS##Kind: \
Expand All @@ -46,7 +45,7 @@ template <typename Traits> class CommonOperatorWriter : public ProtobufWriter {
default:
llvm_unreachable(
"Not reachable, values and instructions are not handled here");
return llvm::Error::success();
return Error::success();
}
}

Expand Down
14 changes: 7 additions & 7 deletions include/glow/Exporter/ONNXModelWriter.h
Original file line number Diff line number Diff line change
Expand Up @@ -55,12 +55,12 @@ class ONNXModelWriter : public CommonOperatorWriter<ONNX_TRAITS> {
ValueInfoType *valueProto);
/// Writes all inputs and outputs with operator name \p opName from give Node
/// \p node into protobuf \p proto.
static llvm::Error writeAllWithNode(const std::string &opName,
const Node *node, NodeType *proto);
static Error writeAllWithNode(const std::string &opName, const Node *node,
NodeType *proto);
/// Writes all inputs and outputs with operator name \p opName from give Node
/// \p node into created node protobuf using \p graph.
static llvm::Error writeAll(const std::string &opName, const Node *node,
GraphType &graph);
static Error writeAll(const std::string &opName, const Node *node,
GraphType &graph);
// Finds if uses of \p node have node with the provided \p kind.
static bool hasUsesOfKind(const Node *node, Kinded::Kind kind);

Expand All @@ -76,18 +76,18 @@ class ONNXModelWriter : public CommonOperatorWriter<ONNX_TRAITS> {
/// there otherwise if an error occurs it will abort.
ONNXModelWriter(const std::string &modelFilename, Function &F,
size_t irVersion, size_t opsetVersion,
llvm::Error *errPtr = nullptr, bool textMode = false);
Error *errPtr = nullptr, bool textMode = false);

private:
/// \returns error for the unexpected node kind.
static llvm::Error writeUnexpectedKind(const Node *node) {
static Error writeUnexpectedKind(const Node *node) {
RETURN_ERR(strFormat("Glow can not export node %s, unsupported kind: %s.",
node->getName().str().c_str(), node->getKindName()));
}

/// Declares the overriden all pure virtual methods, declared in base class.
#define DEF_NODE(CLASS, NAME) \
llvm::Error write##NAME(const CLASS *, GraphType &) override;
Error write##NAME(const CLASS *, GraphType &) override;
#include "glow/AutoGenNodes.def"
};

Expand Down
6 changes: 3 additions & 3 deletions include/glow/Exporter/ProtobufWriter.h
Original file line number Diff line number Diff line change
Expand Up @@ -32,16 +32,16 @@ class ProtobufWriter {
/// Output file stream.
std::ofstream ff_;

llvm::Error writeModel(const ::google::protobuf::Message &modelProto,
bool textMode = false);
Error writeModel(const ::google::protobuf::Message &modelProto,
bool textMode = false);

public:
/// Constructs new ProtobufWriter object. It will write protopuf messages into
/// \p modelFilename using graph and constants from \p F.
/// If \p errPtr is not null then if an error occurs it will get assigned
/// there otherwise if an error occurs it will abort.
ProtobufWriter(const std::string &modelFilename, Function &F,
llvm::Error *errPtr = nullptr);
Error *errPtr = nullptr);
};

} // namespace glow
Expand Down
Loading