Browse Source

feat(lite): fix typo

GitOrigin-RevId: 8c46aa3a30
release-1.7
Megvii Engine Team 3 years ago
parent
commit
d02b0a08f7
20 changed files with 39 additions and 39 deletions
  1. +1
    -1
      imperative/src/impl/backward_graph_opt.cpp
  2. +4
    -4
      imperative/src/impl/op_def.cpp
  3. +6
    -6
      imperative/src/impl/ops/utility.cpp
  4. +2
    -2
      imperative/src/impl/proxy_graph.cpp
  5. +1
    -1
      imperative/src/impl/proxy_graph.h
  6. +1
    -1
      imperative/src/impl/proxy_graph_detail.cpp
  7. +2
    -2
      imperative/src/impl/subgraph.cpp
  8. +3
    -3
      imperative/src/impl/subgraph_detail.cpp
  9. +1
    -1
      imperative/src/include/megbrain/imperative/backward_graph_opt.h
  10. +1
    -1
      imperative/src/include/megbrain/imperative/graph_builder.h
  11. +2
    -2
      imperative/src/include/megbrain/imperative/op_def.h
  12. +1
    -1
      imperative/src/include/megbrain/imperative/ops/utility.h
  13. +1
    -1
      imperative/src/include/megbrain/imperative/proxy_graph_detail.h
  14. +5
    -5
      imperative/src/include/megbrain/imperative/subgraph.h
  15. +2
    -2
      imperative/src/include/megbrain/imperative/subgraph_detail.h
  16. +1
    -1
      imperative/src/test/backward_graph.cpp
  17. +2
    -2
      lite/pylite/pylite.md
  18. +1
    -1
      lite/pylite/test/test_global.py
  19. +1
    -1
      lite/pylite/test/test_network.py
  20. +1
    -1
      lite/pylite/test/test_network_cuda.py

+ 1
- 1
imperative/src/impl/backward_graph_opt.cpp View File

@@ -16,7 +16,7 @@
using namespace mgb;
using namespace imperative;

OptimizedBackwardGraphResult::OptimizedBackwardGraphResult(const EncodedSubraph& src)
OptimizedBackwardGraphResult::OptimizedBackwardGraphResult(const EncodedSubgraph& src)
: input_has_grad(src.output_mask) {
if (src.graph.exprs.size() <= 1) {
// backward graph only contains a single op


+ 4
- 4
imperative/src/impl/op_def.cpp View File

@@ -80,12 +80,12 @@ std::tuple<SmallVector<LogicalTensorDesc>, bool> OpDef::infer_output_attrs_falli
return def.trait()->infer_output_attrs_fallible(def, inputs);
}

EncodedSubraph OpDef::make_backward_graph(
EncodedSubgraph OpDef::make_backward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,
const SmallVector<bool>& output_has_grad) {
using BackwardGraphCache = OpMethResultCache<EncodedSubraph, SmallVector<bool>, SmallVector<bool>>;
using BackwardGraphCache = OpMethResultCache<EncodedSubgraph, SmallVector<bool>, SmallVector<bool>>;
thread_local BackwardGraphCache cache;
decltype(cache)::key_t cache_key{const_cast<OpDef&>(def).shared_from_this(), inputs, {input_requires_grad, output_has_grad}};
auto iter = cache.find(cache_key);
@@ -100,10 +100,10 @@ std::vector<std::pair<const char*, std::string>> OpDef::props(
return def.trait()->props(def);
}

EncodedSubraph OpDef::make_forward_graph(
EncodedSubgraph OpDef::make_forward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs){
using ForwardGraphCache = OpMethResultCache<EncodedSubraph, SmallVector<bool>, SmallVector<bool>>;
using ForwardGraphCache = OpMethResultCache<EncodedSubgraph, SmallVector<bool>, SmallVector<bool>>;
thread_local ForwardGraphCache cache;
decltype(cache)::key_t cache_key{const_cast<OpDef&>(def).shared_from_this(), inputs};
auto iter = cache.find(cache_key);


+ 6
- 6
imperative/src/impl/ops/utility.cpp View File

@@ -182,11 +182,11 @@ OP_TRAIT_REG(Identity, Identity)

namespace { namespace subgraph {

EncodedSubraph make_forward_graph(const OpDef& def, SmallVector<LogicalTensorDesc> inputs) {
return EncodedSubraph::make(*def.cast_final_safe<SubgraphOp>().graph);
EncodedSubgraph make_forward_graph(const OpDef& def, SmallVector<LogicalTensorDesc> inputs) {
return EncodedSubgraph::make(*def.cast_final_safe<SubgraphOp>().graph);
}

EncodedSubraph make_backward_graph(
EncodedSubgraph make_backward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,
@@ -199,7 +199,7 @@ EncodedSubraph make_backward_graph(
}
}
auto bgraph = subgraph_detail::make_backward_graph(def, inputs, input_requires_grad, output_has_grad);
return EncodedSubraph::make_single(
return EncodedSubgraph::make_single(
SubgraphOp::make(op.name + "Grad",
std::make_shared<Subgraph>(bgraph.graph)),
bgraph.input_mask, bgraph.output_mask);
@@ -430,7 +430,7 @@ std::tuple<SmallVector<MemoryDesc>, SmallVector<MemoryDesc>> infer_output_mem_de
return {};
}

EncodedSubraph make_backward_graph(
EncodedSubgraph make_backward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,
@@ -452,7 +452,7 @@ EncodedSubraph make_backward_graph(
grad_outputs_has_grad, key);
}
auto compiled_op = CompiledOp::make(bgraph_op, op.gopt_level);
auto encoded_graph = EncodedSubraph::make_single(compiled_op, backward_graph.input_mask, backward_graph.output_mask);
auto encoded_graph = EncodedSubgraph::make_single(compiled_op, backward_graph.input_mask, backward_graph.output_mask);
return encoded_graph;
}



+ 2
- 2
imperative/src/impl/proxy_graph.cpp View File

@@ -669,7 +669,7 @@ struct ProxyGraph::GradGraph {
cg::VarNode* grad;
};

EncodedSubraph
EncodedSubgraph
ProxyGraph::make_backward_graph(
const OpDef& opdef,
const SmallVector<LogicalTensorDesc>& input_descs,
@@ -704,7 +704,7 @@ ProxyGraph::make_backward_graph(
}
auto* gfunc = cg::lookup_grad_func(fwd->dyn_typeinfo());

EncodedSubraph result;
EncodedSubgraph result;
auto&& igraph = result.graph;

size_t nr_backward_graph_inputs = 0;


+ 1
- 1
imperative/src/impl/proxy_graph.h View File

@@ -40,7 +40,7 @@ public:
const SmallVector<Tensor*>& outputs,
const SmallVector<Tensor*>& workspace);

EncodedSubraph make_backward_graph(
EncodedSubgraph make_backward_graph(
const OpDef& opdef,
const SmallVector<LogicalTensorDesc>& input_descs,
const SmallVector<bool>& input_requires_grad,


+ 1
- 1
imperative/src/impl/proxy_graph_detail.cpp View File

@@ -113,7 +113,7 @@ void execute(const OpDef& def,
// return graph->infer_output_attrs_fallible(def, inputs);
// }

EncodedSubraph
EncodedSubgraph
make_backward_graph(const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,


+ 2
- 2
imperative/src/impl/subgraph.cpp View File

@@ -101,7 +101,7 @@ void Subgraph::replace_vars(
}
}

std::string EncodedSubraph::repr() const {
std::string EncodedSubgraph::repr() const {
std::string buffer;
buffer.push_back('|');
for (size_t i = 0; i < input_mask.size(); ++i) {
@@ -118,7 +118,7 @@ std::string EncodedSubraph::repr() const {
return buffer;
}

size_t EncodedSubraph::hash() const {
size_t EncodedSubgraph::hash() const {
return std::hash<std::string>{}(repr());
}



+ 3
- 3
imperative/src/impl/subgraph_detail.cpp View File

@@ -76,11 +76,11 @@ SmallVector<TensorPtr> apply_on_physical_tensor(
return outputs;
}

static EncodedSubraph make_backward_graph_from_forward(
static EncodedSubgraph make_backward_graph_from_forward(
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,
const SmallVector<bool>& output_has_grad,
EncodedSubraph forward_graph) {
EncodedSubgraph forward_graph) {
using namespace std::placeholders;
using var_t = Subgraph::var_t;
using vars_t = Subgraph::vars_t;
@@ -149,7 +149,7 @@ static EncodedSubraph make_backward_graph_from_forward(
return backward_graph;
}

EncodedSubraph make_backward_graph(
EncodedSubgraph make_backward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,


+ 1
- 1
imperative/src/include/megbrain/imperative/backward_graph_opt.h View File

@@ -19,7 +19,7 @@ struct OptimizedBackwardGraphResult {
SmallVector<bool> save_for_backward;
SmallVector<bool> input_has_grad;

OptimizedBackwardGraphResult(const EncodedSubraph& bgraph);
OptimizedBackwardGraphResult(const EncodedSubgraph& bgraph);
};

} // namespace mgb::imperative

+ 1
- 1
imperative/src/include/megbrain/imperative/graph_builder.h View File

@@ -29,7 +29,7 @@ class Subgraph::Builder {
using desc_t = TDesc;
using descs_t = SmallVector<TDesc>;
using infer_fn_t = std::function<descs_t(op_t, descs_t, size_t)>;
using encoded_graph_t = EncodedSubraph;
using encoded_graph_t = EncodedSubgraph;
using var_map_t = std::unordered_map<var_t, var_t>;
vars_t m_inputs;
SmallVector<std::pair<var_t, TensorPtr>> m_constants;


+ 2
- 2
imperative/src/include/megbrain/imperative/op_def.h View File

@@ -87,7 +87,7 @@ public:
const SmallVector<TensorPtr>& inputs_tensors,
const SmallVector<MemoryDesc>& inputs_mems);

static EncodedSubraph make_backward_graph(
static EncodedSubgraph make_backward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,
@@ -96,7 +96,7 @@ public:
static std::vector<std::pair<const char*, std::string>> props(
const OpDef& def);

static EncodedSubraph make_forward_graph(
static EncodedSubgraph make_forward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs);



+ 1
- 1
imperative/src/include/megbrain/imperative/ops/utility.h View File

@@ -40,7 +40,7 @@ struct ShapeInfer final : OpDefImplBase<ShapeInfer> {
std::shared_ptr<OpDef> op;
SmallVector<CompNode> devices;
SmallVector<DType> dtypes;
EncodedSubraph graph;
EncodedSubgraph graph;
ShapeInfer() = default;
ShapeInfer(std::shared_ptr<OpDef> op, SmallVector<CompNode> devices,
SmallVector<DType> dtypes)


+ 1
- 1
imperative/src/include/megbrain/imperative/proxy_graph_detail.h View File

@@ -38,7 +38,7 @@ void exec(const OpDef& def,
const SmallVector<TensorPtr>& inputs,
const SmallVector<TensorPtr>& outputs);

EncodedSubraph
EncodedSubgraph
make_backward_graph(const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,


+ 5
- 5
imperative/src/include/megbrain/imperative/subgraph.h View File

@@ -96,7 +96,7 @@ struct Subgraph {
bool operator==(const Subgraph& rhs) const;
};

struct EncodedSubraph {
struct EncodedSubgraph {
Subgraph graph;
SmallVector<bool> input_mask;
SmallVector<bool> output_mask;
@@ -146,8 +146,8 @@ struct EncodedSubraph {
return decoded_outputs;
}

static EncodedSubraph make(Subgraph graph) {
EncodedSubraph result;
static EncodedSubgraph make(Subgraph graph) {
EncodedSubgraph result;
result.input_mask = graph.gen_input_mask();
result.output_mask = graph.gen_output_mask();
graph.inputs = result.encode_inputs(graph.inputs);
@@ -156,11 +156,11 @@ struct EncodedSubraph {
return result;
}

static EncodedSubraph make_single(
static EncodedSubgraph make_single(
std::shared_ptr<OpDef> op,
SmallVector<bool> input_mask,
SmallVector<bool> output_mask) {
EncodedSubraph result;
EncodedSubgraph result;
result.input_mask = input_mask;
result.output_mask = output_mask;
Subgraph::var_t last_var = 0;


+ 2
- 2
imperative/src/include/megbrain/imperative/subgraph_detail.h View File

@@ -24,7 +24,7 @@ apply_on_physical_tensor(const OpDef& def,
std::tuple<SmallVector<LogicalTensorDesc>, bool> infer_output_attrs_fallible(const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs);

EncodedSubraph
EncodedSubgraph
make_backward_graph(const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,
@@ -35,7 +35,7 @@ apply_on_var_node(
const OpDef& def,
const VarNodeArray& inputs);

EncodedSubraph make_backward_graph(
EncodedSubgraph make_backward_graph(
const OpDef& def,
const SmallVector<LogicalTensorDesc>& inputs,
const SmallVector<bool>& input_requires_grad,


+ 1
- 1
imperative/src/test/backward_graph.cpp View File

@@ -22,7 +22,7 @@ using namespace cg;
using namespace imperative;

template <typename T>
T prepare_backward_graph_inputs(const EncodedSubraph& bg, const T& inputs,
T prepare_backward_graph_inputs(const EncodedSubgraph& bg, const T& inputs,
const T& outputs, const T& grads) {
T ret;
size_t i = 0;


+ 2
- 2
lite/pylite/pylite.md View File

@@ -143,7 +143,7 @@ LiteNetwork 主要为用户提供模型载入,运行等功能。使用的模
* CPU 基本模型载入运行的 example
```
def test_network_basic():
source_dir = os.getenv("LITE_TEST_RESOUCE")
source_dir = os.getenv("LITE_TEST_RESOURCE")
input_data_path = os.path.join(source_dir, "input_data.npy")
# read input to input_data
input_data = np.load(input_data_path)
@@ -176,7 +176,7 @@ def test_network_basic():
* CUDA 上使用 device 内存作为模型输入,需要在构造 network 候配置 config 和 IO 信息
```
def test_network_device_IO():
source_dir = os.getenv("LITE_TEST_RESOUCE")
source_dir = os.getenv("LITE_TEST_RESOURCE")
input_data_path = os.path.join(source_dir, "input_data.npy")
model_path = os.path.join(source_dir, "shufflenet.mge")
# read input to input_data


+ 1
- 1
lite/pylite/test/test_global.py View File

@@ -18,7 +18,7 @@ set_log_level(2)


class TestShuffleNet(unittest.TestCase):
source_dir = os.getenv("LITE_TEST_RESOUCE")
source_dir = os.getenv("LITE_TEST_RESOURCE")
input_data_path = os.path.join(source_dir, "input_data.npy")
correct_data_path = os.path.join(source_dir, "output_data.npy")
correct_data = np.load(correct_data_path).flatten()


+ 1
- 1
lite/pylite/test/test_network.py View File

@@ -52,7 +52,7 @@ def test_network_io():


class TestShuffleNet(unittest.TestCase):
source_dir = os.getenv("LITE_TEST_RESOUCE")
source_dir = os.getenv("LITE_TEST_RESOURCE")
input_data_path = os.path.join(source_dir, "input_data.npy")
correct_data_path = os.path.join(source_dir, "output_data.npy")
model_path = os.path.join(source_dir, "shufflenet.mge")


+ 1
- 1
lite/pylite/test/test_network_cuda.py View File

@@ -33,7 +33,7 @@ def require_cuda(ngpu=1):


class TestShuffleNetCuda(unittest.TestCase):
source_dir = os.getenv("LITE_TEST_RESOUCE")
source_dir = os.getenv("LITE_TEST_RESOURCE")
input_data_path = os.path.join(source_dir, "input_data.npy")
correct_data_path = os.path.join(source_dir, "output_data.npy")
model_path = os.path.join(source_dir, "shufflenet.mge")


Loading…
Cancel
Save