GitOrigin-RevId: 10a6cccff9
tags/v0.3.2
@@ -10,6 +10,9 @@ find_package(Numpy REQUIRED) | |||||
find_package(SWIG REQUIRED) | find_package(SWIG REQUIRED) | ||||
set(SWIG_SRC src/swig/mgb.i) | set(SWIG_SRC src/swig/mgb.i) | ||||
set(CMAKE_SWIG_FLAGS -Wall -threads -py3 -modern -DSWIGWORDSIZE64) | set(CMAKE_SWIG_FLAGS -Wall -threads -py3 -modern -DSWIGWORDSIZE64) | ||||
if(MGB_ENABLE_JSON) | |||||
set(CMAKE_SWIG_FLAGS ${CMAKE_SWIG_FLAGS} -DMGB_ENABLE_JSON) | |||||
endif() | |||||
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-parameter") | set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -Wno-unused-parameter") | ||||
if(MGE_WITH_DISTRIBUTED) | if(MGE_WITH_DISTRIBUTED) | ||||
@@ -69,6 +69,15 @@ class CompGraph { | |||||
return reinterpret_cast<size_t>(&$self->get()); | return reinterpret_cast<size_t>(&$self->get()); | ||||
} | } | ||||
std::string get_dynamic_info() const { | |||||
#ifdef MGB_ENABLE_JSON | |||||
auto jsonstr = self->get().get_dynamic_info(); | |||||
return jsonstr->to_string(); | |||||
#else | |||||
return std::string(""); | |||||
#endif | |||||
} | |||||
std::string __repr__() const { | std::string __repr__() const { | ||||
auto &&graph = $self->get(); | auto &&graph = $self->get(); | ||||
return mgb::ssprintf("<CompGraph #%zu at %p>", graph.id(), &graph); | return mgb::ssprintf("<CompGraph #%zu at %p>", graph.id(), &graph); | ||||
@@ -0,0 +1,53 @@ | |||||
# -*- coding: utf-8 -*- | |||||
# MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||||
# | |||||
# Copyright (c) 2014-2020 Megvii Inc. All rights reserved. | |||||
# | |||||
# Unless required by applicable law or agreed to in writing, | |||||
# software distributed under the License is distributed on an | |||||
# "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||||
import json | |||||
import numpy as np | |||||
import megengine.functional as F | |||||
from megengine import graph, tensor | |||||
def test_dynmaic_profiling(): | |||||
sz = 16 | |||||
cg = graph.get_default_graph() | |||||
x = tensor(np.arange(0, sz, dtype=np.float32)) | |||||
y = F.relu(x) | |||||
str1 = cg.get_dynamic_info() | |||||
if str1 == "": | |||||
return | |||||
json_str1 = json.loads(str1) | |||||
z = F.add_update(x, y) | |||||
json_str2 = json.loads(cg.get_dynamic_info()) | |||||
diff = lambda l1, l2: [x for x in l1 if x not in l2] | |||||
jdiff = diff(json_str2, json_str1) | |||||
assert len(jdiff) == 1, "add_update operator should produce only one opr internally" | |||||
dest_key = list(jdiff[0].keys())[0] | |||||
assert ( | |||||
jdiff[0][dest_key]["output"][0]["memory"] == sz * 4 | |||||
), "output of add_update operator has wrong allocated size" | |||||
# check add_update is inplace or not | |||||
dest_ptr = jdiff[0][dest_key]["output"][0]["dev_ptr"] | |||||
found = False | |||||
for li in json_str1: | |||||
if "0" in li.keys(): | |||||
src_ptr = li["0"]["output"][0]["dev_ptr"] | |||||
found = dest_ptr == src_ptr | |||||
assert found == True, "add_update is not inplace" |
@@ -726,4 +726,48 @@ std::string ComputingGraphImpl::VarReceiverInfo::to_string() const { | |||||
allow_empty_value); | allow_empty_value); | ||||
} | } | ||||
#if MGB_ENABLE_JSON | |||||
std::shared_ptr<json::Value> ComputingGraphImpl::get_dynamic_info() const { | |||||
auto make_var_json = [](VarNode* single_var) { | |||||
auto &&cur_mem_plan = single_var->mem_plan(); | |||||
if (cur_mem_plan.valid()) | |||||
return json::Object::make({ | |||||
{"name", json::String::make(single_var->name())}, | |||||
{"memory", json::Number::make(cur_mem_plan.chunk().size())}, | |||||
{"dev_ptr", json::NumberInt::make( | |||||
reinterpret_cast<size_t>(single_var->dev_tensor().raw_ptr()))} | |||||
}); | |||||
else | |||||
return json::Object::make({ | |||||
{"name", json::String::make(single_var->name())}, | |||||
{"memory", json::Null::make()}, | |||||
{"dev_ptr", json::Null::make()} | |||||
}); | |||||
}; | |||||
auto objlist = json::Array::make(); | |||||
for(auto &opri: m_opr_refkeeper){ | |||||
auto cur_opr = opri.get(); | |||||
auto objptr = json::Object::make(); | |||||
auto &&objbody = *objptr; | |||||
objbody["name"] = json::String::make(cur_opr->name()); | |||||
auto jvars = json::Array::make(); | |||||
for(auto &outputi: cur_opr->output()){ | |||||
jvars->add(make_var_json(outputi)); | |||||
} | |||||
objbody["output"] = jvars; | |||||
auto obj = json::Object::make({{std::to_string(cur_opr->id()), objptr}}); | |||||
objlist->add(obj); | |||||
} | |||||
return objlist; | |||||
} | |||||
#endif // MGB_ENABLE_JSON | |||||
// vim: syntax=cpp.doxygen foldmethod=marker foldmarker=f{{{,f}}} | // vim: syntax=cpp.doxygen foldmethod=marker foldmarker=f{{{,f}}} |
@@ -146,6 +146,10 @@ public: | |||||
return m_var_receiver.at(var); | return m_var_receiver.at(var); | ||||
} | } | ||||
#if MGB_ENABLE_JSON | |||||
std::shared_ptr<json::Value> get_dynamic_info() const override; | |||||
#endif | |||||
VarNode* find_var_by_id(size_t id) const override; | VarNode* find_var_by_id(size_t id) const override; | ||||
TopoSorter& topo_sorter() { return components().topo_sorter; } | TopoSorter& topo_sorter() { return components().topo_sorter; } | ||||
@@ -17,6 +17,10 @@ | |||||
#include "megbrain/graph/seq_comp_node_opt.h" | #include "megbrain/graph/seq_comp_node_opt.h" | ||||
#include "megbrain/utils/event.h" | #include "megbrain/utils/event.h" | ||||
#if MGB_ENABLE_JSON | |||||
#include "megbrain/utils/json.h" | |||||
#endif | |||||
namespace mgb { | namespace mgb { | ||||
namespace cg { | namespace cg { | ||||
@@ -171,6 +175,10 @@ class ComputingGraph : public std::enable_shared_from_this<ComputingGraph>, | |||||
virtual const VarReceiverInfo& var_receiver_in_current_comp_seq( | virtual const VarReceiverInfo& var_receiver_in_current_comp_seq( | ||||
const VarNode *var) const = 0; | const VarNode *var) const = 0; | ||||
#if MGB_ENABLE_JSON | |||||
virtual std::shared_ptr<json::Value> get_dynamic_info() const = 0; | |||||
#endif | |||||
/*! | /*! | ||||
* \brief find var node by its ID | * \brief find var node by its ID | ||||
* | * | ||||