@@ -384,7 +384,7 @@ def main(): | |||
parser.add_argument( | |||
"--dump-cpp-model", | |||
help="write a C++ model that can be loaded by " | |||
"megbrain/sdk/load-and-run; " | |||
"megbrain/lite/load_and_run; " | |||
"this implies --embed-input", | |||
) | |||
parser.add_argument( | |||
@@ -1,4 +0,0 @@ | |||
/load_and_run | |||
/data | |||
/*.gcda | |||
/*.gcno |
@@ -1,74 +0,0 @@ | |||
cc_library( | |||
name = "mgblar", | |||
copts = ["-std=c++14"], | |||
srcs = [ | |||
"src/mgblar.cpp", | |||
"src/json_loader.cpp", | |||
"src/text_table.cpp", | |||
], | |||
hdrs = [ | |||
"src/mgblar.h", | |||
"src/json_loader.h", | |||
"src/text_table.h", | |||
"src/npy.h", | |||
], | |||
features = if_opt([ | |||
"no_exceptions", | |||
"no_rtti", | |||
]), | |||
includes = ["src"], | |||
defines = ["MGB_ENABLE_FASTRUN=1"], | |||
deps = ["//brain/megbrain:sdk-test"], | |||
) | |||
cc_megvii_binary( | |||
name = "load_and_run", | |||
copts = ["-std=c++14"], | |||
srcs = ["main.cpp"], | |||
features = if_opt([ | |||
"no_exceptions", | |||
"no_rtti", | |||
]), | |||
internal_deps = [":mgblar"], | |||
visibility = ["//visibility:public"], | |||
) | |||
cc_megvii_shared_object( | |||
name = "load_and_run_shared", | |||
copts = ["-std=c++14"], | |||
srcs = ["main.cpp"], | |||
features = if_opt([ | |||
"no_exceptions", | |||
"no_rtti", | |||
]), | |||
internal_deps = [":mgblar"], | |||
syms = ["main"], | |||
) | |||
cc_megvii_binary( | |||
name = "json_loader_test", | |||
copts = ["-std=c++14"], | |||
srcs = ["test/json_loader_test.cpp"], | |||
internal_deps = [":mgblar"], | |||
) | |||
cc_library( | |||
name = "megbrain_ios_lar_lib", | |||
srcs = [ | |||
"src/mgblar.cpp", | |||
], | |||
hdrs = [ | |||
"src/mgblar.h", | |||
], | |||
copts = ["-DMGB_NO_MAIN=1"], | |||
features = if_opt([ | |||
"no_exceptions", | |||
"no_rtti", | |||
]), | |||
deps = ["//brain/megbrain:sdk-test"], | |||
) | |||
cc_megvii_static_library( | |||
name = "megbrain_ios_lar", | |||
deps = [":megbrain_ios_lar_lib"], | |||
) |
@@ -1,24 +0,0 @@ | |||
include_directories(src) | |||
file(GLOB_RECURSE SOURCES src/*.cpp main.cpp) | |||
add_executable(load_and_run ${SOURCES}) | |||
target_link_libraries(load_and_run megbrain megdnn ${MGE_CUDA_LIBS}) | |||
# load_and_run_depends_shared always for CI check, please do not delete | |||
if(BUILD_SHARED_LIBS) | |||
add_executable(load_and_run_depends_shared ${SOURCES}) | |||
target_link_libraries(load_and_run_depends_shared megengine) | |||
if(WIN32 OR MSVC) | |||
target_compile_definitions(load_and_run_depends_shared PRIVATE MGE_DLL_IMPORT_DATA) | |||
endif() | |||
endif() | |||
install(TARGETS load_and_run EXPORT ${MGE_EXPORT_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) | |||
if(BUILD_SHARED_LIBS) | |||
install(TARGETS load_and_run_depends_shared EXPORT ${MGE_EXPORT_TARGETS} RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR}) | |||
endif() | |||
if(MGE_WITH_TEST) | |||
add_executable(json_loader_test test/json_loader_test.cpp src/json_loader.h src/json_loader.cpp) | |||
target_link_libraries(json_loader_test megbrain megdnn ${MGE_CUDA_LIBS}) | |||
endif() |
@@ -1,29 +0,0 @@ | |||
include ../../Makefile | |||
MACHINE := $(shell $(MGB_CXX) -dumpmachine) | |||
ifneq (, $(findstring android, $(MACHINE))) | |||
DEPS := $(MGB_LIB) ${MEGDNN_LIB} | |||
CXXFLAGS := $(MGB_CXXFLAGS) -fuse-ld=gold -Isrc | |||
LDFLAGS := -ldl -llog ${DEPS} | |||
else | |||
DEPS := $(MGB_LIB) ${MEGDNN_LIB} | |||
CXXFLAGS := $(MGB_CXXFLAGS) -fuse-ld=gold -Isrc | |||
LDFLAGS := -ldl ${DEPS} ${MGB_LDFLAGS} | |||
endif | |||
TARGETS := load_and_run | |||
all: $(TARGETS) | |||
ifneq (,$(findstring gcov,$(MGB_LDFLAGS))) | |||
LDFLAGS += --coverage | |||
endif | |||
load_and_run: main.cpp src/* $(DEPS) | |||
$(MGB_CXX) -o $@ main.cpp src/*.cpp $(CXXFLAGS) $(LDFLAGS) | |||
clean: | |||
rm -f $(TARGETS) | |||
.PHONY: all clean |
@@ -1,144 +0,0 @@ | |||
# Load and Run | |||
Load a model and run, for testing/debugging/profiling. | |||
## Build | |||
<!-- | |||
--> | |||
### Build with cmake | |||
Build MegEngine from source following [README.md](../../README.md). It will also produce the executable, `load_and_run`, which loads a model and runs the test cases attached to the model. | |||
<!-- | |||
--> | |||
## Dump Model with Test Cases Using [dump_with_testcase_mge.py](dump_with_testcase_mge.py) | |||
### Step 1 | |||
Dump the model by calling the python API `megengine.jit.trace.dump()`. | |||
### Step 2 | |||
Append the test cases to the dumped model using [dump_with_testcase_mge.py](dump_with_testcase_mge.py). | |||
The basic usage of [dump_with_testcase_mge.py](dump_with_testcase_mge.py) is | |||
``` | |||
python3 dump_with_testcase_mge.py model -d input_description -o model_with_testcases | |||
``` | |||
where `model` is the file dumped at step 1, `input_description` describes the input data of the test cases, and `model_with_testcases` is the saved model with test cases. | |||
`input_description` can be provided in the following ways: | |||
1. In the format `var0:file0;var1:file1...` meaning that `var0` should use | |||
image file `file0`, `var1` should use image `file1` and so on. If there | |||
is only one input var, the var name can be omitted. This can be combined | |||
with `--resize-input` option. | |||
2. In the format `var0:#rand(min, max, shape...);var1:#rand(min, max)...` | |||
meaning to fill the corresponding input vars with uniform random numbers | |||
in the range `[min, max)`, optionally overriding its shape. | |||
For more usages, run | |||
``` | |||
python3 dump_with_testcase_mge.py --help | |||
``` | |||
### Example | |||
1. Obtain the model file by running [xornet.py](../xor-deploy/xornet.py). | |||
2. Dump the file with test cases attached to the model. | |||
``` | |||
python3 dump_with_testcase_mge.py xornet_deploy.mge -o xornet.mge -d "#rand(0.1, 0.8, 4, 2)" | |||
``` | |||
3. Verify the correctness by running `load_and_run` at the target platform. | |||
``` | |||
load_and_run xornet.mge | |||
``` | |||
## `load_and_run --input` the dumped mge file | |||
You can also use `--input` to set mge file's input, this argument support these 4 formats: | |||
1. PPM/PGM image file. | |||
PPM/PGM is supported by OpenCV and simple to parse, you can easily use `cv::imwrite` to generate one. | |||
``` | |||
load_and_run model.mge --input "data:image.ppm" | |||
``` | |||
`data` is blob name and `image.ppm` is file path, we use `:` to seperate key and value. Please note that `"` is necessary in terminal. | |||
2. npy file. | |||
npy is `Numpy` file format, here is a Python example | |||
``` | |||
import numpy as np | |||
import cv2 | |||
mat = cv2.imread('file.jpg') | |||
np.save('image.npy', mat) | |||
arr = np.array([[[1.1, 1.2],[100, 200.0]]], dtype=np.float32) | |||
np.save('bbox.npy', arr) | |||
``` | |||
then `load_and_run` the model | |||
``` | |||
load_and_run model.mge --input data:image.npy;bbox.npy | |||
``` | |||
3. json format. | |||
For json format, you have to identify data type and blob shape. Here is a Python example | |||
``` | |||
import numpy as np | |||
import json | |||
import cv2 | |||
bbox = np.array([[[1.1, 1.2],[100, 200.0]]], dtype=np.float32) | |||
obj = dict() | |||
obj['shape'] = bbox.shape | |||
obj['raw'] = bbox.flatten().tolist() | |||
obj['type'] = str(bbox.dtype) | |||
json_object = dict() | |||
json_object['bbox'] = obj | |||
json_str = json.dumps(json_object) | |||
with open('bbox.json', 'w') as f: | |||
f.write(json_str) | |||
f.flush() | |||
f.close() | |||
``` | |||
The json loader in `load_and_run` is not fully implement [RFC7159](https://tools.ietf.org/html/rfc7159), it does not support `boolean` and `utf` string format which is useless during inference. | |||
Now let's `load-and-run` the model with json file | |||
``` | |||
load_and_run model.mge --input data:image.npy:bbox:bbox.json | |||
``` | |||
Mutiple key-value pair could be seperated with `;`. | |||
4. plain string format. | |||
Also, you can give the value directly | |||
``` | |||
load_and_run model.mge --input data:image.ppm --input "bbox:[0,0],[200.0,200.0]" --input "batchid:0" | |||
``` | |||
1. `bbox` shape is `[1,2,2]` for `[0,0],[200.0,200.0]`. In order to facilitate user experience, the string parser would add an extra axis for input, thus `bbox:0` is correspond to `[1]` and `bbox:[0]` means that the shape is `[1,1]` | |||
2. Since we can only identify `int32` and `float32` from this format, don't forget `.` for float number. |
@@ -1,25 +0,0 @@ | |||
/** | |||
* \file sdk/load-and-run/main.cpp | |||
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||
* | |||
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved. | |||
* | |||
* Unless required by applicable law or agreed to in writing, | |||
* software distributed under the License is distributed on an | |||
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
*/ | |||
#include "mgblar.h" | |||
#include "megbrain/common.h" | |||
int main(int argc, char **argv) { | |||
MGB_TRY { | |||
return mgb_load_and_run_main(argc, argv); | |||
} MGB_CATCH (std::exception &exc, { | |||
fprintf(stderr, "caught exception: %s\n", exc.what()); | |||
return -2; | |||
}) | |||
} | |||
// vim: syntax=cpp.doxygen foldmethod=marker foldmarker=f{{{,f}}} | |||
@@ -1,299 +0,0 @@ | |||
/** | |||
* \file sdk/load-and-run/src/json_loader.cpp | |||
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||
* | |||
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved. | |||
* | |||
* Unless required by applicable law or agreed to in writing, | |||
* software distributed under the License is distributed on an | |||
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or | |||
* implied. | |||
*/ | |||
#include "json_loader.h" | |||
using namespace mgb; | |||
template <typename T> | |||
T* JsonLoader::Value::safe_cast() { | |||
T* ptr = (T*)(this); | |||
if (nullptr == ptr) { | |||
fprintf(stderr, "cast ptr is null\n"); | |||
} | |||
return ptr; | |||
} | |||
std::unique_ptr<JsonLoader::Value>& JsonLoader::Value::operator[]( | |||
const std::string& key) { | |||
mgb_assert(Type::OBJECT == m_type); | |||
auto t = safe_cast<JsonLoader::ObjectValue>(); | |||
return t->m_obj.at(key); | |||
} | |||
std::unique_ptr<JsonLoader::Value>& JsonLoader::Value::operator[]( | |||
const size_t index) { | |||
mgb_assert(Type::ARRAY == m_type); | |||
auto t = safe_cast<JsonLoader::ArrayValue>(); | |||
return t->m_obj[index]; | |||
} | |||
std::map<std::string, std::unique_ptr<JsonLoader::Value>>& | |||
JsonLoader::Value::objects() { | |||
mgb_assert(Type::OBJECT == m_type); | |||
auto t = safe_cast<JsonLoader::ObjectValue>(); | |||
return t->m_obj; | |||
} | |||
size_t JsonLoader::Value::len() { | |||
if (Type::ARRAY == m_type) { | |||
auto t = safe_cast<JsonLoader::ArrayValue>(); | |||
return t->m_obj.size(); | |||
} else if (Type::OBJECT == m_type) { | |||
auto t = safe_cast<JsonLoader::ObjectValue>(); | |||
return t->m_obj.size(); | |||
} | |||
return 0; | |||
} | |||
megdnn::SmallVector<std::unique_ptr<JsonLoader::Value>>& | |||
JsonLoader::Value::array() { | |||
mgb_assert(Type::ARRAY == m_type); | |||
auto t = safe_cast<JsonLoader::ArrayValue>(); | |||
return t->m_obj; | |||
} | |||
double JsonLoader::Value::number() { | |||
mgb_assert(Type::NUMBER == m_type); | |||
auto t = safe_cast<JsonLoader::NumberValue>(); | |||
return t->value(); | |||
} | |||
std::string JsonLoader::Value::str() { | |||
if (Type::STRING == m_type) { | |||
auto t = safe_cast<StringValue>(); | |||
return t->value(); | |||
} | |||
return std::string(); | |||
} | |||
void JsonLoader::expect(char c) { | |||
mgb_assert(c == (*m_buf)); | |||
m_buf++; | |||
} | |||
void JsonLoader::skip_whitespace() { | |||
const char* p = m_buf; | |||
while (*p == ' ' || *p == '\t' || *p == '\n' || *p == '\r') { | |||
++p; | |||
} | |||
m_buf = p; | |||
} | |||
std::unique_ptr<JsonLoader::Value> JsonLoader::parse_object() { | |||
expect('{'); | |||
skip_whitespace(); | |||
std::unique_ptr<JsonLoader::Value> ret; | |||
JsonLoader::ObjectValue* pObject = new JsonLoader::ObjectValue(); | |||
if ('}' == *m_buf) { | |||
m_buf = m_buf + 1; | |||
ret.reset((JsonLoader::Value*)(pObject)); | |||
return ret; | |||
} | |||
while (true) { | |||
std::unique_ptr<JsonLoader::Value> key = parse_string(); | |||
if (m_state != State::OK) { | |||
return ret; | |||
} | |||
skip_whitespace(); | |||
if (':' != (*m_buf)) { | |||
m_state = State::MISS_COLON; | |||
return ret; | |||
} | |||
m_buf++; | |||
skip_whitespace(); | |||
std::unique_ptr<JsonLoader::Value> pVal = parse_value(); | |||
if (m_state != State::OK) { | |||
return ret; | |||
} | |||
if (pObject->m_obj.find(pVal->str()) != pObject->m_obj.end()) { | |||
m_state = State::KEY_NOT_UNIQUE; | |||
return ret; | |||
} | |||
pObject->m_obj.insert(std::make_pair(key->str(), std::move(pVal))); | |||
skip_whitespace(); | |||
if (',' == (*m_buf)) { | |||
m_buf++; | |||
skip_whitespace(); | |||
} else if ('}' == (*m_buf)) { | |||
m_buf++; | |||
break; | |||
} else { | |||
m_state = State::MISS_BRACE; | |||
break; | |||
} | |||
} | |||
ret.reset((JsonLoader::Value*)(pObject)); | |||
return ret; | |||
} | |||
std::unique_ptr<JsonLoader::Value> JsonLoader::parse_array() { | |||
expect('['); | |||
skip_whitespace(); | |||
std::unique_ptr<JsonLoader::Value> ret; | |||
JsonLoader::ArrayValue* pArray = new JsonLoader::ArrayValue(); | |||
if (']' == *m_buf) { | |||
m_buf = m_buf + 1; | |||
ret.reset((JsonLoader::Value*)(pArray)); | |||
return ret; | |||
} | |||
while (true) { | |||
std::unique_ptr<JsonLoader::Value> pVal = parse_value(); | |||
if (m_state != State::OK) { | |||
mgb_assert(0, "parse value failed during pase array"); | |||
return ret; | |||
} | |||
pArray->m_obj.emplace_back(pVal.get()); | |||
pVal.release(); | |||
skip_whitespace(); | |||
if (',' == *m_buf) { | |||
m_buf++; | |||
skip_whitespace(); | |||
} else if (']' == *m_buf) { | |||
m_buf++; | |||
break; | |||
} else { | |||
m_state = State::BAD_ARRAY; | |||
return ret; | |||
} | |||
} | |||
ret.reset((JsonLoader::Value*)(pArray)); | |||
return ret; | |||
} | |||
std::unique_ptr<JsonLoader::Value> JsonLoader::parse_string() { | |||
expect('\"'); | |||
std::unique_ptr<JsonLoader::Value> ret; | |||
JsonLoader::StringValue* pStr = new JsonLoader::StringValue(); | |||
const char* p = m_buf; | |||
while (true) { | |||
if (*p == '\"') { | |||
p++; | |||
break; | |||
} else { | |||
pStr->m_value += (*p); | |||
p++; | |||
} | |||
} | |||
m_buf = p; | |||
ret.reset((JsonLoader::Value*)(pStr)); | |||
return ret; | |||
} | |||
std::unique_ptr<JsonLoader::Value> JsonLoader::parse_number() { | |||
const char* p = m_buf; | |||
auto loop_digit = [this](const char*& p) { | |||
if (not std::isdigit(*p)) { | |||
m_state = State::BAD_DIGIT; | |||
return; | |||
} | |||
while (std::isdigit(*p)) { | |||
p++; | |||
} | |||
return; | |||
}; | |||
if (*p == '-') | |||
p++; | |||
if (*p == '0') | |||
p++; | |||
else { | |||
loop_digit(std::ref(p)); | |||
} | |||
if (*p == '.') { | |||
p++; | |||
loop_digit(std::ref(p)); | |||
} | |||
if (*p == 'e' || *p == 'E') { | |||
p++; | |||
if (*p == '+' || *p == '-') | |||
p++; | |||
loop_digit(std::ref(p)); | |||
} | |||
JsonLoader::NumberValue* pNum = new JsonLoader::NumberValue(); | |||
pNum->m_value = strtod(m_buf, nullptr); | |||
m_buf = p; | |||
std::unique_ptr<JsonLoader::Value> ret; | |||
ret.reset((JsonLoader::Value*)(pNum)); | |||
return ret; | |||
} | |||
std::unique_ptr<JsonLoader::Value> JsonLoader::parse_value() { | |||
switch (*m_buf) { | |||
case '[': | |||
return parse_array(); | |||
case '{': | |||
return parse_object(); | |||
case '\"': | |||
return parse_string(); | |||
case '\0': | |||
m_state = State::BAD_TYPE; | |||
break; | |||
default: | |||
return parse_number(); | |||
} | |||
return nullptr; | |||
} | |||
std::unique_ptr<JsonLoader::Value> JsonLoader::load(const char* content, | |||
const size_t size) { | |||
m_buf = content; | |||
skip_whitespace(); | |||
std::unique_ptr<JsonLoader::Value> value = parse_value(); | |||
skip_whitespace(); | |||
if (m_state != State::OK) { | |||
return nullptr; | |||
} | |||
mgb_assert(size == static_cast<size_t>(m_buf - content)); | |||
return value; | |||
} | |||
std::unique_ptr<JsonLoader::Value> JsonLoader::load(const char* path) { | |||
std::unique_ptr<std::FILE, void (*)(std::FILE*)> fin( | |||
std::fopen(path, "rb"), [](std::FILE* fp) { std::fclose(fp); }); | |||
mgb_assert(fin.get(), "failed to open %s: %s", path, strerror(errno)); | |||
std::fseek(fin.get(), 0, SEEK_END); | |||
const size_t size = ftell(fin.get()); | |||
std::fseek(fin.get(), 0, SEEK_SET); | |||
std::unique_ptr<char> buf(static_cast<char*>(malloc(size))); | |||
auto nr = std::fread(buf.get(), 1, size, fin.get()); | |||
mgb_assert(nr == size); | |||
return load(buf.get(), size); | |||
} |
@@ -1,184 +0,0 @@ | |||
/** | |||
* \file sdk/load-and-run/src/json_loader.h | |||
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||
* | |||
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved. | |||
* | |||
* Unless required by applicable law or agreed to in writing, | |||
* software distributed under the License is distributed on an | |||
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or | |||
* implied. | |||
*/ | |||
#pragma once | |||
#include <cctype> | |||
#include <fstream> | |||
#include <functional> | |||
#include <iostream> | |||
#include <map> | |||
#include <memory> | |||
#include "megbrain/common.h" | |||
#include "megdnn/thin/small_vector.h" | |||
namespace mgb { | |||
class JsonLoader { | |||
public: | |||
class Value { | |||
protected: | |||
enum struct Type : uint8_t { UNKNOWN, NUMBER, STRING, OBJECT, ARRAY }; | |||
Type m_type; | |||
public: | |||
template <typename T> | |||
T* safe_cast(); | |||
Value() { m_type = Type::UNKNOWN; } | |||
Value(Type type) : m_type(type) {} | |||
virtual ~Value() {} | |||
bool is_array() { return Type::ARRAY == m_type; } | |||
bool is_object() { return Type::OBJECT == m_type; } | |||
bool is_number() { return Type::NUMBER == m_type; } | |||
bool is_str() { return Type::STRING == m_type; } | |||
std::unique_ptr<Value>& operator[](const std::string& key); | |||
std::unique_ptr<Value>& operator[](const size_t index); | |||
std::map<std::string, std::unique_ptr<Value>>& objects(); | |||
size_t len(); | |||
megdnn::SmallVector<std::unique_ptr<Value>>& array(); | |||
double number(); | |||
std::string str(); | |||
}; | |||
void expect(char c); | |||
void skip_whitespace(); | |||
std::unique_ptr<Value> parse_object(); | |||
std::unique_ptr<Value> parse_array(); | |||
std::unique_ptr<Value> parse_string(); | |||
std::unique_ptr<Value> parse_number(); | |||
std::unique_ptr<Value> parse_value(); | |||
enum struct State : uint8_t { | |||
OK = 0, | |||
BAD_TYPE, | |||
BAD_DIGIT, | |||
BAD_ARRAY, | |||
MISS_COLON, | |||
MISS_BRACE, | |||
KEY_NOT_UNIQUE | |||
}; | |||
JsonLoader() { m_state = State::OK; } | |||
std::unique_ptr<Value> load(const char* content, const size_t size); | |||
std::unique_ptr<Value> load(const char* path); | |||
class NumberValue final : public Value { | |||
friend std::unique_ptr<Value> JsonLoader::parse_number(); | |||
double m_value; | |||
public: | |||
NumberValue() : Value(Type::NUMBER) {} | |||
double value() { return m_value; } | |||
}; | |||
class StringValue final : public Value { | |||
std::string m_value; | |||
public: | |||
StringValue() : Value(Type::STRING) {} | |||
std::string value() { return m_value; } | |||
friend std::unique_ptr<Value> JsonLoader::parse_string(); | |||
}; | |||
class ArrayValue final : public Value { | |||
megdnn::SmallVector<std::unique_ptr<Value>> m_obj; | |||
public: | |||
ArrayValue() : Value(Type::ARRAY) {} | |||
ArrayValue(ArrayValue& arr) : Value(arr) { | |||
m_obj.clear(); | |||
for (auto& item : arr.m_obj) { | |||
m_obj.emplace_back(item.get()); | |||
item.release(); | |||
} | |||
} | |||
ArrayValue(ArrayValue&& arr) : Value(arr) { | |||
m_obj.clear(); | |||
for (auto& item : arr.m_obj) { | |||
m_obj.emplace_back(item.get()); | |||
item.release(); | |||
} | |||
} | |||
friend std::unique_ptr<Value> JsonLoader::parse_array(); | |||
friend std::unique_ptr<JsonLoader::Value>& JsonLoader::Value:: | |||
operator[](const size_t index); | |||
friend megdnn::SmallVector<std::unique_ptr<JsonLoader::Value>>& | |||
JsonLoader::Value::array(); | |||
friend size_t JsonLoader::Value::len(); | |||
}; | |||
class ObjectValue final : public Value { | |||
std::map<std::string, std::unique_ptr<Value>> m_obj; | |||
public: | |||
ObjectValue() : Value(Type::OBJECT) {} | |||
ObjectValue(ObjectValue& arr) : Value(arr) { | |||
m_obj.clear(); | |||
for (auto itra = arr.m_obj.begin(); itra != arr.m_obj.end(); | |||
++itra) { | |||
m_obj.emplace( | |||
std::make_pair(itra->first, std::move(itra->second))); | |||
} | |||
} | |||
ObjectValue(ObjectValue&& arr) : Value(arr) { | |||
m_obj.clear(); | |||
for (auto itra = arr.m_obj.begin(); itra != arr.m_obj.end(); | |||
++itra) { | |||
m_obj.emplace( | |||
std::make_pair(itra->first, std::move(itra->second))); | |||
} | |||
} | |||
friend std::unique_ptr<Value> JsonLoader::parse_object(); | |||
friend std::unique_ptr<JsonLoader::Value>& JsonLoader::Value:: | |||
operator[](const std::string&); | |||
friend std::map<std::string, std::unique_ptr<JsonLoader::Value>>& | |||
JsonLoader::Value::objects(); | |||
friend size_t JsonLoader::Value::len(); | |||
}; | |||
private: | |||
const char* m_buf; | |||
State m_state; | |||
}; | |||
} // namespace mgb |
@@ -1,22 +0,0 @@ | |||
/** | |||
* \file sdk/load-and-run/src/mgblar.h | |||
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||
* | |||
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved. | |||
* | |||
* Unless required by applicable law or agreed to in writing, | |||
* software distributed under the License is distributed on an | |||
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
*/ | |||
#pragma once | |||
#ifdef __cplusplus | |||
extern "C" { | |||
#endif | |||
int mgb_load_and_run_main(int argc, char **argv); | |||
#ifdef __cplusplus | |||
} | |||
#endif | |||
// vim: syntax=cpp.doxygen foldmethod=marker foldmarker=f{{{,f}}} |
@@ -1,627 +0,0 @@ | |||
/* | |||
Copyright 2017 Leon Merten Lohse | |||
Permission is hereby granted, free of charge, to any person obtaining a copy | |||
of this software and associated documentation files (the "Software"), to deal | |||
in the Software without restriction, including without limitation the rights | |||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell | |||
copies of the Software, and to permit persons to whom the Software is | |||
furnished to do so, subject to the following conditions: | |||
The above copyright notice and this permission notice shall be included in | |||
all copies or substantial portions of the Software. | |||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | |||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | |||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE | |||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | |||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, | |||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE | |||
SOFTWARE. | |||
*/ | |||
#ifndef NPY_H | |||
#define NPY_H | |||
#include <algorithm> | |||
#include <complex> | |||
#include <cstdint> | |||
#include <cstring> | |||
#include <fstream> | |||
#include <iostream> | |||
#include <regex> | |||
#include <sstream> | |||
#include <stdexcept> | |||
#include <string> | |||
#include <unordered_map> | |||
#include <vector> | |||
namespace npy { | |||
/* Compile-time test for byte order. | |||
If your compiler does not define these per default, you may want to define | |||
one of these constants manually. | |||
Defaults to little endian order. */ | |||
#if defined(__BYTE_ORDER) && __BYTE_ORDER == __BIG_ENDIAN || \ | |||
defined(__BIG_ENDIAN__) || defined(__ARMEB__) || \ | |||
defined(__THUMBEB__) || defined(__AARCH64EB__) || defined(_MIBSEB) || \ | |||
defined(__MIBSEB) || defined(__MIBSEB__) | |||
const bool big_endian = true; | |||
#else | |||
const bool big_endian = false; | |||
#endif | |||
const char magic_string[] = "\x93NUMPY"; | |||
const size_t magic_string_length = 6; | |||
const char little_endian_char = '<'; | |||
const char big_endian_char = '>'; | |||
const char no_endian_char = '|'; | |||
constexpr char host_endian_char = | |||
(big_endian ? big_endian_char : little_endian_char); | |||
/* npy array length */ | |||
typedef unsigned long int ndarray_len_t; | |||
inline void write_magic(std::ostream& ostream, unsigned char v_major = 1, | |||
unsigned char v_minor = 0) { | |||
ostream.write(magic_string, magic_string_length); | |||
ostream.put(v_major); | |||
ostream.put(v_minor); | |||
} | |||
inline void read_magic(std::istream& istream, unsigned char& v_major, | |||
unsigned char& v_minor) { | |||
char buf[magic_string_length + 2]; | |||
istream.read(buf, magic_string_length + 2); | |||
if (!istream) { | |||
fprintf(stderr, "io error: failed reading file"); | |||
} | |||
if (0 != std::memcmp(buf, magic_string, magic_string_length)) { | |||
fprintf(stderr, "this file does not have a valid npy format."); | |||
} | |||
v_major = buf[magic_string_length]; | |||
v_minor = buf[magic_string_length + 1]; | |||
} | |||
// typestring magic | |||
struct Typestring { | |||
private: | |||
char c_endian; | |||
char c_type; | |||
int len; | |||
public: | |||
inline std::string str() { | |||
const size_t max_buflen = 16; | |||
char buf[max_buflen]; | |||
std::sprintf(buf, "%c%c%u", c_endian, c_type, len); | |||
return std::string(buf); | |||
} | |||
Typestring(const std::vector<float>&) | |||
: c_endian{host_endian_char}, c_type{'f'}, len{sizeof(float)} {} | |||
Typestring(const std::vector<double>&) | |||
: c_endian{host_endian_char}, c_type{'f'}, len{sizeof(double)} {} | |||
Typestring(const std::vector<long double>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'f'}, | |||
len{sizeof(long double)} {} | |||
Typestring(const std::vector<char>&) | |||
: c_endian{no_endian_char}, c_type{'i'}, len{sizeof(char)} {} | |||
Typestring(const std::vector<short>&) | |||
: c_endian{host_endian_char}, c_type{'i'}, len{sizeof(short)} {} | |||
Typestring(const std::vector<int>&) | |||
: c_endian{host_endian_char}, c_type{'i'}, len{sizeof(int)} {} | |||
Typestring(const std::vector<long>&) | |||
: c_endian{host_endian_char}, c_type{'i'}, len{sizeof(long)} {} | |||
Typestring(const std::vector<long long>&) | |||
: c_endian{host_endian_char}, c_type{'i'}, len{sizeof(long long)} {} | |||
Typestring(const std::vector<unsigned char>&) | |||
: c_endian{no_endian_char}, | |||
c_type{'u'}, | |||
len{sizeof(unsigned char)} {} | |||
Typestring(const std::vector<unsigned short>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'u'}, | |||
len{sizeof(unsigned short)} {} | |||
Typestring(const std::vector<unsigned int>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'u'}, | |||
len{sizeof(unsigned int)} {} | |||
Typestring(const std::vector<unsigned long>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'u'}, | |||
len{sizeof(unsigned long)} {} | |||
Typestring(const std::vector<unsigned long long>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'u'}, | |||
len{sizeof(unsigned long long)} {} | |||
Typestring(const std::vector<std::complex<float>>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'c'}, | |||
len{sizeof(std::complex<float>)} {} | |||
Typestring(const std::vector<std::complex<double>>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'c'}, | |||
len{sizeof(std::complex<double>)} {} | |||
Typestring(const std::vector<std::complex<long double>>&) | |||
: c_endian{host_endian_char}, | |||
c_type{'c'}, | |||
len{sizeof(std::complex<long double>)} {} | |||
}; | |||
inline void parse_typestring(std::string typestring) { | |||
std::regex re("'([<>|])([ifuc])(\\d+)'"); | |||
std::smatch sm; | |||
std::regex_match(typestring, sm, re); | |||
if (sm.size() != 4) { | |||
fprintf(stderr, "invalid typestring"); | |||
} | |||
} | |||
namespace pyparse { | |||
/** | |||
Removes leading and trailing whitespaces | |||
*/ | |||
inline std::string trim(const std::string& str) { | |||
const std::string whitespace = " \t"; | |||
auto begin = str.find_first_not_of(whitespace); | |||
if (begin == std::string::npos) | |||
return ""; | |||
auto end = str.find_last_not_of(whitespace); | |||
return str.substr(begin, end - begin + 1); | |||
} | |||
inline std::string get_value_from_map(const std::string& mapstr) { | |||
size_t sep_pos = mapstr.find_first_of(":"); | |||
if (sep_pos == std::string::npos) | |||
return ""; | |||
std::string tmp = mapstr.substr(sep_pos + 1); | |||
return trim(tmp); | |||
} | |||
/** | |||
Parses the string representation of a Python dict | |||
The keys need to be known and may not appear anywhere else in the data. | |||
*/ | |||
inline std::unordered_map<std::string, std::string> parse_dict( | |||
std::string in, std::vector<std::string>& keys) { | |||
std::unordered_map<std::string, std::string> map; | |||
if (keys.size() == 0) | |||
return map; | |||
in = trim(in); | |||
// unwrap dictionary | |||
if ((in.front() == '{') && (in.back() == '}')) | |||
in = in.substr(1, in.length() - 2); | |||
else { | |||
fprintf(stderr, "Not a Python dictionary."); | |||
} | |||
std::vector<std::pair<size_t, std::string>> positions; | |||
for (auto const& value : keys) { | |||
size_t pos = in.find("'" + value + "'"); | |||
if (pos == std::string::npos) { | |||
fprintf(stderr, "Missing %s key.", value.c_str()); | |||
} | |||
std::pair<size_t, std::string> position_pair{pos, value}; | |||
positions.push_back(position_pair); | |||
} | |||
// sort by position in dict | |||
std::sort(positions.begin(), positions.end()); | |||
for (size_t i = 0; i < positions.size(); ++i) { | |||
std::string raw_value; | |||
size_t begin{positions[i].first}; | |||
size_t end{std::string::npos}; | |||
std::string key = positions[i].second; | |||
if (i + 1 < positions.size()) | |||
end = positions[i + 1].first; | |||
raw_value = in.substr(begin, end - begin); | |||
raw_value = trim(raw_value); | |||
if (raw_value.back() == ',') | |||
raw_value.pop_back(); | |||
map[key] = get_value_from_map(raw_value); | |||
} | |||
return map; | |||
} | |||
/** | |||
Parses the string representation of a Python boolean | |||
*/ | |||
inline bool parse_bool(const std::string& in) { | |||
if (in == "True") | |||
return true; | |||
if (in == "False") | |||
return false; | |||
fprintf(stderr, "Invalid python boolan."); | |||
return false; | |||
} | |||
/** | |||
Parses the string representation of a Python str | |||
*/ | |||
inline std::string parse_str(const std::string& in) { | |||
if ((in.front() == '\'') && (in.back() == '\'')) | |||
return in.substr(1, in.length() - 2); | |||
fprintf(stderr, "Invalid python string."); | |||
return ""; | |||
} | |||
/** | |||
Parses the string represenatation of a Python tuple into a vector of its items | |||
*/ | |||
inline std::vector<std::string> parse_tuple(std::string in) { | |||
std::vector<std::string> v; | |||
const char seperator = ','; | |||
in = trim(in); | |||
if ((in.front() == '(') && (in.back() == ')')) | |||
in = in.substr(1, in.length() - 2); | |||
else { | |||
fprintf(stderr, "Invalid Python tuple."); | |||
} | |||
std::istringstream iss(in); | |||
for (std::string token; std::getline(iss, token, seperator);) { | |||
v.push_back(token); | |||
} | |||
return v; | |||
} | |||
template <typename T> | |||
inline std::string write_tuple(const std::vector<T>& v) { | |||
if (v.size() == 0) | |||
return ""; | |||
std::ostringstream ss; | |||
if (v.size() == 1) { | |||
ss << "(" << v.front() << ",)"; | |||
} else { | |||
const std::string delimiter = ", "; | |||
// v.size() > 1 | |||
ss << "("; | |||
std::copy(v.begin(), v.end() - 1, | |||
std::ostream_iterator<T>(ss, delimiter.c_str())); | |||
ss << v.back(); | |||
ss << ")"; | |||
} | |||
return ss.str(); | |||
} | |||
inline std::string write_boolean(bool b) { | |||
if (b) | |||
return "True"; | |||
else | |||
return "False"; | |||
} | |||
} // namespace pyparse | |||
inline void parse_header(std::string header, std::string& descr) { | |||
/* | |||
The first 6 bytes are a magic string: exactly "x93NUMPY". | |||
The next 1 byte is an unsigned byte: the major version number of the file | |||
format, e.g. x01. The next 1 byte is an unsigned byte: the minor version | |||
number of the file format, e.g. x00. Note: the version of the file format | |||
is not tied to the version of the numpy package. The next 2 bytes form a | |||
little-endian unsigned short int: the length of the header data | |||
HEADER_LEN. The next HEADER_LEN bytes form the header data describing the | |||
array's format. It is an ASCII string which contains a Python literal | |||
expression of a dictionary. It is terminated by a newline ('n') and | |||
padded with spaces | |||
('x20') to make the total length of the magic string + 4 + HEADER_LEN be | |||
evenly divisible by 16 for alignment purposes. The dictionary contains | |||
three keys: | |||
"descr" : dtype.descr | |||
An object that can be passed as an argument to the numpy.dtype() | |||
constructor to create the array's dtype. For repeatability and | |||
readability, this dictionary is formatted using pprint.pformat() so the | |||
keys are in alphabetic order. | |||
*/ | |||
// remove trailing newline | |||
if (header.back() != '\n') | |||
fprintf(stderr, "invalid header"); | |||
header.pop_back(); | |||
// parse the dictionary | |||
std::vector<std::string> keys{"descr"}; | |||
auto dict_map = npy::pyparse::parse_dict(header, keys); | |||
if (dict_map.size() == 0) | |||
fprintf(stderr, "invalid dictionary in header"); | |||
std::string descr_s = dict_map["descr"]; | |||
parse_typestring(descr_s); | |||
// remove | |||
descr = npy::pyparse::parse_str(descr_s); | |||
return; | |||
} | |||
inline void parse_header(std::string header, std::string& descr, | |||
bool& fortran_order, | |||
std::vector<ndarray_len_t>& shape) { | |||
/* | |||
The first 6 bytes are a magic string: exactly "x93NUMPY". | |||
The next 1 byte is an unsigned byte: the major version number of the file | |||
format, e.g. x01. The next 1 byte is an unsigned byte: the minor version | |||
number of the file format, e.g. x00. Note: the version of the file format | |||
is not tied to the version of the numpy package. The next 2 bytes form a | |||
little-endian unsigned short int: the length of the header data | |||
HEADER_LEN. The next HEADER_LEN bytes form the header data describing the | |||
array's format. It is an ASCII string which contains a Python literal | |||
expression of a dictionary. It is terminated by a newline ('n') and | |||
padded with spaces | |||
('x20') to make the total length of the magic string + 4 + HEADER_LEN be | |||
evenly divisible by 16 for alignment purposes. The dictionary contains | |||
three keys: | |||
"descr" : dtype.descr | |||
An object that can be passed as an argument to the numpy.dtype() | |||
constructor to create the array's dtype. "fortran_order" : bool Whether | |||
the array data is Fortran-contiguous or not. Since Fortran-contiguous | |||
arrays are a common form of non-C-contiguity, we allow them to be written | |||
directly to disk for efficiency. "shape" : tuple of int The shape of the | |||
array. For repeatability and readability, this dictionary is formatted | |||
using pprint.pformat() so the keys are in alphabetic order. | |||
*/ | |||
// remove trailing newline | |||
if (header.back() != '\n') | |||
fprintf(stderr, "invalid header"); | |||
header.pop_back(); | |||
// parse the dictionary | |||
std::vector<std::string> keys{"descr", "fortran_order", "shape"}; | |||
auto dict_map = npy::pyparse::parse_dict(header, keys); | |||
if (dict_map.size() == 0) | |||
fprintf(stderr, "invalid dictionary in header"); | |||
std::string descr_s = dict_map["descr"]; | |||
std::string fortran_s = dict_map["fortran_order"]; | |||
std::string shape_s = dict_map["shape"]; | |||
// TODO: extract info from typestring | |||
parse_typestring(descr_s); | |||
// remove | |||
descr = npy::pyparse::parse_str(descr_s); | |||
// convert literal Python bool to C++ bool | |||
fortran_order = npy::pyparse::parse_bool(fortran_s); | |||
// parse the shape tuple | |||
auto shape_v = npy::pyparse::parse_tuple(shape_s); | |||
if (shape_v.size() == 0) | |||
fprintf(stderr, "invalid shape tuple in header"); | |||
for (auto item : shape_v) { | |||
ndarray_len_t dim = static_cast<ndarray_len_t>(std::stoul(item)); | |||
shape.push_back(dim); | |||
} | |||
} | |||
inline std::string write_header_dict(const std::string& descr, | |||
bool fortran_order, | |||
const std::vector<ndarray_len_t>& shape) { | |||
std::string s_fortran_order = npy::pyparse::write_boolean(fortran_order); | |||
std::string shape_s = npy::pyparse::write_tuple(shape); | |||
return "{'descr': '" + descr + "', 'fortran_order': " + s_fortran_order + | |||
", 'shape': " + shape_s + ", }"; | |||
} | |||
inline void write_header(std::ostream& out, const std::string& descr, | |||
bool fortran_order, | |||
const std::vector<ndarray_len_t>& shape_v) { | |||
std::string header_dict = write_header_dict(descr, fortran_order, shape_v); | |||
size_t length = magic_string_length + 2 + 2 + header_dict.length() + 1; | |||
unsigned char version[2] = {1, 0}; | |||
if (length >= 255 * 255) { | |||
length = magic_string_length + 2 + 4 + header_dict.length() + 1; | |||
version[0] = 2; | |||
version[1] = 0; | |||
} | |||
size_t padding_len = 16 - length % 16; | |||
std::string padding(padding_len, ' '); | |||
// write magic | |||
write_magic(out, version[0], version[1]); | |||
// write header length | |||
if (version[0] == 1 && version[1] == 0) { | |||
char header_len_le16[2]; | |||
uint16_t header_len = static_cast<uint16_t>(header_dict.length() + | |||
padding.length() + 1); | |||
header_len_le16[0] = (header_len >> 0) & 0xff; | |||
header_len_le16[1] = (header_len >> 8) & 0xff; | |||
out.write(reinterpret_cast<char*>(header_len_le16), 2); | |||
} else { | |||
char header_len_le32[4]; | |||
uint32_t header_len = static_cast<uint32_t>(header_dict.length() + | |||
padding.length() + 1); | |||
header_len_le32[0] = (header_len >> 0) & 0xff; | |||
header_len_le32[1] = (header_len >> 8) & 0xff; | |||
header_len_le32[2] = (header_len >> 16) & 0xff; | |||
header_len_le32[3] = (header_len >> 24) & 0xff; | |||
out.write(reinterpret_cast<char*>(header_len_le32), 4); | |||
} | |||
out << header_dict << padding << '\n'; | |||
} | |||
inline std::string read_header(std::istream& istream) { | |||
// check magic bytes an version number | |||
unsigned char v_major, v_minor; | |||
read_magic(istream, v_major, v_minor); | |||
uint32_t header_length = 0; | |||
if (v_major == 1 && v_minor == 0) { | |||
char header_len_le16[2]; | |||
istream.read(header_len_le16, 2); | |||
header_length = (header_len_le16[0] << 0) | (header_len_le16[1] << 8); | |||
if ((magic_string_length + 2 + 2 + header_length) % 16 != 0) { | |||
// TODO: display warning | |||
} | |||
} else if (v_major == 2 && v_minor == 0) { | |||
char header_len_le32[4]; | |||
istream.read(header_len_le32, 4); | |||
header_length = (header_len_le32[0] << 0) | (header_len_le32[1] << 8) | | |||
(header_len_le32[2] << 16) | (header_len_le32[3] << 24); | |||
if ((magic_string_length + 2 + 4 + header_length) % 16 != 0) { | |||
// TODO: display warning | |||
} | |||
} else { | |||
fprintf(stderr, "unsupported file format version"); | |||
} | |||
auto buf_v = std::vector<char>(); | |||
buf_v.reserve(header_length); | |||
istream.read(buf_v.data(), header_length); | |||
std::string header(buf_v.data(), header_length); | |||
return header; | |||
} | |||
inline ndarray_len_t comp_size(const std::vector<ndarray_len_t>& shape) { | |||
ndarray_len_t size = 1; | |||
for (ndarray_len_t i : shape) | |||
size *= i; | |||
return size; | |||
} | |||
template <typename Scalar> | |||
inline void SaveArrayAsNumpy(const std::string& filename, bool fortran_order, | |||
unsigned int n_dims, const unsigned long shape[], | |||
const std::vector<Scalar>& data) { | |||
Typestring typestring_o(data); | |||
std::string typestring = typestring_o.str(); | |||
std::ofstream stream(filename, std::ofstream::binary); | |||
if (!stream) { | |||
fprintf(stderr, "io error: failed to open a file."); | |||
} | |||
std::vector<ndarray_len_t> shape_v(shape, shape + n_dims); | |||
write_header(stream, typestring, fortran_order, shape_v); | |||
auto size = static_cast<size_t>(comp_size(shape_v)); | |||
stream.write(reinterpret_cast<const char*>(data.data()), | |||
sizeof(Scalar) * size); | |||
} | |||
template <typename Scalar> | |||
inline void LoadArrayFromNumpy(const std::string& filename, | |||
std::vector<unsigned long>& shape, | |||
std::vector<Scalar>& data) { | |||
bool fortran_order; | |||
LoadArrayFromNumpy<Scalar>(filename, shape, fortran_order, data); | |||
} | |||
template <typename Scalar> | |||
inline void LoadArrayFromNumpy(const std::string& filename, | |||
std::vector<unsigned long>& shape, | |||
bool& fortran_order, std::vector<Scalar>& data) { | |||
std::ifstream stream(filename, std::ifstream::binary); | |||
if (!stream) { | |||
fprintf(stderr, "io error: failed to open a file."); | |||
} | |||
std::string header = read_header(stream); | |||
// parse header | |||
std::string typestr; | |||
parse_header(header, typestr, fortran_order, shape); | |||
// check if the typestring matches the given one | |||
Typestring typestring_o{data}; | |||
std::string expect_typestr = typestring_o.str(); | |||
if (typestr != expect_typestr) { | |||
fprintf(stderr, "formatting error: typestrings not matching"); | |||
} | |||
// compute the data size based on the shape | |||
auto size = static_cast<size_t>(comp_size(shape)); | |||
data.resize(size); | |||
// read the data | |||
stream.read(reinterpret_cast<char*>(data.data()), sizeof(Scalar) * size); | |||
} | |||
inline void LoadArrayFromNumpy(const std::string& filename, | |||
std::string& type_str, | |||
std::vector<ndarray_len_t>& shape, | |||
std::vector<int8_t>& data) { | |||
std::ifstream stream(filename, std::ifstream::binary); | |||
if (!stream) { | |||
fprintf(stderr, "io error: failed to open a file."); | |||
} | |||
std::string header = read_header(stream); | |||
bool fortran_order; | |||
// parse header | |||
parse_header(header, type_str, fortran_order, shape); | |||
// check if the typestring matches the given one | |||
std::string size_str = type_str.substr(type_str.size() - 1); | |||
size_t elem_size = atoi(size_str.c_str()); | |||
// compute the data size based on the shape | |||
auto byte_size = elem_size * static_cast<size_t>(comp_size(shape)); | |||
data.resize(byte_size); | |||
// read the data | |||
stream.read(reinterpret_cast<char*>(data.data()), byte_size); | |||
} | |||
} // namespace npy | |||
#endif // NPY_H |
@@ -1,108 +0,0 @@ | |||
/** | |||
* \file sdk/load-and-run/src/text_table.cpp | |||
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||
* | |||
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved. | |||
* | |||
* Unless required by applicable law or agreed to in writing, | |||
* software distributed under the License is distributed on an | |||
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or | |||
* implied. | |||
*/ | |||
#include "text_table.h" | |||
using namespace mgb; | |||
namespace { | |||
inline void mid(std::ostream& os, const std::string& str, size_t max_w) { | |||
size_t l = (max_w - str.length()) / 2 + str.length(); | |||
size_t r = max_w - l; | |||
os << std::setw(l) << std::right << str; | |||
if (r > 0) os << std::setw(r) << ' '; | |||
} | |||
inline size_t char_length(char c) { return c ? 1 : 0; } | |||
} // namespace | |||
void TextTable::adjuster_last_row() { | |||
if (m_rows.empty()) return; | |||
auto& row = m_rows.back(); | |||
if (row.params.horizontal == 0 or row.params.vertical == 0) { | |||
row.params.corner = 0; | |||
} | |||
if (row.params.horizontal != 0 && row.params.vertical != 0 && | |||
row.params.corner == 0) { | |||
row.params.corner = row.params.horizontal; | |||
} | |||
} | |||
void TextTable::show(std::ostream& os) { | |||
if (m_rows.empty()) return; | |||
auto& last_row = m_rows.front(); | |||
bool first = true; | |||
for (auto& row : m_rows) { | |||
auto& lrow = | |||
(last_row.values.size() * char_length(last_row.params.horizontal)) > | |||
(row.values.size() * char_length(row.params.horizontal)) | |||
? last_row | |||
: row; | |||
// line before row | |||
if (lrow.params.horizontal) { | |||
if (not first) os << std::endl; | |||
os << m_prefix; | |||
if (lrow.params.corner) os << lrow.params.corner; | |||
size_t skip_size = 0; | |||
// table name | |||
if (first) { | |||
os << m_name; | |||
skip_size = m_name.length(); | |||
} | |||
for (size_t i = 0; i < lrow.values.size(); ++i) { | |||
auto max_w = m_cols_max_w.at(i) + m_padding * 2; | |||
if (max_w + char_length(lrow.params.corner) <= skip_size) { | |||
skip_size = | |||
skip_size - max_w - char_length(lrow.params.corner); | |||
continue; | |||
} | |||
size_t rest = | |||
max_w + char_length(lrow.params.corner) - skip_size; | |||
skip_size = 0; | |||
if (rest > char_length(lrow.params.corner)) { | |||
os << std::string(rest - char_length(lrow.params.corner), | |||
lrow.params.horizontal); | |||
rest = char_length(lrow.params.corner); | |||
} | |||
if (rest > 0 && lrow.params.corner) os << lrow.params.corner; | |||
} | |||
} else if (first) { | |||
os << m_prefix << ' ' << m_name; | |||
} | |||
first = false; | |||
os << std::endl << m_prefix; | |||
if (row.params.vertical) os << row.params.vertical; | |||
// row | |||
for (size_t i = 0; i < row.values.size(); ++i) { | |||
auto& str = row.values.at(i); | |||
auto max_w = m_cols_max_w.at(i) + 2 * m_padding; | |||
if (row.params.align == Align::Mid) { | |||
mid(os, str, max_w); | |||
} else if (row.params.align == Align::Left) { | |||
os << std::setw(max_w) << std::left << str; | |||
} else { | |||
os << std::setw(max_w) << std::right << str; | |||
} | |||
if (row.params.vertical) os << row.params.vertical; | |||
} | |||
last_row = row; | |||
} | |||
if (last_row.params.horizontal) { | |||
os << std::endl << m_prefix; | |||
if (last_row.params.corner) os << last_row.params.corner; | |||
for (size_t i = 0; i < last_row.values.size(); ++i) { | |||
auto max_w = m_cols_max_w.at(i); | |||
std::string tmp(max_w + m_padding * 2, last_row.params.horizontal); | |||
os << tmp; | |||
if (last_row.params.corner) os << last_row.params.corner; | |||
} | |||
} | |||
} |
@@ -1,132 +0,0 @@ | |||
/** | |||
* \file sdk/load-and-run/src/text_table.h | |||
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||
* | |||
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved. | |||
* | |||
* Unless required by applicable law or agreed to in writing, | |||
* software distributed under the License is distributed on an | |||
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or | |||
* implied. | |||
*/ | |||
#pragma once | |||
#include <array> | |||
#include <iomanip> | |||
#include <ostream> | |||
#include <sstream> | |||
#include <string> | |||
#include <tuple> | |||
#include <type_traits> | |||
#include <vector> | |||
#include "megbrain/common.h" | |||
namespace mgb | |||
{ | |||
class TextTable { | |||
public: | |||
enum Level { Summary, Detail }; | |||
enum class Align : int { Left, Right, Mid }; | |||
explicit TextTable(const std::string& table_name) : m_name(table_name) {} | |||
TextTable& horizontal(char c) { | |||
m_row.params.horizontal = c; | |||
return *this; | |||
} | |||
TextTable& vertical(char c) { | |||
m_row.params.vertical = c; | |||
return *this; | |||
} | |||
TextTable& corner(char c) { | |||
m_row.params.corner = c; | |||
return *this; | |||
} | |||
TextTable& align(Align v) { | |||
m_row.params.align = v; | |||
return *this; | |||
} | |||
TextTable& padding(size_t w) { | |||
m_padding = w; | |||
return *this; | |||
} | |||
TextTable& prefix(const std::string& str) { | |||
m_prefix = str; | |||
return *this; | |||
} | |||
template <typename T> | |||
TextTable& add(const T& value) { | |||
m_row.values.emplace_back(value); | |||
if (m_cols_max_w.size() < m_row.values.size()) { | |||
m_cols_max_w.emplace_back(m_row.values.back().length()); | |||
} else { | |||
mgb_assert(m_row.values.size() >= 1); | |||
size_t i = m_row.values.size() - 1; | |||
m_cols_max_w[i] = | |||
std::max(m_cols_max_w[i], m_row.values.back().length()); | |||
} | |||
return *this; | |||
} | |||
template <typename T, typename std::enable_if<std::is_floating_point<T>::value, bool>::type = 0> | |||
TextTable& add(const T& value) { | |||
std::stringstream ss; | |||
ss << std::setiosflags(std::ios::fixed) << std::setprecision(2); | |||
ss << value; | |||
m_row.values.emplace_back(ss.str()); | |||
if (m_cols_max_w.size() < m_row.values.size()) { | |||
m_cols_max_w.emplace_back(m_row.values.back().length()); | |||
} else { | |||
mgb_assert(m_row.values.size() >= 1); | |||
size_t i = m_row.values.size() - 1; | |||
m_cols_max_w[i] = | |||
std::max(m_cols_max_w[i], m_row.values.back().length()); | |||
} | |||
return *this; | |||
} | |||
template <typename T, typename std::enable_if<std::is_integral<T>::value, bool>::type = 0> | |||
TextTable& add(const T& value) { | |||
m_row.values.emplace_back(std::to_string(value)); | |||
return *this; | |||
} | |||
void eor() { | |||
m_rows.emplace_back(m_row); | |||
adjuster_last_row(); | |||
m_row.values.clear(); | |||
} | |||
void reset() { | |||
m_row = {}; | |||
m_cols_max_w.clear(); | |||
m_padding = 0; | |||
m_rows.clear(); | |||
} | |||
void show(std::ostream& os); | |||
private: | |||
void adjuster_last_row(); | |||
std::string m_name; | |||
std::vector<size_t> m_cols_max_w; | |||
size_t m_padding = 0; | |||
std::string m_prefix = ""; | |||
struct Row { | |||
std::vector<std::string> values; | |||
struct Params { | |||
Align align = Align::Left; | |||
char horizontal = '-', vertical = '|', corner = '+'; | |||
} params; | |||
}; | |||
std::vector<Row> m_rows; | |||
Row m_row; | |||
}; | |||
inline std::ostream& operator<<(std::ostream& stream, TextTable& table) { | |||
table.show(stream); | |||
return stream; | |||
} | |||
} // namespace mgb |
@@ -1,74 +0,0 @@ | |||
/** | |||
* \file sdk/load-and-run/test/test_json_loader.cpp | |||
* MegEngine is Licensed under the Apache License, Version 2.0 (the "License") | |||
* | |||
* Copyright (c) 2014-2021 Megvii Inc. All rights reserved. | |||
* | |||
* Unless required by applicable law or agreed to in writing, | |||
* software distributed under the License is distributed on an | |||
* "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |||
*/ | |||
#include <cfloat> | |||
#include <cstdint> | |||
#include <cstdio> | |||
#include <cmath> | |||
#include "../src/json_loader.h" | |||
using namespace mgb; | |||
void test_number(double real, std::string str) { | |||
JsonLoader json; | |||
auto root = json.load(str.data(), str.size()); | |||
mgb_assert(root->is_number()); | |||
mgb_assert(std::fabs(real - root->number()) <= DBL_EPSILON); | |||
} | |||
void test_string(std::string str, std::string json_str) { | |||
JsonLoader json; | |||
auto root = json.load(json_str.data(), json_str.size()); | |||
mgb_assert(root->is_str()); | |||
mgb_assert(str == root->str()); | |||
} | |||
void test_array(size_t num, std::string str) { | |||
JsonLoader json; | |||
auto root = json.load(str.data(), str.size()); | |||
mgb_assert(root->is_array()); | |||
mgb_assert(root->len() == num); | |||
} | |||
void test_object(size_t num, std::string str) { | |||
JsonLoader json; | |||
auto root = json.load(str.data(), str.size()); | |||
mgb_assert(root->is_object()); | |||
mgb_assert(root->len() == num); | |||
} | |||
int main() { | |||
test_number(1.0, "1.0"); | |||
test_number(1e10, "1e10"); | |||
test_number(0.2345678, "0.02345678e1"); | |||
test_number(-10086, "-1.0086E4"); | |||
test_number(1.7976931348623157e+308, | |||
"1.7976931348623157e+308"); // max double | |||
test_string("a", "\"a\""); | |||
test_string("\\table", "\"\\table\""); | |||
test_array(0, " [ ] "); | |||
test_array(4, " [ 0.1, 0.2,0.3, 1990 ] "); | |||
test_array(2, " [ 0.1, \"hello-world\"]"); | |||
test_array(3, " [ 0.1, \"hello-world\", [2.0, 33]]"); | |||
test_array(1, " [ [ [ [2020] ], [2021], [[2022]] ] ]"); | |||
test_object(0, " { } "); | |||
test_object(1, "{\"key1\": 2023}"); | |||
test_object(1, | |||
"{\"key1\": { \"key2\": { " | |||
"\"key3\": \"value\" } } }"); | |||
test_object(1, "{\"key1\":{\"key2\":{}}}"); | |||
printf("test passed\n"); | |||
return 0; | |||
} |