You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

test_dump_naming.py 9.3 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325
  1. # -*- coding: utf-8 -*-
  2. # MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  3. #
  4. # Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
  5. #
  6. # Unless required by applicable law or agreed to in writing,
  7. # software distributed under the License is distributed on an
  8. # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9. import io
  10. import numpy as np
  11. import pytest
  12. import megengine.functional as F
  13. import megengine.module as M
  14. import megengine.utils.comp_graph_tools as cgtools
  15. from megengine import Parameter, Tensor
  16. from megengine.core.tensor import megbrain_graph as G
  17. from megengine.jit.tracing import trace
  18. from megengine.quantization.quantize import quantize, quantize_qat
  19. from megengine.utils.naming import AutoNaming
  20. def _dump_and_load(func, symbolic, keep_opr_name=True):
  21. AutoNaming.clear()
  22. func = trace(func, symbolic=symbolic, capture_as_const=True)
  23. x = Tensor(np.ones(shape=(2, 3)))
  24. func(x).numpy()
  25. file = io.BytesIO()
  26. func.dump(
  27. file,
  28. optimize_for_inference=False,
  29. arg_names=("x",),
  30. keep_opr_name=keep_opr_name,
  31. keep_var_name=2,
  32. )
  33. file.seek(0)
  34. *_, outputs = G.load_graph(file)
  35. ops = cgtools.get_oprs_seq(outputs)
  36. return ops
  37. @pytest.mark.parametrize("symbolic", [False, True])
  38. def test_auto_naming(symbolic):
  39. class Simple(M.Module):
  40. def __init__(self, name):
  41. super().__init__()
  42. self.name = name
  43. def forward(self, x):
  44. return x + x
  45. m = Simple("simple")
  46. op = _dump_and_load(m, symbolic)[-1]
  47. assert op.name == "simple.ADD"
  48. assert op.outputs[0].name == "simple.ADD"
  49. @pytest.mark.parametrize("symbolic", [False, True])
  50. def test_user_named_tensor(symbolic):
  51. class Simple(M.Module):
  52. def __init__(self, name):
  53. super().__init__()
  54. self.name = name
  55. self.k = Parameter(1.0, name="k")
  56. def forward(self, x):
  57. x = x + x
  58. x.name = "o_x"
  59. return x
  60. m = Simple("simple")
  61. op = _dump_and_load(m, symbolic)[-1]
  62. assert op.name == "simple.ADD"
  63. assert op.outputs[0].name == "o_x"
  64. @pytest.mark.parametrize("symbolic", [False, True])
  65. def test_user_named_param(symbolic):
  66. class Simple(M.Module):
  67. def __init__(self, name):
  68. super().__init__()
  69. self.name = name
  70. self.k = Parameter(2.0, name="k")
  71. def forward(self, x):
  72. return self.k * x
  73. m = Simple("simple")
  74. op = _dump_and_load(m, symbolic)[-1]
  75. assert op.inputs[0].name == "x"
  76. assert op.inputs[1].name == "simple.k"
  77. @pytest.mark.parametrize("symbolic", [False, True])
  78. def test_without_module(symbolic):
  79. def f(x):
  80. return 2 * x
  81. op = _dump_and_load(f, symbolic)[-1]
  82. assert op.name == "MUL"
  83. @pytest.mark.parametrize("symbolic", [False, True])
  84. def test_ignore_top_module(symbolic):
  85. class Simple(M.Module):
  86. def forward(self, x):
  87. return x + x
  88. m = Simple()
  89. op = _dump_and_load(m, symbolic)[-1]
  90. assert op.name == "ADD"
  91. assert op.outputs[0].name == "ADD"
  92. @pytest.mark.parametrize("symbolic", [False, True])
  93. def test_with_submodule(symbolic):
  94. class Simple(M.Module):
  95. def __init__(self, name):
  96. super().__init__()
  97. self.name = name
  98. self.linear = M.Linear(3, 3)
  99. def forward(self, x):
  100. x = self.linear(x)
  101. return x
  102. m = Simple("simple")
  103. ops = _dump_and_load(m, symbolic)
  104. assert ops[-1].name == "simple.linear.ADD"
  105. assert ops[-2].name == "simple.linear.MatrixMul"
  106. assert ops[-1].outputs[0].name == "simple.linear.ADD"
  107. @pytest.mark.parametrize("symbolic", [False, True])
  108. def test_with_submodule_in_container(symbolic):
  109. class Simple(M.Module):
  110. def __init__(self, name):
  111. super().__init__()
  112. self.name = name
  113. self.l0 = [M.Linear(3, 3) for _ in range(2)]
  114. self.l1 = tuple(self.l0)
  115. self.l2 = dict(zip(["l2-0", "l2-1"], self.l0))
  116. def forward(self, x):
  117. for i in range(2):
  118. x = self.l0[i](x)
  119. x = self.l1[i](x)
  120. x = self.l2["l2-%d" % i](x)
  121. return x
  122. m = Simple("simple")
  123. ops = _dump_and_load(m, symbolic)
  124. assert ops[-1].outputs[0].name == "simple.l2.l2-1.ADD"
  125. assert ops[-1].name == "simple.l2.l2-1.ADD"
  126. assert ops[-2].name == "simple.l2.l2-1.MatrixMul"
  127. assert ops[-3].name == "simple.l1.1.ADD"
  128. assert ops[-4].name == "simple.l1.1.MatrixMul"
  129. assert ops[-5].name == "simple.l0.1.ADD"
  130. assert ops[-6].name == "simple.l0.1.MatrixMul"
  131. @pytest.mark.parametrize("symbolic", [False, True])
  132. def test_named_submodule(symbolic):
  133. class Simple(M.Module):
  134. def __init__(self, name):
  135. super().__init__()
  136. self.name = name
  137. self.linear = M.Linear(3, 3, name="x")
  138. def forward(self, x):
  139. x = self.linear(x)
  140. return x
  141. m = Simple("simple")
  142. ops = _dump_and_load(m, symbolic)
  143. assert ops[-1].name == "simple.x.ADD"
  144. assert ops[-2].name == "simple.x.MatrixMul"
  145. assert ops[-1].outputs[0].name == "simple.x.ADD"
  146. @pytest.mark.parametrize("symbolic", [False, True])
  147. def test_with_same_operators(symbolic):
  148. class Simple(M.Module):
  149. def __init__(self, name):
  150. super().__init__()
  151. self.name = name
  152. def forward(self, x):
  153. x = F.relu(x)
  154. x = F.relu(x)
  155. return x
  156. m = Simple("simple")
  157. ops = _dump_and_load(m, symbolic)
  158. assert ops[-1].name == "simple.RELU[1]"
  159. assert ops[-2].name == "simple.RELU[0]"
  160. @pytest.mark.parametrize("symbolic", [False, True])
  161. def test_not_keep_opr_name(symbolic):
  162. def f(x):
  163. return 2 * x
  164. op = _dump_and_load(f, symbolic, False)[-1]
  165. assert op.name == "MUL(x,const<2>[2])[4]"
  166. @pytest.mark.parametrize("tensor_name, var_name", [("data", "data"), (None, "arg_0")])
  167. def test_catch_input_name(tensor_name, var_name):
  168. def f(x):
  169. return 2 * x
  170. func = trace(f, symbolic=True, capture_as_const=True)
  171. x = Tensor(np.ones(shape=(2, 3)), name=tensor_name)
  172. func(x).numpy()
  173. file = io.BytesIO()
  174. func.dump(file, optimize_for_inference=False, keep_opr_name=True, keep_var_name=2)
  175. file.seek(0)
  176. *_, outputs = G.load_graph(file)
  177. op = cgtools.get_oprs_seq(outputs)[-1]
  178. assert op.inputs[0].name == var_name
  179. @pytest.mark.parametrize("symbolic", [False, True])
  180. def test_quantized_module_auto_naming(symbolic):
  181. class Simple(M.Module):
  182. def __init__(self, name):
  183. super().__init__(name=name)
  184. self.quant = M.QuantStub()
  185. self.linear = M.Linear(3, 3, bias=True)
  186. self.dequant = M.DequantStub()
  187. def forward(self, x):
  188. out = self.quant(x)
  189. out = self.linear(out)
  190. out = self.dequant(out)
  191. return out
  192. m = Simple("simple")
  193. quantize_qat(m)
  194. quantize(m)
  195. m.eval()
  196. ops = _dump_and_load(m, symbolic)
  197. ops_name = (
  198. "x",
  199. "simple.quant.TypeCvt",
  200. "simple.linear.MatrixMul",
  201. "simple.linear.ADD",
  202. "simple.linear.TypeCvt",
  203. "simple.dequant.TypeCvt",
  204. )
  205. for op, name in zip(ops, ops_name):
  206. assert op.name == name
  207. @pytest.mark.parametrize("symbolic", [False, True])
  208. def test_quantized_module_user_naming(symbolic):
  209. class Simple(M.Module):
  210. def __init__(self, name):
  211. super().__init__(name=name)
  212. self.quant = M.QuantStub()
  213. self.linear = M.Linear(3, 3, bias=True, name="user-linear")
  214. self.dequant = M.DequantStub()
  215. def forward(self, x):
  216. out = self.quant(x)
  217. out = self.linear(out)
  218. out = self.dequant(out)
  219. return out
  220. m = Simple("simple")
  221. quantize_qat(m)
  222. quantize(m)
  223. m.eval()
  224. ops = _dump_and_load(m, symbolic)
  225. ops_name = (
  226. "x",
  227. "simple.quant.TypeCvt",
  228. "simple.user-linear.MatrixMul",
  229. "simple.user-linear.ADD",
  230. "simple.user-linear.TypeCvt",
  231. "simple.dequant.TypeCvt",
  232. )
  233. for op, name in zip(ops, ops_name):
  234. assert op.name == name
  235. @pytest.mark.parametrize("symbolic", [False, True])
  236. def test_quantized_module_user_naming_param(symbolic):
  237. class Simple(M.Module):
  238. def __init__(self, name):
  239. super().__init__(name=name)
  240. self.quant = M.QuantStub()
  241. self.linear = M.Linear(3, 3, bias=True)
  242. self.dequant = M.DequantStub()
  243. self.linear.weight.name = "user-weight"
  244. self.linear.bias.name = "user-bias"
  245. def forward(self, x):
  246. out = self.quant(x)
  247. out = self.linear(out)
  248. out = self.dequant(out)
  249. return out
  250. m = Simple("simple")
  251. quantize_qat(m)
  252. quantize(m)
  253. m.eval()
  254. ops = _dump_and_load(m, symbolic)
  255. (matrix_mul_op,) = [op for op in ops if op.name == "simple.linear.MatrixMul"]
  256. for var in matrix_mul_op.inputs:
  257. assert var.name in ("simple.quant.TypeCvt", "simple.linear.user-weight")
  258. # WONTFIX: bias' name does not meet expectations because of astype operator after quantization

MegEngine 安装包中集成了使用 GPU 运行代码所需的 CUDA 环境,不用区分 CPU 和 GPU 版。 如果想要运行 GPU 程序,请确保机器本身配有 GPU 硬件设备并安装好驱动。 如果你想体验在云端 GPU 算力平台进行深度学习开发的感觉,欢迎访问 MegStudio 平台