You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

module_tracer.py 7.0 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278
  1. # MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  2. #
  3. # Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
  4. #
  5. # Unless required by applicable law or agreed to in writing,
  6. # software distributed under the License is distributed on an
  7. # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  8. import collections
  9. from .. import Tensor
  10. from .. import functional as F
  11. from ..core.tensor.array_method import ArrayMethodMixin
  12. from ..module import Module
  13. from ..module.qat import QATModule
  14. _active_module_tracer = None
  15. BUILTIN_ARRAY_METHOD = [
  16. "__lt__",
  17. "__le__",
  18. "__gt__",
  19. "__ge__",
  20. "__eq__",
  21. "__ne__",
  22. "__neg__",
  23. "__pos__",
  24. "__abs__",
  25. "__invert__",
  26. "__round__",
  27. "__floor__",
  28. "__ceil__",
  29. "__add__",
  30. "__sub__",
  31. "__mul__",
  32. "__matmul__",
  33. "__truediv__",
  34. "__floordiv__",
  35. "__mod__",
  36. "__pow__",
  37. "__lshift__",
  38. "__rshift__",
  39. "__and__",
  40. "__or__",
  41. "__xor__",
  42. "__radd__",
  43. "__rsub__",
  44. "__rmul__",
  45. "__rmatmul__",
  46. "__rtruediv__",
  47. "__rfloordiv__",
  48. "__rmod__",
  49. "__rpow__",
  50. "__rlshift__",
  51. "__rrshift__",
  52. "__rand__",
  53. "__ror__",
  54. "__rxor__",
  55. "__iadd__",
  56. "__isub__",
  57. "__imul__",
  58. "__imatmul__",
  59. "__itruediv__",
  60. "__ifloordiv__",
  61. "__imod__",
  62. "__ipow__",
  63. "__ilshift__",
  64. "__irshift__",
  65. "__iand__",
  66. "__ior__",
  67. "__ixor__",
  68. "transpose",
  69. "astype",
  70. "reshape",
  71. "_broadcast",
  72. "flatten",
  73. "sum",
  74. "prod",
  75. "min",
  76. "max",
  77. "mean",
  78. "__getitem__",
  79. "__setitem__",
  80. ]
  81. BUILTIN_TENSOR_WRAP_METHOD = [
  82. "T",
  83. "to",
  84. "size",
  85. "shape",
  86. "detach",
  87. "device",
  88. "dtype",
  89. "grad",
  90. "item",
  91. "ndim",
  92. "numpy",
  93. "qparams",
  94. "set_value",
  95. "reset_zero",
  96. "requires_grad",
  97. "_reset",
  98. "_isscalar",
  99. "_setscalar",
  100. "_tuple_shape",
  101. "_unsetscalar",
  102. ]
  103. def get_tensor_wrapable_method():
  104. return BUILTIN_TENSOR_WRAP_METHOD + BUILTIN_ARRAY_METHOD
  105. def active_module_tracer():
  106. return _active_module_tracer
  107. def set_active_module_tracer(tracer):
  108. global _active_module_tracer
  109. _active_module_tracer = tracer
  110. class module_tracer:
  111. # builtin types
  112. _opaque_types = set()
  113. _active_scopes = None
  114. def __init__(self, wrap_fn):
  115. self._active_scopes = []
  116. self.patcher = Patcher(wrap_fn)
  117. @classmethod
  118. def register_as_builtin(cls, mod):
  119. assert issubclass(mod, Module)
  120. cls._opaque_types.add(mod)
  121. return mod
  122. @classmethod
  123. def is_builtin(cls, mod):
  124. return type(mod) in cls._opaque_types
  125. def push_scope(self, scope):
  126. self._active_scopes.append(scope)
  127. def pop_scope(self):
  128. self._active_scopes.pop()
  129. def current_scope(self):
  130. if self._active_scopes:
  131. return self._active_scopes[-1]
  132. return None
  133. def top_scope(self):
  134. if self._active_scopes:
  135. return self._active_scopes[0]
  136. return None
  137. class NotExist:
  138. pass
  139. class PatchedFn:
  140. frame_dict = None
  141. name = None
  142. origin_fn = None
  143. def __init__(self, frame_dict, name):
  144. self.frame_dict = frame_dict
  145. self.name = name
  146. self.origin_fn = (
  147. self.frame_dict[name]
  148. if isinstance(frame_dict, collections.abc.Mapping)
  149. else getattr(frame_dict, name, NotExist)
  150. )
  151. def set_func(self, func):
  152. if isinstance(self.frame_dict, collections.abc.Mapping):
  153. self.frame_dict[self.name] = func
  154. else:
  155. if func is not NotExist:
  156. setattr(self.frame_dict, self.name, func)
  157. else:
  158. delattr(self.frame_dict, self.name)
  159. class Patcher:
  160. _builtin_functions = []
  161. _builtin_modules = [
  162. F,
  163. F.distributed,
  164. F.elemwise,
  165. F.inplace,
  166. F.loss,
  167. F.math,
  168. F.metric,
  169. F.nn,
  170. F.quantized,
  171. F.tensor,
  172. F.utils,
  173. F.vision,
  174. ]
  175. _builtin_methods = [
  176. Tensor,
  177. ArrayMethodMixin,
  178. ]
  179. def __init__(self, wrap_fn):
  180. self.patched_fn_ids = set()
  181. self.patched_fn = []
  182. self.visited_frames_ids = set()
  183. self.wrap_fn = wrap_fn
  184. for module in self._builtin_modules:
  185. self.patch_module(module)
  186. # some functions in F.nn are import from other module, and not in __all__
  187. self.auto_patch(F.nn.__dict__, False)
  188. for meth in BUILTIN_ARRAY_METHOD:
  189. self.patch_method(ArrayMethodMixin, meth, self.wrap_fn)
  190. self.patch_method(Tensor, "detach", self.wrap_fn)
  191. self.patch_method(Tensor, "__new__", self.wrap_fn)
  192. self.patch_method(QATModule, "_apply_fakequant_with_observer", self.wrap_fn)
  193. for i, j in self._builtin_functions:
  194. if id(i) not in self.visited_frames_ids:
  195. self.patch_function(i, j, self.wrap_fn)
  196. for m in module_tracer._opaque_types:
  197. self.auto_patch(getattr(getattr(m, "forward", m), "__globals__", {}))
  198. def patch_function(self, frame_dict, fn, wrap_fn):
  199. patched_fn = PatchedFn(frame_dict, fn)
  200. self.patched_fn_ids.add(id(patched_fn.origin_fn))
  201. patched_fn.set_func(wrap_fn(patched_fn.origin_fn))
  202. self.patched_fn.append(patched_fn)
  203. def patch_method(self, cls, name, wrap_fn):
  204. self.patch_function(cls, name, wrap_fn)
  205. def patch_cls(self, cls):
  206. import inspect
  207. if id(cls) not in self.visited_frames_ids:
  208. for k, v in cls.__dict__.items():
  209. if inspect.isfunction(v) and not k.startswith("_"):
  210. self.patch_function(cls, k, self.wrap_fn)
  211. self.visited_frames_ids.add(id(cls))
  212. def patch_module(self, module):
  213. import inspect
  214. if id(module.__dict__) not in self.visited_frames_ids:
  215. keys = (
  216. getattr(module, "__all__")
  217. if hasattr(module, "__all__")
  218. else module.__dict__.keys()
  219. )
  220. for k in keys:
  221. v = getattr(module, k)
  222. if inspect.isfunction(v) and not k.startswith("_"):
  223. self.patch_function(module.__dict__, k, self.wrap_fn)
  224. self.visited_frames_ids.add(id(module.__dict__))
  225. def auto_patch(self, frame_dict, check_frame_id=True):
  226. if id(frame_dict) not in self.visited_frames_ids or not check_frame_id:
  227. for k, v in frame_dict.items():
  228. if id(v) in self.patched_fn_ids:
  229. self.patch_function(frame_dict, k, self.wrap_fn)
  230. self.visited_frames_ids.add(id(frame_dict))
  231. def __enter__(self):
  232. return self
  233. def __exit__(self, type, vlaue, trace):
  234. while self.patched_fn:
  235. pf = self.patched_fn.pop()
  236. pf.set_func(pf.origin_fn)
  237. self.visited_frames_ids.clear()