You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

server.py 8.9 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300
  1. # -*- coding: utf-8 -*-
  2. # MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  3. #
  4. # Copyright (c) 2014-2021 Megvii Inc. All rights reserved.
  5. #
  6. # Unless required by applicable law or agreed to in writing,
  7. # software distributed under the License is distributed on an
  8. # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9. import multiprocessing as mp
  10. import threading
  11. import time
  12. from collections import defaultdict
  13. from functools import partial
  14. from socketserver import ThreadingMixIn
  15. from xmlrpc.client import ServerProxy
  16. from xmlrpc.server import SimpleXMLRPCServer
  17. from ..core._imperative_rt.utils import create_mm_server
  18. from ..utils.future import Future
  19. class Methods:
  20. """
  21. Distributed Server Method.
  22. Used for exchange information between distributed nodes.
  23. :param mm_server_port: multiple machine rpc server port.
  24. """
  25. def __init__(self, mm_server_port):
  26. self.lock = threading.Lock()
  27. self.mm_server_port = mm_server_port
  28. self.dict_is_grad = defaultdict(partial(Future, True))
  29. self.dict_remote_tracer = defaultdict(partial(Future, True))
  30. self.dict_pack_list = defaultdict(partial(Future, False))
  31. self.dict_barrier_counter = defaultdict(int)
  32. self.dict_barrier_event = defaultdict(threading.Event)
  33. self.user_dict = defaultdict(partial(Future, False))
  34. self.bcast_dict = {}
  35. def connect(self):
  36. """Method for checking connection success."""
  37. return True
  38. def get_mm_server_port(self):
  39. """Get multiple machine rpc server port."""
  40. return self.mm_server_port
  41. def set_is_grad(self, key, is_grad):
  42. """
  43. Mark send/recv need gradiants by key.
  44. :param key: key to match send/recv op.
  45. :param is_grad: whether this op need grad.
  46. """
  47. with self.lock:
  48. future = self.dict_is_grad[key]
  49. future.set(is_grad)
  50. return True
  51. def check_is_grad(self, key):
  52. """
  53. Check whether send/recv need gradiants.
  54. :param key: key to match send/recv op.
  55. """
  56. with self.lock:
  57. future = self.dict_is_grad[key]
  58. ret = future.get()
  59. with self.lock:
  60. del self.dict_is_grad[key]
  61. return ret
  62. def set_remote_tracer(self, key, tracer_set):
  63. """
  64. Set tracer dict for tracing send/recv op.
  65. :param key: key to match send/recv op.
  66. :param tracer_set: valid tracer set.
  67. """
  68. with self.lock:
  69. future = self.dict_remote_tracer[key]
  70. future.set(tracer_set)
  71. return True
  72. def check_remote_tracer(self, key):
  73. """
  74. Get tracer dict for send/recv op.
  75. :param key: key to match send/recv op.
  76. """
  77. with self.lock:
  78. future = self.dict_remote_tracer[key]
  79. ret = future.get()
  80. with self.lock:
  81. del self.dict_remote_tracer[key]
  82. return ret
  83. def group_barrier(self, key, size):
  84. """
  85. A barrier wait for all group member.
  86. :param key: group key to match each other.
  87. :param size: group size.
  88. """
  89. with self.lock:
  90. self.dict_barrier_counter[key] += 1
  91. counter = self.dict_barrier_counter[key]
  92. event = self.dict_barrier_event[key]
  93. if counter == size:
  94. del self.dict_barrier_counter[key]
  95. del self.dict_barrier_event[key]
  96. event.set()
  97. else:
  98. event.wait()
  99. return True
  100. def user_set(self, key, val):
  101. """Set user defined key-value pairs across processes."""
  102. with self.lock:
  103. future = self.user_dict[key]
  104. future.set(val)
  105. return True
  106. def user_get(self, key):
  107. """Get user defined key-value pairs across processes."""
  108. with self.lock:
  109. future = self.user_dict[key]
  110. return future.get()
  111. def bcast_val(self, val, key, size):
  112. with self.lock:
  113. if key not in self.bcast_dict:
  114. self.bcast_dict[key] = [Future(False), size]
  115. arr = self.bcast_dict[key]
  116. if val is not None:
  117. arr[0].set(val)
  118. val = None
  119. else:
  120. val = arr[0].get()
  121. with self.lock:
  122. cnt = arr[1] - 1
  123. arr[1] = cnt
  124. if cnt == 0:
  125. del self.bcast_dict[key]
  126. return val
  127. class ThreadXMLRPCServer(ThreadingMixIn, SimpleXMLRPCServer):
  128. pass
  129. def _start_server(py_server_port, queue):
  130. """
  131. Start python distributed server and multiple machine server.
  132. :param py_server_port: python server port.
  133. :param mm_server_port: multiple machine server port.
  134. :param queue: server port will put in this queue, puts exception when process fails.
  135. """
  136. try:
  137. mm_server_port = create_mm_server("0.0.0.0", 0)
  138. server = ThreadXMLRPCServer(
  139. ("0.0.0.0", py_server_port), logRequests=False, allow_none=True
  140. )
  141. server.register_instance(Methods(mm_server_port))
  142. _, py_server_port = server.server_address
  143. queue.put((py_server_port, mm_server_port))
  144. server.serve_forever()
  145. except Exception as e:
  146. queue.put(e)
  147. class Server:
  148. """
  149. Distributed Server for distributed training.
  150. Should be running at master node.
  151. :param port: python server port.
  152. """
  153. def __init__(self, port=0):
  154. q = mp.Queue()
  155. self.proc = mp.Process(target=_start_server, args=(port, q), daemon=True)
  156. self.proc.start()
  157. ret = q.get()
  158. if isinstance(ret, Exception):
  159. raise ret
  160. else:
  161. self.py_server_port, self.mm_server_port = ret
  162. def __del__(self):
  163. self.proc.terminate()
  164. class Client:
  165. """
  166. Distributed Client for distributed training.
  167. :param master_ip: ip address of master node.
  168. :param port: port of server at master node.
  169. """
  170. def __init__(self, master_ip, port):
  171. self.master_ip = master_ip
  172. self.port = port
  173. self.connect()
  174. self.bcast_dict = defaultdict(lambda: 0)
  175. def connect(self):
  176. """Check connection success."""
  177. while True:
  178. try:
  179. self.proxy = ServerProxy(
  180. "http://{}:{}".format(self.master_ip, self.port), allow_none=True
  181. )
  182. if self.proxy.connect():
  183. break
  184. except:
  185. time.sleep(1)
  186. def get_mm_server_port(self):
  187. """Get multiple machine server port."""
  188. return self.proxy.get_mm_server_port()
  189. def set_is_grad(self, key, is_grad):
  190. """
  191. Mark send/recv need gradiants by key.
  192. :param key: key to match send/recv op.
  193. :param is_grad: whether this op need grad.
  194. """
  195. self.proxy.set_is_grad(key, is_grad)
  196. def check_is_grad(self, key):
  197. """
  198. Check whether send/recv need gradiants.
  199. :param key: key to match send/recv op.
  200. """
  201. return self.proxy.check_is_grad(key)
  202. def set_remote_tracer(self, key, tracer_set):
  203. """
  204. Set tracer dict for tracing send/recv op.
  205. :param key: key to match send/recv op.
  206. :param tracer_set: valid tracer set.
  207. """
  208. self.proxy.set_remote_tracer(key, tracer_set)
  209. def check_remote_tracer(self, key):
  210. """
  211. Get tracer dict for send/recv op.
  212. :param key: key to match send/recv op.
  213. """
  214. return self.proxy.check_remote_tracer(key)
  215. def group_barrier(self, key, size):
  216. """
  217. A barrier wait for all group member.
  218. :param key: group key to match each other.
  219. :param size: group size.
  220. """
  221. self.proxy.group_barrier(key, size)
  222. def user_set(self, key, val):
  223. """Set user defined key-value pairs across processes."""
  224. return self.proxy.user_set(key, val)
  225. def user_get(self, key):
  226. """Get user defined key-value pairs across processes."""
  227. return self.proxy.user_get(key)
  228. def bcast_val(self, val, key, size):
  229. idx = self.bcast_dict[key] + 1
  230. self.bcast_dict[key] = idx
  231. key = key + "_bcast_" + str(idx)
  232. return self.proxy.bcast_val(val, key, size)
  233. def main(port=0, verbose=True):
  234. mm_server_port = create_mm_server("0.0.0.0", 0)
  235. server = ThreadXMLRPCServer(("0.0.0.0", port), logRequests=verbose)
  236. server.register_instance(Methods(mm_server_port))
  237. _, port = server.server_address
  238. print("serving on port", port)
  239. server.serve_forever()
  240. if __name__ == "__main__":
  241. import argparse
  242. ap = argparse.ArgumentParser()
  243. ap.add_argument("-p", "--port", type=int, default=0)
  244. ap.add_argument("-v", "--verbose", type=bool, default=True)
  245. args = ap.parse_args()
  246. main(port=args.port, verbose=args.verbose)

MegEngine 安装包中集成了使用 GPU 运行代码所需的 CUDA 环境,不用区分 CPU 和 GPU 版。 如果想要运行 GPU 程序,请确保机器本身配有 GPU 硬件设备并安装好驱动。 如果你想体验在云端 GPU 算力平台进行深度学习开发的感觉,欢迎访问 MegStudio 平台