You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

server.py 6.4 kB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209
  1. # -*- coding: utf-8 -*-
  2. # MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  3. #
  4. # Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
  5. #
  6. # Unless required by applicable law or agreed to in writing,
  7. # software distributed under the License is distributed on an
  8. # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9. import multiprocessing as mp
  10. import threading
  11. import time
  12. from collections import defaultdict
  13. from functools import partial
  14. from socketserver import ThreadingMixIn
  15. from xmlrpc.client import ServerProxy
  16. from xmlrpc.server import SimpleXMLRPCServer
  17. from ..core._imperative_rt.utils import create_mm_server
  18. from ..utils.future import Future
  19. from .util import get_free_ports
  20. class Methods:
  21. """Distributed Server Method.
  22. Used for exchange information between distributed nodes.
  23. :param mm_server_port: multiple machine rpc server port.
  24. """
  25. def __init__(self, mm_server_port):
  26. self.lock = threading.Lock()
  27. self.mm_server_port = mm_server_port
  28. self.dict_is_grad = defaultdict(partial(Future, True))
  29. self.dict_remote_tracer = defaultdict(partial(Future, True))
  30. self.dict_pack_list = defaultdict(partial(Future, False))
  31. self.dict_barrier_counter = defaultdict(int)
  32. self.dict_barrier_event = defaultdict(threading.Event)
  33. def connect(self):
  34. """Method for checking connection success."""
  35. return True
  36. def get_mm_server_port(self):
  37. """Get multiple machine rpc server port."""
  38. return self.mm_server_port
  39. def set_is_grad(self, key, is_grad):
  40. """Mark send/recv need gradiants by key.
  41. :param key: key to match send/recv op.
  42. :param is_grad: whether this op need grad.
  43. """
  44. with self.lock:
  45. future = self.dict_is_grad[key]
  46. future.set(is_grad)
  47. return True
  48. def check_is_grad(self, key):
  49. """Check whether send/recv need gradiants.
  50. :param key: key to match send/recv op.
  51. """
  52. with self.lock:
  53. future = self.dict_is_grad[key]
  54. ret = future.get()
  55. with self.lock:
  56. del self.dict_is_grad[key]
  57. return ret
  58. def set_remote_tracer(self, key, tracer_set):
  59. """Set tracer dict for tracing send/recv op.
  60. :param key: key to match send/recv op.
  61. :param tracer_set: valid tracer set.
  62. """
  63. with self.lock:
  64. future = self.dict_remote_tracer[key]
  65. future.set(tracer_set)
  66. return True
  67. def check_remote_tracer(self, key):
  68. """Get tracer dict for send/recv op.
  69. :param key: key to match send/recv op.
  70. """
  71. with self.lock:
  72. future = self.dict_remote_tracer[key]
  73. ret = future.get()
  74. with self.lock:
  75. del self.dict_remote_tracer[key]
  76. return ret
  77. def group_barrier(self, key, size):
  78. """A barrier wait for all group member.
  79. :param key: group key to match each other.
  80. :param size: group size.
  81. """
  82. with self.lock:
  83. self.dict_barrier_counter[key] += 1
  84. counter = self.dict_barrier_counter[key]
  85. event = self.dict_barrier_event[key]
  86. if counter == size:
  87. del self.dict_barrier_counter[key]
  88. del self.dict_barrier_event[key]
  89. event.set()
  90. else:
  91. event.wait()
  92. return True
  93. class ThreadXMLRPCServer(ThreadingMixIn, SimpleXMLRPCServer):
  94. pass
  95. def start_server(py_server_port, mm_server_port):
  96. """Start python distributed server and multiple machine server.
  97. :param py_server_port: python server port.
  98. :param mm_server_port: multiple machine server port.
  99. """
  100. server = ThreadXMLRPCServer(("0.0.0.0", py_server_port), logRequests=False)
  101. server.register_instance(Methods(mm_server_port))
  102. server.serve_forever()
  103. class Server:
  104. """Distributed Server for distributed training.
  105. Should be running at master node.
  106. :param port: python server port.
  107. """
  108. def __init__(self, port):
  109. self.py_server_port = get_free_ports(1)[0] if port == 0 else port
  110. self.mm_server_port = create_mm_server("0.0.0.0", 0)
  111. self.proc = mp.Process(
  112. target=start_server,
  113. args=(self.py_server_port, self.mm_server_port),
  114. daemon=True,
  115. )
  116. self.proc.start()
  117. class Client:
  118. """Distributed Client for distributed training.
  119. :param master_ip: ip address of master node.
  120. :param port: port of server at master node.
  121. """
  122. def __init__(self, master_ip, port):
  123. self.master_ip = master_ip
  124. self.port = port
  125. self.connect()
  126. def connect(self):
  127. """Check connection success."""
  128. while True:
  129. try:
  130. self.proxy = ServerProxy(
  131. "http://{}:{}".format(self.master_ip, self.port)
  132. )
  133. if self.proxy.connect():
  134. break
  135. except:
  136. time.sleep(1)
  137. def get_mm_server_port(self):
  138. """Get multiple machine server port."""
  139. return self.proxy.get_mm_server_port()
  140. def set_is_grad(self, key, is_grad):
  141. """Mark send/recv need gradiants by key.
  142. :param key: key to match send/recv op.
  143. :param is_grad: whether this op need grad.
  144. """
  145. self.proxy.set_is_grad(key, is_grad)
  146. def check_is_grad(self, key):
  147. """Check whether send/recv need gradiants.
  148. :param key: key to match send/recv op.
  149. """
  150. return self.proxy.check_is_grad(key)
  151. def set_remote_tracer(self, key, tracer_set):
  152. """Set tracer dict for tracing send/recv op.
  153. :param key: key to match send/recv op.
  154. :param tracer_set: valid tracer set.
  155. """
  156. self.proxy.set_remote_tracer(key, tracer_set)
  157. def check_remote_tracer(self, key):
  158. """Get tracer dict for send/recv op.
  159. :param key: key to match send/recv op.
  160. """
  161. return self.proxy.check_remote_tracer(key)
  162. def group_barrier(self, key, size):
  163. """A barrier wait for all group member.
  164. :param key: group key to match each other.
  165. :param size: group size.
  166. """
  167. self.proxy.group_barrier(key, size)

MegEngine 安装包中集成了使用 GPU 运行代码所需的 CUDA 环境,不用区分 CPU 和 GPU 版。 如果想要运行 GPU 程序,请确保机器本身配有 GPU 硬件设备并安装好驱动。 如果你想体验在云端 GPU 算力平台进行深度学习开发的感觉,欢迎访问 MegStudio 平台