You can not select more than 25 topics Topics must start with a chinese character,a letter or number, can include dashes ('-') and can be up to 35 characters long.

helper.py 1.7 kB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465
  1. # -*- coding: utf-8 -*-
  2. # MegEngine is Licensed under the Apache License, Version 2.0 (the "License")
  3. #
  4. # Copyright (c) 2014-2020 Megvii Inc. All rights reserved.
  5. #
  6. # Unless required by applicable law or agreed to in writing,
  7. # software distributed under the License is distributed on an
  8. # "AS IS" BASIS, WITHOUT ARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  9. import functools
  10. import multiprocessing as mp
  11. from typing import Callable
  12. from megengine.device import get_device_count
  13. from .functional import all_reduce_sum, broadcast
  14. from .group import WORLD, group_barrier, is_distributed
  15. def synchronized(func: Callable):
  16. """Decorator. Decorated function will synchronize when finished.
  17. Specifically, we use this to prevent data race during hub.load"""
  18. @functools.wraps(func)
  19. def wrapper(*args, **kwargs):
  20. if not is_distributed():
  21. return func(*args, **kwargs)
  22. ret = func(*args, **kwargs)
  23. group_barrier()
  24. return ret
  25. return wrapper
  26. def get_device_count_by_fork(device_type: str):
  27. q = mp.Queue()
  28. def worker(queue):
  29. num = get_device_count(device_type)
  30. queue.put(num)
  31. p = mp.Process(target=worker, args=(q,))
  32. p.start()
  33. p.join()
  34. return q.get()
  35. def bcast_params_(params, group):
  36. for p in params:
  37. p._reset(broadcast(p, group))
  38. class AllreduceCallback:
  39. def __init__(self, reduce_method, group=WORLD):
  40. self._reduce_method = reduce_method
  41. self._group = group
  42. def __call__(self, param, grad):
  43. ret = all_reduce_sum(grad, self._group)
  44. if self._reduce_method == "MEAN":
  45. ret = ret / self._group.size
  46. return ret
  47. make_allreduce_cb = AllreduceCallback

MegEngine 安装包中集成了使用 GPU 运行代码所需的 CUDA 环境,不用区分 CPU 和 GPU 版。 如果想要运行 GPU 程序,请确保机器本身配有 GPU 硬件设备并安装好驱动。 如果你想体验在云端 GPU 算力平台进行深度学习开发的感觉,欢迎访问 MegStudio 平台