__init__.py 14 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318
  1. from __future__ import annotations
  2. from typing import cast, Callable, Generic, List, Optional, Type, TypeVar, Union
  3. import torch
  4. __all__ = ['Future', 'collect_all', 'wait_all']
  5. T = TypeVar("T")
  6. S = TypeVar("S")
  7. class _PyFutureMeta(type(torch._C.Future), type(Generic)): # type: ignore[misc, no-redef]
  8. pass
  9. class Future(torch._C.Future, Generic[T], metaclass=_PyFutureMeta):
  10. r"""
  11. Wrapper around a ``torch._C.Future`` which encapsulates an asynchronous
  12. execution of a callable, e.g. :meth:`~torch.distributed.rpc.rpc_async`. It
  13. also exposes a set of APIs to add callback functions and set results.
  14. .. warning:: GPU support is a beta feature, subject to changes.
  15. """
  16. def __init__(self, *, devices: Optional[List[Union[int, str, torch.device]]] = None):
  17. r"""
  18. Create an empty unset ``Future``. If the future is intended to hold
  19. values containing CUDA tensors, (a superset of) their CUDA devices must
  20. be specified at construction. (This is only supported if
  21. ``torch.cuda.is_available()`` returns ``True``). This is needed to
  22. ensure proper CUDA stream synchronization. The child futures, returned
  23. by the ``then`` method, will inherit these devices.
  24. Args:
  25. devices(``List[Union[int, str, torch.device]]``, optional): the set
  26. of devices on which tensors contained in this future's value are
  27. allowed to reside and on which callbacks are allowed to operate.
  28. """
  29. if devices is None:
  30. devices = []
  31. super().__init__([torch.device(d) for d in devices])
  32. def done(self) -> bool:
  33. r"""
  34. Return ``True`` if this ``Future`` is done. A ``Future`` is done if it
  35. has a result or an exception.
  36. If the value contains tensors that reside on GPUs, ``Future.done()``
  37. will return ``True`` even if the asynchronous kernels that are
  38. populating those tensors haven't yet completed running on the device,
  39. because at such stage the result is already usable, provided one
  40. performs the appropriate synchronizations (see :meth:`wait`).
  41. """
  42. return super().done()
  43. def wait(self) -> T:
  44. r"""
  45. Block until the value of this ``Future`` is ready.
  46. If the value contains tensors that reside on GPUs, then an additional
  47. synchronization is performed with the kernels (executing on the device)
  48. which may be asynchronously populating those tensors. Such sync is
  49. non-blocking, which means that ``wait()`` will insert the necessary
  50. instructions in the current streams to ensure that further operations
  51. enqueued on those streams will be properly scheduled after the async
  52. kernels but, once that is done, ``wait()`` will return, even if those
  53. kernels are still running. No further synchronization is required when
  54. accessing and using the values, as long as one doesn't change streams.
  55. Returns:
  56. The value held by this ``Future``. If the function (callback or RPC)
  57. creating the value has thrown an error, this ``wait`` method will
  58. also throw an error.
  59. """
  60. return super().wait()
  61. def value(self) -> T:
  62. r"""
  63. Obtain the value of an already-completed future.
  64. This method should only be called after a call to :meth:`wait` has
  65. completed, or inside a callback function passed to :meth:`then`. In
  66. other cases this ``Future`` may not yet hold a value and calling
  67. ``value()`` could fail.
  68. If the value contains tensors that reside on GPUs, then this method will
  69. *not* perform any additional synchronization. This should be done
  70. beforehand, separately, through a call to :meth:`wait` (except within
  71. callbacks, for which it's already being taken care of by :meth:`then`).
  72. Returns:
  73. The value held by this ``Future``. If the function (callback or RPC)
  74. creating the value has thrown an error, this ``value()`` method will
  75. also throw an error.
  76. """
  77. return super().value()
  78. def then(self, callback: Callable[[Future[T]], S]) -> Future[S]:
  79. r"""
  80. Append the given callback function to this ``Future``, which will be run
  81. when the ``Future`` is completed. Multiple callbacks can be added to
  82. the same ``Future``, but the order in which they will be executed cannot
  83. be guaranteed (to enforce a certain order consider chaining:
  84. ``fut.then(cb1).then(cb2)``). The callback must take one argument, which
  85. is the reference to this ``Future``. The callback function can use the
  86. :meth:`value` method to get the value. Note that if this ``Future`` is
  87. already completed, the given callback will be run immediately inline.
  88. If the ``Future``'s value contains tensors that reside on GPUs, the
  89. callback might be invoked while the async kernels that are populating
  90. those tensors haven't yet finished executing on the device. However, the
  91. callback will be invoked with some dedicated streams set as current
  92. (fetched from a global pool) which will be synchronized with those
  93. kernels. Hence any operation performed by the callback on these tensors
  94. will be scheduled on the device after the kernels complete. In other
  95. words, as long as the callback doesn't switch streams, it can safely
  96. manipulate the result without any additional synchronization. This is
  97. similar to the non-blocking behavior of :meth:`wait`.
  98. Similarly, if the callback returns a value that contains tensors that
  99. reside on a GPU, it can do so even if the kernels that are producing
  100. these tensors are still running on the device, as long as the callback
  101. didn't change streams during its execution. If one wants to change
  102. streams, one must be careful to re-synchronize them with the original
  103. streams, that is, those that were current when the callback was invoked.
  104. Args:
  105. callback(``Callable``): a ``Callable`` that takes this ``Future`` as
  106. the only argument.
  107. Returns:
  108. A new ``Future`` object that holds the return value of the
  109. ``callback`` and will be marked as completed when the given
  110. ``callback`` finishes.
  111. .. note:: Note that if the callback function throws, either
  112. through the original future being completed with an exception and
  113. calling ``fut.wait()``, or through other code in the callback, the
  114. future returned by ``then`` will be marked appropriately with the
  115. encountered error. However, if this callback later completes
  116. additional futures, those futures are not marked as completed with
  117. an error and the user is responsible for handling completion/waiting
  118. on those futures independently.
  119. Example::
  120. >>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_FUTURES)
  121. >>> def callback(fut):
  122. ... print(f"RPC return value is {fut.wait()}.")
  123. >>> fut = torch.futures.Future()
  124. >>> # The inserted callback will print the return value when
  125. >>> # receiving the response from "worker1"
  126. >>> cb_fut = fut.then(callback)
  127. >>> chain_cb_fut = cb_fut.then(
  128. ... lambda x : print(f"Chained cb done. {x.wait()}")
  129. ... )
  130. >>> fut.set_result(5)
  131. RPC return value is 5.
  132. Chained cb done. None
  133. """
  134. return cast(Future[S], super().then(callback))
  135. def add_done_callback(self, callback: Callable[[Future[T]], None]) -> None:
  136. r"""
  137. Append the given callback function to this ``Future``, which will be run
  138. when the ``Future`` is completed. Multiple callbacks can be added to
  139. the same ``Future``, but the order in which they will be executed cannot
  140. be guaranteed. The callback must take one argument, which is the
  141. reference to this ``Future``. The callback function can use the
  142. :meth:`value` method to get the value. Note that if this ``Future`` is
  143. already completed, the given callback will be run inline.
  144. We recommend that you use the :meth:`then` method as it provides a way
  145. to synchronize after your callback has completed. ``add_done_callback``
  146. can be cheaper if your callback does not return anything. But both
  147. :meth:`then` and ``add_done_callback`` use the same callback
  148. registration API under the hood.
  149. With respect to GPU tensors, this method behaves in the same way as
  150. :meth:`then`.
  151. Args:
  152. callback(``Future``): a ``Callable`` that takes in one argument,
  153. which is the reference to this ``Future``.
  154. .. note:: Note that if the callback function throws, either
  155. through the original future being completed with an exception and
  156. calling ``fut.wait()``, or through other code in the callback,
  157. error handling must be carefully taken care of. For example, if
  158. this callback later completes additional futures, those futures are
  159. not marked as completed with an error and the user is responsible
  160. for handling completion/waiting on those futures independently.
  161. Example::
  162. >>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_FUTURES)
  163. >>> def callback(fut):
  164. ... print("This will run after the future has finished.")
  165. ... print(fut.wait())
  166. >>> fut = torch.futures.Future()
  167. >>> fut.add_done_callback(callback)
  168. >>> fut.set_result(5)
  169. This will run after the future has finished.
  170. 5
  171. """
  172. super().add_done_callback(callback)
  173. def set_result(self, result: T) -> None:
  174. r"""
  175. Set the result for this ``Future``, which will mark this ``Future`` as
  176. completed and trigger all attached callbacks. Note that a ``Future``
  177. cannot be marked completed twice.
  178. If the result contains tensors that reside on GPUs, this method can be
  179. called even if the asynchronous kernels that are populating those
  180. tensors haven't yet completed running on the device, provided that the
  181. streams on which those kernels were enqueued are set as the current ones
  182. when this method is called. Put simply, it's safe to call this method
  183. immediately after launching those kernels, without any additional
  184. synchronization, as long as one doesn't change streams in between. This
  185. method will record events on all the relevant current streams and will
  186. use them to ensure proper scheduling for all the consumers of this
  187. ``Future``.
  188. Args:
  189. result (object): the result object of this ``Future``.
  190. Example::
  191. >>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_FUTURES)
  192. >>> import threading
  193. >>> import time
  194. >>> def slow_set_future(fut, value):
  195. ... time.sleep(0.5)
  196. ... fut.set_result(value)
  197. >>> fut = torch.futures.Future()
  198. >>> t = threading.Thread(
  199. ... target=slow_set_future,
  200. ... args=(fut, torch.ones(2) * 3)
  201. ... )
  202. >>> t.start()
  203. >>> print(fut.wait())
  204. tensor([3., 3.])
  205. >>> t.join()
  206. """
  207. super().set_result(result)
  208. def set_exception(self, result: T) -> None:
  209. r"""
  210. Set an exception for this ``Future``, which will mark this ``Future`` as
  211. completed with an error and trigger all attached callbacks. Note that
  212. when calling wait()/value() on this ``Future``, the exception set here
  213. will be raised inline.
  214. Args:
  215. result (BaseException): the exception for this ``Future``.
  216. Example::
  217. >>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_FUTURES)
  218. >>> fut = torch.futures.Future()
  219. >>> fut.set_exception(ValueError("foo"))
  220. >>> fut.wait()
  221. Traceback (most recent call last):
  222. ...
  223. ValueError: foo
  224. """
  225. assert isinstance(result, Exception), f"{result} is of type {type(result)}, not an Exception."
  226. def raise_error(fut_result):
  227. raise fut_result
  228. super()._set_unwrap_func(raise_error)
  229. self.set_result(result) # type: ignore[arg-type]
  230. def collect_all(futures: List[Future]) -> Future[List[Future]]:
  231. r"""
  232. Collects the provided :class:`~torch.futures.Future` objects into a single
  233. combined :class:`~torch.futures.Future` that is completed when all of the
  234. sub-futures are completed.
  235. Args:
  236. futures (list): a list of :class:`~torch.futures.Future` objects.
  237. Returns:
  238. Returns a :class:`~torch.futures.Future` object to a list of the passed
  239. in Futures.
  240. Example::
  241. >>> # xdoctest: +REQUIRES(env:TORCH_DOCTEST_FUTURES)
  242. >>> fut0 = torch.futures.Future()
  243. >>> fut1 = torch.futures.Future()
  244. >>> fut = torch.futures.collect_all([fut0, fut1])
  245. >>> fut0.set_result(0)
  246. >>> fut1.set_result(1)
  247. >>> fut_list = fut.wait()
  248. >>> print(f"fut0 result = {fut_list[0].wait()}")
  249. fut0 result = 0
  250. >>> print(f"fut1 result = {fut_list[1].wait()}")
  251. fut1 result = 1
  252. """
  253. return cast(Future[List[Future]], torch._C._collect_all(cast(List[torch._C.Future], futures)))
  254. def wait_all(futures: List[Future]) -> List:
  255. r"""
  256. Waits for all provided futures to be complete, and returns
  257. the list of completed values. If any of the futures encounters an error,
  258. the method will exit early and report the error not waiting for other
  259. futures to complete.
  260. Args:
  261. futures (list): a list of :class:`~torch.futures.Future` object.
  262. Returns:
  263. A list of the completed :class:`~torch.futures.Future` results. This
  264. method will throw an error if ``wait`` on any
  265. :class:`~torch.futures.Future` throws.
  266. """
  267. return [fut.wait() for fut in torch._C._collect_all(cast(List[torch._C.Future], futures)).wait()]