I am done

This commit is contained in:
2024-10-30 22:14:35 +01:00
parent 720dc28c09
commit 40e2a747cf
36901 changed files with 5011519 additions and 0 deletions

View File

@ -0,0 +1,12 @@
from .autocast_mode import autocast, custom_bwd, custom_fwd
from .common import amp_definitely_not_available
from .grad_scaler import GradScaler
__all__ = [
"amp_definitely_not_available",
"autocast",
"custom_bwd",
"custom_fwd",
"GradScaler",
]

View File

@ -0,0 +1,90 @@
# mypy: allow-untyped-defs
import functools
from typing import Any
from typing_extensions import deprecated
import torch
__all__ = ["autocast", "custom_fwd", "custom_bwd"]
class autocast(torch.amp.autocast_mode.autocast):
r"""See :class:`torch.autocast`.
``torch.cuda.amp.autocast(args...)`` is deprecated. Please use ``torch.amp.autocast("cuda", args...)`` instead.
"""
@deprecated(
"`torch.cuda.amp.autocast(args...)` is deprecated. "
"Please use `torch.amp.autocast('cuda', args...)` instead.",
category=FutureWarning,
)
def __init__(
self,
enabled: bool = True,
dtype: torch.dtype = torch.float16,
cache_enabled: bool = True,
):
if torch._jit_internal.is_scripting():
self._enabled = enabled
self.device = "cuda"
self.fast_dtype = dtype
return
super().__init__(
"cuda", enabled=enabled, dtype=dtype, cache_enabled=cache_enabled
)
def __enter__(self):
if torch._jit_internal.is_scripting():
return self
return super().__enter__()
# TODO: discuss a unified TorchScript-friendly API for autocast
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any): # type: ignore[override]
if torch._jit_internal.is_scripting():
return
return super().__exit__(exc_type, exc_val, exc_tb)
def __call__(self, func):
if torch._jit_internal.is_scripting():
return func
return super().__call__(func)
# Preserved only for BC reasons
@deprecated(
"`torch.cuda.amp.autocast_mode._cast(value, dtype)` is deprecated. "
"Please use `torch.amp.autocast_mode._cast(value, 'cuda', dtype)` instead.",
category=FutureWarning,
)
def _cast(value, dtype):
return torch.amp.autocast_mode._cast(value, "cuda", dtype)
@deprecated(
"`torch.cuda.amp.custom_fwd(args...)` is deprecated. "
"Please use `torch.amp.custom_fwd(args..., device_type='cuda')` instead.",
category=FutureWarning,
)
def custom_fwd(fwd=None, *, cast_inputs=None):
"""
``torch.cuda.amp.custom_fwd(args...)`` is deprecated. Please use
``torch.amp.custom_fwd(args..., device_type='cuda')`` instead.
"""
return functools.partial(torch.amp.custom_fwd, device_type="cuda")(
fwd=fwd, cast_inputs=cast_inputs
)
@deprecated(
"`torch.cuda.amp.custom_bwd(args...)` is deprecated. "
"Please use `torch.amp.custom_bwd(args..., device_type='cuda')` instead.",
category=FutureWarning,
)
def custom_bwd(bwd):
"""
``torch.cuda.amp.custom_bwd(args...)`` is deprecated. Please use
``torch.amp.custom_bwd(args..., device_type='cuda')`` instead.
"""
return functools.partial(torch.amp.custom_bwd, device_type="cuda")(bwd)

View File

@ -0,0 +1,11 @@
# mypy: allow-untyped-defs
from importlib.util import find_spec
import torch
__all__ = ["amp_definitely_not_available"]
def amp_definitely_not_available():
return not (torch.cuda.is_available() or find_spec("torch_xla"))

View File

@ -0,0 +1,38 @@
from typing_extensions import deprecated
import torch
# We need to keep this unused import for BC reasons
from torch.amp.grad_scaler import OptState # noqa: F401
__all__ = ["GradScaler"]
class GradScaler(torch.amp.GradScaler):
r"""
See :class:`torch.amp.GradScaler`.
``torch.cuda.amp.GradScaler(args...)`` is deprecated. Please use ``torch.amp.GradScaler("cuda", args...)`` instead.
"""
@deprecated(
"`torch.cuda.amp.GradScaler(args...)` is deprecated. "
"Please use `torch.amp.GradScaler('cuda', args...)` instead.",
category=FutureWarning,
)
def __init__(
self,
init_scale: float = 2.0**16,
growth_factor: float = 2.0,
backoff_factor: float = 0.5,
growth_interval: int = 2000,
enabled: bool = True,
) -> None:
super().__init__(
"cuda",
init_scale=init_scale,
growth_factor=growth_factor,
backoff_factor=backoff_factor,
growth_interval=growth_interval,
enabled=enabled,
)