[misc] update pre-commit and run all files (#4752)

* [misc] update pre-commit

* [misc] run pre-commit

* [misc] remove useless configuration files

* [misc] ignore cuda for clang-format
This commit is contained in:
Hongxin Liu
2023-09-19 14:20:26 +08:00
committed by GitHub
parent 3c6b831c26
commit 079bf3cb26
1268 changed files with 50037 additions and 38444 deletions

View File

@@ -39,6 +39,7 @@ class MixedPrecisionMixin(ABC):
return self.optim.zero_grad()
```
"""
dtype: torch.dtype
@abstractmethod
@@ -51,7 +52,6 @@ class MixedPrecisionMixin(ABC):
Returns:
Tensor: Loss value (possibly scaled).
"""
pass
@abstractmethod
def pre_backward_by_grad(self, tensor: Tensor, grad: Tensor) -> Tensor:
@@ -64,7 +64,6 @@ class MixedPrecisionMixin(ABC):
Returns:
Tensor: Gradient of the tensor (possibly scaled).
"""
pass
@abstractmethod
def should_skip_step(self) -> bool:
@@ -73,13 +72,10 @@ class MixedPrecisionMixin(ABC):
Returns:
bool: Whether to skip the step.
"""
pass
@abstractmethod
def pre_zero_grad(self) -> None:
"""Called before zero_grad.
"""
pass
"""Called before zero_grad."""
@abstractmethod
def get_grad_div_scale(self) -> float:
@@ -88,4 +84,3 @@ class MixedPrecisionMixin(ABC):
Returns:
float: A divisor for gradient clipping or step.
"""
pass