[fx] added testing for all gpt variants (#1210)

* [fx] added testing for all gpt variants

* polish code

* polish code
This commit is contained in:
Frank Lee
2022-07-06 14:03:13 +08:00
committed by GitHub
parent 189946c5c4
commit 2d13a45a3b
8 changed files with 136 additions and 72 deletions

View File

@@ -1,8 +1,7 @@
import operator
import torch
from torch.fx.proxy import Proxy, Attribute
from typing import List, Union
from torch.utils._pytree import PyTree
from typing import List, Union, Any
__all__ = ['ColoProxy']
@@ -14,34 +13,33 @@ class ColoProxy(Proxy):
Usage:
proxy = tracer.create_proxy(...)
proxy.meta_tensor = torch.empty(4, 2, device='meta')
proxy.meta_data = torch.empty(4, 2, device='meta')
print(len(proxy)) # expect output 4
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._meta_tensor = None
self._meta_data = None
@property
def meta_tensor(self):
return self._meta_tensor
def meta_data(self):
return self._meta_data
@meta_tensor.setter
def meta_tensor(self, tensor: Union[List[torch.Tensor], torch.Tensor]):
def _is_meta(item):
assert torch.is_tensor(item) and item.is_meta
torch.fx.node.map_aggregate(tensor, _is_meta)
self._meta_tensor = tensor
@meta_data.setter
def meta_data(self, data: Any):
self._meta_data = data
@property
def has_meta_tensor(self):
return self.meta_tensor is not None
def has_meta_data(self):
return self._meta_data is not None
def _assert_has_meta(self):
assert self.has_meta_tensor, f'Meta tensor is not set for {self.node.name}'
def _assert_meta_data_is_tensor(self):
assert torch.is_tensor(
self._meta_data) and self._meta_data.is_meta, f'Meta data is not a meta tensor for {self.node.name}'
def _assert_has_meta_data(self):
assert self._meta_data, f'Meta data is not set for {self.node.name}'
@property
def device(self):
@@ -51,37 +49,37 @@ class ColoProxy(Proxy):
@property
def dtype(self):
self._assert_has_meta()
return self.meta_tensor.dtype
self._assert_meta_data_is_tensor()
return self.meta_data.dtype
@property
def shape(self):
self._assert_has_meta()
return self.meta_tensor.shape
self._assert_meta_data_is_tensor()
return self.meta_data.shape
def dim(self):
self._assert_has_meta()
return self.meta_tensor.dim()
self._assert_meta_data_is_tensor()
return self.meta_data.dim()
def size(self, dim: int = None):
self._assert_has_meta()
self._assert_meta_data_is_tensor()
if dim:
return self.meta_tensor.size(dim=dim)
return self.meta_data.size(dim=dim)
else:
# size(dim=None) will trigger runtime error for meta tensor
return self.meta_tensor.size()
return self.meta_data.size()
def __len__(self):
self._assert_has_meta()
return len(self.meta_tensor)
self._assert_has_meta_data()
return len(self.meta_data)
def __bool__(self):
self._assert_has_meta()
return self.meta_tensor
self._assert_has_meta_data()
return self.meta_data
def __getattr__(self, k):
if k == "metadata":
return self.meta_tensor
if k == "meta_data":
return self.__getattribute__(k)
# note: not added to the graph yet, if this is a method call
# we peephole optimize to the method invocation
return Attribute(self, k)