From dec8fdb2292bb3d2e198b943820854b48f67e4bc Mon Sep 17 00:00:00 2001 From: CjhHa1 Date: Thu, 11 Apr 2024 10:12:31 +0800 Subject: [PATCH] [Inference] resolve rebase conflicts fix --- colossalai/inference/core/engine.py | 2 +- colossalai/shardformer/layer/embedding.py | 1 - 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/colossalai/inference/core/engine.py b/colossalai/inference/core/engine.py index 6ea555996..9e5228019 100644 --- a/colossalai/inference/core/engine.py +++ b/colossalai/inference/core/engine.py @@ -1,6 +1,6 @@ import time from itertools import count -from typing import Dict, List, Optional, Tuple, Union, Iterable +from typing import Dict, List, Optional, Tuple, Union import numpy as np import torch diff --git a/colossalai/shardformer/layer/embedding.py b/colossalai/shardformer/layer/embedding.py index d081b2040..ab2503a60 100644 --- a/colossalai/shardformer/layer/embedding.py +++ b/colossalai/shardformer/layer/embedding.py @@ -175,7 +175,6 @@ class VocabParallelEmbedding1D(ParallelModule): he initializer of weight, defaults to normal initializer. The ``args`` and ``kwargs`` used in :class:``torch.nn.functional.embedding`` should contain: - :: max_norm (float, optional): If given, each embedding vector with norm larger than max_norm is renormalized to have norm max_norm. Note: this will modify weight in-place.