mirror of
https://github.com/hpcaitech/ColossalAI.git
synced 2025-04-28 19:55:29 +00:00
[pre-commit.ci] auto fixes from pre-commit.com hooks
for more information, see https://pre-commit.ci
This commit is contained in:
parent
624b5978bb
commit
6f61be451f
@ -81,11 +81,11 @@ with gr.Blocks(css=CSS) as demo:
|
|||||||
)
|
)
|
||||||
with gr.Row():
|
with gr.Row():
|
||||||
btn = gr.UploadButton("📁", file_types=["file"], file_count="multiple", size="sm")
|
btn = gr.UploadButton("📁", file_types=["file"], file_count="multiple", size="sm")
|
||||||
restart_btn = gr.Button(str("\u21BB"), elem_id="restart-btn", scale=1)
|
restart_btn = gr.Button(str("\u21bb"), elem_id="restart-btn", scale=1)
|
||||||
txt = gr.Textbox(
|
txt = gr.Textbox(
|
||||||
scale=8,
|
scale=8,
|
||||||
show_label=False,
|
show_label=False,
|
||||||
placeholder="Enter text and press enter, or use 📁 to upload files, click \u21BB to clear loaded files and restart chat",
|
placeholder="Enter text and press enter, or use 📁 to upload files, click \u21bb to clear loaded files and restart chat",
|
||||||
container=True,
|
container=True,
|
||||||
autofocus=True,
|
autofocus=True,
|
||||||
)
|
)
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""This code is adapted from Alpa
|
"""This code is adapted from Alpa
|
||||||
https://github.com/alpa-projects/alpa/
|
https://github.com/alpa-projects/alpa/
|
||||||
with some changes. """
|
with some changes."""
|
||||||
|
|
||||||
import multiprocessing
|
import multiprocessing
|
||||||
import time
|
import time
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""This code is adapted from Alpa
|
"""This code is adapted from Alpa
|
||||||
https://github.com/alpa-projects/alpa/
|
https://github.com/alpa-projects/alpa/
|
||||||
with some changes. """
|
with some changes."""
|
||||||
|
|
||||||
import operator
|
import operator
|
||||||
from dataclasses import dataclass
|
from dataclasses import dataclass
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
""" PyTorch OpenMoE model."""
|
"""PyTorch OpenMoE model."""
|
||||||
import math
|
import math
|
||||||
from typing import List, Optional, Tuple, Union
|
from typing import List, Optional, Tuple, Union
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
"""This code is from NVIDIA apex:
|
"""This code is from NVIDIA apex:
|
||||||
https://github.com/NVIDIA/apex
|
https://github.com/NVIDIA/apex
|
||||||
with some changes. """
|
with some changes."""
|
||||||
|
|
||||||
import numbers
|
import numbers
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
""" adapted from https://github.com/jiaweizzhao/GaLore/blob/master/galore_torch/adamw8bit.py"""
|
"""adapted from https://github.com/jiaweizzhao/GaLore/blob/master/galore_torch/adamw8bit.py"""
|
||||||
|
|
||||||
import warnings
|
import warnings
|
||||||
from collections import defaultdict
|
from collections import defaultdict
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
""" adapted from https://github.com/jiaweizzhao/GaLore/blob/master/galore_torch/adamw8bit.py"""
|
"""adapted from https://github.com/jiaweizzhao/GaLore/blob/master/galore_torch/adamw8bit.py"""
|
||||||
|
|
||||||
import warnings
|
import warnings
|
||||||
from typing import List
|
from typing import List
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
""" PyTorch ChatGLM model. """
|
"""PyTorch ChatGLM model."""
|
||||||
|
|
||||||
from typing import List, Optional, Tuple
|
from typing import List, Optional, Tuple
|
||||||
|
|
||||||
|
@ -34,8 +34,8 @@ class PreTrainingDataset:
|
|||||||
self.do_whole_word_mask = do_whole_word_mask
|
self.do_whole_word_mask = do_whole_word_mask
|
||||||
self.max_predictions_per_seq = max_predictions_per_seq
|
self.max_predictions_per_seq = max_predictions_per_seq
|
||||||
self.vocab_words = list(tokenizer.vocab.keys())
|
self.vocab_words = list(tokenizer.vocab.keys())
|
||||||
self.rec = re.compile("[\u4E00-\u9FA5]")
|
self.rec = re.compile("[\u4e00-\u9fa5]")
|
||||||
self.whole_rec = re.compile("##[\u4E00-\u9FA5]")
|
self.whole_rec = re.compile("##[\u4e00-\u9fa5]")
|
||||||
|
|
||||||
self.mlm_p = 0.15
|
self.mlm_p = 0.15
|
||||||
self.mlm_mask_p = 0.8
|
self.mlm_mask_p = 0.8
|
||||||
|
@ -12,7 +12,7 @@
|
|||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
""" PyTorch DeBERTa-v2 model."""
|
"""PyTorch DeBERTa-v2 model."""
|
||||||
|
|
||||||
import math
|
import math
|
||||||
from collections.abc import Sequence
|
from collections.abc import Sequence
|
||||||
|
Loading…
Reference in New Issue
Block a user