Skip to content

Commit

Permalink
fix lint (#856)
Browse files Browse the repository at this point in the history
* fix lint

* fix lint
  • Loading branch information
HIT-cwh committed Jul 19, 2024
1 parent 27cf856 commit f49ac98
Show file tree
Hide file tree
Showing 7 changed files with 17 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
use_varlen_attn = False

# Data
data_files = ['/root/ld/pull_request/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/pretrain.json']
data_files = ['/path/to/json/file.json']
max_length = 2048
pack_to_max_length = True

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
use_varlen_attn = False

# Data
data_files = ['/root/ld/pull_request/xtuner/xtuner/configs/custom_dataset/pretrain/minicpm/pretrain.json']
data_files = ['/path/to/json/file.json']
max_length = 2048
pack_to_max_length = True

Expand Down
8 changes: 5 additions & 3 deletions xtuner/configs/minicpm/1_2b/minicpm_1b_dpo_qlora.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
# Copyright (c) OpenMMLab. All rights reserved.
from datasets import load_dataset
import torch
from datasets import load_dataset
from mmengine.dataset import DefaultSampler
from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook,
LoggerHook, ParamSchedulerHook)
from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR
from torch.optim import AdamW
from transformers import AutoModelForCausalLM, AutoTokenizer,BitsAndBytesConfig
from peft import LoraConfig
from torch.optim import AdamW
from transformers import (AutoModelForCausalLM, AutoTokenizer,
BitsAndBytesConfig)

from xtuner.dataset.collate_fns.preference_collate_fn import \
preference_collate_fn
from xtuner.dataset.preference_dataset import (build_preference_dataset,
Expand Down
5 changes: 2 additions & 3 deletions xtuner/configs/minicpm/1_2b/minicpm_1b_lora_alpaca_zh_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR
from peft import LoraConfig
from torch.optim import AdamW
from transformers import (AutoModelForCausalLM, AutoTokenizer,
BitsAndBytesConfig)
from transformers import AutoModelForCausalLM, AutoTokenizer

from xtuner.dataset import process_hf_dataset
from xtuner.dataset.collate_fns import default_collate_fn
Expand Down Expand Up @@ -78,7 +77,7 @@
pretrained_model_name_or_path=pretrained_model_name_or_path,
trust_remote_code=True,
torch_dtype=torch.float16,
),
),
lora=dict(
type=LoraConfig,
r=64,
Expand Down
8 changes: 5 additions & 3 deletions xtuner/configs/minicpm/2b/minicpm_2b_dpo_qlora.py
Original file line number Diff line number Diff line change
@@ -1,13 +1,15 @@
# Copyright (c) OpenMMLab. All rights reserved.
from datasets import load_dataset
import torch
from datasets import load_dataset
from mmengine.dataset import DefaultSampler
from mmengine.hooks import (CheckpointHook, DistSamplerSeedHook, IterTimerHook,
LoggerHook, ParamSchedulerHook)
from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR
from torch.optim import AdamW
from transformers import AutoModelForCausalLM, AutoTokenizer,BitsAndBytesConfig
from peft import LoraConfig
from torch.optim import AdamW
from transformers import (AutoModelForCausalLM, AutoTokenizer,
BitsAndBytesConfig)

from xtuner.dataset.collate_fns.preference_collate_fn import \
preference_collate_fn
from xtuner.dataset.preference_dataset import (build_preference_dataset,
Expand Down
5 changes: 2 additions & 3 deletions xtuner/configs/minicpm/2b/minicpm_2b_lora_alpaca_zh_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,7 @@
from mmengine.optim import AmpOptimWrapper, CosineAnnealingLR, LinearLR
from peft import LoraConfig
from torch.optim import AdamW
from transformers import (AutoModelForCausalLM, AutoTokenizer,
BitsAndBytesConfig)
from transformers import AutoModelForCausalLM, AutoTokenizer

from xtuner.dataset import process_hf_dataset
from xtuner.dataset.collate_fns import default_collate_fn
Expand Down Expand Up @@ -78,7 +77,7 @@
pretrained_model_name_or_path=pretrained_model_name_or_path,
trust_remote_code=True,
torch_dtype=torch.float16,
),
),
lora=dict(
type=LoraConfig,
r=64,
Expand Down
4 changes: 1 addition & 3 deletions xtuner/utils/templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,9 +130,7 @@
SYSTEM=('[INST] {system} [/INST]\n'),
INSTRUCTION=('[INST] {input} [/INST]'),
SEP='\n'),
minicpm=dict(
INSTRUCTION=('<用户> {input} <AI>'),
SEP='\n'),
minicpm=dict(INSTRUCTION=('<用户> {input} <AI>'), SEP='\n'),
gemma=dict(
# `system` field is extended by xtuner
SYSTEM=('<start_of_turn>system\n{system}<end_of_turn>\n'),
Expand Down

0 comments on commit f49ac98

Please sign in to comment.