Skip to content

Commit

Permalink
tuning embedding for qwen-7b
Browse files Browse the repository at this point in the history
  • Loading branch information
LZHgrla committed Oct 11, 2023
1 parent 5b543b3 commit 34dc327
Show file tree
Hide file tree
Showing 18 changed files with 18 additions and 0 deletions.
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_alpaca_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -79,6 +79,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_alpaca_zh_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -110,6 +110,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_colorist_e5.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_lawyer_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_medical_e1.py
Original file line number Diff line number Diff line change
Expand Up @@ -78,6 +78,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -75,6 +75,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_oasst1_512_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_oasst1_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_openorca_e1.py
Original file line number Diff line number Diff line change
Expand Up @@ -76,6 +76,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_sql_e3.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down
1 change: 1 addition & 0 deletions xtuner/configs/qwen/qwen_7b/qwen_7b_qlora_tiny_codes_e1.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,6 +80,7 @@
type=LoraConfig,
r=64,
lora_alpha=16,
target_modules=['c_attn', 'c_proj', 'w1', 'w2', 'wte'],
lora_dropout=0.1,
bias='none',
task_type='CAUSAL_LM'))
Expand Down

0 comments on commit 34dc327

Please sign in to comment.