asussome commited on
Commit
78ce165
1 Parent(s): 7d21148

Training in progress, epoch 0

Browse files
adapter_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
- "base_model_name_or_path": "TheBloke/Xwin-LM-7B-V0.1-GPTQ",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
@@ -20,8 +20,8 @@
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
- "q_proj",
24
- "v_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
 
1
  {
2
  "alpha_pattern": {},
3
  "auto_mapping": null,
4
+ "base_model_name_or_path": "mistralai/Mistral-7B-Instruct-v0.2",
5
  "bias": "none",
6
  "fan_in_fan_out": false,
7
  "inference_mode": true,
 
20
  "rank_pattern": {},
21
  "revision": null,
22
  "target_modules": [
23
+ "v_proj",
24
+ "q_proj"
25
  ],
26
  "task_type": "CAUSAL_LM",
27
  "use_dora": false,
adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:143e1bd7e3e30bbfb0c29bb9e99b4c881df3ceaf611dc62c85deb08b46a21188
3
- size 33571624
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9db85ac4c7f0bdb70b14f1602db8049f72bc290eb880e806266711be8f0d81d3
3
+ size 27280152
runs/May18_16-56-07_f71b21d14eff/events.out.tfevents.1716051373.f71b21d14eff.535.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2a766c0aa438f347b0aa58c583459a634fa8ef4731c542b25aa0632b2de2b51d
3
+ size 5226
runs/May18_17-15-28_f71b21d14eff/events.out.tfevents.1716052533.f71b21d14eff.6348.0 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ef8a6211630dae14a22ad6f53a8ad2b99c963bf94c1c25445d31c94969bed862
3
+ size 5987
tokenizer.json CHANGED
The diff for this file is too large to render. See raw diff
 
tokenizer.model CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:9e556afd44213b6bd1be2b850ebbbd98f5481437a8021afaf58ee7fb1818d347
3
- size 499723
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:dadfd56d766715c61d2ef780a525ab43b8e6da4de6865bda3d95fdef5e134055
3
+ size 493443
tokenizer_config.json CHANGED
@@ -27,14 +27,16 @@
27
  "special": true
28
  }
29
  },
 
30
  "bos_token": "<s>",
 
31
  "clean_up_tokenization_spaces": false,
32
  "eos_token": "</s>",
33
- "legacy": false,
34
- "model_max_length": 4096,
35
  "pad_token": "</s>",
36
- "padding_side": "right",
37
  "sp_model_kwargs": {},
 
38
  "tokenizer_class": "LlamaTokenizer",
39
  "unk_token": "<unk>",
40
  "use_default_system_prompt": false
 
27
  "special": true
28
  }
29
  },
30
+ "additional_special_tokens": [],
31
  "bos_token": "<s>",
32
+ "chat_template": "{{ bos_token }}{% for message in messages %}{% if (message['role'] == 'user') != (loop.index0 % 2 == 0) %}{{ raise_exception('Conversation roles must alternate user/assistant/user/assistant/...') }}{% endif %}{% if message['role'] == 'user' %}{{ '[INST] ' + message['content'] + ' [/INST]' }}{% elif message['role'] == 'assistant' %}{{ message['content'] + eos_token}}{% else %}{{ raise_exception('Only user and assistant roles are supported!') }}{% endif %}{% endfor %}",
33
  "clean_up_tokenization_spaces": false,
34
  "eos_token": "</s>",
35
+ "legacy": true,
36
+ "model_max_length": 1000000000000000019884624838656,
37
  "pad_token": "</s>",
 
38
  "sp_model_kwargs": {},
39
+ "spaces_between_special_tokens": false,
40
  "tokenizer_class": "LlamaTokenizer",
41
  "unk_token": "<unk>",
42
  "use_default_system_prompt": false
training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e4c0f5d075432b6fba96fdb703ffe1593a1158b7ff6a51344c72e97b440e18b4
3
  size 5048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:a6cbe416fbcea976847d6201a7108e8e377ade9ed97f03b93229e9c4c4ce148d
3
  size 5048