Vision-CAIR commited on
Commit
b1b5382
1 Parent(s): cddb047

Delete checkpoints

Browse files
checkpoints/config.json DELETED
@@ -1,83 +0,0 @@
1
- {
2
- "_name_or_path": "/tmp/iopath_cache/manifold_cache/tree/users/shenx/finetune/09051611-cambrian_qwenvl_t576_ov",
3
- "architectures": [
4
- "CambrianQwenForCausalLM"
5
- ],
6
- "attention_bias": false,
7
- "attention_dropout": 0.0,
8
- "bos_token_id": 151643,
9
- "connect_layer": 2,
10
- "connector_depth": 3,
11
- "connector_only": true,
12
- "dino_threshold": 0.83,
13
- "drop_threshold": 0.7,
14
- "eos_token_id": 151645,
15
- "frame_pos": false,
16
- "freeze_mm_mlp_adapter": false,
17
- "hidden_act": "silu",
18
- "hidden_size": 3584,
19
- "highres": true,
20
- "highres_connect": false,
21
- "image_aspect_ratio": "pad",
22
- "image_position": 91,
23
- "image_token_len": 144,
24
- "initializer_range": 0.02,
25
- "intermediate_size": 18944,
26
- "is_st_sampler": false,
27
- "lowres_token": 8,
28
- "max_position_embeddings": 32768,
29
- "max_window_layers": 28,
30
- "mm_patch_merge_type": "flat",
31
- "mm_projector_lr": null,
32
- "mm_projector_type": "sva",
33
- "mm_use_im_patch_token": false,
34
- "mm_use_im_start_end": false,
35
- "mm_vision_sampler_lr": null,
36
- "mm_vision_select_feature": "patch",
37
- "mm_vision_select_layer": -2,
38
- "mm_vision_tower_aux_list": [
39
- "siglip/CLIP-ViT-SO400M-14-384",
40
- "facebook/dinov2-giant-res378"
41
- ],
42
- "mm_vision_tower_aux_token_len_list": [
43
- 576,
44
- 576
45
- ],
46
- "mm_vision_tower_lr": null,
47
- "model_type": "cambrian_qwen",
48
- "num_attention_heads": 28,
49
- "num_hidden_layers": 28,
50
- "num_key_value_heads": 4,
51
- "num_of_vision_sampler_layers": 10,
52
- "num_query_group": 1,
53
- "pretraining_tp": 1,
54
- "query_num_list": [
55
- 144
56
- ],
57
- "rms_norm_eps": 1e-06,
58
- "rope_scaling": null,
59
- "rope_theta": 1000000.0,
60
- "sliding_window": null,
61
- "spmd_debug": null,
62
- "spmd_fsdp_sharding": null,
63
- "spmd_mesh": null,
64
- "start_of_vision_sampler_layers": 0,
65
- "stride_of_vision_sampler_layers": 3,
66
- "tie_word_embeddings": false,
67
- "tokenizer_model_max_length": 10000,
68
- "tokenizer_padding_side": "right",
69
- "torch_dtype": "float32",
70
- "transformers_version": "4.44.2",
71
- "tune_mm_mlp_adapter": false,
72
- "unfreeze_mm_vision_tower": false,
73
- "use_cache": false,
74
- "use_mm_proj": true,
75
- "use_pos_skipping": false,
76
- "use_sliding_window": false,
77
- "vision_hidden_size": 1024,
78
- "vision_tower_aux_token_len_list": [
79
- 576,
80
- 576
81
- ],
82
- "vocab_size": 152064
83
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoints/merges.txt DELETED
The diff for this file is too large to render. See raw diff
 
checkpoints/pytorch_model.bin DELETED
@@ -1,3 +0,0 @@
1
- version https://git-lfs.github.com/spec/v1
2
- oid sha256:a2318caf68e95e3b06abc940d405ae04660e3f1c1441e1038e5f34c99c46a0df
3
- size 15343521646
 
 
 
 
checkpoints/special_tokens_map.json DELETED
@@ -1,20 +0,0 @@
1
- {
2
- "additional_special_tokens": [
3
- "<|im_start|>",
4
- "<|im_end|>"
5
- ],
6
- "eos_token": {
7
- "content": "<|im_end|>",
8
- "lstrip": false,
9
- "normalized": false,
10
- "rstrip": false,
11
- "single_word": false
12
- },
13
- "pad_token": {
14
- "content": "<|endoftext|>",
15
- "lstrip": false,
16
- "normalized": false,
17
- "rstrip": false,
18
- "single_word": false
19
- }
20
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoints/tokenizer.json DELETED
The diff for this file is too large to render. See raw diff
 
checkpoints/tokenizer_config.json DELETED
@@ -1,53 +0,0 @@
1
- {
2
- "add_prefix_space": false,
3
- "added_tokens_decoder": {
4
- "151643": {
5
- "content": "<|endoftext|>",
6
- "lstrip": false,
7
- "normalized": false,
8
- "rstrip": false,
9
- "single_word": false,
10
- "special": true
11
- },
12
- "151644": {
13
- "content": "<|im_start|>",
14
- "lstrip": false,
15
- "normalized": false,
16
- "rstrip": false,
17
- "single_word": false,
18
- "special": true
19
- },
20
- "151645": {
21
- "content": "<|im_end|>",
22
- "lstrip": false,
23
- "normalized": false,
24
- "rstrip": false,
25
- "single_word": false,
26
- "special": true
27
- },
28
- "151646": {
29
- "content": "<image>",
30
- "lstrip": false,
31
- "normalized": false,
32
- "rstrip": false,
33
- "single_word": false,
34
- "special": true
35
- }
36
- },
37
- "additional_special_tokens": [
38
- "<|im_start|>",
39
- "<|im_end|>"
40
- ],
41
- "bos_token": null,
42
- "chat_template": "{% for message in messages %}{% if loop.first and messages[0]['role'] != 'system' %}{{ '<|im_start|>system\nYou are a helpful assistant.<|im_end|>\n' }}{% endif %}{{'<|im_start|>' + message['role'] + '\n' + message['content'] + '<|im_end|>' + '\n'}}{% endfor %}{% if add_generation_prompt %}{{ '<|im_start|>assistant\n' }}{% endif %}",
43
- "clean_up_tokenization_spaces": false,
44
- "eos_token": "<|im_end|>",
45
- "errors": "replace",
46
- "model_max_length": 32768,
47
- "pad_token": "<|endoftext|>",
48
- "padding_side": "right",
49
- "processor_class": "LlavaProcessor",
50
- "split_special_tokens": false,
51
- "tokenizer_class": "Qwen2Tokenizer",
52
- "unk_token": null
53
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
checkpoints/vocab.json DELETED
The diff for this file is too large to render. See raw diff