Upload Liquid AI x Weights & Biases JP Hackathon FT UnslothLoRALFM2 (uTdHPvFpcb4OjXwlu2dBEExRTEPnY21YxIxoR3sJ1lA)
Browse filesUpload model trained with Unsloth 2x faster
base model:https://wandb.ai/liquid-ai/lfm2-wandb-tokyo-hackathon/weave/objects/UnslothLoRALFM2/versions/uTdHPvFpcb4OjXwlu2dBEExRTEPnY21YxIxoR3sJ1lA
- adapter_config.json +2 -2
- adapter_model.safetensors +1 -1
adapter_config.json
CHANGED
|
@@ -5,7 +5,7 @@
|
|
| 5 |
"parent_library": "transformers.models.lfm2.modeling_lfm2",
|
| 6 |
"unsloth_fixed": true
|
| 7 |
},
|
| 8 |
-
"base_model_name_or_path": "unsloth/LFM2-350M",
|
| 9 |
"bias": "none",
|
| 10 |
"corda_config": null,
|
| 11 |
"eva_config": null,
|
|
@@ -19,7 +19,7 @@
|
|
| 19 |
"loftq_config": {},
|
| 20 |
"lora_alpha": 16,
|
| 21 |
"lora_bias": false,
|
| 22 |
-
"lora_dropout": 0,
|
| 23 |
"megatron_config": null,
|
| 24 |
"megatron_core": "megatron.core",
|
| 25 |
"modules_to_save": null,
|
|
|
|
| 5 |
"parent_library": "transformers.models.lfm2.modeling_lfm2",
|
| 6 |
"unsloth_fixed": true
|
| 7 |
},
|
| 8 |
+
"base_model_name_or_path": "unsloth/LFM2-350M-unsloth-bnb-4bit",
|
| 9 |
"bias": "none",
|
| 10 |
"corda_config": null,
|
| 11 |
"eva_config": null,
|
|
|
|
| 19 |
"loftq_config": {},
|
| 20 |
"lora_alpha": 16,
|
| 21 |
"lora_bias": false,
|
| 22 |
+
"lora_dropout": 0.0,
|
| 23 |
"megatron_config": null,
|
| 24 |
"megatron_core": "megatron.core",
|
| 25 |
"modules_to_save": null,
|
adapter_model.safetensors
CHANGED
|
@@ -1,3 +1,3 @@
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
-
oid sha256:
|
| 3 |
size 20073168
|
|
|
|
| 1 |
version https://git-lfs.github.com/spec/v1
|
| 2 |
+
oid sha256:3c0159e42b34a1891c1f1ed38956965cf1556602cb7d448250f665278a5634fb
|
| 3 |
size 20073168
|