Instructions to use Curiousfox/outputs with libraries, inference providers, notebooks, and local apps. Follow these links to get started.
- Libraries
- PEFT
How to use Curiousfox/outputs with PEFT:
from peft import PeftModel from transformers import AutoModelForSeq2SeqLM base_model = AutoModelForSeq2SeqLM.from_pretrained("google/mt5-base") model = PeftModel.from_pretrained(base_model, "Curiousfox/outputs") - Transformers
How to use Curiousfox/outputs with Transformers:
# Load model directly from transformers import AutoModel model = AutoModel.from_pretrained("Curiousfox/outputs", dtype="auto") - Notebooks
- Google Colab
- Kaggle
| { | |
| "alora_invocation_tokens": null, | |
| "alpha_pattern": {}, | |
| "arrow_config": null, | |
| "auto_mapping": null, | |
| "base_model_name_or_path": "google/mt5-base", | |
| "bias": "none", | |
| "corda_config": null, | |
| "ensure_weight_tying": false, | |
| "eva_config": null, | |
| "exclude_modules": null, | |
| "fan_in_fan_out": false, | |
| "inference_mode": true, | |
| "init_lora_weights": true, | |
| "layer_replication": null, | |
| "layers_pattern": null, | |
| "layers_to_transform": null, | |
| "loftq_config": {}, | |
| "lora_alpha": 128, | |
| "lora_bias": false, | |
| "lora_dropout": 0.1, | |
| "lora_ga_config": null, | |
| "megatron_config": null, | |
| "megatron_core": "megatron.core", | |
| "modules_to_save": null, | |
| "peft_type": "LORA", | |
| "peft_version": "0.19.1", | |
| "qalora_group_size": 16, | |
| "r": 64, | |
| "rank_pattern": {}, | |
| "revision": null, | |
| "target_modules": [ | |
| "q", | |
| "v" | |
| ], | |
| "target_parameters": null, | |
| "task_type": "SEQ_2_SEQ_LM", | |
| "trainable_token_indices": null, | |
| "use_bdlora": null, | |
| "use_dora": false, | |
| "use_qalora": false, | |
| "use_rslora": false | |
| } |