File size: 549 Bytes
2dc7acf |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 |
{
"model_type": "fingpt",
"architectures": [
"FinGPTForCausalLM"
],
"auto_map": {
"AutoModelForCausalLM": "fingpt.modeling_fingpt.FinGPTForCausalLM"
},
"base_model": "meta-llama/Llama-2-7b-hf",
"finetuning_method": "lora",
"tasks": [
"sentiment-analysis",
"relation-extraction",
"headline-classification",
"named-entity-recognition",
"question-answering",
"text-generation"
],
"languages": [
"en",
"zh"
],
"license": "mit",
"pipeline_tag": "text-generation"
} |