base_model: mistralai/Mistral-7B-Instruct-v0.3 inference_settings: max_new_tokens: 150 repetition_penalty: 1.1 temperature: 0.7 top_p: 0.9 library: transformers model_id: mycholpath/RA-Mistral-7B model_type: mistral pipeline_tag: text-generation tasks: - text-generation