| | log_dir: '../storage/logs' |
| | log_chunk_dir: '../storage/logs/chunks' |
| | device: 'cpu' |
| |
|
| | vectorstore: |
| | load_from_HF: True |
| | reparse_files: True |
| | data_path: '../storage/data' |
| | url_file_path: '../storage/data/urls.txt' |
| | expand_urls: True |
| | db_option : 'RAGatouille' |
| | db_path : '../vectorstores' |
| | model : 'sentence-transformers/all-MiniLM-L6-v2' |
| | search_top_k : 3 |
| | score_threshold : 0.2 |
| |
|
| | faiss_params: |
| | index_path: 'vectorstores/faiss.index' |
| | index_type: 'Flat' |
| | index_dimension: 384 |
| | index_nlist: 100 |
| | index_nprobe: 10 |
| |
|
| | colbert_params: |
| | index_name: "new_idx" |
| |
|
| | llm_params: |
| | llm_arch: 'langchain' |
| | use_history: True |
| | generate_follow_up: False |
| | memory_window: 3 |
| | llm_style: 'Normal' |
| | llm_loader: 'gpt-4o-mini' |
| | openai_params: |
| | temperature: 0.7 |
| | local_llm_params: |
| | temperature: 0.7 |
| | repo_id: 'TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF' |
| | filename: 'tinyllama-1.1b-chat-v1.0.Q5_0.gguf' |
| | model_path: 'storage/models/tinyllama-1.1b-chat-v1.0.Q5_0.gguf' |
| | stream: False |
| | pdf_reader: 'gpt' |
| |
|
| | chat_logging: |
| | log_chat: True |
| | platform: 'literalai' |
| | callbacks: True |
| |
|
| | splitter_options: |
| | use_splitter: True |
| | split_by_token : True |
| | remove_leftover_delimiters: True |
| | remove_chunks: False |
| | chunking_mode: 'semantic' |
| | chunk_size : 300 |
| | chunk_overlap : 30 |
| | chunk_separators : ["\n\n", "\n", " ", ""] |
| | front_chunks_to_remove : null |
| | last_chunks_to_remove : null |
| | delimiters_to_remove : ['\t', '\n', ' ', ' '] |
| |
|