mirror of
https://github.com/Ladebeze66/ragflow_preprocess.git
synced 2026-02-04 06:00:27 +01:00
104 lines
2.2 KiB
JSON
104 lines
2.2 KiB
JSON
{
|
|
"léger": {
|
|
"vision": {
|
|
"model": "llava:34b-v1.6-fp16",
|
|
"language": "en",
|
|
"temperature": 0.2,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
},
|
|
"translation": {
|
|
"model": "mistral:latest",
|
|
"language": "fr",
|
|
"temperature": 0.1,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
},
|
|
"summary": {
|
|
"model": "mistral:latest",
|
|
"language": "fr",
|
|
"temperature": 0.2,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
},
|
|
"rewriter": {
|
|
"model": "mistral:latest",
|
|
"language": "fr",
|
|
"temperature": 0.3,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
}
|
|
},
|
|
"moyen": {
|
|
"vision": {
|
|
"model": "llava:34b-v1.6-fp16",
|
|
"language": "en",
|
|
"temperature": 0.2,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
},
|
|
"translation": {
|
|
"model": "qwen2.5:72b-instruct-q8_0",
|
|
"language": "fr",
|
|
"temperature": 0.1,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
},
|
|
"summary": {
|
|
"model": "deepseek-r1:70b-llama-distill-q8_0",
|
|
"language": "fr",
|
|
"temperature": 0.2,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
},
|
|
"rewriter": {
|
|
"model": "mistral:latest",
|
|
"language": "fr",
|
|
"temperature": 0.3,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 1024
|
|
}
|
|
},
|
|
"avancé": {
|
|
"vision": {
|
|
"model": "llama3.2-vision:90b-instruct-q8_0",
|
|
"language": "en",
|
|
"temperature": 0.2,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 2048
|
|
},
|
|
"translation": {
|
|
"model": "deepseek-r1:70b-llama-distill-q8_0",
|
|
"language": "fr",
|
|
"temperature": 0.1,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 2048
|
|
},
|
|
"summary": {
|
|
"model": "deepseek-r1:70b-llama-distill-q8_0",
|
|
"language": "fr",
|
|
"temperature": 0.2,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 2048
|
|
},
|
|
"rewriter": {
|
|
"model": "deepseek-r1:70b-llama-distill-q8_0",
|
|
"language": "fr",
|
|
"temperature": 0.3,
|
|
"top_p": 0.95,
|
|
"top_k": 40,
|
|
"max_tokens": 2048
|
|
}
|
|
}
|
|
} |