Gabriel382 commited on
Commit
3a8d286
Β·
1 Parent(s): 2166551

setting model manually

Browse files
Files changed (2) hide show
  1. app.py +17 -6
  2. requirements.txt +1 -1
app.py CHANGED
@@ -1,17 +1,28 @@
1
  import os
2
  from fastapi import FastAPI
3
  from pydantic import BaseModel
4
- from transformers import pipeline
5
 
6
- # βœ… Force Hugging Face to store model files in a writable directory
7
  os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
8
 
9
- app = FastAPI()
 
 
 
 
 
 
 
 
 
 
10
 
11
- # Load translation model (French)
12
- translator = pipeline("translation", model="facebook/m2m100_418M")
 
 
13
 
14
- # Request model
15
  class TextRequest(BaseModel):
16
  text: str
17
 
 
1
  import os
2
  from fastapi import FastAPI
3
  from pydantic import BaseModel
4
+ from transformers import pipeline, AutoModelForSeq2SeqLM, AutoTokenizer
5
 
6
+ # βœ… Define a writable cache directory
7
  os.environ["TRANSFORMERS_CACHE"] = "/app/cache"
8
 
9
+ # βœ… Ensure the cache directory exists
10
+ os.makedirs("/app/cache", exist_ok=True)
11
+
12
+ # βœ… Hugging Face model
13
+ MODEL_NAME = "facebook/m2m100_418M"
14
+
15
+ # βœ… Download the model manually before usage
16
+ print("Downloading model...")
17
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_NAME, cache_dir="/app/cache")
18
+ model = AutoModelForSeq2SeqLM.from_pretrained(MODEL_NAME, cache_dir="/app/cache")
19
+ translator = pipeline("translation", model=model, tokenizer=tokenizer)
20
 
21
+ print("Model loaded successfully!")
22
+
23
+ # βœ… FastAPI App
24
+ app = FastAPI()
25
 
 
26
  class TextRequest(BaseModel):
27
  text: str
28
 
requirements.txt CHANGED
@@ -1,5 +1,5 @@
 
1
  fastapi
2
  uvicorn
3
  transformers
4
  torch
5
- sentencepiece
 
1
+ sentencepiece
2
  fastapi
3
  uvicorn
4
  transformers
5
  torch