Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -10,16 +10,18 @@ from pydantic import PrivateAttr
|
|
| 10 |
|
| 11 |
# Wrapper-Klasse für das deutsche GPT-2 Modell
|
| 12 |
class GermanGPT2(LLM):
|
| 13 |
-
_pipeline: any = PrivateAttr()
|
|
|
|
|
|
|
| 14 |
|
| 15 |
def __init__(self, max_new_tokens=128, temperature=0.7, **kwargs):
|
| 16 |
super().__init__(**kwargs)
|
| 17 |
-
self.
|
| 18 |
-
self.
|
| 19 |
self._pipeline = pipeline("text-generation", model="dbmdz/german-gpt2")
|
| 20 |
|
| 21 |
def _call(self, prompt, stop=None):
|
| 22 |
-
result = self._pipeline(prompt, max_length=self.
|
| 23 |
return result[0]["generated_text"]
|
| 24 |
|
| 25 |
@property
|
|
|
|
| 10 |
|
| 11 |
# Wrapper-Klasse für das deutsche GPT-2 Modell
|
| 12 |
class GermanGPT2(LLM):
|
| 13 |
+
_pipeline: any = PrivateAttr()
|
| 14 |
+
_max_new_tokens: int = PrivateAttr()
|
| 15 |
+
_temperature: float = PrivateAttr()
|
| 16 |
|
| 17 |
def __init__(self, max_new_tokens=128, temperature=0.7, **kwargs):
|
| 18 |
super().__init__(**kwargs)
|
| 19 |
+
self._max_new_tokens = max_new_tokens
|
| 20 |
+
self._temperature = temperature
|
| 21 |
self._pipeline = pipeline("text-generation", model="dbmdz/german-gpt2")
|
| 22 |
|
| 23 |
def _call(self, prompt, stop=None):
|
| 24 |
+
result = self._pipeline(prompt, max_length=self._max_new_tokens, do_sample=True, temperature=self._temperature)
|
| 25 |
return result[0]["generated_text"]
|
| 26 |
|
| 27 |
@property
|