Vik Paruchuri
commited on
Commit
·
1202336
1
Parent(s):
5471d0c
Fix vertex
Browse files- README.md +4 -2
- marker/services/vertex.py +5 -1
README.md
CHANGED
|
@@ -317,8 +317,10 @@ All output formats will return a metadata dictionary, with the following fields:
|
|
| 317 |
When running with the `--use_llm` flag, you have a choice of services you can use:
|
| 318 |
|
| 319 |
- `Gemini` - this will use the Gemini developer API by default. You'll need to pass `--gemini_api_key` to configuration.
|
| 320 |
-
- `Google Vertex` - this will use vertex, which can be more reliable. You'll need to pass `--vertex_project_id
|
| 321 |
-
- `Ollama` - this will use local models. You can configure `--ollama_base_url` and `--ollama_model`. To use it, set `--llm_service=marker.services.
|
|
|
|
|
|
|
| 322 |
|
| 323 |
# Internals
|
| 324 |
|
|
|
|
| 317 |
When running with the `--use_llm` flag, you have a choice of services you can use:
|
| 318 |
|
| 319 |
- `Gemini` - this will use the Gemini developer API by default. You'll need to pass `--gemini_api_key` to configuration.
|
| 320 |
+
- `Google Vertex` - this will use vertex, which can be more reliable. You'll need to pass `--vertex_project_id`. To use it, set `--llm_service=marker.services.vertex.GoogleVertexService`.
|
| 321 |
+
- `Ollama` - this will use local models. You can configure `--ollama_base_url` and `--ollama_model`. To use it, set `--llm_service=marker.services.ollama.OllamaService`.
|
| 322 |
+
|
| 323 |
+
These services may have additional optional configuration as well - you can see it by viewing the classes.
|
| 324 |
|
| 325 |
# Internals
|
| 326 |
|
marker/services/vertex.py
CHANGED
|
@@ -12,7 +12,11 @@ class GoogleVertexService(BaseGeminiService):
|
|
| 12 |
vertex_location: Annotated[
|
| 13 |
str,
|
| 14 |
"Google Cloud Location for Vertex AI.",
|
| 15 |
-
] =
|
|
|
|
|
|
|
|
|
|
|
|
|
| 16 |
|
| 17 |
def get_google_client(self, timeout: int = 60):
|
| 18 |
return genai.Client(
|
|
|
|
| 12 |
vertex_location: Annotated[
|
| 13 |
str,
|
| 14 |
"Google Cloud Location for Vertex AI.",
|
| 15 |
+
] = "us-central1"
|
| 16 |
+
gemini_model_name: Annotated[
|
| 17 |
+
str,
|
| 18 |
+
"The name of the Google model to use for the service."
|
| 19 |
+
] = "gemini-1.5-flash-002"
|
| 20 |
|
| 21 |
def get_google_client(self, timeout: int = 60):
|
| 22 |
return genai.Client(
|