Natwar commited on
Commit
1a39287
·
verified ·
1 Parent(s): 4237d47

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -10
app.py CHANGED
@@ -24,17 +24,28 @@ def install_package(package, version=None):
24
  raise
25
 
26
 
27
- # Required packages — ORDER MATTERS:
28
- # tokenizers must be pinned before transformers so pip does not upgrade it.
29
- # transformers 5.x dropped the "summarization" pipeline task entirely,
30
- # so we pin to 4.46.3 (last v4 release) which still has it.
31
- # tokenizers 0.21.4 is required by transformers 4.46.3 and has
32
- # pre-built abi3 wheels that work on Python 3.13 without Rust compilation.
 
 
 
 
 
 
 
 
 
 
 
 
33
  required_packages = {
34
  "gradio": None,
35
  "torch": None,
36
- "tokenizers": "0.21.4", # must come before transformers; abi3 wheel works on Py3.13
37
- "transformers": "4.46.3", # last v4 release; still supports the summarization pipeline
38
  }
39
 
40
  for package, version in required_packages.items():
@@ -44,7 +55,22 @@ for package, version in required_packages.items():
44
  except ImportError:
45
  install_package(package, version)
46
 
47
- # Import packages
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
48
  import gradio as gr
49
  import torch
50
  from transformers import pipeline
@@ -97,7 +123,6 @@ def summarize_text(text, model_name, summary_length, num_beams):
97
 
98
  try:
99
  global summarizer
100
- # Reload pipeline on CPU whenever the model changes
101
  summarizer = pipeline("summarization", model=model_name, device=-1)
102
 
103
  length_mapping = {
 
24
  raise
25
 
26
 
27
+ def force_install(package_spec):
28
+ """Install a package unconditionally, overriding whatever version is present."""
29
+ print(f"Force-installing {package_spec}...")
30
+ try:
31
+ subprocess.check_call([
32
+ sys.executable, "-m", "pip", "install", "--no-cache-dir", package_spec
33
+ ])
34
+ except subprocess.CalledProcessError as e:
35
+ print(f"Failed to force-install {package_spec}: {e}")
36
+ raise
37
+
38
+
39
+ # Phase 1 — install missing packages.
40
+ # Notes:
41
+ # - tokenizers is NOT pre-pinned here; transformers 4.46.3 pulls tokenizers 0.20.3
42
+ # which already ships native cp313 wheels, so no Rust compilation is needed.
43
+ # - transformers is pinned to 4.46.3 (last v4 release) because v5 dropped the
44
+ # "summarization" pipeline task entirely.
45
  required_packages = {
46
  "gradio": None,
47
  "torch": None,
48
+ "transformers": "4.46.3",
 
49
  }
50
 
51
  for package, version in required_packages.items():
 
55
  except ImportError:
56
  install_package(package, version)
57
 
58
+ # Phase 2 — fix the huggingface_hub version AFTER transformers has run.
59
+ #
60
+ # Problem: transformers 4.46.3 requires huggingface-hub<1.0, so pip picks
61
+ # the latest <1.0 release (currently 0.36.x). Starting around hub 0.30,
62
+ # get_session() returns an httpx.Client when httpx is present on the system.
63
+ # transformers' own hub.py calls get_session().head(..., allow_redirects=...),
64
+ # which is a requests-style kwarg that httpx rejects with:
65
+ # TypeError: Client.head() got an unexpected keyword argument 'allow_redirects'
66
+ #
67
+ # Fix: force hub back to 0.28.1 — the last release that uses requests (not httpx)
68
+ # for get_session(), while still satisfying:
69
+ # - transformers 4.46.3 requirement: >=0.23.2, <1.0 ✓
70
+ # - gradio requirement: >=0.28.1 ✓
71
+ force_install("huggingface_hub==0.28.1")
72
+
73
+ # Now safe to import everything
74
  import gradio as gr
75
  import torch
76
  from transformers import pipeline
 
123
 
124
  try:
125
  global summarizer
 
126
  summarizer = pipeline("summarization", model=model_name, device=-1)
127
 
128
  length_mapping = {