ariG23498 HF Staff commited on
Commit
488ec1b
·
verified ·
1 Parent(s): d8f0a18

Upload lightonai_LightOnOCR-1B-1025_1.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. lightonai_LightOnOCR-1B-1025_1.py +91 -0
lightonai_LightOnOCR-1B-1025_1.py ADDED
@@ -0,0 +1,91 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # requires-python = ">=3.12"
3
+ # dependencies = [
4
+ # "torch",
5
+ # "torchvision",
6
+ # "transformers",
7
+ # "diffusers",
8
+ # "sentence-transformers",
9
+ # "accelerate",
10
+ # "peft",
11
+ # "slack-sdk",
12
+ # ]
13
+ # ///
14
+
15
+ try:
16
+ # Get a test image
17
+ image_url = "https://jeroen.github.io/images/testocr.png"
18
+ response = requests.get(image_url)
19
+ image = Image.open(BytesIO(response.content)).convert("RGB")
20
+
21
+ display(image)
22
+
23
+ # Run inference
24
+ messages = [{"role": "user", "content": [{"type": "image"}]}]
25
+ text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
26
+
27
+ inputs = processor(text=[text], images=[image], return_tensors="pt").to(device)
28
+ inputs["pixel_values"] = inputs["pixel_values"].to(torch.bfloat16)
29
+
30
+ outputs = model.generate(
31
+ **inputs,
32
+ max_new_tokens=1024,
33
+ )
34
+
35
+ input_length = inputs['input_ids'].shape[1]
36
+ generated_text = processor.tokenizer.decode(outputs[0, input_length:], skip_special_tokens=True)
37
+
38
+ print("============\n")
39
+ print(generated_text)
40
+ with open('lightonai_LightOnOCR-1B-1025_1.txt', 'w', encoding='utf-8') as f:
41
+ f.write('Everything was good in lightonai_LightOnOCR-1B-1025_1.txt')
42
+ except Exception as e:
43
+ import os
44
+ from slack_sdk import WebClient
45
+ client = WebClient(token=os.environ['SLACK_TOKEN'])
46
+ client.chat_postMessage(
47
+ channel='#exp-slack-alerts',
48
+ text='Problem in <https://huggingface.co/datasets/model-metadata/code_execution_files/blob/main/lightonai_LightOnOCR-1B-1025_1.txt|lightonai_LightOnOCR-1B-1025_1.txt>',
49
+ )
50
+
51
+ with open('lightonai_LightOnOCR-1B-1025_1.txt', 'a', encoding='utf-8') as f:
52
+ import traceback
53
+ f.write('''```CODE:
54
+ # Get a test image
55
+ image_url = "https://jeroen.github.io/images/testocr.png"
56
+ response = requests.get(image_url)
57
+ image = Image.open(BytesIO(response.content)).convert("RGB")
58
+
59
+ display(image)
60
+
61
+ # Run inference
62
+ messages = [{"role": "user", "content": [{"type": "image"}]}]
63
+ text = processor.apply_chat_template(messages, tokenize=False, add_generation_prompt=True)
64
+
65
+ inputs = processor(text=[text], images=[image], return_tensors="pt").to(device)
66
+ inputs["pixel_values"] = inputs["pixel_values"].to(torch.bfloat16)
67
+
68
+ outputs = model.generate(
69
+ **inputs,
70
+ max_new_tokens=1024,
71
+ )
72
+
73
+ input_length = inputs['input_ids'].shape[1]
74
+ generated_text = processor.tokenizer.decode(outputs[0, input_length:], skip_special_tokens=True)
75
+
76
+ print("============\n")
77
+ print(generated_text)
78
+ ```
79
+
80
+ ERROR:
81
+ ''')
82
+ traceback.print_exc(file=f)
83
+
84
+ finally:
85
+ from huggingface_hub import upload_file
86
+ upload_file(
87
+ path_or_fileobj='lightonai_LightOnOCR-1B-1025_1.txt',
88
+ repo_id='model-metadata/code_execution_files',
89
+ path_in_repo='lightonai_LightOnOCR-1B-1025_1.txt',
90
+ repo_type='dataset',
91
+ )