Chris4K commited on
Commit
dc96452
ยท
verified ยท
1 Parent(s): d911e01

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +48 -54
app.py CHANGED
@@ -2,32 +2,27 @@ import os
2
  import json
3
  import gradio as gr
4
  from transformers import Tool
5
- #from transformers.tools.base import push_to_hub
6
- from transformers.utils import TransformersHub
7
-
8
 
9
  def generate_files(title="Text Generation Tool", emoji="๐ŸŒ–", colorFrom="blue", colorTo="blue",
10
  sdk="gradio", sdk_version="4.3.0", app_file="app.py", pinned=False,
11
  tags=["tool"], tool_name="text_generator", tool_description="This is a tool that chats with a user. "
12
  "It takes an input named `prompt` which contains a system_role, user_message, context and history. "
13
  "It returns a text message."):
14
- # Convert tags to a list if it's not already
15
- #if not isinstance(tags, list):
16
- # tags = [tags]
17
 
18
  # Generate readme content
19
  readme_content = '''## readme
20
- title: {}
21
- emoji: {}
22
- colorFrom: {}
23
- colorTo: {}
24
- sdk: {}
25
- sdk_version: {}
26
- app_file: {}
27
- pinned: false
28
- tags:
29
- - tool
30
- '''.format(title, emoji, colorFrom, colorTo, sdk, sdk_version, app_file)
31
 
32
  # Generate tool config JSON content
33
  tool_config = {
@@ -39,64 +34,63 @@ def generate_files(title="Text Generation Tool", emoji="๐ŸŒ–", colorFrom="blue",
39
 
40
  # Generate app.py content
41
  app_py_content = '''from transformers.tools.base import launch_gradio_demo
42
- from {} import {}Tool
43
- launch_gradio_demo({}Tool)
44
- '''.format(tool_name, tool_name.capitalize(), tool_name.capitalize())
45
 
46
  # Generate requirements.txt content
47
  requirements_content = '''transformers>=4.29.0
48
- # diffusers
49
- accelerate
50
- torch
51
- '''
52
 
53
  # Generate text_generator.py content
54
  text_generator_py_content = '''import os
55
- from transformers import pipeline
56
- class {}Tool(Tool):
57
- name = "{}"
58
- description = (
59
- "{}"
60
- )
61
- inputs = ["text"]
62
- outputs = ["text"]
63
- def __call__(self, prompt: str):
64
- token = os.environ['hf']
65
- text_generator = pipeline(model="microsoft/Orca-2-13b", token=token)
66
- generated_text = text_generator(prompt, max_length=500, num_return_sequences=1, temperature=0.7)
67
- print(generated_text)
68
- return generated_text
69
- '''.format(tool_name.capitalize(), tool_name, tool_description)
70
-
71
- os.makedirs("new", exist_ok=True)
 
72
 
73
  # Write content to files
74
- with open("new/README.md", "w") as readme_file:
75
  readme_file.write(readme_content)
76
 
77
- with open("new/tool_config.json", "w") as tool_config_file:
78
  tool_config_file.write(tool_config_json)
79
 
80
- with open("new/app.py", "w") as app_py_file:
81
  app_py_file.write(app_py_content)
82
 
83
- with open("new/requirements.txt", "w") as requirements_file:
84
  requirements_file.write(requirements_content)
85
 
86
- with open("new/{}.py".format(tool_name), "w") as text_generator_py_file:
87
  text_generator_py_file.write(text_generator_py_content)
88
 
89
- # Upload the files to the Hugging Face Hub
90
- hub = TransformersHub(organization="Chris4K")
91
- hub.create(
92
- model_id=tool_name,
93
- repo_path="./new/",
94
- commit_message="Upload {} tool".format(tool_name),
95
- private=False
96
  )
97
 
98
  # Return the generated files for download
99
- return "README.md", "tool_config.json", "app.py", "requirements.txt", "{}.py".format(tool_name)
100
 
101
 
102
  # Define the inputs for the Gradio interface
 
2
  import json
3
  import gradio as gr
4
  from transformers import Tool
5
+ from huggingface_hub import upload_folder
 
 
6
 
7
  def generate_files(title="Text Generation Tool", emoji="๐ŸŒ–", colorFrom="blue", colorTo="blue",
8
  sdk="gradio", sdk_version="4.3.0", app_file="app.py", pinned=False,
9
  tags=["tool"], tool_name="text_generator", tool_description="This is a tool that chats with a user. "
10
  "It takes an input named `prompt` which contains a system_role, user_message, context and history. "
11
  "It returns a text message."):
 
 
 
12
 
13
  # Generate readme content
14
  readme_content = '''## readme
15
+ title: {}
16
+ emoji: {}
17
+ colorFrom: {}
18
+ colorTo: {}
19
+ sdk: {}
20
+ sdk_version: {}
21
+ app_file: {}
22
+ pinned: false
23
+ tags:
24
+ - tool
25
+ '''.format(title, emoji, colorFrom, colorTo, sdk, sdk_version, app_file)
26
 
27
  # Generate tool config JSON content
28
  tool_config = {
 
34
 
35
  # Generate app.py content
36
  app_py_content = '''from transformers.tools.base import launch_gradio_demo
37
+ from {} import {}Tool
38
+ launch_gradio_demo({}Tool)
39
+ '''.format(tool_name, tool_name.capitalize(), tool_name.capitalize())
40
 
41
  # Generate requirements.txt content
42
  requirements_content = '''transformers>=4.29.0
43
+ # diffusers
44
+ accelerate
45
+ torch
46
+ '''
47
 
48
  # Generate text_generator.py content
49
  text_generator_py_content = '''import os
50
+ from transformers import pipeline
51
+ class {}Tool(Tool):
52
+ name = "{}"
53
+ description = (
54
+ "{}"
55
+ )
56
+ inputs = ["text"]
57
+ outputs = ["text"]
58
+ def __call__(self, prompt: str):
59
+ token = os.environ['hf']
60
+ text_generator = pipeline(model="microsoft/Orca-2-13b", token=token)
61
+ generated_text = text_generator(prompt, max_length=500, num_return_sequences=1, temperature=0.7)
62
+ print(generated_text)
63
+ return generated_text
64
+ '''.format(tool_name.capitalize(), tool_name, tool_description)
65
+
66
+ # Create a new folder for the tool
67
+ os.makedirs(tool_name, exist_ok=True)
68
 
69
  # Write content to files
70
+ with open(f"{tool_name}/README.md", "w") as readme_file:
71
  readme_file.write(readme_content)
72
 
73
+ with open(f"{tool_name}/tool_config.json", "w") as tool_config_file:
74
  tool_config_file.write(tool_config_json)
75
 
76
+ with open(f"{tool_name}/app.py", "w") as app_py_file:
77
  app_py_file.write(app_py_content)
78
 
79
+ with open(f"{tool_name}/requirements.txt", "w") as requirements_file:
80
  requirements_file.write(requirements_content)
81
 
82
+ with open(f"{tool_name}/{tool_name}.py", "w") as text_generator_py_file:
83
  text_generator_py_file.write(text_generator_py_content)
84
 
85
+ # Upload the folder to the Hugging Face Hub
86
+ upload_folder(
87
+ folder_path=tool_name,
88
+ repo_id="your_username/your_repo_name",
89
+ repo_type="space"
 
 
90
  )
91
 
92
  # Return the generated files for download
93
+ return "README.md", "tool_config.json", "app.py", "requirements.txt", f"{tool_name}.py"
94
 
95
 
96
  # Define the inputs for the Gradio interface