Spaces:
Runtime error
Runtime error
File size: 3,550 Bytes
3a60826 1331a68 3a60826 3ea080b 3a60826 3ea080b 41877f0 3ea080b 41877f0 3ea080b 0213fd5 3ea080b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 |
# from transformers import pipeline, Conversation
# import gradio as gr
# import os
# from getpass import getpass
# model = os.getenv('bigcode/starcoder')
# chatbot = pipeline(task="text-generation")
# message_list = []
# response_list = []
# def YourCoder_chatbot(message, history):
# python_code_examples = f"""
# ---------------------
# Example 1: Code Snippet
# def calculate_average(numbers):
# total = 0
# for number in numbers:
# total += number
# average = total / len(numbers)
# return average
# Code Review: Consider using the sum() function to calculate the total sum of the numbers
# instead of manually iterating over the list.
# This would make the code more concise and efficient.
# ---------------------
# Example 2: Code Snippet
# def find_largest_number(numbers):
# largest_number = numbers[0]
# for number in numbers:
# if number > largest_number:
# largest_number = number
# return largest_number
# Code Review: Refactor the code using the max() function to find the largest number in the list.
# This would simplify the code and improve its readability.
# ---------------------
# """
# prompt = f"""
# I will provide you with code snippets,
# and you will review them for potential issues and suggest improvements.
# Please focus on providing concise and actionable feedback, highlighting areas
# that could benefit from refactoring, optimization, or bug fixes.
# Your feedback should be constructive and aim to enhance the overall quality and maintainability of the code.
# Please avoid providing explanations for your suggestions unless specifically requested. Instead, focus on clearly identifying areas for improvement and suggesting alternative approaches or solutions.
# Few good examples of Python code output between #### separator:
# ####
# {python_code_examples}
# ####
# Code Snippet is shared below, delimited with triple backticks:
# ```
# {message}
# ```
# """
# conversation = chatbot(prompt)
# return conversation[0]['generated_text']
# chatbot = gr.ChatInterface(YourCoder_chatbot, title="YourCoder Chatbot", description="Enter piece of code to generate a code review!")
# chatbot.launch()
# import gradio as gr
# # def YourCoder_chatbot(message, history):
# # gr.load("models/bigcode/starcoder")
# # chatbot = gr.ChatInterface(YourCoder_chatbot, title="YourCoder Chatbot", description="Enter piece of code to generate a code review!")
# chatbot = gr.Interface(fn=gr.load("models/bigcode/starcoder"), inputs=[gr.Textbox(label="Insert Code Snippet",lines=5)],
# outputs=[gr.Textbox(label="Review Here",lines=8)],
# title="Code Reviewer"
# )
# # gr.load("models/bigcode/starcoder").launch()
# chatbot.launch()
#####################
import os
import gradio as gr
from transformers import pipeline
# Get the token from environment variables
# token = os.getenv("HUGGINGFACE_TOKEN")
# if token is None:
# raise ValueError("Hugging Face token is not set in the environment variables.")
# Load the model from the Hugging Face Model Hub with authentication
generator = pipeline('text-generation', model='bigcode/starcoder', use_auth_token=token)
# Define the prediction function
def generate_text(prompt):
result = generator(prompt, max_length=50)
return result[0]['generated_text']
# Create the Gradio interface
iface = gr.Interface(fn=generate_text, inputs="text", outputs="text")
# Launch the app
iface.launch() |