|
|
import os |
|
|
import re |
|
|
import sys |
|
|
import logging |
|
|
import nest_asyncio |
|
|
|
|
|
|
|
|
import panel as pn |
|
|
import tiktoken |
|
|
import chromadb |
|
|
|
|
|
from llama_index.core import ( |
|
|
Settings, |
|
|
VectorStoreIndex, |
|
|
PromptTemplate, |
|
|
PromptHelper, |
|
|
StorageContext |
|
|
) |
|
|
from llama_index.core.text_splitter import SentenceSplitter |
|
|
from llama_index.llms.openai import OpenAI |
|
|
from llama_index.embeddings.huggingface import HuggingFaceEmbedding |
|
|
from llama_index.readers.web import SimpleWebPageReader |
|
|
from llama_index.vector_stores.chroma import ChromaVectorStore |
|
|
|
|
|
nest_asyncio.apply() |
|
|
|
|
|
FORMAT = "%(asctime)s | %(levelname)s | %(name)s | %(message)s" |
|
|
|
|
|
@pn.cache |
|
|
def get_logger(name, format_=FORMAT, level=logging.INFO): |
|
|
logger = logging.getLogger(name) |
|
|
|
|
|
logger.handlers.clear() |
|
|
|
|
|
handler = logging.StreamHandler() |
|
|
handler.setStream(sys.stdout) |
|
|
formatter = logging.Formatter(format_) |
|
|
handler.setFormatter(formatter) |
|
|
logger.addHandler(handler) |
|
|
logger.propagate = False |
|
|
|
|
|
logger.setLevel(level) |
|
|
logger.info("Logger successfully configured") |
|
|
return logger |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
pn.extension("codeeditor", sizing_mode="stretch_width") |
|
|
|
|
|
TTL = 1800 |
|
|
ACCENT = "#2EB872" |
|
|
THEME = pn.config.theme |
|
|
|
|
|
CHAT_GPT_LOGO = "https://upload.wikimedia.org/wikipedia/commons/thumb/0/04/ChatGPT_logo.svg/512px-ChatGPT_logo.svg.png" |
|
|
CHAT_GPT_URL = "https://chat.openai.com/" |
|
|
LLAMA_INDEX_LOGO = "https://asset.brandfetch.io/id6a4s3gXI/idncpUsO_z.jpeg" |
|
|
LLAMA_INDEX_URL = "https://www.llamaindex.ai/" |
|
|
|
|
|
LLM_VERSION = "gpt-3.5-turbo-1106" |
|
|
|
|
|
pn.chat.ChatMessage.default_avatars.update( |
|
|
{ |
|
|
"assistant": CHAT_GPT_LOGO, |
|
|
"user": "🦙", |
|
|
} |
|
|
) |
|
|
pn.chat.ChatMessage.show_reaction_icons = False |
|
|
|
|
|
EXPLANATION = f""" |
|
|
## ScaleUp - (Level up your Python abilities) |
|
|
--- |
|
|
|
|
|
**ScaleUp** is a powerful Python coding assistant app that leverages `OpenAI` and `LlamaIndex` to provide an interactive, |
|
|
AI-powered learning experience. |
|
|
|
|
|
It acts as a virtual mentor, offering expert guidance, contextually relevant responses, and an integrated code editor for writing and testing Python code. |
|
|
|
|
|
### Key Features: |
|
|
|
|
|
- **Expert Python Guidance**: Get insightful and accurate answers to your Python queries. |
|
|
- **Interactive Code Editor**: Write and test your code, with suggestions and code snippets from the AI. |
|
|
- **Context-Aware Responses**: Responses are tailored based on your provided information and a comprehensive knowledge base. |
|
|
- **Streaming Responses**: Receive real-time, up-to-date responses as the AI generates them. |
|
|
|
|
|
## OpenAI GPT |
|
|
--- |
|
|
We are using the OpenAI `{LLM_VERSION}` to power the coding assistant. |
|
|
|
|
|
## Getting Started |
|
|
--- |
|
|
|
|
|
Ask your Python-related questions, share your code snippets, or request guidance on specific topics. |
|
|
|
|
|
The AI will respond with detailed explanations, code examples, and insightful suggestions to help you learn and improve your Python skills. |
|
|
""" |
|
|
|
|
|
SYSTEM_PROMPT = ( |
|
|
"You are an expert Python developer with years of experience writing Python code and teaching Python to other programmers. " |
|
|
"You have vast experience mentoring people who are learning Python. " |
|
|
"I want you to be my mentor while I learn Python myself. " |
|
|
"Your goal is to provide insightful, accurate, and concise answers to questions in this domain. " |
|
|
"When generating code, please explicitly state the sources you reference.\n\n" |
|
|
"Here is some context related to the query:\n" |
|
|
"-----------------------------------------\n" |
|
|
"{context_str}\n" |
|
|
"-----------------------------------------\n" |
|
|
"Considering the above information, please respond to the following inquiry with detailed references to applicable principles, " |
|
|
"libraries, design patterns, or debugging methodology where appropriate:\n\n" |
|
|
"Question: {query_str}\n\n" |
|
|
"Answer succinctly, and ensure your response is understandable to someone with extreme enthusiasm to learn Python programming." |
|
|
) |
|
|
|
|
|
|
|
|
URLS = [ |
|
|
"https://thewhitetulip.gitbook.io/py", |
|
|
"https://docs.python.org/3/tutorial/", |
|
|
"https://awesomepython.org/", |
|
|
"https://awesome-python.com/", |
|
|
] |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
USER_CONTENT_FORMAT = """ |
|
|
Request: |
|
|
{content} |
|
|
Code: |
|
|
```python |
|
|
{code} |
|
|
``` |
|
|
""".strip() |
|
|
|
|
|
DEFAULT_CODE_EXAMPLE = """ |
|
|
print("Hello World") |
|
|
""".strip() |
|
|
|
|
|
|
|
|
EXAMPLE_QUESTIONS = f""" |
|
|
## Python Programming Questions |
|
|
|
|
|
### Basic |
|
|
|
|
|
- Write a Python function to find the maximum of three numbers. |
|
|
- Write a Python program to reverse a string. |
|
|
- Write a Python program to check if a given number is prime or not. |
|
|
- Write a Python program to find the factorial of a number. |
|
|
- Write a Python program to check if a string is a palindrome or not. |
|
|
- Write a Python program to find the largest number in a list. |
|
|
- Write a Python program to find the sum of all numbers in a list. |
|
|
- Write a Python program to find the second largest number in a list. |
|
|
- Write a Python program to remove duplicates from a list. |
|
|
- Write a Python program to implement a simple calculator. |
|
|
- Write a Python program to check if a string is a palindrome. |
|
|
- Write a Python program to find the Fibonacci sequence up to a given number. |
|
|
- Write a Python program to Solve the Fizbuzz Algorithm in the most simple way you can think of ... |
|
|
|
|
|
### Advanced |
|
|
|
|
|
- Write a Python program to sort a list of dictionaries by a specific value. |
|
|
- Write a Python program to implement a binary search algorithm. |
|
|
- Write a Python program to implement a merge sort algorithm. |
|
|
- Write a Python program to implement a linked list data structure. |
|
|
- Write a Python program to implement a binary tree data structure. |
|
|
- Implement an LRU (Least Recently Used) Cache. |
|
|
- Write a function to check if a binary tree is balanced. |
|
|
- Implement a stack using two queues. |
|
|
- Write a function to calculate the factorial of a number recursively. |
|
|
- Implement a depth-first search (DFS) algorithm to traverse a graph. |
|
|
|
|
|
""" |
|
|
|
|
|
def _powered_by(): |
|
|
"""Returns a component describing the frameworks powering the chat ui.""" |
|
|
params = {"height": 40, "sizing_mode": "fixed", "margin": (0, 10)} |
|
|
return pn.Column( |
|
|
pn.pane.Markdown("### AI Powered By", margin=(10, 5, 10, 0)), |
|
|
pn.Row( |
|
|
pn.pane.Image(LLAMA_INDEX_LOGO, link_url=LLAMA_INDEX_URL, **params), |
|
|
pn.pane.Image(CHAT_GPT_LOGO, link_url=CHAT_GPT_URL, **params), |
|
|
align="center", |
|
|
), |
|
|
) |
|
|
|
|
|
llm = OpenAI(temperature=0.1, model=LLM_VERSION, max_tokens=512) |
|
|
embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5") |
|
|
text_splitter = SentenceSplitter(chunk_size=1024, chunk_overlap=20) |
|
|
|
|
|
prompt_helper = PromptHelper( |
|
|
context_window=4096, |
|
|
num_output=256, |
|
|
chunk_overlap_ratio=0.1, |
|
|
chunk_size_limit=None, |
|
|
) |
|
|
|
|
|
|
|
|
Settings.llm = llm |
|
|
Settings.embed_model = embed_model |
|
|
Settings.tokenizer = tiktoken.encoding_for_model(LLM_VERSION).encode |
|
|
Settings.text_splitter = text_splitter |
|
|
Settings.prompt_helper = prompt_helper |
|
|
|
|
|
def load_data(data=URLS): |
|
|
""" |
|
|
Initialize the Index |
|
|
""" |
|
|
reader = SimpleWebPageReader(html_to_text=True) |
|
|
documents = reader.load_data(data) |
|
|
|
|
|
logging.info("index creating with `%d` documents", len(documents)) |
|
|
chroma_client = chromadb.EphemeralClient() |
|
|
chroma_collection = chroma_client.get_or_create_collection("python-data") |
|
|
vector_store = ChromaVectorStore(chroma_collection=chroma_collection) |
|
|
storage_context = StorageContext.from_defaults(vector_store=vector_store) |
|
|
index = VectorStoreIndex.from_documents(documents, storage_context=storage_context, embed_model=embed_model) |
|
|
|
|
|
return index |
|
|
|
|
|
|
|
|
def initialize_query_engine(index): |
|
|
""" |
|
|
Initialize Query Engine |
|
|
""" |
|
|
|
|
|
template = SYSTEM_PROMPT |
|
|
qa_template = PromptTemplate(template) |
|
|
|
|
|
|
|
|
query_engine = index.as_query_engine(text_qa_template=qa_template, similarity_top_k=3) |
|
|
|
|
|
return query_engine |
|
|
|
|
|
|
|
|
def build_chat_engine(index): |
|
|
""" |
|
|
Initialize Chat Engine |
|
|
""" |
|
|
|
|
|
template = SYSTEM_PROMPT |
|
|
qa_template = PromptTemplate(template) |
|
|
|
|
|
chat_engine = index.as_chat_engine( |
|
|
chat_mode="context", |
|
|
text_qa_template=qa_template, |
|
|
verbose=True, |
|
|
streaming=True |
|
|
) |
|
|
return chat_engine |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
logger = get_logger(name="app") |
|
|
|
|
|
index = load_data() |
|
|
|
|
|
|
|
|
template = SYSTEM_PROMPT |
|
|
qa_template = PromptTemplate(template) |
|
|
|
|
|
chat_engine = index.as_chat_engine( |
|
|
chat_mode="context", |
|
|
text_qa_template=qa_template, |
|
|
verbose = True, |
|
|
streaming=True |
|
|
) |
|
|
|
|
|
|
|
|
os.getenv('OPENAI_API_KEY') |
|
|
|
|
|
async def generate_response( |
|
|
contents: str, |
|
|
user: str, |
|
|
instance: pn.chat.ChatInterface |
|
|
): |
|
|
""" |
|
|
Docstring placeholder |
|
|
""" |
|
|
response = await chat_engine.astream_chat(contents) |
|
|
text = "" |
|
|
async for token in response.async_response_gen(): |
|
|
text += token |
|
|
yield text |
|
|
|
|
|
|
|
|
llm_code = re.findall(r"```python\n(.*)\n```", text, re.DOTALL)[0] |
|
|
code_editor.value = llm_code |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
chat_interface = pn.chat.ChatInterface( |
|
|
callback=generate_response, |
|
|
show_send=True, |
|
|
show_rerun=False, |
|
|
show_undo=True, |
|
|
show_clear=True, |
|
|
show_button_name=True, |
|
|
sizing_mode="stretch_both", |
|
|
callback_exception="verbose" |
|
|
) |
|
|
|
|
|
chat_interface.send( |
|
|
SYSTEM_PROMPT, |
|
|
user="System", |
|
|
respond=False |
|
|
) |
|
|
|
|
|
code_editor = pn.widgets.CodeEditor( |
|
|
value=DEFAULT_CODE_EXAMPLE, |
|
|
language="python", |
|
|
sizing_mode="stretch_both", |
|
|
) |
|
|
|
|
|
|
|
|
question_layout = pn.Column( |
|
|
EXAMPLE_QUESTIONS, |
|
|
sizing_mode="stretch_width" |
|
|
) |
|
|
|
|
|
|
|
|
tabs_layout = pn.Tabs( |
|
|
("Code", code_editor), |
|
|
("Example Questions", question_layout), |
|
|
sizing_mode = "stretch_both", |
|
|
) |
|
|
|
|
|
component = pn.Row( |
|
|
chat_interface, |
|
|
tabs_layout, |
|
|
sizing_mode="stretch_both" |
|
|
) |
|
|
|
|
|
|
|
|
template = pn.template.FastListTemplate( |
|
|
title="ScaleUp Code Assistant 🐍", |
|
|
sidebar=[ |
|
|
EXPLANATION, |
|
|
_powered_by(), |
|
|
], |
|
|
main=[component], |
|
|
main_layout=None, |
|
|
accent=ACCENT, |
|
|
) |
|
|
|
|
|
template.servable() |
|
|
|
|
|
|
|
|
|
|
|
|