SatyamPrakash09 commited on
Commit
fa5049f
·
verified ·
1 Parent(s): d2acdbb

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +29 -14
app.py CHANGED
@@ -3,37 +3,52 @@ import gradio as gr
3
  from langchain import LLMChain, PromptTemplate
4
  from langchain.memory import ConversationBufferMemory
5
  from langchain_google_genai import ChatGoogleGenerativeAI
 
6
  from dotenv import load_dotenv
7
 
8
  load_dotenv()
9
  os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
10
 
11
- # Create LLM instance
12
- llm = ChatGoogleGenerativeAI(model="gemini-2.5-flash", temperature=0)
13
 
14
- template = """As an adventurous and globetrotting college student named Onix, you're constantly on the lookout for new cultures, experiences, and breathtaking landscapes. You've visited numerous countries, immersing yourself in local traditions, and you're always eager to swap travel stories and offer tips on exciting destinations
 
 
 
 
 
 
 
 
 
 
 
15
  {chat_history}
16
  User: {user_message}
17
  Chatbot:"""
18
 
19
- prompt = PromptTemplate(input_variables=["chat_history", "user_message"], template=template)
 
 
 
 
 
20
  memory = ConversationBufferMemory(memory_key="chat_history")
21
 
22
- llm_chain = LLMChain(llm=llm, prompt=prompt, memory=memory)
 
 
 
 
23
 
24
  def get_text_response(user_message, history):
 
25
  response = llm_chain.predict(user_message=user_message)
26
  return response
27
 
28
- demo = gr.ChatInterface(
29
- fn=get_text_response,
30
- examples=[
31
- "How are you doing?",
32
- "What are your interests?",
33
- "Which places do you like to visit?"
34
- ],
35
- type='messages'
36
- )
37
 
38
  if __name__ == "__main__":
39
  demo.launch()
 
 
 
3
  from langchain import LLMChain, PromptTemplate
4
  from langchain.memory import ConversationBufferMemory
5
  from langchain_google_genai import ChatGoogleGenerativeAI
6
+ import os
7
  from dotenv import load_dotenv
8
 
9
  load_dotenv()
10
  os.environ["GOOGLE_API_KEY"] = os.getenv("GOOGLE_API_KEY")
11
 
 
 
12
 
13
+ from langchain_google_genai import ChatGoogleGenerativeAI
14
+
15
+
16
+ try:
17
+ llm = ChatGoogleGenerativeAI(model="gemini-2.5-flash", temperature=0)
18
+ response = llm.invoke("Hello Gemini, can you hear me?")
19
+ print("✅ API is working!")
20
+ print("Response:", response.content)
21
+ except Exception as e:
22
+ print("❌ API Error:", str(e))
23
+
24
+ template = """You are an advanced code reviewer, vulnerability scanner, and secure coding assistant. Your task is to analyze any code provided, regardless of the programming language, and automatically adapt your responses to the language and style of the user. First, explain what the code does in simple terms, then review it for quality issues, inefficiencies, bad practices, or redundant logic. Next, perform a thorough security audit by identifying vulnerabilities such as injection flaws, unvalidated inputs, insecure API usage, weak authentication, hard-coded secrets, race conditions, or other exploitable patterns. For each vulnerability, provide a risk level (Low, Medium, High), explain how it could be exploited, and give actionable recommendations or alternative code snippets that follow secure coding best practices. Also suggest improvements to make the code more readable, scalable, and maintainable, following the conventions of the detected language. Always treat the code as if it were part of a production system and prioritize security, clarity, and performance in your suggestions.
25
  {chat_history}
26
  User: {user_message}
27
  Chatbot:"""
28
 
29
+ prompt = PromptTemplate(
30
+ input_variables=["chat_history", "user_message"], template=template
31
+ )
32
+
33
+
34
+ from langchain.memory import ConversationBufferMemory
35
  memory = ConversationBufferMemory(memory_key="chat_history")
36
 
37
+ llm_chain = LLMChain(
38
+ llm=llm,
39
+ prompt=prompt,
40
+ memory=memory
41
+ )
42
 
43
  def get_text_response(user_message, history):
44
+ # LangChain memory handles the history internally
45
  response = llm_chain.predict(user_message=user_message)
46
  return response
47
 
48
+
49
+ demo = gr.ChatInterface(get_text_response, examples=["How are you doing?","What is a code vunerability?","What happens if a code is not secure?"], type='messages')
 
 
 
 
 
 
 
50
 
51
  if __name__ == "__main__":
52
  demo.launch()
53
+
54
+