docter / app.py
meraj12's picture
Update app.py
921faeb verified
import streamlit as st
import http.client
# Function to make an API request to Scrapeless
def get_user_data():
try:
# Create HTTPS connection to Scrapeless API
conn = http.client.HTTPSConnection("api.scrapeless.com")
# Set up headers, such as the authorization token if required
headers = {
# Uncomment and replace with your API key if needed:
'Authorization': 'Bearer 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'
}
# Send a GET request to the Scrapeless API
conn.request("GET", "/api/v1/me", "", headers)
# Get the response from the API
res = conn.getresponse()
if res.status == 200:
data = res.read()
return data.decode("utf-8")
else:
return f"Error: {res.status} - {res.reason}"
except Exception as e:
return f"An error occurred: {str(e)}"
# Streamlit UI
st.title("Scrapeless API Data")
# Trigger the API call when a button is pressed
if st.button("Get User Data"):
result = get_user_data()
st.write(result) # Display the result in Streamlit
import streamlit as st
import torch
from transformers import AutoModelForCausalLM, AutoTokenizer
import os
# Set up Groq API Key (you should ideally use environment variables or a secrets manager for production)
groq_api_key = "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"
# Load the GPT-2 model and tokenizer
model_name = "gpt2"
model = AutoModelForCausalLM.from_pretrained(model_name)
tokenizer = AutoTokenizer.from_pretrained(model_name)
# Ensure pad_token_id is set (if not, use eos_token_id)
if tokenizer.pad_token_id is None:
tokenizer.pad_token_id = tokenizer.eos_token_id
def get_medical_recommendations(disease):
# Tokenize input and convert it into tensor
inputs = tokenizer.encode(f"Give medical precautions for: {disease}", return_tensors="pt")
# Create attention mask
attention_mask = inputs.ne(tokenizer.pad_token_id).long() # Check for non-padding tokens
# Generate recommendations
outputs = model.generate(inputs, attention_mask=attention_mask, pad_token_id=tokenizer.eos_token_id, max_length=200, num_return_sequences=1, no_repeat_ngram_size=2)
# Decode and return the response
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
return response
import requests
def get_doctors_from_foursquare(location):
client_id = "YOUR_CLIENT_ID"
client_secret = "YOUR_CLIENT_SECRET"
url = f"https://api.foursquare.com/v2/venues/search?query=doctor&near={location}&client_id={client_id}&client_secret={client_secret}&v=20230220"
response = requests.get(url)
data = response.json()
doctors = []
for venue in data['response']['venues']:
name = venue['name']
address = venue['location']['address']
doctors.append(f"{name} - {address}")
if not doctors:
return ["No doctors found in this location."]
return doctors
# Streamlit UI
st.title("Medical Disease Recommendations & Doctor Finder")
# Get disease input
disease = st.text_input("Enter your disease:")
if disease:
recommendations = get_medical_recommendations(disease)
st.subheader("Medical Recommendations")
st.write(recommendations)
# Get location input
location = st.text_input("Enter your location to find doctors:")
if location:
doctors = find_doctors_in_location(location)
st.subheader("Doctors in your location")
st.write(doctors)