index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
44,729,242
|
HobbesE/Study-Buddy-Finder
|
refs/heads/main
|
/crud.py
|
"""Crud Operations for Study Buddy finder"""
from model import Student, Personal, Attendence, StudySession, Comment, Resource, connect_to_db, db
import random
from sqlalchemy import update
def create_student(username, password, email, first_name, last_name, cohort_name, cohort_year, icon_url):
"""Create and return a new student."""
student = Student(
username = username,
password = password,
first_name = first_name, #if these are out of order, data will populate into wrong columns
last_name = last_name,
email = email,
cohort_name = cohort_name,
cohort_year = cohort_year,
icon_url = icon_url
)
db.session.add(student)
db.session.commit()
return student
def choose_icon():
icons= [
"static/Creative-Tail-Animal-bat.svg.png",
"static/Creative-Tail-Animal-bear.svg.png",
"static/Creative-Tail-Animal-bee.svg.png",
"static/Creative-Tail-Animal-bug.svg.png",
"static/Creative-Tail-Animal-bird.svg.png",
"static/Creative-Tail-Animal-butterfly.svg.png",
"static/Creative-Tail-Animal-camel.svg.png",
"static/Creative-Tail-Animal-cat.svg.png",
"static/Creative-Tail-Animal-cheetah.svg.png",
"static/Creative-Tail-Animal-coala.svg.png",
"static/Creative-Tail-Animal-cow.svg.png",
"static/Creative-Tail-Animal-crocodile.svg.png",
"static/Creative-Tail-Animal-dinosaur.svg.png",
"static/Creative-Tail-Animal-dog.svg.png",
# "static/Creative-Tail-Animal-dolphin.svg.png",
"static/Creative-Tail-Animal-dove.svg.png",
"static/Creative-Tail-Animal-duck.svg.png",
"static/Creative-Tail-Animal-eagle.svg.png",
"static/Creative-Tail-Animal-elephant.svg.png",
"static/Creative-Tail-Animal-flamingo.svg.png",
"static/Creative-Tail-Animal-fox.svg.png",
"static/Creative-Tail-Animal-frog.svg.png",
"static/Creative-Tail-Animal-giraffe.svg.png",
"static/Creative-Tail-Animal-gorilla.svg.png",
"static/Creative-Tail-Animal-horse.svg.png",
"static/Creative-Tail-Animal-kangoroo.svg.png",
"static/Creative-Tail-Animal-leopard.svg.png",
"static/Creative-Tail-Animal-lion.svg.png",
"static/Creative-Tail-Animal-monkey.svg.png",
"static/Creative-Tail-Animal-mouse.svg.png",
"static/Creative-Tail-Animal-panda.svg.png",
"static/Creative-Tail-Animal-parrot.svg.png",
"static/Creative-Tail-Animal-penguin.svg.png",
"static/Creative-Tail-Animal-sheep.svg.png",
"static/Creative-Tail-Animal-snake.svg.png",
"static/Creative-Tail-Animal-squirrel.svg.png",
"static/Creative-Tail-Animal-tiger.svg.png",
"static/Creative-Tail-Animal-turtle.svg.png",
"static/Creative-Tail-Animal-wolf.svg.png",
"static/Creative-Tail-Animal-zebra.svg.png"
]
icon_url=random.choice(icons)
return icon_url
def create_personal_info(pronouns, location, goals, past_roles, github, linkedin, spotify, instagram):
"""Create personal details associated with user which can be updated"""
personal_info = Personal(
pronouns = pronouns,
location = location,
goals = goals,
past_roles = past_roles,
github = github,
linkedin = linkedin,
spotify = spotify,
instagram = instagram
)
db.session.add(personal_info)
db.session.commit()
return personal_info
def edit_personal_info(user_id, pronouns, location, goals, past_roles, github, linkedin, spotify, instagram):
updated_info = update(personal_info).values(pronouns=pronouns, location=location, goals=goals, past_roles=past_roles, github=github, linkedin=linkedin, spotify=spotify, instagram=instagram)
return updated_info
def attend(study_session_id, user_id):
study_session = get_study_session_by_id(study_session_id)
creator = study_session.creator.user_id
record_exists = Attendence.query.filter(Attendence.study_session_id==study_session_id, Attendence.user_id==user_id).first()
if record_exists:
return
if user_id == creator:
return
else:
attendence = Attendence(
study_session_id=study_session_id,
user_id=user_id
)
db.session.add(attendence)
db.session.commit()
return attendence
def create_study_session(participant, proposed_time, topic, capacity, prerequisites, creator):
study_session = StudySession(
participant=participant,
proposed_time=proposed_time,
topic=topic,
capacity=capacity,
prerequisites=prerequisites,
creator=creator
)
db.session.add(study_session)
db.session.commit()
return study_session
# def create_topic(topic_description, topic_title):
# topic=Topic(
# topic_description=topic_description,
# topic_title=topic_title
# )
# db.session.add(topic)
# db.session.commit()
# return topic
def get_user_by_email(email):
"return a user by a provided email"
return Student.query.filter(Student.email == email).first()
def get_study_sessions():
"""Return all study sessions"""
return StudySession.query.all()
def get_roster_list():
"return a list of students for all study sessions ever created"
study_sessions=get_study_sessions()
roster_list=[]
for study_session in study_sessions:
roster = take_attendence(study_session.study_session_id)
roster_list.append(roster)
return roster_list
def get_user_study_sessions(student_obj): # <Student username="JBland07">
"""Return user's relevant study sessions"""
# user_study_session=StudySession(
#user = db.session.query(Student).filter_by(username=target_user) #only use db.session when the database isn't connected yet
# user = Student.query.filter_by(username=student_obj.username).first() #we grabbed this in our profile route
# user_study_sessions = StudySession.query.filter_by(participant=target_user_id)
# to get the created study sessions:
created_sessions = student_obj.study_sessions # refers to model relationship-- creator = db.relationship('Student', backref='study_sessions')
# [<StudySession study_session_id=1 participant=1 proposed_time=noon topic_id = None active = True>,
# <StudySession study_session_id=2 participant=1 proposed_time=noon topic_id = None active = True>]
# print("*****************IN CRUD TRYING TO GET STUDY SESSIONS!******************")
# print("created_sessions: ", created_sessions)
# to find all sessions where this user is a participant
joined_sessions = Attendence.query.filter_by(user_id=student_obj.user_id).all()
# Attendence = attendence_id, user_id, study_session_id
# print("joined_sessions: ", joined_sessions)
user_study_sessions = []
for study_sess in joined_sessions:
study_session = StudySession.query.filter_by(study_session_id=study_sess.study_session_id).first()
user_study_sessions.append(study_session)
#user_study_sessions.extend(created_sessions)
return user_study_sessions
def get_study_session_by_id(study_session_id):
"""get study session for study-session html page"""
return StudySession.query.get(study_session_id)
def take_attendence(study_session_id):
"""return a list of students in a study session"""
study_session = get_study_session_by_id(study_session_id)
print(study_session)
attendees = Attendence.query.filter_by(study_session_id=study_session.study_session_id).all()
# [<Attendence attendence_id= 1 study_session_id 1>, <Attendence attendence_id= 2 study_session_id 1>,
# <Attendence attendence_id= 3 study_session_id 1>, ...]
# creator = study_session.creator.username
student_objects=[]
student_usernames = []
for attendee in attendees:
student = get_participant(attendee.user_id)
username = student.username
if username not in student_usernames:
# if username is not creator: #still shows up on page, just without details? Change in crud.attend instead.
student_usernames.append(username)
student_objects.append(student)
print("################")
print(student_usernames)
print(student_objects)
# trying to check if that student is in there twice
# each student we grab their unique username
# append their username into the student_useranmes if it isnt' there
# also append that student obj to the stuent_objects list
#<Student id=1, username=lracine0> <Student id=2, usernmae=lracine0>
# otherwise we caught a duplicate
# create an empty list for the return statement that will hold all student objects
# loop over each <Attendence> object, get the student_id, and send it to get_participant
return student_objects
def get_participant(user_id):
"""Return the username of a student within a study session"""
return Student.query.get(user_id)
def create_comment(comment, study_session_id, user_id):
"""Create a new comment within a study session page"""
new_comment = Comment(comment=comment, study_session_id=study_session_id, user_id=user_id)
db.session.add(new_comment)
db.session.commit()
return new_comment
def get_comments(study_session_id):
"""Return all comments within a study session page"""
comments = Comment.query.filter(Comment.study_session_id == study_session_id).all()
comments_list = []
if comments:
for comment in comments:
dict_comments = {}
user = get_participant(comment.user_id)
dict_comments[user] = comment.comment
comments_list.append(dict_comments)
return comments_list
def create_resource(resource, description, study_session_id, user_id):
"""Create a new resource within a study session page"""
new_resource = Resource(resource=resource, description=description, study_session_id=study_session_id, user_id=user_id)
db.session.add(new_resource)
db.session.commit()
return new_resource
def get_resources(study_session_id):
"""Return all resources within a study session page"""
resources = Resource.query.filter(Resource.study_session_id == study_session_id).all()
resources_list = []
if resources:
for resource in resources:
dict_resources = {}
user = get_participant(resource.user_id)
dict_resources[user] = resource.resource
resources_list.append(dict_resources)
return resources_list
def is_user_signed_in():
""" Check if user is signed in """
return session.get("signed_in_user_id") is not None
# def get_participants_for_study_session(target_user_id):
# participants_for_study_sessions = StudySession.query.filter_by(participant_id=target_user_id)
# return participants_for_study_sessions
# if __name__ == '__main__':
# from server import app
# connect_to_db(app)
|
{"/seed_database.py": ["/crud.py", "/model.py", "/server.py"], "/server.py": ["/model.py", "/crud.py"], "/crud.py": ["/model.py"]}
|
44,818,717
|
Raj181101/HomeScreenPvtltd
|
refs/heads/master
|
/customer_store/views.py
|
from django.shortcuts import render
from rest_framework.serializers import ModelSerializer, SerializerMethodField
from django.views.decorators.csrf import csrf_exempt, csrf_protect
from django.views.decorators.http import require_POST, require_GET
from django.http import HttpResponse, JsonResponse
from .models import *
# Create your views here.
class CustomerSerializer(ModelSerializer):
bookmarks = SerializerMethodField()
class Meta:
model = Customer
depth = 1
fields = ('id','name','email','mobile','langitude','latitude','bookmarks')
def get_bookmarks(self, customer):
context = []
bookmarks = customer.customerbookmark_set.last().bookmarks.all()
for bookmark in bookmarks:
data = {}
data['title'] = bookmark.title
data['url'] = bookmark.url
data['source_name'] = bookmark.source_name
context.append(data)
return context
class BookmarkSerializer(ModelSerializer):
customer = SerializerMethodField()
class Meta:
model = Bookmark
depth = 1
fields = ('id','title','url','source_name','customer')
def get_customer(self, bookmark):
context = {}
customer = bookmark.customerbookmark_set.last().customer
context['id'] = customer.id
context['name'] = customer.name
context['email'] = customer.email
context['mobile'] = customer.mobile
return context
@require_GET
@csrf_exempt
def customer(request,id):
'''
url = 'http://localhost:8000/api/customer/2/'
'''
try:
providers = Customer.objects.filter(id=id)
status = True
response = CustomerSerializer(providers, many=True).data
except ValidationError:
status = False
data = ["Invalid UID/id"]
return JsonResponse({
'status' : status,
'data' : response
})
@require_GET
@csrf_exempt
def all_customer(request):
try:
providers = Customer.objects.filter(status=True)
status = True
response = CustomerSerializer(providers, many=True).data
except ValidationError:
status = False
data = ["Invalid UID/id"]
return JsonResponse({
'status' : status,
'data' : response
})
@require_GET
@csrf_exempt
def api_browse(request):
'''
url = 'http://localhost:8000/api/browse/?id=2',
url = 'http://localhost:8000/api/browse/?source_name=Python',
url = 'http://localhost:8000/api/browse/?title=Python',
url = 'http://localhost:8000/api/browse/?startdate=2020-09-01&enddate=2020-09-05',
url = 'http://localhost:8000/api/browse/?latitude=77.5946&longitude=12.9716',
url = 'http://localhost:8000/api/browse/?sort_by=false/true',
'''
status = False
response = ["No Data Found On Your Search!."]
message = 'Success'
id = request.GET.get('id')
source_name = request.GET.get('source_name')
title = request.GET.get('title')
##Search on Date Range
startdate = request.GET.get('startdate')
enddate = request.GET.get('enddate')
##Search on Location of Latitude and Langitude
latitude = request.GET.get('latitude')
longitude = request.GET.get('longitude')
##Search on Sorting from low to high or high to low
sort_by = request.GET.get('sort_by')
if id or source_name or title or startdate or enddate or latitude or longitude or sort_by:
if id:
try:
customers = Customer.objects.filter(id=id)
if customers:
response = CustomerSerializer(customers, many=True).data
status = True
else:
status = False
message = 'No Data Matched With Your Search ID !. Please Provide Valid ID'
except Exception as e:
status = False
message = str(e)
if source_name:
try:
bookmarks = Bookmark.objects.filter(source_name__icontains = source_name)
if bookmarks:
response = BookmarkSerializer(bookmarks, many=True).data
status = True
else:
status = False
message = 'No Data Matched With Your Search !. Please Provide Valid SourceName'
except Exception as e:
status = False
message = str(e)
if title:
try:
bookmarks = Bookmark.objects.filter(title__icontains = title)
if bookmarks:
response = BookmarkSerializer(bookmarks, many=True).data
status = True
else:
status = False
message = 'No Data Matched With Your Search !. Please Provide Valid Title'
except Exception as e:
status = False
message = str(e)
if startdate and enddate:
try:
bookmarks = Bookmark.objects.filter(created_date__range = [startdate,enddate])
if bookmarks:
response = BookmarkSerializer(bookmarks, many=True).data
status = True
else:
status = False
message = 'No Data Matched With Your Search !. Please Provide Valid Dates'
except Exception as e:
status = False
message = str(e)
if latitude and longitude:
try:
customers = Customer.objects.filter(latitude=latitude,langitude=longitude)
if customers:
ids = [bookmark.id for customer in customers for bookmark in customer.customerbookmark_set.all()]
bookmarks = Bookmark.objects.filter(id__in=ids)
response = BookmarkSerializer(bookmarks, many=True).data
status = True
else:
status = False
message = 'No Data Matched With Your Search ID !. Please Provide Valid Latitude and Longitude'
except Exception as e:
status = False
message = str(e)
if sort_by and sort_by.lower() == 'true':
try:
bookmarks = Bookmark.objects.all().order_by('-created_date')
if bookmarks:
response = BookmarkSerializer(bookmarks, many=True).data
status = True
else:
status = False
message = 'No Data Matched With Your Search !. '
except Exception as e:
status = False
message = str(e)
elif sort_by and sort_by.lower() == 'false':
try:
bookmarks = Bookmark.objects.all().order_by('created_date')
if bookmarks:
response = BookmarkSerializer(bookmarks, many=True).data
status = True
else:
status = False
message = 'No Data Matched With Your Search !.'
except Exception as e:
status = False
message = str(e)
else:
message = 'Fails'
return JsonResponse({
'status' : status,
'message' : message,
'data' : response
})
@require_POST
@csrf_exempt
def bookmark_create(request):
"""
url = http://127.0.0.1:8000/api/create/
data = {
'title':'Python',
'url':'https://www.djangoproject.com/'
'source_name':'djangoteam',
'customer_id':1,
}
"""
success = False
response = {}
title = request.POST.get('title')
url = request.POST.get('url')
source_name = request.POST.get('source_name')
customer_id = request.POST.get('customer_id')
if title and url and source_name and customer_id:
try:
if Customer.objects.filter(id=customer_id):
try:
bookmark = Bookmark.objects.create(title=title,url=url,source_name=source_name)
customer = Customer.objects.filter(id=customer_id).last()
customerbkmk = customer.customerbookmark_set.last()
customerbkmk.bookmarks.add(bookmark)
customerbkmk.save()
response['message'] = 'Bookmark Created Succesfully'
success = True
except Exception as e:
response['message'] = str(e)
else:
response['message'] = 'Customer Does Not Exists With Provided ID,Please Provide Valid ID'
except Exception as e:
response['message'] = str(e)
else:
response['message'] = 'Please Provide Valid Data'
return JsonResponse({
'success' : success,
'data' : response
})
|
{"/customer_store/views.py": ["/customer_store/models.py"]}
|
44,818,718
|
Raj181101/HomeScreenPvtltd
|
refs/heads/master
|
/customer_proj/urls.py
|
from django.contrib import admin
from django.urls import path
from customer_store import views as views
urlpatterns = [
path('admin/', admin.site.urls),
path('api/customer/<int:id>/', views.customer, name='customer'),
path('api/customers/', views.all_customer, name='all_customer'),
path('api/browse/', views.api_browse, name='api_browse'),
path('api/create/', views.bookmark_create, name='bookmark_create'),
]
|
{"/customer_store/views.py": ["/customer_store/models.py"]}
|
44,818,719
|
Raj181101/HomeScreenPvtltd
|
refs/heads/master
|
/customer_store/models.py
|
from django.db import models
from django.core.validators import RegexValidator
from django.utils import timezone
from django.utils.text import slugify
from PIL import Image
# Create your models here.
class Customer(models.Model):
name = models.CharField(max_length=48, blank=True, null=True)
password = models.CharField(max_length=50, blank=True, null=True)
email = models.EmailField(blank=True, null=True)
mobile = models.CharField(validators=[
RegexValidator(
regex=r'^(\+\d{1,3}[- ]?)?\d{10}$',
message="Invalid Number")], max_length=15)
profile_pic = models.ImageField(upload_to='profile_pics',blank=True, null=True)
created_date = models.DateTimeField(auto_now=True)
modified_date = models.DateTimeField(auto_now_add=True)
status = models.BooleanField(default=True)
langitude = models.CharField(max_length=100, blank=True, null=True)
latitude = models.CharField(max_length=100, blank=True, null=True)
def __str__(self):
return self.name
class Bookmark(models.Model):
title = models.CharField(max_length=400)
slug = models.SlugField(max_length=500,blank=True, null=True,unique=True)
url = models.URLField(max_length=600, blank=True, null=True)
source_name = models.CharField(max_length=250)
created_date = models.DateTimeField(auto_now=True)
modified_date = models.DateTimeField(auto_now_add=True)
def save(self, *args, **kwargs):
value = str(self.title)
self.slug = slugify(value, allow_unicode=True)
super().save(*args, **kwargs)
def __str__(self):
return self.title
class CustomerBookmark(models.Model):
customer = models.ForeignKey(Customer, on_delete=models.SET_NULL, blank=True, null=True)
bookmarks = models.ManyToManyField(Bookmark,blank=True)
total_bookmarks = models.PositiveIntegerField()
def __str__(self):
return self.customer.name
|
{"/customer_store/views.py": ["/customer_store/models.py"]}
|
44,944,375
|
theresachan/OnuslesPaigie
|
refs/heads/main
|
/Forms.py
|
from wtforms.fields import *
from wtforms.fields.html5 import EmailField
from wtforms import Form, StringField, TextAreaField, validators, SelectField, IntegerField
class CreateEntryForm(Form):
cost_category = StringField('Cost Category', [validators.Length(min=1, max=150), validators.DataRequired()])
expenses = StringField('Expenses', [validators.Length(min=1, max=150), validators.DataRequired()])
description = TextAreaField('Description', [validators.Optional()])
class CheckoutForm(Form):
first_name = StringField('First Name', [validators.DataRequired()])
last_name = StringField('Last Name', [validators.DataRequired()])
email = EmailField('Email', [validators.DataRequired()])
phone = IntegerField('Phone Number', [validators.DataRequired()])
country = SelectField('Country', [validators.DataRequired()],
choices=[('', 'Select'), ('S', 'Singapore'), ('AF', 'Afghanistan')], default='')
address1 = StringField('Address Line 1', [validators.DataRequired()])
address2 = StringField('Address Line 2', [validators.DataRequired()])
postal_code = IntegerField('Postal Code', [validators.DataRequired()])
card_name = StringField("Card Name", [validators.Optional()])
card_number = IntegerField("Card Number", [validators.Optional()])
expiry_date = DateTimeField("Expiry Date", [validators.Optional()])
cvv = IntegerField("CVV", [validators.Optional()])
remember_me = BooleanField("Remember Me")
|
{"/__init__.py": ["/Forms.py", "/Entry.py", "/Returns.py", "/Order.py"]}
|
44,944,376
|
theresachan/OnuslesPaigie
|
refs/heads/main
|
/Order.py
|
class Order:
count_id = 0
def __init__(self, email, address1, address2):
Order.count_id += 1
self.__order_id = Order.count_id
self.__email = email
self.__address1 = address1
self.__address2 = address2
def get_order_id(self):
return self.__order_id
def get_email(self):
return self.__email
def get_address1(self):
return self.__address1
def get_address2(self):
return self.__address2
def set_order_id(self, order_id):
self.__order_id = order_id
def set_email(self, email):
self.__email = email
def set_address1(self, address1):
self.__address1 = address1
def set_address2(self, address2):
self.__address2 = address2
|
{"/__init__.py": ["/Forms.py", "/Entry.py", "/Returns.py", "/Order.py"]}
|
44,944,377
|
theresachan/OnuslesPaigie
|
refs/heads/main
|
/Entry.py
|
class Entry:
count_id = 0
def __init__(self, cost_category, expenses, description):
Entry.count_id += 1
self.__entry_id = Entry.count_id
self.__cost_category = cost_category
self.__expenses = expenses
self.__description = description
def get_entry_id(self):
return self.__entry_id
def get_cost_category(self):
return self.__cost_category
def get_expenses(self):
return self.__expenses
def get_description(self):
return self.__description
def set_entry_id(self, entry_id):
self.__entry_id = entry_id
def set_cost_category(self, cost_category):
self.__cost_category = cost_category
def set_expenses(self, expenses):
self.__expenses = expenses
def set_description(self, description):
self.__description = description
|
{"/__init__.py": ["/Forms.py", "/Entry.py", "/Returns.py", "/Order.py"]}
|
44,944,378
|
theresachan/OnuslesPaigie
|
refs/heads/main
|
/__init__.py
|
from flask import Flask, render_template, request, redirect, url_for, session
from Forms import CreateEntryForm, CheckoutForm
import shelve, Entry, Order
app = Flask(__name__)
app.secret_key = 'any_random_string'
@app.route('/')
def home():
return render_template('home.html')
@app.route("/aboutus")
def blog():
return render_template('about.html')
@app.route("/cart")
def cart():
return render_template('cart.html')
@app.route("/checkout", methods=["GET", "POST"])
def checkout():
create_checkout_form = CheckoutForm(request.form)
if request.method == 'POST' and create_checkout_form.validate():
orders_dict = {}
db = shelve.open('orders.db', 'c')
try:
orders_dict = db['Orders']
except:
print("Error in retrieving Orders from storage.db.")
order = Order.Order(create_checkout_form.email.data, create_checkout_form.address1.data,
create_checkout_form.address2.data
)
orders_dict[order.get_order_id()] = order
db['Orders'] = orders_dict
# Test codes
orders_dict = db['Orders']
order = orders_dict[order.get_order_id()]
print(order.get_email(), "was stored in storage.db successfully with order_id ==",
order.get_order_id())
db.close()
return redirect(url_for('retrieve_order'))
return render_template('checkout.html', title="Onusles - Checkout", form=create_checkout_form)
@app.route("/contact")
def contact():
return render_template('contact.html')
@app.route("/shopgrid")
def shopgrid():
return render_template('shop_grid.html')
@app.route("/order")
def retrieve_order():
orders_dict = {}
db = shelve.open('orders.db', 'c')
orders_dict = db['Orders']
db.close()
orders_list = []
for key in orders_dict:
order = orders_dict.get(key)
orders_list.append(order)
return render_template('retrieveOrder.html', count=len(orders_list), orders_list=orders_list)
@app.route("/staff", methods=['GET', 'POST'])
def staff():
create_entry_form = CreateEntryForm(request.form)
if request.method == 'POST' and create_entry_form.validate():
entries_dict = {}
db = shelve.open('storage.db', 'c')
try:
entries_dict = db['Entries']
except:
print("Error in retrieving Entries from storage.db.")
entry = Entry.Entry(create_entry_form.cost_category.data, create_entry_form.expenses.data, create_entry_form.description.data
)
entries_dict[entry.get_entry_id()] = entry
db['Entries'] = entries_dict
# Test codes
entries_dict = db['Entries']
entry = entries_dict[entry.get_entry_id()]
print(entry.get_cost_category(), "was stored in storage.db successfully with entry_id ==",
entry.get_entry_id())
db.close()
session['entry_created'] = entry.get_cost_category()
return redirect(url_for('retrieve_entries'))
return render_template("staff.html",form=create_entry_form)
@app.route('/retrieveEntry')
def retrieve_entries():
entries_dict = {}
db = shelve.open('storage.db', 'r')
entries_dict = db['Entries']
db.close()
entries_list = []
for key in entries_dict:
entry = entries_dict.get(key)
entries_list.append(entry)
return render_template('retrieveEntries.html', count=len(entries_list), entries_list=entries_list)
@app.route('/updateEntry/<int:id>/', methods=['GET','POST'])
def update_entry(id):
update_entry_form = CreateEntryForm(request.form)
if request.method == 'POST' and update_entry_form.validate():
entries_dict = {}
db = shelve.open('storage.db', 'w')
entries_dict = db['Entries']
entry = entries_dict.get(id)
entry.set_cost_category(update_entry_form.cost_category.data)
entry.set_expenses(update_entry_form.expenses.data)
entry.set_description(update_entry_form.description.data)
db['Entries'] = entries_dict
db.close()
session['entry_updated'] = entry.get_cost_category()
return redirect(url_for('retrieve_entries'))
else:
entries_dict = {}
db = shelve.open('storage.db', 'r')
entries_dict = db['Entries']
db.close()
entry = entries_dict.get(id)
update_entry_form.cost_category.data = entry.get_cost_category()
update_entry_form.expenses.data = entry.get_expenses()
update_entry_form.description.data = entry.get_description()
return render_template('updateEntry.html', form=update_entry_form)
@app.route('/deleteEntry/<int:id>', methods=['POST'])
def delete_entry(id):
entries_dict = {}
db = shelve.open('storage.db', 'w')
entries_dict = db['Entries']
entry = entries_dict.pop(id)
db['Entries'] = entries_dict
db.close()
session['entry_deleted'] = entry.get_cost_category()
return redirect(url_for('retrieve_entries'))
@app.errorhandler(404)
def page_not_found(e):
return render_template('error404.html'), 404
if __name__ == '__main__':
app.run()
|
{"/__init__.py": ["/Forms.py", "/Entry.py", "/Returns.py", "/Order.py"]}
|
45,013,982
|
dlehdwncy2/malcraw
|
refs/heads/main
|
/malware_crawler.py
|
import filetype
from bs4 import BeautifulSoup
import sys
import hashlib
import re
import os
from multiprocessing import Process, current_process ,Queue, Pool
import socket
import datetime
import time
import requests
import urllib
import urllib.request
import zipfile
import json
import hashlib
import shutil
from requests import get
from pyunpack import Archive
from urllib.request import Request, urlopen
def parse(url):
request = urllib.request.Request(url)
request.add_header('User-Agent', 'Mozilla/4.0 (compatible; MSIE 8.0; Windows NT 5.1)')
http = BeautifulSoup(urllib.request.urlopen(request), "lxml")
return http
##########################################
temp_paths = r'E:\sample\crawler\temp'
daily_malware_paths = r'E:\sample\crawler\daily'
daily_malware_paths_temp = r'E:\sample\crawler\daily\temp'
daily_malware_paths_temp2 = r'E:\sample\crawler\daily\temp2'
file_type_path = r'E:\sample\crawler\malware\filetype'
others_paths = r'E:\sample\crawler\malware\filetype\others'
if not os.path.exists(temp_paths): os.makedirs(temp_paths)
if not os.path.exists(daily_malware_paths): os.makedirs(daily_malware_paths)
if not os.path.exists(daily_malware_paths_temp): os.makedirs(daily_malware_paths_temp)
if not os.path.exists(daily_malware_paths_temp2): os.makedirs(daily_malware_paths_temp2)
if not os.path.exists(file_type_path): os.makedirs(file_type_path)
if not os.path.exists(others_paths): os.makedirs(others_paths)
##########################################
def malc0de():
while True:
soup=parse('http://malc0de.com/rss')
mlc=[row for row in soup('description')][1:]
mlc_sites = list()
for row in mlc:
site = re.sub('&', '&', str(row).split()[1]).replace(',', '')
mlc_sites.append(site)
for url in mlc_sites:
if not re.match('http', url):url = 'http://' + url
try:
response=get(url)
malware_file_data = response.content
except:continue
try:sha256=hashlib.sha256(malware_file_data).hexdigest()
except:sha256=hashlib.sha256(malware_file_data.encode()).hexdigest()
temp_file_name_full_path=os.path.join(temp_paths,sha256)
if not os.path.isfile(temp_file_name_full_path):
with open(temp_file_name_full_path,'wb') as file_handle:file_handle.write(malware_file_data)
try:kind = filetype.guess(temp_file_name_full_path)
except OSError:continue
if kind is None:
if len(url.split('.')[-1])<6:
extension=os.path.join(file_type_path,url.split('.')[-1])
if not os.path.exists(extension): os.makedirs(extension)
others_file_name_full_path=os.path.join(extension,sha256)
else:
others_file_name_full_path=os.path.join(others_paths,sha256)
shutil.move(temp_file_name_full_path,others_file_name_full_path)
continue
else:
extension_paths=os.path.join(file_type_path,kind.extension)
extension_file_name_full_path=os.path.join(extension_paths,sha256)
if not os.path.exists(extension_paths):os.makedirs(extension_paths)
shutil.move(temp_file_name_full_path,extension_file_name_full_path)
continue
def dasmalwerk():
while True:
time.sleep(100)
url="https://das-malwerk.herokuapp.com/"
soup=parse(url)
links = soup.select("tbody > tr > td > a")
for link in links:
malware_url = link.attrs['href']
if 'virustotal' in malware_url:continue
try:
response = get(malware_url)
malware_file_data = response.content
except:
continue
try:
sha256 = hashlib.sha256(malware_file_data).hexdigest()
except:
sha256 = hashlib.sha256(malware_file_data.encode()).hexdigest()
temp_file_name_full_path = os.path.join(temp_paths, sha256)
if not os.path.isfile(temp_file_name_full_path):
with open(temp_file_name_full_path, 'wb') as file_handle:
file_handle.write(malware_file_data)
kind = filetype.guess(temp_file_name_full_path)
if kind.extension=='zip':
zFile=zipfile.ZipFile(temp_file_name_full_path)
zFile.setpassword(b'infected')
unpack_file_full_path_list=list()
for zfile_name in zFile.namelist():
#Temp 경로에 압축해제 압축해재시 압축파일 내부에 있는 이름으로 진행
zFile.extract(zfile_name,temp_paths)
#Temp에 압축해제된 파일 풀 경로 설정
unpack_file_full_path=os.path.join(temp_paths,zfile_name)
unpack_file_full_path_list.append(unpack_file_full_path)
zFile.close()
os.remove(temp_file_name_full_path)
for unpack_file_full_path in unpack_file_full_path_list:
try:kind = filetype.guess(unpack_file_full_path)
except OSError:continue
if kind is None:
if len(url.split('.')[-1]) < 4:
others_paths = os.path.join(file_type_path, url.split('.')[-1])
if not os.path.exists(others_paths): os.makedirs(others_paths)
others_file_name_full_path = os.path.join(others_paths, sha256)
else:
others_file_name_full_path = os.path.join(others_paths, sha256)
shutil.copy(unpack_file_full_path, others_file_name_full_path)
continue
else:
extension_paths = os.path.join(file_type_path, kind.extension)
extension_file_name_full_path = os.path.join(extension_paths, sha256)
if not os.path.exists(extension_paths): os.makedirs(extension_paths)
shutil.copy(unpack_file_full_path, extension_file_name_full_path)
continue
else:
temp_file_name_full_path=temp_file_name_full_path
try:kind = filetype.guess(temp_file_name_full_path)
except OSError:continue
if kind is None:
others_file_name_full_path = os.path.join(others_paths, sha256)
shutil.copy(temp_file_name_full_path, others_file_name_full_path)
continue
else:
extension_paths = os.path.join(file_type_path, kind.extension)
extension_file_name_full_path = os.path.join(extension_paths, sha256)
if not os.path.exists(extension_paths): os.makedirs(extension_paths)
shutil.copy(temp_file_name_full_path, extension_file_name_full_path)
continue
def urlhaus():
request_header = {
'Accept': 'text/html, application/xhtml+xml, image/jxr, */*',
'Accept-Language': 'ko-KR',
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
'Accept-Encoding': 'gzip, deflate',
'Connection': 'close'}
while True:
url_list="https://urlhaus.abuse.ch/browse/page/0","https://urlhaus.abuse.ch/browse/page/1"
default_path="https://urlhaus.abuse.ch"
for url in url_list:
resp = requests.get(url,headers=request_header)
soup=parse(resp.content)
#time.sleep(3)
links = soup.select("tbody > tr > td > a")
for link in links:
href=link.attrs['href']
if 'url' in href:
child_url_link=default_path+href
#print(child_url_link)
time.sleep(3)
child_soup=parse(child_url_link)
links = child_soup.select("tbody > tr > td > span")[0]
malware_url=links.string
#print(malware_url)
try:
response = get(malware_url)
malware_file_data = response.content
except:
continue
try:sha256 = hashlib.sha256(malware_file_data).hexdigest()
except:sha256 = hashlib.sha256(malware_file_data.encode()).hexdigest()
temp_file_name_full_path=os.path.join(temp_paths,sha256)
if not os.path.isfile(temp_file_name_full_path):
with open(temp_file_name_full_path,'wb') as file_handle:file_handle.write(malware_file_data)
try:kind = filetype.guess(temp_file_name_full_path)
except OSError:continue
if kind is None:
if len(url.split('.')[-1])<4:
ext_path=os.path.join(file_type_path,url.split('.')[-1])
print(ext_path)
if not os.path.exists(ext_path): os.makedirs(ext_path)
others_file_name_full_path=os.path.join(ext_path,sha256)
else:
others_file_name_full_path=os.path.join(others_paths,sha256)
shutil.move(temp_file_name_full_path,others_file_name_full_path)
continue
else:
extension_paths=os.path.join(file_type_path,kind.extension)
print(extension_paths)
extension_file_name_full_path=os.path.join(extension_paths,sha256)
if not os.path.exists(extension_paths):os.makedirs(extension_paths)
shutil.move(temp_file_name_full_path,extension_file_name_full_path)
continue
'''
try:kind = filetype.guess(rename_file_full_path2)
except OSError:continue
if kind is None:
other_file_name_full_path2=os.path.join(file_type_path,file_sha256_hash2)
shutil.move(rename_file_full_path2,other_file_name_full_path2)
continue
else:
extension_paths2=os.path.join(file_type_path,kind.extension)
extension_file_name_full_path2=os.path.join(extension_paths2,file_sha256_hash2)
if not os.path.exists(extension_paths2):os.makedirs(extension_paths2)
shutil.move(rename_file_full_path2,extension_file_name_full_path2)
continue
'''
def bazaar():
ZIP_PASSWORD = b"infected"
while True:
default_path="https://mb-api.abuse.ch/downloads/"
soup=parse(default_path)
links = soup.select("table > tr > td > a")
time_ago = datetime.datetime.now() - datetime.timedelta(days = 1)
time_ago = time_ago.strftime("%Y-%m-%d")
daily_file_list=os.listdir(daily_malware_paths)
for link in links:
href=link.attrs['href']
if len(href)<3:continue
#if time_ago!=href.split('.')[0]:continue
if href.split('.') in daily_file_list:continue
child_url_link=default_path+href
try:
response = get(child_url_link)
malware_file_data = response.content
except:
continue
daily_file_name_full_path=os.path.join(daily_malware_paths,href)
#print(daily_file_name_full_path)
if not os.path.isfile(daily_file_name_full_path):
with open(daily_file_name_full_path,'wb') as file_handle:file_handle.write(malware_file_data)
#pyunpack.Archive(daily_file_name_full_path+'.zip').extractall(daily_file_name_full_path)
malware_daily_zip = zipfile.ZipFile(daily_file_name_full_path)
malware_daily_zip.setpassword(ZIP_PASSWORD)
malware_daily_zip.extractall(daily_malware_paths_temp)
malware_daily_zip.close()
extract_file_list=[os.path.join(daily_malware_paths_temp,filename) for filename in os.listdir(daily_malware_paths_temp)]
for file_full_path in extract_file_list:
try:
Archive(file_full_path).extractall(daily_malware_paths_temp2)
print(file_full_path)
extract_file_list2=[os.path.join(daily_malware_paths_temp2,filename) for filename in os.listdir(daily_malware_paths_temp2)]
for file_full_path2 in extract_file_list2:
file_data2=open(file_full_path2,'rb').read()
file_sha256_hash2=hashlib.sha256(file_data2).hexdigest()
rename_file_full_path2=os.path.join(daily_malware_paths_temp2,file_sha256_hash2)
os.rename(file_full_path2,rename_file_full_path2)
try:kind = filetype.guess(rename_file_full_path2)
except OSError:continue
if kind is None:
other_file_name_full_path2=os.path.join(file_type_path,file_sha256_hash2)
shutil.move(rename_file_full_path2,other_file_name_full_path2)
continue
else:
extension_paths2=os.path.join(file_type_path,kind.extension)
extension_file_name_full_path2=os.path.join(extension_paths2,file_sha256_hash2)
if not os.path.exists(extension_paths2):os.makedirs(extension_paths2)
shutil.move(rename_file_full_path2,extension_file_name_full_path2)
continue
except ValueError:
print(file_full_path)
file_data=open(file_full_path,'rb').read()
file_sha256_hash=hashlib.sha256(file_data).hexdigest()
if '.' in file_full_path:
try:
rename_file_full_path=os.path.join(daily_malware_paths_temp,file_sha256_hash)
os.rename(file_full_path,rename_file_full_path)
try:kind = filetype.guess(rename_file_full_path)
except OSError:continue
if kind is None:
other_file_name_full_path=os.path.join(file_type_path,file_sha256_hash)
shutil.move(rename_file_full_path,other_file_name_full_path)
continue
else:
extension_paths=os.path.join(file_type_path,kind.extension)
extension_file_name_full_path=os.path.join(extension_paths,file_sha256_hash)
if not os.path.exists(extension_paths):os.makedirs(extension_paths)
shutil.move(rename_file_full_path,extension_file_name_full_path)
continue
except FileExistsError:
os.remove(file_full_path)
"""
malc0de_proc = Process(target=malc0de, args=())
수집대상 URL
https://urlhaus.abuse.ch/browse/
http://www.virusign.com/ -> 회원가입 필요 ㅄ
http://contagiominidump.blogspot.com/2018/ -> 다운로드 후 unzip 필요
https://dasmalwerk.eu/ -> 다운로드 후 unzip 필요 -> 패스워드 infected
"""
if __name__=="__main__":
# start Multi Process
#malc0de()
urlhaus()
bazaar()
'''
dasmalwerk_proc = Process(target=dasmalwerk, args=())
dasmalwerk_proc.start()
urlhaus_proc = Process(target=urlhaus, args=())
urlhaus_proc.start()
malc0de_proc=Process(target=malc0de,args=())
malc0de_proc.start()
bazaar_proc = Process(target=bazaar, args=())
bazaar_proc.start()
'''
|
{"/unzip_manager.py": ["/craw_comm.py"]}
|
45,055,091
|
auckyaisy/kpuvsmala
|
refs/heads/main
|
/pemilu/views.py
|
from django.contrib.auth import authenticate, login, logout
from django.shortcuts import render
from django.http import HttpResponse, HttpResponseRedirect
from django.urls import reverse
def index(request):
return render(request, "pemilu/landingpage.html")
# if not request.user.is_authenticated:
# return HttpResponseRedirect(reverse("login"))
# return render(request, "pemilu/index.html")
def login_view(request):
if request.method == "POST":
username = request.POST["username"]
password = request.POST["password"]
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
return HttpResponseRedirect(reverse("index"))
else:
return render(request, "pemilu/login.html", {
"message": "Tidak Ditemukan"
})
return render(request, "pemilu/login.html")
def logout_view(request):
logout(request)
return render(request, "pemilu/login.html", {
"message": "Keluar"
})
def peserta(request):
return render(request, "pemilu/peserta.html")
def calon(request):
return render(request, "pemilu/calon.html")
def verifikasi(request):
return render(request, "pemilu/vote.html")
def vote(request):
return render(request, "pemilu/voting.html")
def loaderio(request):
return render(request, "pemilu/loaderio-a4d352d8a6f3faa6ff162f58d006c23f.txt")
|
{"/pemilu/views.py": ["/pemilu/models.py"], "/data.py": ["/pemilu/models.py"], "/asd.py": ["/pemilu/models.py"], "/pemilu/admin.py": ["/pemilu/models.py"]}
|
45,055,092
|
auckyaisy/kpuvsmala
|
refs/heads/main
|
/pemilu/urls.py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('login', views.login_view, name='login'),
path("logout", views.logout_view, name="logout"),
path("peserta", views.peserta, name="peserta"),
path("calon", views.calon, name="calon"),
path("verifikasi", views.verifikasi, name="verifikasi"),
path("vote", views.vote, name="vote"),
path("loaderio-a4d352d8a6f3faa6ff162f58d006c23f/", views.loaderio, name="loaderio")
]
|
{"/pemilu/views.py": ["/pemilu/models.py"], "/data.py": ["/pemilu/models.py"], "/asd.py": ["/pemilu/models.py"], "/pemilu/admin.py": ["/pemilu/models.py"]}
|
45,112,050
|
so89bur/LetsGo
|
refs/heads/main
|
/app/utils.py
|
from datetime import datetime
def send_datetime_to_client(dt):
UTC_OFFSET_TIMEDELTA = datetime.now() - datetime.utcnow()
if dt and isinstance(dt, datetime):
return (dt + UTC_OFFSET_TIMEDELTA).strftime("%d.%m.%y %H:%M:%S")
return None
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,051
|
so89bur/LetsGo
|
refs/heads/main
|
/app/api/inst_search.py
|
import glob
import os
import time
import json
from instabot import Bot
from instaloader import Instaloader, Profile, Hashtag
from flask import request
from app import app, db
from app.utils import send_datetime_to_client
from app.api import support
class INSTGRMSearch(object):
DEF_LOC = 1000.0
def __init__(self, keyword):
self.users = {}
self.users_names = []
self.list_of_request_keys = []
self.keyword = keyword
self.users_infos = {}
self.bot, self.loader = self.__set_login_instagram__()
self.__set_default_location__()
def __set_default_location__(self):
# Для локации установим дефолтные координаты
for attr in ['s_lat', 'u_lat', 'z_lng', 'v_lng']:
setattr(self, attr, self.DEF_LOC)
return
def __set_login_instagram__(self):
# очистка данных и авторизация пользователя
cookie_del = glob.glob("config/*cookie.json")
if len(cookie_del) > 0:
os.remove(cookie_del[0])
bot = Bot(max_likes_to_like=1000000, min_likes_to_like=10)
bot.login(username='hackaton_test1', password='456321Zx', is_threaded=True)
loader = Instaloader()
USER = 'hackaton_test1'
PASSWORD = '456321Zx'
loader.login(USER, PASSWORD)
return bot, loader
def generate_list_of_request_keys(self):
# Генерируем топ запросы
self.list_of_request_keys = [
'%sпутешествие' % self.keyword,
'%s контент' % self.keyword,
'%s путешествия' % self.keyword,
'%s красивая' % self.keyword,
'%s блогер' % self.keyword,
'блогер %s' % self.keyword,
'%s блог' % self.keyword,
'блог %s' % self.keyword,
'%s путешественник' % self.keyword
]
return
def set_keyword(self, keyword):
self.keyword = keyword
return
def set_location(self, s_lat, u_lat, z_lng, v_lng):
#координаты, относительно которых будут определяться нужные блогеры
self.s_lat = float(s_lat)
self.u_lat = float(u_lat)
self.z_lng = float(z_lng)
self.v_lng = float(v_lng)
return
def set_users_by_request_key(self, request_key):
# получаем юзеров по ключу
_users_ = []
_structure_ = self.bot.get_topsearch(request_key)
for user in _structure_['users']:
_users_.append(user.get('user', {}).get('username'))
return _users_
def start_search(self, max_posts, min_followers):
# TODO старт поиска по хэштегам
# self.users = self.get_users_of_posts(self.keyword, max_posts)
# старт по топовым выдачам
self.generate_list_of_request_keys()
for request_key in self.list_of_request_keys:
users = self.set_users_by_request_key(request_key)
self.request_users_infos(users, min_followers)
return
def request_users_infos(self, users, min_followers):
# функция запрашивает сведения о пользователях-блогерах
time.sleep(15)
for username in users:
profile = Profile.from_username(self.loader.context, username)
time.sleep(1)
if profile.followers > min_followers:
self.users_infos[username] = {
'user_name': username,
'full_name': profile.full_name,
'followers': profile.followers,
'is_business_account': profile.is_business_account,
'profile_pic_url': profile.profile_pic_url
}
with open('data23.txt', 'w') as f:
json.dump(self.users_infos, f, ensure_ascii=False)
return
def get_users_of_posts(self, hastag, max_posts):
# функция возвращает множество юзеров
_users_ = set()
posts_of_hashtag = self.get_posts_of_hashtag(hastag, max_posts)
for url_post in posts_of_hashtag:
_request_media_info_ = self.bot.get_media_info(url_post)
if len(_request_media_info_) > 0:
media_info = _request_media_info_[0]
location = media_info.get('location', {})
lng = float(location.get('lng', self.DEF_LOC))
lat = float(location.get('lat', self.DEF_LOC))
_user_name_ = media_info.get('user').get('username')
if lng == self.DEF_LOC or lat == self.DEF_LOC:
# исключаем посты без локаций,
# ответственный тревелблогер локацию ставит всегда
...
elif lat > self.u_lat and lat < self.s_lat:
if lng < self.v_lng and lng > self.z_lng:
_users_.add(_user_name_)
else:
# Пост не входит в локации
...
return _users_
def get_posts_of_hashtag(self, hastag, max_posts):
return self.bot.get_total_hashtag_medias(hastag, amount=max_posts)
def get_frequent_hashtags(self, list_of_hashtags):
hashtags_humbers = {}
number_hashtags = {}
for item in list_of_hashtags:
if item in number_hashtags:
hashtags_humbers[item] = hashtags_humbers[item] + 1
else:
hashtags_humbers[item] = 1
for hashtag, number in hashtags_humbers.items():
if number in number_hashtags:
number_hashtags[number].append(hashtag)
else:
number_hashtags[number] = [hashtag]
_sorted_keys_ = sorted(list(number_hashtags.keys()), reverse=True)
_popular_ = []
for number in _sorted_keys_:
for hashtag in number_hashtags[number]:
_popular_.append(hashtag)
if len(_popular_) > 10:
return _popular_
return _popular_
def get_bloger_info_by_username(self, username):
loader = Instaloader()
profile = Profile.from_username(loader.context, username)
posts = profile.get_posts()
likes_count = 0.0
posts_count = 0.0
comments_count = 0.0
list_of_hashtags = []
for post in posts:
posts_count += 1.0
likes_count += post.likes
comments_count += post.comments
list_of_hashtags += post.caption_hashtags
coef = ((likes_count + comments_count) / (posts_count * profile.followers) ) * 100.0
# _popular_ = self.get_frequent_hashtags(list_of_hashtags)
return {
'username': username,
'posts_count': posts_count,
'likes_count': likes_count,
'comments_count': comments_count,
'coef': coef,
# '_popular_': _popular_
}
@support
@app.route('/api/v1/bloger', methods=['GET'])
def get_bloger_info():
_instgrm_ = INSTGRMSearch()
bloger_info = _instgrm_.get_bloger_info_by_username(username='kristina_azman')
return json.dumps({
'success': True,
'result': bloger_info
})
if __name__ == '__main__':
# создание соединения
_instgrm_ = INSTGRMSearch(keyword='самара')
# установка координат города
_instgrm_.set_location(
s_lat='54.41',
u_lat='51.470',
z_lng='47.55',
v_lng='52.35'
)
# # поиск блогеров и создание бд
_instgrm_.start_search(max_posts=100, min_followers=100)
_instgrm_.get_bloger_info_by_username(user_name='kristina_azman')
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,052
|
so89bur/LetsGo
|
refs/heads/main
|
/app/api/places.py
|
import json
from flask import request
from app import app, db
from app.utils import send_datetime_to_client
from app.api import support
from app.models import Place
def prepare_place(instance):
return {
'id': instance.id,
'name': instance.name,
'lat': instance.lat,
'lon': instance.lon,
'routes_count': len(instance.RoutePlaces),
}
@support
@app.route('/api/v1/place', methods=['POST'])
def new_place():
data = prepare_form_data(request.form)
instance = Place()
db.session.add(instance)
for property_name, property_value in data.items():
if property_value:
setattr(instance, property_name, property_value)
db.session.add(instance)
db.session.commit()
return json.dumps({'success': True})
@support
@app.route('/api/v1/place/<id>', methods=['GET'])
def get_place(id):
result = None
instance = Place.query.filter_by(id=id).first()
if instance:
result = prepare_trip(instance)
if result:
return json.dumps({
'success': True,
'result': result
})
else:
return json.dumps({'success': False})
@support
@app.route('/api/v1/places', methods=['GET'])
def get_palces():
start = request.args.get('start', default=0, type=int)
limit = request.args.get('limit', default=20, type=int)
order_by = request.args.get('order_by', default='id', type=str)
order_type = request.args.get('order_type', default='asc', type=str)
filter_prop = request.args.get('filter_prop', default='name')
filter_value = request.args.get('filter_value', default=None)
search_query = request.args.get('search_query', default=None)
items = []
if search_query:
search_value = '%{}%'.format(search_query).lower()
query = Place.query.filter(Place.name.ilike(search_value))
elif filter_value and filter_prop:
query = Place.query.filter(getattr(Place, filter_prop) == filter_value)
else:
query = Place.query
if order_type == 'asc':
order_info = getattr(Place, order_by).asc()
else:
order_info = getattr(Place, order_by).desc()
for item in query.order_by(order_info).slice(start, start + limit).all():
items.append(prepare_place(item))
return json.dumps({
'success': True,
'total': Place.query.count(),
'result': items
})
@support
@app.route('/api/v1/place/<id>', methods=['PUT'])
def edit_place(id):
data = prepare_form_data(request.form)
if id:
instance = Place.query.filter_by(id=id).first()
for property_name, property_value in data.items():
if property_value:
instance[property_name] = property_value
db.session.add(instance)
db.session.commit()
return json.dumps({'success': True})
else:
return json.dumps({'success': False})
@support
@app.route('/api/v1/place/<id>', methods=['DELETE'])
def remove_place(id):
if id:
Place.query.filter_by(id=id).first().delete()
db.session.commit()
return json.dumps({'success': True})
else:
return json.dumps({'success': False})
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,053
|
so89bur/LetsGo
|
refs/heads/main
|
/app/__init__.py
|
from flask import Flask
from config import Config
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_cors import CORS
ATTEMPTS_NUMBER = 10
app = Flask(__name__,
static_folder="../../static",
template_folder=".",
)
CORS(app, resources={r"*": {"origins": "*"}})
app.config.from_object(Config)
db = SQLAlchemy(app)
migrate = Migrate(app, db)
from app import routes, models, api
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,054
|
so89bur/LetsGo
|
refs/heads/main
|
/migrations/versions/92a33a24a3ae_.py
|
"""empty message
Revision ID: 92a33a24a3ae
Revises:
Create Date: 2021-08-21 17:03:13.105991
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '92a33a24a3ae'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('blogger',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('username', sa.String(length=40), nullable=True),
sa.Column('full_name', sa.String(length=500), nullable=True),
sa.Column('followers', sa.Integer(), nullable=True),
sa.Column('count_likes', sa.Integer(), nullable=True),
sa.Column('count_comments', sa.Integer(), nullable=True),
sa.Column('count_posts', sa.Integer(), nullable=True),
sa.Column('er', sa.Float(), nullable=True),
sa.Column('profile_pic_url', sa.String(length=500), nullable=True),
sa.Column('is_business_account', sa.Boolean(), nullable=True),
sa.Column('public', sa.Boolean(), nullable=True),
sa.Column('verify', sa.Boolean(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
op.create_table('hashtag',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=40), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('invitation_info',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('label', sa.String(length=500), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('label')
)
op.create_table('media',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=40), nullable=True),
sa.Column('type', sa.String(length=100), nullable=True),
sa.Column('src', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('src'),
sa.UniqueConstraint('type')
)
op.create_table('place',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('lat', sa.Float(), nullable=True),
sa.Column('lon', sa.Float(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('route',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=40), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('settings',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('key', sa.String(length=40), nullable=True),
sa.Column('value', sa.String(length=100), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('key'),
sa.UniqueConstraint('value')
)
op.create_table('status_trip',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('name', sa.String(length=40), nullable=True),
sa.Column('label', sa.String(length=40), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('label'),
sa.UniqueConstraint('name')
)
op.create_table('type_media',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('label', sa.String(length=40), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('label')
)
op.create_table('user',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('login', sa.String(length=40), nullable=True),
sa.Column('pass_hash', sa.String(length=128), nullable=True),
sa.Column('registered', sa.DateTime(), nullable=False),
sa.Column('updated', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('login')
)
op.create_table('post',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('blogger_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=200), nullable=True),
sa.Column('date', sa.DateTime(), nullable=True),
sa.Column('count_likes', sa.Integer(), nullable=True),
sa.Column('count_comments', sa.Integer(), nullable=True),
sa.Column('deleted', sa.Boolean(), nullable=True),
sa.Column('audience_coverage', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['blogger_id'], ['blogger.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('route_place',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('order', sa.Integer(), nullable=True),
sa.Column('place_id', sa.Integer(), nullable=True),
sa.Column('route_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['place_id'], ['place.id'], ),
sa.ForeignKeyConstraint(['route_id'], ['route.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_table('trip',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('route_id', sa.Integer(), nullable=True),
sa.Column('invitation_text', sa.String(length=500), nullable=True),
sa.Column('status_trip_id', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=40), nullable=True),
sa.Column('date', sa.DateTime(), nullable=True),
sa.Column('min_count_folowers', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['route_id'], ['route.id'], ),
sa.ForeignKeyConstraint(['status_trip_id'], ['status_trip.id'], ),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('blogger_post',
sa.Column('blogger_id', sa.Integer(), nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['blogger_id'], ['blogger.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['post.id'], ),
sa.PrimaryKeyConstraint('blogger_id', 'post_id')
)
op.create_table('blogger_trip',
sa.Column('blogger_id', sa.Integer(), nullable=False),
sa.Column('trip_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['blogger_id'], ['blogger.id'], ),
sa.ForeignKeyConstraint(['trip_id'], ['trip.id'], ),
sa.PrimaryKeyConstraint('blogger_id', 'trip_id')
)
op.create_table('hashtag_post',
sa.Column('hashtag_id', sa.Integer(), nullable=False),
sa.Column('post_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['hashtag_id'], ['hashtag.id'], ),
sa.ForeignKeyConstraint(['post_id'], ['post.id'], ),
sa.PrimaryKeyConstraint('hashtag_id', 'post_id')
)
op.create_table('hashtag_trip',
sa.Column('hashtag_id', sa.Integer(), nullable=False),
sa.Column('trip_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['hashtag_id'], ['hashtag.id'], ),
sa.ForeignKeyConstraint(['trip_id'], ['trip.id'], ),
sa.PrimaryKeyConstraint('hashtag_id', 'trip_id')
)
op.create_table('post_trip',
sa.Column('post_id', sa.Integer(), nullable=False),
sa.Column('trip_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['post_id'], ['post.id'], ),
sa.ForeignKeyConstraint(['trip_id'], ['trip.id'], ),
sa.PrimaryKeyConstraint('post_id', 'trip_id')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('post_trip')
op.drop_table('hashtag_trip')
op.drop_table('hashtag_post')
op.drop_table('blogger_trip')
op.drop_table('blogger_post')
op.drop_table('trip')
op.drop_table('route_place')
op.drop_table('post')
op.drop_table('user')
op.drop_table('type_media')
op.drop_table('status_trip')
op.drop_table('settings')
op.drop_table('route')
op.drop_table('place')
op.drop_table('media')
op.drop_table('invitation_info')
op.drop_table('hashtag')
op.drop_table('blogger')
# ### end Alembic commands ###
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,055
|
so89bur/LetsGo
|
refs/heads/main
|
/app/models.py
|
import jwt
from app import db, app
from werkzeug.security import generate_password_hash, check_password_hash
from datetime import datetime, timedelta
class DictMixin(object):
def toDict(self):
return {c.name: str(getattr(self, c.name)) for c in self.__table__.columns}
class BaseMixin(DictMixin):
created = db.Column(db.DateTime, default=datetime.utcnow)
updated = db.Column(db.DateTime, onupdate=datetime.utcnow)
blogger_trip = db.Table('blogger_trip',
db.Column('blogger_id', db.Integer, db.ForeignKey('blogger.id'),
primary_key=True),
db.Column('trip_id', db.Integer, db.ForeignKey('trip.id'),
primary_key=True)
)
blogger_post = db.Table('blogger_post',
db.Column('blogger_id', db.Integer, db.ForeignKey('blogger.id'),
primary_key=True),
db.Column('post_id', db.Integer, db.ForeignKey('post.id'),
primary_key=True)
)
post_trip = db.Table('post_trip',
db.Column('post_id', db.Integer, db.ForeignKey('post.id'),
primary_key=True),
db.Column('trip_id', db.Integer, db.ForeignKey('trip.id'),
primary_key=True)
)
hashtag_trip = db.Table('hashtag_trip',
db.Column('hashtag_id', db.Integer, db.ForeignKey('hashtag.id'),
primary_key=True),
db.Column('trip_id', db.Integer, db.ForeignKey('trip.id'),
primary_key=True)
)
hashtag_post = db.Table('hashtag_post',
db.Column('hashtag_id', db.Integer, db.ForeignKey('hashtag.id'),
primary_key=True),
db.Column('post_id', db.Integer, db.ForeignKey('post.id'),
primary_key=True)
)
class Blogger(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
username = db.Column(db.String(40), unique=True)
full_name = db.Column(db.String(500))
followers = db.Column(db.Integer)
count_likes = db.Column(db.Integer)
count_comments = db.Column(db.Integer)
count_posts = db.Column(db.Integer)
er = db.Column(db.Float)
profile_pic_url = db.Column(db.String(500))
is_business_account = db.Column(db.Boolean)
public = db.Column(db.Boolean)
verify = db.Column(db.Boolean)
Posts = db.relationship('Post', back_populates='Blogger')
class Trip(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
route_id = db.Column(db.Integer, db.ForeignKey('route.id'))
invitation_text = db.Column(db.String(500))
status_trip_id = db.Column(db.Integer, db.ForeignKey('status_trip.id'))
name = db.Column(db.String(40), unique=True)
date = db.Column(db.DateTime)
min_count_folowers = db.Column(db.Integer)
Bloggers = db.relationship('Blogger', secondary=blogger_trip,
backref=db.backref('Trips'))
Hashtags = db.relationship('Hashtag', secondary=hashtag_trip,
backref=db.backref('Trips'))
Posts = db.relationship('Post', secondary=post_trip,
backref=db.backref('Trips'))
Route = db.relationship('Route', back_populates='Trips')
Status = db.relationship('StatusTrip', back_populates='Trips')
class StatusTrip(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(40), unique=True)
label = db.Column(db.String(40), unique=True)
Trips = db.relationship('Trip', back_populates='Status')
class InvitationInfo(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
label = db.Column(db.String(500), unique=True)
class TypeMedia(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
label = db.Column(db.String(40), unique=True)
class Route(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(40), unique=True)
RoutePlaces = db.relationship('RoutePlace', back_populates='Route')
Trips = db.relationship('Trip', back_populates='Route')
class RoutePlace(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
order = db.Column(db.Integer)
place_id = db.Column(db.Integer, db.ForeignKey('place.id'))
route_id = db.Column(db.Integer, db.ForeignKey('route.id'))
Place = db.relationship('Place', back_populates='RoutePlaces')
Route = db.relationship('Route', back_populates='RoutePlaces')
class Hashtag(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(40), unique=True)
class Settings(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
key = db.Column(db.String(40), unique=True)
value = db.Column(db.String(100), unique=True)
class Media(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(40), unique=True)
type = db.Column(db.String(100), unique=True)
src = db.Column(db.String(100), unique=True)
class Place(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(255), unique=True)
lat = db.Column(db.Float)
lon = db.Column(db.Float)
RoutePlaces = db.relationship('RoutePlace', back_populates='Place')
class User(DictMixin, db.Model):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
login = db.Column(db.String(40), unique=True)
pass_hash = db.Column(db.String(128))
registered = db.Column(db.DateTime, nullable=False)
updated = db.Column(db.DateTime, onupdate=datetime.utcnow)
def __init__(self):
self.registered = datetime.now()
def set_password(self, password):
self.pass_hash = generate_password_hash(password)
def check_password(self, password):
return check_password_hash(self.pass_hash, password)
class Post(db.Model, DictMixin):
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
blogger_id = db.Column(db.Integer, db.ForeignKey('blogger.id'))
name = db.Column(db.String(200))
date = db.Column(db.DateTime)
count_likes = db.Column(db.Integer)
count_comments = db.Column(db.Integer)
deleted = db.Column(db.Boolean)
audience_coverage = db.Column(db.Integer)
Blogger = db.relationship('Blogger', back_populates='Posts')
Hashtags = db.relationship('Hashtag', secondary=hashtag_post,
backref=db.backref('Posts'))
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,056
|
so89bur/LetsGo
|
refs/heads/main
|
/app/api/bloggers.py
|
import json
from flask import request
from app import app, db
from app.utils import send_datetime_to_client
from app.api import support
from app.models import Trip, Blogger, Post, Hashtag
def prepare_blogger(instance):
return {
'id': instance.id,
"username": instance.username,
"full_name": instance.full_name,
"count_likes": instance.count_likes,
"count_comments": instance.count_comments,
"count_posts": instance.count_posts,
"followers": instance.followers,
"er": instance.er,
"is_business_account": instance.is_business_account,
"verify": instance.verify,
"profile_pic_url": instance.profile_pic_url,
}
@support
@app.route('/api/v1/bloggers', methods=['GET'])
def get_bloggers():
start = request.args.get('start', default=0, type=int)
limit = request.args.get('limit', default=20, type=int)
order_by = request.args.get('order_by', default='id', type=str)
order_type = request.args.get('order_type', default='asc', type=str)
filter_prop = request.args.get('filter_prop', default='name')
filter_value = request.args.get('filter_value', default=None)
search_query = request.args.get('search_query', default=None)
items = []
if search_query:
search_value = '%{}%'.format(search_query).lower()
query = Blogger.query.filter(Blogger.name.ilike(search_value))
elif filter_value and filter_prop:
query = Blogger.query.filter(getattr(Blogger, filter_prop) == filter_value)
else:
query = Blogger.query
if order_type == 'asc':
order_info = getattr(Blogger, order_by).asc()
else:
order_info = getattr(Blogger, order_by).desc()
for item in query.order_by(order_info).slice(start, start + limit).all():
items.append(prepare_blogger(item))
return json.dumps({
'success': True,
'total': Blogger.query.count(),
'result': items
})
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,057
|
so89bur/LetsGo
|
refs/heads/main
|
/app/api/__init__.py
|
import json
from flask import request
from functools import wraps
from app import db
from app.models import User
def support(func):
def func_wrapper(*args, **kwargs):
try:
for i in range(ATTEMPTS_NUMBER):
try:
return func(*args, **kwargs)
except Exception:
continue
break
except Exception as e:
db.session.rollback()
return json.dumps({'success': False})
finally:
db and db.close()
return func_wrapper
from . import trips, bloggers, inst_search, hashtags, settings, places
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,058
|
so89bur/LetsGo
|
refs/heads/main
|
/first_start.py
|
import json
from random import randint, choice
import random
import math
from datetime import datetime
from app import app, db
from app.models import Trip, Post, Blogger, Hashtag, TypeMedia, StatusTrip, \
InvitationInfo, Place
BASE_LAT = 0
BASE_LON = 0
INVITATIONS = [
'Хотим пригласить вас принять участие в травел-поездке'
]
TRIP_STATUSES = {
'new': 'Новая',
'inprogress': 'В процессе',
'finished': 'Завершена',
}
HASHTAGS = ['травел_блогер', 'красивая_самара']
def clear_data():
for key in data.keys():
data[key].clear()
def create_random_point(x0, y0, distance):
r = distance / 111300
u = random.uniform(0, 1)
v = random.uniform(0, 1)
w = r * math.sqrt(u)
t = 2 * math.pi * v
x = w * math.cos(t)
x1 = x / math.cos(y0)
y = w * math.sin(t)
return {
"lat": round(x0 + x1, 5),
"lon": round(y0 + y, 5),
}
def write_hashtags():
for name in HASHTAGS:
hashtag = Hashtag(name=name)
db.session.add(hashtag)
db.session.commit()
def write_places():
for index in range(100):
place = Place()
point = create_random_point(BASE_LAT, BASE_LON, 100000)
place.name = 'place {}'.format(index)
place.lat = point['lat']
place.lon = point['lon']
db.session.add(place)
db.session.commit()
def write_posts():
for index in range(100):
post = Post()
post.name = 'Post {}'.format(index)
post.date = datetime.now()
post.count_likes = randint(5000, 10000)
post.count_comments = randint(100, 5000)
post.deleted = False
post.audience_coverage = randint(0, 100)
db.session.add(post)
db.session.commit()
def write_bloggers():
file = open('app/data.json')
statistic_file = open('app/other.json')
file_data = json.load(file)
static_data = json.load(statistic_file)
for username, info in static_data.items():
blogger = Blogger()
blogger.username = username
blogger.full_name = file_data.get(username, {}).get('full_name')
blogger.followers = file_data.get(username, {}).get('followers')
blogger.is_business_account = file_data.get(username, {}).get('is_business_account')
blogger.profile_pic_url = file_data.get(username, {}).get('profile_pic_url')
blogger.count_likes = info['likes_count']
blogger.count_comments = info['comments_count']
blogger.count_posts = info['posts_count']
blogger.er = info['coef']
blogger.public = bool(randint(0, 1))
blogger.verify = bool(randint(0, 1))
random_posts = []
random_trips = []
exist_posts = []
for post_index in range(20):
post = choice(Post.query.all())
if post.id not in exist_posts:
random_posts.append(post)
exist_posts.append(post.id)
exist_trips = []
blogger.Posts = random_posts
for trip_index in range(5):
trip = choice(Trip.query.all())
if trip.id not in exist_trips:
random_trips.append(trip)
exist_trips.append(trip.id)
blogger.Posts = random_posts
blogger.Trips = random_trips
db.session.add(blogger)
db.session.commit()
def write_trips():
for index in range(20):
trip = Trip()
trip.name = 'Поздка {}'.format(index)
trip.date = datetime.now()
trip.invitation_text = choice(InvitationInfo.query.all()).label
db.session.add(trip)
db.session.commit()
def write_base_data():
for name in ['photo', 'video']:
media = TypeMedia(label=name)
db.session.add(media)
db.session.commit()
for name, label in TRIP_STATUSES.items():
status = StatusTrip(name=name, label=label)
db.session.add(status)
for label in INVITATIONS:
invitation = InvitationInfo(label=label)
db.session.add(invitation)
db.session.commit()
def reset_db():
db.drop_all()
db.create_all()
if __name__ == '__main__':
reset_db()
write_base_data()
write_hashtags()
write_places()
write_posts()
write_trips()
write_bloggers()
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,059
|
so89bur/LetsGo
|
refs/heads/main
|
/app/api/trips.py
|
import json
from flask import request
from app import app, db
from app.utils import send_datetime_to_client
from app.api import support
from app.models import Trip, Blogger, Post, Hashtag
def prepare_trip(instance):
prepared_posts = []
prepared_hashtags = []
prepared_bloggers = []
for blogger in instance.Bloggers:
prepared_bloggers.append({
'id': blogger.id,
'username': blogger.username,
'full_name': blogger.full_name,
'followers': blogger.followers,
'verify': blogger.verify,
'profile_pic_url': blogger.profile_pic_url,
'is_business_account': blogger.is_business_account,
})
for hashtag in instance.Hashtags:
prepared_hashtags.append({
'id': blogger.id,
'name': hashtag.name,
})
for post in instance.Posts:
prepared_posts.append({
'id': post.id,
'name': post.name,
})
prepared_route = None
if instance.Route:
prepared_route = {
'id': instance.Route.id,
'name': instance.Route.name,
}
return {
'id': instance.id,
'name': instance.name,
'date': send_datetime_to_client(instance.date),
'invitation_text': instance.invitation_text,
'Bloggers': prepared_bloggers,
'Posts': prepared_posts,
'Hashtags': prepared_hashtags,
'Route': prepared_route,
}
@support
@app.route('/api/v1/trip/attach/blogger', methods=['POST'])
def attach_blogger():
data = json.loads(request.data)
trip_id = data.get('trip_id', None)
blogger_id = data.get('blogger_id', None)
trip = Trip.query.filter_by(id=trip_id).first()
exists_bloggers_ids = []
if trip:
for blogger in trip.Bloggers:
if blogger.id not in exists_bloggers_ids:
exists_bloggers_ids.append(blogger.id)
if blogger_id not in exists_bloggers_ids:
new_blogger = Blogger.query.filter_by(id=blogger_id).first()
if new_blogger:
trip.Bloggers.append(new_blogger)
db.session.commit()
return json.dumps({'success': True})
@support
@app.route('/api/v1/trip/detach/blogger', methods=['POST'])
def detach_blogger():
data = json.loads(request.data)
trip_id = data.get('trip_id', None)
blogger_id = data.get('blogger_id', None)
trip = Trip.query.filter_by(id=trip_id).first()
exists_bloggers = []
if trip:
for blogger in trip.Bloggers:
if blogger.id != blogger_id:
exists_bloggers.append(blogger)
trip.Bloggers = exists_bloggers
db.session.commit()
return json.dumps({'success': True})
@support
@app.route('/api/v1/trip', methods=['POST'])
def new_trip():
data = prepare_form_data(request.form)
instance = Trip()
db.session.add(instance)
for property_name, property_value in data.items():
if property_value:
setattr(instance, property_name, property_value)
db.session.add(instance)
db.session.commit()
return json.dumps({'success': True})
@support
@app.route('/api/v1/trip/<id>', methods=['GET'])
def get_trip(id):
result = None
instance = Trip.query.filter_by(id=id).first()
if instance:
result = prepare_trip(instance)
if result:
return json.dumps({
'success': True,
'result': result
})
else:
return json.dumps({'success': False})
@support
@app.route('/api/v1/trips', methods=['GET'])
def get_trips():
start = request.args.get('start', default=0, type=int)
limit = request.args.get('limit', default=20, type=int)
order_by = request.args.get('order_by', default='id', type=str)
order_type = request.args.get('order_type', default='asc', type=str)
filter_prop = request.args.get('filter_prop', default='name')
filter_value = request.args.get('filter_value', default=None)
search_query = request.args.get('search_query', default=None)
items = []
if search_query:
search_value = '%{}%'.format(search_query).lower()
query = Trip.query.filter(Trip.name.ilike(search_value))
elif filter_value and filter_prop:
query = Trip.query.filter(getattr(Trip, filter_prop) == filter_value)
else:
query = Trip.query
if order_type == 'asc':
order_info = getattr(Trip, order_by).asc()
else:
order_info = getattr(Trip, order_by).desc()
for item in query.order_by(order_info).slice(start, start + limit).all():
items.append(prepare_trip(item))
return json.dumps({
'success': True,
'total': Trip.query.count(),
'result': items
})
@support
@app.route('/api/v1/trip/<id>', methods=['PUT'])
def edit_trip(id):
data = prepare_form_data(request.form)
if id:
instance = Trip.query.filter_by(id=id).first()
for property_name, property_value in data.items():
if property_value:
instance[property_name] = property_value
db.session.add(instance)
db.session.commit()
return json.dumps({'success': True})
else:
return json.dumps({'success': False})
@support
@app.route('/api/v1/trip/<id>', methods=['DELETE'])
def remove_trip(id):
if id:
Trip.query.filter_by(id=id).first().delete()
db.session.commit()
return json.dumps({'success': True})
else:
return json.dumps({'success': False})
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,060
|
so89bur/LetsGo
|
refs/heads/main
|
/app/api/hashtags.py
|
import json
from flask import request
from app import app, db
from app.utils import send_datetime_to_client
from app.api import support
from app.models import Trip, Blogger, Post, Hashtag
def prepare_hashtag(instance):
return {
'id': instance.id,
"name": instance.name,
"trips_count": len(instance.Trips),
"posts_count": len(instance.Posts)
}
@support
@app.route('/api/v1/hashtags', methods=['GET'])
def get_hashtags():
start = request.args.get('start', default=0, type=int)
limit = request.args.get('limit', default=20, type=int)
order_by = request.args.get('order_by', default='id', type=str)
order_type = request.args.get('order_type', default='asc', type=str)
filter_prop = request.args.get('filter_prop', default='name')
filter_value = request.args.get('filter_value', default=None)
search_query = request.args.get('search_query', default=None)
items = []
if search_query:
search_value = '%{}%'.format(search_query).lower()
query = Hashtag.query.filter(Hashtag.name.ilike(search_value))
elif filter_value and filter_prop:
query = Hashtag.query.filter(getattr(Hashtag, filter_prop) == filter_value)
else:
query = Hashtag.query
if order_type == 'asc':
order_info = getattr(Hashtag, order_by).asc()
else:
order_info = getattr(Hashtag, order_by).desc()
for item in query.order_by(order_info).slice(start, start + limit).all():
items.append(prepare_hashtag(item))
return json.dumps({
'success': True,
'total': Hashtag.query.count(),
'result': items
})
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,061
|
so89bur/LetsGo
|
refs/heads/main
|
/get_blogers.py
|
import glob
import os
from instabot import Bot
# from instaloader import Instaloader, Profile
# loader = Instaloader()
# USER = 'hackaton_test'
# PASSWORD = '456321Zx'
# loader.login(USER, PASSWORD)
# target_profile = 'alisa_belleza'
# profile = Profile.from_username(loader.context, target_profile)
# num_followers = profile.followers
# print(profile, num_followers)
# total_num_likes = 0
# total_num_comments = 0
# total_num_posts = 0
# # print(list(profile.get_tagged_posts()))
# for post in profile.get_posts():
# print(post)
# total_num_likes += post.likes
# total_num_comments += post.comments
# total_num_posts += 1
# print(total_num_likes)
# 1/0
# engagement = float(total_num_likes + total_num_comments) / \
# (num_followers * total_num_posts)
# print(engagement * 100)
# 1/0
cookie_del = glob.glob("config/*cookie.json")
os.remove(cookie_del[0])
bot = Bot(max_likes_to_like=1000000, min_likes_to_like=10)
# bot.login(username='sov89bur', password='456321Zxcv!')
bot.login(username='hackaton_test', password='456321Zx')
# username = 'ekaterina_in_'
# user_followers = bot.get_user_followers(username)
# print(len(user_followers))
# user_id = user_followers[1]
# username = bot.get_username_from_user_id(user_id)
# print(username)
# twony_last_medias = bot.get_user_medias(username, filtration = None)
# media_id = twony_last_medias[0]
# media_info = bot.get_media_info(media_id)[0]
# print(media_info)
accounts = {}
# twony_last_medias = bot.get_geotag_medias(214290799)
# print("twony_last_medias", len(twony_last_medias))
# for count, val in enumerate(twony_last_medias):
# print(val['user']['username'], val['user']
# ['full_name'], )
# media_id = val['id']
# # media_info = bot.get_media_info(media_id)[0]
# # if media_info['caption']:
# # print(media_info['caption']['text'])
hastag = u'самара'
twony_last_medias = bot.get_total_hashtag_medias(hastag)
print("twony_last_medias", len(twony_last_medias))
# check_list = ['самара', 'travel']
# all_key = True
# check_list = ['самара', 'samara']
# all_key = False
# check_geo = ['самара']
# for count, user_id in enumerate(twony_last_medias):
# print(bot.get_user_reel(user_id))
# username = bot.get_username_from_user_id(user_id)
# print(username)
# media_id = val
# media_info = bot.get_media_info(media_id)[0]
# flag = False
# if media_info['caption']:
# for check in check_list:
# if check in media_info['caption']['text']:
# flag = True
# else:
# if all_key:
# flag = False
# break
# if flag:
# accounts[media_info['user']['username']] = media_info['like_count']
# # if count > 23:
# # break
# # for username in accounts:
# # user_followers = bot.get_user_followers(username)
# # print(username, len(user_followers))
# # 214290799/samara-russia/
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,062
|
so89bur/LetsGo
|
refs/heads/main
|
/config.py
|
import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SECRET_KEY = os.environ.get('SECRET_KEY') or 'you-will-never-guess'
CSRF_ENABLED = True
WTF_CSRF_SECRET_KEY = 'dsofpkoasodksap'
SQLALCHEMY_DATABASE_URI = 'postgresql://admin:superpassword@localhost:5432/posteach'
CACHE_TYPE = "simple"
CACHE_DEFAULT_TIMEOUT = 300
MAX_CONTENT_LENGTH = 16 * 1024 * 1024
ALLOWED_EXTENSIONS = set(['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'])
SQLALCHEMY_TRACK_MODIFICATIONS = False
class ProductionConfig(Config):
DEBUG = False
class DevelopConfig(Config):
DEBUG = True
ASSETS_DEBUG = True
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,063
|
so89bur/LetsGo
|
refs/heads/main
|
/app/api/settings.py
|
import json
from flask import request
from app import app, db
from app.utils import send_datetime_to_client
from app.api import support
from app.models import Settings
SETTINGS_KEYS = [
'instagram_login',
'instagram_password',
]
def get_settings_item(key, default=None):
item = Settings.query.filter(Settings.key == key).first()
if item is not None:
return item.value
else:
return default
def save_settings_item(key, value):
item = Settings.query.filter(Settings.key == key).first()
if item is None:
item = Settings(key=key)
item.value = value
db.session.add(item)
db.session.commit()
@support
@app.route('/api/v1/settings', methods=['GET'])
def get_settings():
data = {}
for key in SETTINGS_KEYS:
data[key] = get_settings_item(key, "")
return json.dumps(data)
@support
@app.route('/api/v1/settings', methods=['POST'])
def set_settings():
data = request.json
for key in SETTINGS_KEYS:
save_settings_item(key, data[key])
return json.dumps({})
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,112,064
|
so89bur/LetsGo
|
refs/heads/main
|
/app/routes.py
|
import os
from flask import render_template, send_file, send_from_directory
from app import app
@app.route('/favicon.png')
def send_favicon():
return send_file('../static/favicon.png')
@app.route('/img/<path:path>')
def send_img(path):
return send_from_directory('../static/img', path)
@app.route('/css/<path:path>')
def send_css(path):
return send_from_directory('../static/css', path)
@app.route('/fonts/<path:path>')
def send_fonts(path):
return send_from_directory('../static/fonts', path)
@app.route('/js/<path:path>')
def send_js(path):
return send_from_directory('../static/js', path)
@app.route('/', defaults={'path': ''}, methods=['GET'])
@app.route('/<path:path>', methods=['GET'])
def index(path):
return render_template("index.html")
|
{"/first_start.py": ["/app/__init__.py", "/app/models.py"], "/app/api/inst_search.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py"], "/app/api/places.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/__init__.py": ["/config.py"], "/app/models.py": ["/app/__init__.py"], "/app/api/bloggers.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/__init__.py": ["/app/__init__.py", "/app/models.py"], "/app/api/trips.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/hashtags.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/api/settings.py": ["/app/__init__.py", "/app/utils.py", "/app/api/__init__.py", "/app/models.py"], "/app/routes.py": ["/app/__init__.py"]}
|
45,130,403
|
AntonellaCatanzano/github_proyecto_jaguarette_polotic
|
refs/heads/master
|
/JAGUARETTEKAA/urls.py
|
from django.urls import path
from .views import home, contacto, acerca_de, productos, login, agregar_producto, buscar_producto, modificar_producto, eliminar_producto, carrito
#Importamos una función llamada "home"
#En este archivo definimos las rutas
urlpatterns = [
path('', home , name="index"),
path('acerca_de/', acerca_de , name="acerca_de"),
path('contacto/', contacto , name="contacto"),
path('productos/', productos , name="productos"),
path('agregar_producto/', agregar_producto , name="agregar_producto"),
path('busqueda_producto/', buscar_producto , name="buscar_producto"),
path('modificar_producto/<int:producto_id>/', modificar_producto , name="modificar_producto"),
path('eliminar_producto/<int:producto_id>/', eliminar_producto , name="eliminar_producto"),
path('carrito/', carrito , name="carrito"),
]
|
{"/JAGUARETTEKAA/urls.py": ["/JAGUARETTEKAA/views.py"], "/JAGUARETTEKAA/forms.py": ["/JAGUARETTEKAA/models.py"], "/JAGUARETTEKAA/views.py": ["/JAGUARETTEKAA/forms.py", "/JAGUARETTEKAA/models.py"]}
|
45,152,622
|
harshvy373/CAAS
|
refs/heads/master
|
/goodies/register.py
|
import tkinter as tk
import tkinter.font as tkFont
from tkinter import ttk
from ttkthemes import ThemedStyle
from functools import partial
from db_class import *
import socket
from getmac import get_mac_address as gma
#from login import *
def validate(nam):
print(nam.get())
def validate_registration(name, username, password, cpassword, typ, clas, ques, ans):
if name.get()=="" or username.get()=="" or password.get()=="" or cpassword.get()=="" or typ.get()=="" or typ.get()=="Select" or clas.get()=="" or clas.get()=="Select" or ques.get()=="" or ques.get()=="Select" or ans.get()=="":
Message["text"] = "Enter all the details !"
Message["fg"] = "#d10606"
Message.place(x=0,y=510,width=140,height=30)
elif password.get()!=cpassword.get():
Message["text"] = "Password and Confirm Password doesn't match !"
Message["fg"] = "#d10606"
Message.place(x=2,y=510,width=300,height=30)
else:
q="select * from users where username='"+username.get()+"';"
s=find(q)
if s==1:
Message["text"] = "Username already exists !"
Message["fg"] = "#d10606"
Message.place(x=0,y=510,width=170,height=30)
else:
hostname = socket.gethostname()
IPAddr = socket.gethostbyname(hostname)
mac_add=gma()
details = "Hostname => "+hostname+", IP Address => "+IPAddr+", MAC Address => "+mac_add
q="insert into users values('"+name.get()+"','"+username.get()+"','"+password.get()+"','"+typ.get()+"','"+clas.get()+"','"+ques.get()+"','"+ans.get()+"','"+details+"');"
insert(q)
name_entry.delete(0,"end")
username_entry.delete(0,"end")
acc_select.set('')
class_select.set('')
ques_select.set('')
password_entry.delete(0,"end")
cpassword_entry.delete(0,"end")
sans_entry.delete(0,"end")
Message["text"] = "Registered successfully !"
Message["fg"] = "#008000"
Message.place(x=0,y=510,width=170,height=30)
root = tk.Tk()
root.iconbitmap(default='favicon.ico')
#app = App(root)
#setting title
root.title("CASS | Register")
#setting window size
width=400
height=550
screenwidth = root.winfo_screenwidth()
screenheight = root.winfo_screenheight()
alignstr = '%dx%d+%d+%d' % (width, height, (screenwidth - width) / 2, (screenheight - height) / 2)
root.geometry(alignstr)
root.resizable(width=False, height=False)
Heading=tk.Label(root)
ft = tkFont.Font(family='Arial',size=11)
Heading["font"] = ft
Heading["fg"] = "#333333"
Heading["justify"] = "center"
Heading["text"] = "Enter the details for registration"
Heading.place(x=90,y=10,width=203,height=30)
#name
name_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
name_label["font"] = ft
name_label["fg"] = "#333333"
name_label["justify"] = "left"
name_label["text"] = "Name"
name_label.place(x=-2,y=60,width=70,height=25)
name = tk.StringVar()
name_entry=tk.Entry(root,textvariable=name)
name_entry["borderwidth"] = "1px"
ft = tkFont.Font(family='Arial',size=10)
name_entry["font"] = ft
name_entry["fg"] = "#333333"
name_entry["justify"] = "left"
name_entry.place(x=160,y=60,width=212,height=30)
#username
username_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
username_label["font"] = ft
username_label["fg"] = "#333333"
username_label["justify"] = "left"
username_label["text"] = "Username"
username_label.place(x=10,y=110,width=70,height=25)
username = tk.StringVar()
username_entry=tk.Entry(root,textvariable=username)
username_entry["borderwidth"] = "1px"
ft = tkFont.Font(family='Arial',size=10)
username_entry["font"] = ft
username_entry["fg"] = "#333333"
username_entry["justify"] = "left"
username_entry.place(x=160,y=110,width=211,height=30)
#password
password_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
password_label["font"] = ft
password_label["fg"] = "#333333"
password_label["justify"] = "left"
password_label["text"] = "Password"
password_label.place(x=10,y=160,width=70,height=25)
password = tk.StringVar()
password_entry=tk.Entry(root,textvariable=password,show="*")
password_entry["borderwidth"] = "1px"
ft = tkFont.Font(family='Arial',size=10)
password_entry["font"] = ft
password_entry["fg"] = "#333333"
password_entry["justify"] = "left"
password_entry.place(x=160,y=160,width=213,height=30)
#cpassword
cpassword_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
cpassword_label["font"] = ft
cpassword_label["fg"] = "#333333"
cpassword_label["justify"] = "left"
cpassword_label["text"] = "Confirm Password"
cpassword_label.place(x=8,y=210,width=122,height=30)
cpassword = tk.StringVar()
cpassword_entry=tk.Entry(root,textvariable=cpassword,show="*")
cpassword_entry["borderwidth"] = "1px"
ft = tkFont.Font(family='Arial',size=10)
cpassword_entry["font"] = ft
cpassword_entry["fg"] = "#333333"
cpassword_entry["justify"] = "left"
cpassword_entry.place(x=160,y=210,width=213,height=30)
#acc_type
type_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
type_label["font"] = ft
type_label["fg"] = "#333333"
type_label["justify"] = "left"
type_label["text"] = "Type"
type_label.place(x=-3,y=260,width=70,height=25)
acc_type = tk.StringVar()
acc_select=ttk.Combobox(root,textvariable=acc_type,state="readonly")
acc_select["values"]=('Select','Admin','Teacher','Student')
ft = tkFont.Font(family='Arial',size=10)
acc_select["font"] = ft
acc_select["background"] = "#ffffff"
acc_select["justify"] = "left"
acc_select.place(x=160,y=260,width=213,height=30)
#class
class_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
class_label["font"] = ft
class_label["fg"] = "#333333"
class_label["justify"] = "left"
class_label["text"] = "Class"
class_label.place(x=0,y=310,width=70,height=25)
class_type = tk.StringVar()
class_select=ttk.Combobox(root,textvariable=class_type,state="readonly")
class_select["values"]=('Select','XII-A','XII-B','XII-C')
ft = tkFont.Font(family='Arial',size=10)
class_select["font"] = ft
class_select["background"] = "#ffffff"
class_select["justify"] = "left"
class_select.place(x=160,y=310,width=213,height=30)
#sques
sques_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
sques_label["font"] = ft
sques_label["fg"] = "#333333"
sques_label["justify"] = "left"
sques_label["text"] = "Security Question"
sques_label.place(x=10,y=360,width=122,height=30)
sec_ques = tk.StringVar()
ques_select=ttk.Combobox(root,textvariable=sec_ques,state="readonly")
ques_select["values"]=('Select','DOB','DOA')
ft = tkFont.Font(family='Arial',size=10)
ques_select["font"] = ft
ques_select["background"] = "#ffffff"
ques_select["justify"] = "left"
ques_select.place(x=160,y=360,width=213,height=30)
#s_ans
sans_label=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
sans_label["font"] = ft
sans_label["fg"] = "#333333"
sans_label["justify"] = "left"
sans_label["text"] = "Security Answer"
sans_label.place(x=10,y=410,width=112,height=30)
sec_ans = tk.StringVar()
sans_entry=tk.Entry(root,textvariable=sec_ans)
sans_entry["borderwidth"] = "1px"
ft = tkFont.Font(family='Arial',size=10)
sans_entry["font"] = ft
sans_entry["fg"] = "#333333"
sans_entry["justify"] = "left"
sans_entry.place(x=160,y=410,width=212,height=30)
#register_button
register_button=tk.Button(root)
register_button["bg"] = "#525458"
ft = tkFont.Font(family='Arial',size=10)
register_button["font"] = ft
register_button["fg"] = "#ffffff"
register_button["justify"] = "center"
register_button["text"] = "REGISTER"
register_button.place(x=10,y=470,width=364,height=30)
validate_registration = partial(validate_registration, name, username, password, cpassword, acc_type, class_type, sec_ques, sec_ans)
register_button["command"] = validate_registration
#message
Message=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
Message["font"] = ft
Message["fg"] = "#008000"
Message["justify"] = "left"
Message.place(x=0,y=510,width=170,height=30)
root.mainloop()
|
{"/scan_webcam.py": ["/db_class.py"], "/admin_takeattendance.py": ["/db_class.py", "/admin.py", "/admin_generateids.py", "/admin_impexp.py", "/admin_viewattendance.py", "/forgot.py"], "/forgot.py": ["/db_class.py"], "/admin.py": ["/admin_generateids.py", "/admin_impexp.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_generateids.py": ["/table_class.py", "/db_class.py", "/single_id.py", "/double_id.py", "/admin.py", "/admin_impexp.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_impexp.py": ["/db_class.py", "/table_class.py", "/report_print2.py", "/admin.py", "/admin_generateids.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_viewattendance.py": ["/table_class.py", "/report_print2.py", "/admin.py", "/admin_generateids.py", "/admin_impexp.py", "/admin_takeattendance.py", "/forgot.py"], "/goodies/register.py": ["/db_class.py"], "/login.py": ["/db_class.py", "/admin.py"]}
|
45,152,623
|
harshvy373/CAAS
|
refs/heads/master
|
/table_class.py
|
from tkinter import *
from pandastable import Table, TableModel
from tkinter import messagebox
df,final="",""
def copy():
a=messagebox.askquestion("Copy","Do you want to copy in excel format?")
if a=="yes":
final.to_clipboard(excel = True)
else:
final.to_clipboard(excel = False)
messagebox.showinfo("Copy","Copied to clipboard")
def draw_Tableee(dataframe):
global final
df=final=dataframe
root=Tk()
root.title("Table View")
root.geometry('600x400+200+100')
frame = Frame(root)
frame.pack(fill=BOTH,expand=1)
#df = TableModel.getSampleData()
table = pt = Table(frame, dataframe=df,showtoolbar=False, showstatusbar=False,editable=False, enable_menus=False)
pt.show()
button2=Button(frame,text="Copy to Clipboard",command=copy)
button2["bg"] = "#333333"
button2["fg"] = "#ffffff"
button2.grid(row=4,column=1)
frame.pack()
root.mainloop()
#draw_Table("d")
|
{"/scan_webcam.py": ["/db_class.py"], "/admin_takeattendance.py": ["/db_class.py", "/admin.py", "/admin_generateids.py", "/admin_impexp.py", "/admin_viewattendance.py", "/forgot.py"], "/forgot.py": ["/db_class.py"], "/admin.py": ["/admin_generateids.py", "/admin_impexp.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_generateids.py": ["/table_class.py", "/db_class.py", "/single_id.py", "/double_id.py", "/admin.py", "/admin_impexp.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_impexp.py": ["/db_class.py", "/table_class.py", "/report_print2.py", "/admin.py", "/admin_generateids.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_viewattendance.py": ["/table_class.py", "/report_print2.py", "/admin.py", "/admin_generateids.py", "/admin_impexp.py", "/admin_takeattendance.py", "/forgot.py"], "/goodies/register.py": ["/db_class.py"], "/login.py": ["/db_class.py", "/admin.py"]}
|
45,152,624
|
harshvy373/CAAS
|
refs/heads/master
|
/login.py
|
import tkinter as tk
import tkinter.font as tkFont
import tkinter.ttk as ttk
from ttkthemes import ThemedStyle
from functools import partial
from db_class import *
from admin import *
def validateLogin(username,password):
q="select type,name from users where username='"+username.get()+"' and password='"+password.get()+"';"
s=select(q)
try:
if s[0]=="Admin":
root.destroy()
admin_start(s[1])
elif s[0]=="Teacher":
root.destroy()
print("Teacher")
elif s[0]=="Student":
root.destroy()
print("Student")
except:
Message["text"]="Wrong credentials !"
root = tk.Tk()
root.iconbitmap(default='goodies/favicon.ico')
style = ThemedStyle(root)
#setting title
root.title("CASS | Login")
#setting window size
width=390
height=220
screenwidth = root.winfo_screenwidth()
screenheight = root.winfo_screenheight()
alignstr = '%dx%d+%d+%d' % (width, height, (screenwidth - width) / 2, (screenheight - height) / 2)
root.geometry(alignstr)
root.resizable(width=False, height=False)
GLabel_898=tk.Label(root)
ft = tkFont.Font(family='Arial Bold',size=11)
GLabel_898["font"] = ft
GLabel_898["fg"] = "#000000"
GLabel_898["justify"] = "center"
GLabel_898["text"] = "Enter your details to login"
GLabel_898.place(x=90,y=10,width=197,height=30)
GLabel_692=tk.Label(root)
ft = tkFont.Font(family='Arial',size=11)
GLabel_692["font"] = ft
GLabel_692["fg"] = "#000000"
GLabel_692["justify"] = "left"
GLabel_692["text"] = "Username"
GLabel_692.place(x=20,y=50,width=70,height=30)
username = tk.StringVar()
GLineEdit_526=tk.Entry(root, textvariable=username)
GLineEdit_526["borderwidth"] = "1px"
ft = tkFont.Font(family='Arial',size=11)
GLineEdit_526["font"] = ft
GLineEdit_526["fg"] = "#000000"
GLineEdit_526["justify"] = "left"
GLineEdit_526.place(x=150,y=50,width=212,height=30)
GLabel_232=tk.Label(root)
ft = tkFont.Font(family='Arial',size=11)
GLabel_232["font"] = ft
GLabel_232["fg"] = "#000000"
GLabel_232["justify"] = "left"
GLabel_232["text"] = "Password"
GLabel_232.place(x=20,y=100,width=70,height=25)
password = tk.StringVar()
GLineEdit_165=tk.Entry(root, textvariable=password, show='*')
GLineEdit_165["borderwidth"] = "1px"
ft = tkFont.Font(family='Arial',size=11)
GLineEdit_165["font"] = ft
GLineEdit_165["fg"] = "#000000"
GLineEdit_165["justify"] = "left"
GLineEdit_165.place(x=150,y=100,width=212,height=30)
GButton_819=tk.Button(root)
GButton_819["bg"] = "#333333"
ft = tkFont.Font(family='Arial Bold',size=11)
GButton_819["font"] = ft
GButton_819["fg"] = "#ffffff"
GButton_819["justify"] = "center"
GButton_819["text"] = "L O G I N"
GButton_819.place(x=20,y=150,width=345,height=30)
validateLogin = partial(validateLogin, username, password)
GButton_819["command"] = validateLogin
Message=tk.Label(root)
ft = tkFont.Font(family='Arial',size=10)
Message["font"] = ft
Message["fg"] = "#d10606"
Message["text"] = ""
Message.place(x=4,y=190,width=150,height=30)
root.mainloop()
|
{"/scan_webcam.py": ["/db_class.py"], "/admin_takeattendance.py": ["/db_class.py", "/admin.py", "/admin_generateids.py", "/admin_impexp.py", "/admin_viewattendance.py", "/forgot.py"], "/forgot.py": ["/db_class.py"], "/admin.py": ["/admin_generateids.py", "/admin_impexp.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_generateids.py": ["/table_class.py", "/db_class.py", "/single_id.py", "/double_id.py", "/admin.py", "/admin_impexp.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_impexp.py": ["/db_class.py", "/table_class.py", "/report_print2.py", "/admin.py", "/admin_generateids.py", "/admin_takeattendance.py", "/admin_viewattendance.py", "/forgot.py"], "/admin_viewattendance.py": ["/table_class.py", "/report_print2.py", "/admin.py", "/admin_generateids.py", "/admin_impexp.py", "/admin_takeattendance.py", "/forgot.py"], "/goodies/register.py": ["/db_class.py"], "/login.py": ["/db_class.py", "/admin.py"]}
|
45,185,007
|
Nicolas1st/ClassAssignments
|
refs/heads/master
|
/receiver.py
|
import paho.mqtt.client as mqtt
def on_connect(client, userdata, flags, rc):
print('Connected to ' + str(rc))
client.subscribe("topic/test")
client.connection_flag = True
def on_disconnect(client, userdata, flags, rc):
if rc != 0:
print('Unexpected disconnection.')
else:
print('Disconnected.')
client.unsubscribe("topic/test")
client.connection_flag = False
def on_message(client, userdata, msg):
if msg.payload.decode().lower() == "Stop":
client.disconnect()
ip_address = input('What is the ip address of the mqtt broker you want to connect to? ')
client = mqtt.Client()
client.connected = False
client.connect(ip_address, 1883, 60)
client.on_connect = on_connect
client.on_disconnect = on_disconnect
client.on_message = on_message
client.loop_start()
while client.connected:
pass
client.disconnect()
client.loop_stop()
|
{"/ZombiTown.py": ["/SunCloudStuff.py"], "/publisher.py": ["/SunCloudStuff.py"]}
|
45,233,982
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/service/admin.py
|
from django.contrib import admin
from .models import Order, Services, SubServices, OrderProducts, Product
# Register your models here.
admin.site.register(Order)
admin.site.register(Services)
admin.site.register(SubServices)
admin.site.register(Product)
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,983
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/migrations/0001_initial.py
|
# Generated by Django 3.1.4 on 2020-12-10 14:34
import account.validations
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(blank=True, null=True, verbose_name='last login')),
('email', models.EmailField(max_length=255, unique=True, verbose_name='email address')),
('username', models.CharField(max_length=255, unique=True)),
('first_name', models.CharField(blank=True, max_length=60)),
('last_name', models.CharField(blank=True, max_length=60)),
('avatar', models.ImageField(blank=True, upload_to='avatar/')),
('address', models.CharField(max_length=1024)),
('phone', models.CharField(max_length=11, validators=[account.validations.phone_validation])),
('date_of_creation', models.DateField(auto_now_add=True)),
('is_active', models.BooleanField(default=True)),
('job', models.CharField(choices=[('None', 'None'), ('سباك', 'سباك'), ('نجار', 'نجار'), ('كهربائى', 'كهربائى'), ('مبيض محاره', 'مبيض محاره')], default='None', max_length=50)),
('available', models.BooleanField()),
('is_admin', models.BooleanField(default=False)),
('is_superuser', models.BooleanField(default=False)),
('is_stuff', models.BooleanField(default=False)),
('is_technical', models.BooleanField(default=False)),
],
options={
'abstract': False,
},
),
]
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,984
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/migrations/0006_auto_20210102_0045.py
|
# Generated by Django 3.1.4 on 2021-01-02 00:45
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('account', '0005_auto_20210102_0010'),
]
operations = [
migrations.RemoveField(
model_name='user',
name='is_technical',
),
migrations.RemoveField(
model_name='user',
name='job',
),
]
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,985
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/service/migrations/0001_initial.py
|
# Generated by Django 3.1.4 on 2020-12-30 16:06
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status', models.BooleanField(default=False)),
('create_at', models.DateField(auto_now_add=True)),
('description', models.TextField(blank=True)),
('date', models.DateField()),
('total_cost', models.FloatField()),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customer_order', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Services',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=100)),
('description', models.TextField()),
('image', models.ImageField(upload_to='service/')),
],
),
migrations.CreateModel(
name='SubServices',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('description', models.TextField()),
('image', models.ImageField(upload_to='sub_services/')),
('cost', models.FloatField()),
('service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='service.services')),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('cost', models.FloatField()),
('image', models.ImageField(upload_to='products/')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='product_category', to='service.services')),
],
),
migrations.CreateModel(
name='OrderSubService',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='service.order')),
('sub_service', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='sub_services_order', to='service.subservices')),
],
),
migrations.CreateModel(
name='OrderProducts',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='service.order')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='service.product')),
],
),
migrations.CreateModel(
name='OrderPictures',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('pictures', models.ImageField(upload_to='order/')),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='service.order')),
],
),
migrations.AddField(
model_name='order',
name='service',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='service_order', to='service.services'),
),
migrations.AddField(
model_name='order',
name='technical',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='technical_order', to=settings.AUTH_USER_MODEL),
),
migrations.CreateModel(
name='Rating',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('rate', models.IntegerField(choices=[(1, 1), (2, 2), (3, 3), (4, 4), (5, 5)])),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='customer_rate', to=settings.AUTH_USER_MODEL)),
('order', models.ForeignKey(blank=True, on_delete=django.db.models.deletion.CASCADE, related_name='customer_rate_order', to='service.order')),
('technical', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='technical_rate', to=settings.AUTH_USER_MODEL)),
],
options={
'unique_together': {('customer', 'technical')},
'index_together': {('customer', 'technical')},
},
),
]
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,986
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/serializers.py
|
from django.contrib.auth.password_validation import validate_password
from django.contrib.auth import password_validation
from django.utils.translation import gettext_lazy as _
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.utils.encoding import smart_bytes, force_str, smart_str, DjangoUnicodeDecodeError
from rest_framework.exceptions import AuthenticationFailed
from rest_framework.authtoken.models import Token
from rest_framework import serializers
from .models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = '__all__'
extra_kwargs = {'password': {'write_only': True, 'required': True}}
def create(self, validated_data):
user = User.objects.create_user(
email=validated_data['email'],
username=validated_data['username'],
phone=validated_data['phone'],
address=validated_data['address'],
password=validated_data['password'],
)
# if validated_data['is_technical']:
# user.is_technical = True
# user.save()
# very important Hint:
# Django not create Token When create user for this you must create token for every user register
Token.objects.create(user=user)
return user
class ChangePasswordSerializer(serializers.Serializer):
"""
Serializer for password change endpoint.
"""
old_password = serializers.CharField(required=True)
new_password = serializers.CharField(required=True)
confirm_new_password = serializers.CharField(required=True)
def validate(self, data):
if data['new_password'] != data['confirm_new_password']:
raise serializers.ValidationError({'confirm_new_password': _("The two password fields didn't match.")})
# password_validation.validate_password(data['new_password'], self.context['request'].user)
return data
def validate_new_password(self, value):
validate_password(value)
return value
class EmailVerificationSerializer(serializers.Serializer):
token = serializers.CharField(max_length=600)
class Meta:
model = User
fields = ['token']
class ResetPassowrdByEmailSerializer(serializers.Serializer):
email = serializers.EmailField(min_length=2)
class Meta:
fields = ['email']
class SetNewPasswordSeriliazer(serializers.Serializer):
password = serializers.CharField(min_length=2, max_length=20, write_only=True)
token = serializers.CharField(min_length=2, write_only=True)
uid64 = serializers.CharField(min_length=2, write_only=True)
class Meta:
fields=['password','token','uid64']
def validate(self, attrs):
try:
password = attrs.get('password')
token = attrs.get('token')
uid64 = attrs.get('uid64')
id = force_str(urlsafe_base64_decode(uid64))
user = User.objects.get(id=id)
if not PasswordResetTokenGenerator().check_token(user,token):
raise AuthenticationFailed('The reset link is invalid', 401)
user.set_password(password)
user.save()
return user
except Exception as err:
raise AuthenticationFailed('The reset link is invalid',401)
return super().validate(attrs)
class SendMessageToAdminSeriliazer(serializers.Serializer):
email = serializers.EmailField(min_length=2)
subject = serializers.CharField(max_length=1024, min_length=5)
message = serializers.CharField(style={'base_template': 'textarea.html'})
class Meta:
fields = ['email', 'subject', 'message']
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,987
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/urls.py
|
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from .views import UserViewSet, UpdatePasswordView,\
PasswordTokenCheck, RestPasswordByEmailView, VerifyEmail, SetNewPasswordView,\
SendMessageToAdmin
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register('users', UserViewSet)
urlpatterns = [
path('', include(router.urls)),
# path('get_all_technical/', get_all_technical, name='get_all_technical'),
path('send_message/', SendMessageToAdmin.as_view(), name='send_message'),
path('email_verify/', VerifyEmail.as_view(), name='email_verify'),
path('change_password/', UpdatePasswordView.as_view(), name='change_password'),
path('reset_password/', RestPasswordByEmailView.as_view(), name='reset_password'),
path('password_reset_confirm/<uid64>/<token>/', PasswordTokenCheck.as_view(), name='password_reset_confirm'),
path('password_reset_complete/',SetNewPasswordView.as_view(), name='password_reset_complete')
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,988
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/service/models.py
|
from django.db import models
from django.core.validators import MaxLengthValidator,MinLengthValidator
from account.models import User , Services
# Create your models here.
# class Services(models.Model):
# type = models.CharField(max_length=100)
# description = models.TextField()
# image = models.ImageField(upload_to='service/')
#
# def __str__(self):
# return f"Service Category: {self.type}"
# class ServicePicture(models.Model):
# service = models.ForeignKey(Services, on_delete=models.CASCADE)
# picture = models.ImageField(upload_to='services/')
class SubServices(models.Model):
service = models.ForeignKey(Services, on_delete=models.CASCADE)
name = models.CharField(max_length=100)
description = models.TextField()
image = models.ImageField(upload_to='sub_services/')
cost = models.FloatField()
def __str__(self):
return f"Service Category: {self.service.type}, Service Name: {self.name}"
class Product(models.Model):
category = models.ForeignKey(Services, on_delete=models.CASCADE, related_name='product_category')
name = models.CharField(max_length=255)
cost = models.FloatField()
image = models.ImageField(upload_to='products/')
# def __str__(self):
# return f"category: {self.category.name} , product name:{self.name}"
class Order(models.Model):
customer = models.ForeignKey(User, related_name='customer_order', on_delete=models.CASCADE)
technical = models.ForeignKey(User, related_name='technical_order', on_delete=models.CASCADE)
service = models.ForeignKey(Services, related_name='service_order', on_delete=models.CASCADE)
status = models.BooleanField(default=False)
create_at = models.DateField(auto_now_add=True)
description = models.TextField(blank=True)
date = models.DateField()
total_cost = models.FloatField()
class OrderPictures(models.Model):
order = models.ForeignKey(Order, on_delete=models.CASCADE)
pictures = models.ImageField(upload_to='order/')
class OrderSubService(models.Model):
order = models.ForeignKey(Order, on_delete=models.CASCADE)
sub_service = models.ForeignKey(SubServices, related_name='sub_services_order', on_delete=models.CASCADE)
class OrderProducts(models.Model):
order = models.ForeignKey(Order, on_delete=models.CASCADE)
product = models.ForeignKey(Product,on_delete=models.CASCADE)
class Rating(models.Model):
customer = models.ForeignKey(User, related_name='customer_rate', on_delete=models.CASCADE)
technical = models.ForeignKey(User, related_name='technical_rate', on_delete=models.CASCADE)
order = models.ForeignKey(Order, related_name='customer_rate_order', on_delete=models.CASCADE, blank=True)
rate = models.IntegerField(choices=[
(1, 1),
(2, 2),
(3, 3),
(4, 4),
(5, 5),
])
class Meta:
unique_together = (('customer', 'technical'),)
# when ordering data
index_together = (('customer', 'technical'),)
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,989
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/views.py
|
from django.utils.http import urlsafe_base64_encode, urlsafe_base64_decode
from django.contrib.auth.tokens import PasswordResetTokenGenerator
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.utils.encoding import smart_bytes, force_str, smart_str, DjangoUnicodeDecodeError
from rest_framework import viewsets, status
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.decorators import action
from rest_framework.authentication import TokenAuthentication
from rest_framework_simplejwt.tokens import RefreshToken
from django.template import Context
from django.template.loader import render_to_string, get_template
from django.core.mail import send_mail
from rest_framework.authtoken.models import Token
from rest_framework.decorators import api_view
from .models import User
from .utils import Util
from .serializers import UserSerializer, ChangePasswordSerializer,\
ResetPassowrdByEmailSerializer, SetNewPasswordSeriliazer, SendMessageToAdminSeriliazer
import jwt
from decouple import config
# Create your views here.
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
permission_classes = (AllowAny,)
def create(self, request, *args, **kwargs):
user = request.data
serializer = self.serializer_class(data=user)
serializer.is_valid(raise_exception=True)
serializer.save()
user_data = serializer.data
user = User.objects.get(email=user_data['email'])
user.is_active = False
user.save()
token = RefreshToken.for_user(user).access_token
current_site = get_current_site(request)
# relative_link = reverse('email_verify')
# adsurl = f'http://{current_site}{relative_link}?token={str(token)}'
# relative_link = reverse('email_verify')
adsurl = f'http://localhost:4200/activate_account?token={str(token)}'
# email_body = f'Hi {user.username} Use link below to verify your email \n {adsurl}'
# data = {'email_body': email_body, 'to_email': user.email, 'email_subject': 'Verify Your Account'}
# Util.send_email(data)
ctx = {
'user': user.username,
'adsurl': adsurl
}
message = get_template('activate.html').render(ctx)
data = {'email_body': message, 'to_email':user.email, 'email_subject': 'Verify Your Account'}
print(data)
Util.send_html_email(data)
# response ={
# 'message': 'user created',
# 'user': user_data
# }
return Response(user_data, status=status.HTTP_201_CREATED)
@action(detail=False, methods=['GET'])
def get_all_technical(self, request, pk=None):
# technicals = User.objects.filter(~Q(job=None))
technicals = User.objects.filter(is_technical=True)
serializer = UserSerializer(technicals, many=True)
# response = {'message': 'get all technicals', 'result': serializer.data}
return Response(serializer.data, status=status.HTTP_200_OK)
@action(detail=True, methods=['GET'])
def get_technical_with_job(self, request, pk=None):
technicals = User.objects.filter(technical_job=pk)
print(technicals)
print(pk)
serializer = UserSerializer(technicals, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@action(detail=False, methods=['GET'],
authentication_classes=[TokenAuthentication],
permission_classes=[IsAuthenticated])
def get_user(self, request, pk=None):
user = Token.objects.get(key=request.auth.key).user
serializer = self.serializer_class(user, many=False)
return Response(serializer.data, status=status.HTTP_200_OK)
# @api_view(['GET'])
# def get_all_technical(request):
# """
# An endpoint for get_all_technical.
# """
# if request.method == "GET":
# technicals = User.objects.filter(is_technical=True)
# serializer = UserSerializer(technicals, many=True)
# # response = {'message': 'get all technicals', 'result': serializer.data}
# return Response(serializer.data, status=status.HTTP_200_OK)
class VerifyEmail(APIView):
def get(self, request):
token = request.GET.get('token')
try:
data = jwt.decode(token, config('SECRET_KEY'))
user = User.objects.get(id=data['user_id'])
user.is_active = True
user.save()
return Response({'message': 'Successfully activated Account'},status=status.HTTP_200_OK)
# if jwt (JSon Web Token )token ExpiredSignatureError generate new token ****** not do untill now
except jwt.ExpiredSignatureError as err:
return Response({'error': 'Activation link Expired'}, status=status.HTTP_400_BAD_REQUEST)
except jwt.exceptions.DecodeError as err:
return Response({'error': 'Invalid token'}, status=status.HTTP_400_BAD_REQUEST)
class UpdatePasswordView(APIView):
"""
An endpoint for changing password.
"""
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated, )
def get_object(self, queryset=None):
return self.request.user
def put(self, request, *args, **kwargs):
self.object = self.get_object()
serializer = ChangePasswordSerializer(data=request.data)
if serializer.is_valid():
# Check old password
old_password = serializer.data.get("old_password")
if not self.object.check_password(old_password):
return Response({"old_password": ["Wrong password."]},
status=status.HTTP_400_BAD_REQUEST)
# set_password also hashes the password that the user will get
self.object.set_password(serializer.data.get("new_password"))
self.object.save()
response = {
'message': 'Password changed'
}
return Response(response, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class RestPasswordByEmailView(APIView):
# override post method
def post(self, request):
serializer = ResetPassowrdByEmailSerializer(data=request.data)
if serializer.is_valid():
email = request.data['email']
print(email)
if User.objects.filter(email=email).exists():
user = User.objects.get(email=email)
uid64 = urlsafe_base64_encode(smart_bytes(user.id))
token = PasswordResetTokenGenerator().make_token(user)
current_site = get_current_site(
request=request).domain
relative_link = reverse(
'password_reset_confirm', kwargs={'uid64': uid64, 'token': token})
# absurl = f'http://{current_site}{relative_link}'
# email_body = f'Hello,\n {user.username} Use Link below to rest your password \n' + absurl
# data = {'email_body': email_body, 'to_email': email, 'email_subject': 'Rest your password'}
# Util.send_email(data)
adsurl = f'http://localhost:4200/ResetPasswordConfirm?token={str(token)}&uid64={str(uid64)}'
ctx = {
'user': 'customer',
'adsurl': adsurl
}
message = get_template('reset_password.html').render(ctx)
data = {'email_body': message, 'to_email': email, 'email_subject': 'Rest your password'}
Util.send_html_email(data)
return Response({'success': 'We have send you a link to rest your password'}, status=status.HTTP_200_OK)
return Response({'fail': 'This email not registrations'}, status=status.HTTP_400_BAD_REQUEST)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
class PasswordTokenCheck(APIView):
def get(self, request, uid64, token):
try:
id = smart_str(urlsafe_base64_decode(uid64))
user = User.objects.get(id=id)
if not PasswordResetTokenGenerator().check_token(user, token):
return Response({'error': 'Token is not valid, please request a new one'},status=status.HTTP_401_UNAUTHORIZED)
return Response({'success': True, 'message': 'Credentials Valid', 'uid64':uid64, 'token': token},status= status.HTTP_200_OK)
except DjangoUnicodeDecodeError as err:
return Response({'error': 'Token is not valid, please request a new one'}, status=status.HTTP_401_UNAUTHORIZED)
class SetNewPasswordView(APIView):
def patch(self, request):
serializer = SetNewPasswordSeriliazer(data=request.data)
serializer.is_valid(raise_exception=True)
return Response({'success': True, 'message':'Password reset successfully'}, status=status.HTTP_200_OK)
class SendMessageToAdmin(APIView):
def post(self, request):
serializer = SendMessageToAdminSeriliazer(data=request.data)
if serializer.is_valid():
message = request.data['message']
subject = request.data['subject']
email = request.data['email']
data = {'email_body': message, 'to_email': email, 'email_subject': subject}
admin = ['promostafaeladawy@gmail.com',]
send_mail(subject=subject, message=message, from_email=email, recipient_list=admin)
return Response({'success': True, 'message': 'Message send successfully'}, status=status.HTTP_200_OK)
return Response({'error': serializer.errors, 'message': 'Message not send '}, status=status.HTTP_400_BAD_REQUEST)
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,990
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/validations.py
|
import re
from django.core.exceptions import ValidationError
def phone_validation(val):
if not re.match('^0(10|12|11|15)[0-9]{8}', str(val)):
raise ValidationError('Enter Valid Egypt Phone Number')
# def password_validation(password1, password2):
# # Check that the two password entries match
# if password1 and password2 and password1 != password2:
# raise ValidationError("Passwords don't match")
# return password2
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,991
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/models.py
|
from django.db import models
# from service.models import Services as Job
# import service.models.Services
from django.contrib.auth.models import (BaseUserManager, AbstractBaseUser)
from .validations import phone_validation
from datetime import datetime
# from service.models import Rating
JOBS = [
('None', 'None'),
('سباك', 'سباك'),
('نجار', 'نجار'),
('كهربائى', 'كهربائى'),
('مبيض محاره', 'مبيض محاره'),
]
# Create your models here.
# for avoid circular import
class Services(models.Model):
type = models.CharField(max_length=100)
description = models.TextField()
image = models.ImageField(upload_to='service/')
def __str__(self):
return f"Service Category: {self.type}"
class UserManager(BaseUserManager):
def create_user(self, email, username, phone, address, password=None):
"""
Creates and saves a User with the given email, date of
birth and password.
"""
# if not email or not username or not phone or not address:
# raise ValueError("User Must Have All Required Data ?")
user = self.model(
email=self.normalize_email(email),
username=username,
phone=phone,
address=address
)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, username, phone, address, password=None):
user = self.create_user(
email,
username=username,
phone=phone,
password=password,
address=address
)
user.is_admin = True
user.save(using=self._db)
return user
class User(AbstractBaseUser):
email = models.EmailField(
verbose_name='email address',
max_length=255,
unique=True,
)
username = models.CharField(max_length=255, unique=True)
first_name = models.CharField(max_length=60, blank=True, null=True)
last_name = models.CharField(max_length=60, blank=True, null=True)
avatar = models.ImageField(upload_to='avatar/', blank=True, null=True)
address = models.CharField(max_length=1024)
phone = models.CharField(
max_length=11,
null=False,
validators=[phone_validation]
)
date_of_creation = models.DateField(auto_now_add=True)
is_active = models.BooleanField(default=True)
# for technical Account
is_technical = models.BooleanField(default=False)
available = models.BooleanField(default=True)
description = models.TextField(null=True, blank=True)
technical_job = models.ForeignKey(Services, on_delete=models.CASCADE, null=True, blank=True, default='')
# for manage users
is_admin = models.BooleanField(default=False)
is_superuser = models.BooleanField(default=False)
is_stuff = models.BooleanField(default=False)
objects = UserManager()
# using for login
# USERNAME_FIELD = 'username or email'
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['username', 'phone', 'address']
def __str__(self):
if self.is_technical:
return f"Technical Username: {self.username}"
return f"Customer Username: {self.username}"
# def no_of_rating(self):
# if self.is_technical:
# rating = Rating.objects.filter(technical=self)
# return len(rating)
# return 0
#
# def avg_rating(self):
# if self.is_technical:
# sum =0
# ratings = Rating.objects.filter(technical=self)
# for rating in ratings:
# sum += rating.stars
# if len(ratings) > 0:
# return sum/len(ratings)
# else:
# return 0
# return 0
def has_perm(self, perm, obj=None):
"Does the user have a specific permission?"
# Simplest possible answer: Yes, always
return True
def has_module_perms(self, app_label):
"Does the user have permissions to view the app `app_label`?"
# Simplest possible answer: Yes, always
return True
@property
def is_staff(self):
"Is the user a member of staff?"
# Simplest possible answer: All admins are staff
return self.is_admin
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,992
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/account/migrations/0005_auto_20210102_0010.py
|
# Generated by Django 3.1.4 on 2021-01-02 00:10
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('account', '0004_user_description'),
]
operations = [
migrations.CreateModel(
name='Services',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=100)),
('description', models.TextField()),
('image', models.ImageField(upload_to='service/')),
],
),
migrations.AddField(
model_name='user',
name='technical_job',
field=models.ForeignKey(blank=True, default='', null=True, on_delete=django.db.models.deletion.CASCADE, to='account.services'),
),
]
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,993
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/service/views.py
|
from django.shortcuts import render
from django.contrib.auth.decorators import login_required
from account.models import User
from .models import Services, SubServices, Order, Rating, OrderPictures,\
Product, OrderSubService, OrderProducts
from .serializers import ServicesSerializer, SubServicesSerializer,\
OrderSerializer, RatingSerializer, ProductSerializer
from rest_framework import viewsets, status
from rest_framework.response import Response
from django.http import JsonResponse
from rest_framework.decorators import action
from rest_framework.permissions import AllowAny, IsAuthenticated
from rest_framework.authentication import TokenAuthentication
from rest_framework.views import APIView
# Create your views here.
class ServiceViewSet(viewsets.ModelViewSet):
queryset = Services.objects.all()
serializer_class = ServicesSerializer
permission_classes = (AllowAny, )
@action(detail=True, methods=['GET'])
def get_sub_service(self, request, pk=None):
sub_services = SubServices.objects.filter(service=pk)
serializer = SubServicesSerializer(sub_services, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class SubServicesViewSet(viewsets.ModelViewSet):
queryset = SubServices.objects.all()
serializer_class = SubServicesSerializer
permission_classes = (AllowAny, )
@action(detail=False, methods=['POST'],
authentication_classes=[TokenAuthentication],
permission_classes=[IsAuthenticated])
def apply_order(self, request, pk=None):
serializer = OrderSerializer(data=request.data)
if serializer.is_valid():
customer = request.user
service = Services.objects.get(id=request.data['service'])
technical = User.objects.get(id=request.data['technical'])
customer_order = Order.objects.create(
customer=customer,
service=service,
technical=technical,
date=request.data['date'],
total_cost=request.data['total_cost']
)
#
# check for image order
if 'description' in request.data:
customer_order.description=request.data['description']
customer_order.save()
# check for image order
for image in request.FILES.getlist('images'):
OrderPictures.objects.create(order=customer_order, pictures=image)
# check for sub services in order
for sub_service in request.data['sub_services']:
sub_service = SubServices.objects.get(id=sub_service)
OrderSubService.objects.create(order=customer_order, sub_service=sub_service)
# check for products in order
for product in request.data['products']:
product = Product.objects.get(id=product)
OrderProducts.objects.create(order=customer_order, product=product)
serializer = OrderSerializer(customer_order, many=False)
response = {'message': 'Order Created', 'result': serializer.data}
return Response(response, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
# @action(detail=True, methods=['POST'],
# authentication_classes=[TokenAuthentication],
# permission_classes=[IsAuthenticated])
# def apply_order(self, request, pk=None):
# if 'technical' in request.data and 'date' in request.data and 'total_cost' in request.data and 'service' in request.data:
# customer = request.user
# service = Services.objects.get(id=request.data['service'])
# technical = User.objects.get(id=request.data['technical'])
# date = request.data['date']
# total_cost = request.data['total_cost']
# customer_order = Order.objects.create(
# customer=customer, service=service,
# technical=technical, date=date,
# total_cost=total_cost
# )
# customer_order.save()
# for image in request.FILES.getlist('images'):
# OrderPictures.objects.create(order=customer_order, pictures=image)
# serializer = OrderSerializer(customer_order, many=False)
# response = {'message': 'Order Created', 'result': serializer.data}
# return Response(response, status=status.HTTP_200_OK)
# else:
# response = {'message': 'You Need to provide All Requiring Data'}
# return Response(response, status=status.HTTP_400_BAD_REQUEST)
class CustomerOrder(viewsets.ModelViewSet):
queryset = Order.objects.all()
serializer_class = OrderSerializer
authentication_classes = (TokenAuthentication,)
permission_classes = (IsAuthenticated,)
@action(detail=True, methods=['POST'])
def rate_technical_job(self, request, pk=None):
if 'rate' in request.data:
customer = request.user
order = Order.objects.get(id=pk)
rate = request.data['rate']
try:
rating = Rating.objects.get(customer=customer, technical=order.technical)
rating.rate = rate
rating.save()
serializer=RatingSerializer(rating,many=False)
response = {'message':'Rating Updated','result':serializer.data}
return Response(response, status=status.HTTP_200_OK)
except:
rating = Rating.objects.create(customer=customer, technical=order.technical, order=order, rate=rate)
serializer = RatingSerializer(rating,many=False)
response = {'message': 'Rating Create', 'result': serializer.data}
return Response(response, status=status.HTTP_200_OK)
else:
response = {'message': 'You Need to provide technical rate'}
return Response(response, status=status.HTTP_400_BAD_REQUEST)
@action(detail=False, methods=['GET'])
def get_all_customer_orders(self, request, pk=None):
orders = Order.objects.filter(customer=request.user)
serializer = OrderSerializer(orders, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class ProductView(viewsets.ModelViewSet):
queryset = Product.objects.all()
serializer_class = ProductSerializer
@action(detail=True, methods=['GET'])
def get_products(self, request, pk=None):
products = Product.objects.filter(category=pk)
serializer = ProductSerializer(products, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,994
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/service/urls.py
|
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
from .views import ServiceViewSet, SubServicesViewSet, CustomerOrder, ProductView
from rest_framework.routers import DefaultRouter
router = DefaultRouter()
router.register('services', ServiceViewSet)
router.register('subservices', SubServicesViewSet)
router.register('customerorders', CustomerOrder)
router.register('products', ProductView)
urlpatterns = [
path('', include(router.urls)),
]
if settings.DEBUG:
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,233,995
|
ProMostafa/sn3a
|
refs/heads/main
|
/backend/service/serializers.py
|
from rest_framework import serializers
from .models import Services, SubServices , Order , Rating, Product
from account.models import User
class ServicesSerializer(serializers.ModelSerializer):
class Meta:
model = Services
fields = '__all__'
class SubServicesSerializer(serializers.ModelSerializer):
class Meta:
model = SubServices
fields = '__all__'
class OrderSerializer(serializers.ModelSerializer):
class Meta:
model = Order
fields = ['technical', 'service', 'status', 'create_at','description','date','total_cost']
# def create(self, validated_data):
# service = Services.objects.get(id=validated_data['service'])
# technical = User.objects.get(id=validated_data['technical'])
# order = Order.objects.create(
# technical=technical,
# service=service,
# date=validated_data['date'],
# total_cost=validated_data['total_cost'],
# )
# return order
# def validate(self, attrs):
# try:
# technical = attrs.get('technical')
# service = attrs.get('service')
# date = attrs.get('date')
# total_cost = attrs.get('total_cost')
# return attrs
# except Exception as err:
# raise ValueError('some of requiring data is missing')
class RatingSerializer(serializers.ModelSerializer):
class Meta:
model = Rating
fields = '__all__'
class ProductSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = '__all__'
# class OrderDataSerializers(serializers.Serializer):
|
{"/backend/service/serializers.py": ["/backend/service/models.py"], "/backend/service/views.py": ["/backend/service/models.py", "/backend/service/serializers.py"], "/backend/service/admin.py": ["/backend/service/models.py"], "/backend/account/serializers.py": ["/backend/account/models.py"], "/backend/account/urls.py": ["/backend/account/views.py"], "/backend/account/views.py": ["/backend/account/models.py", "/backend/account/serializers.py"], "/backend/account/models.py": ["/backend/account/validations.py"], "/backend/service/urls.py": ["/backend/service/views.py"]}
|
45,293,055
|
amb8489/music_DB
|
refs/heads/master
|
/connection.py
|
import psycopg2
from dbinfo import info
connection = None
def connect():
global connection
connection = psycopg2.connect(
host=info["host"],
database=info["database"],
user=info["user"],
password=info["password"]
)
def get_connection():
if not connection:
connect()
return connection
def close_connection():
if connection is not None:
connection.close()
connection = None
def add_songs():
import os
directory = 'songs'
sep = "<sep>"
# id: 0 1 2 3 4 5 6
genres = {"rap": 0, "pop": 1, "country": 2, "R&B": 3, "rock": 4, "alternative": 5, "indie": 6}
album_hist_seen = {}
conn = get_connection()
cur = conn.cursor()
number_of_song_files_to_add = ø
d = 0
b = 0
songid = 1
for filename in os.listdir(directory):
if d == number_of_song_files_to_add:
return
d += 1
if filename.endswith(".txt"):
path = directory + "/" + filename
with open(path, "r") as f:
for line in f:
song_data = f.readline().strip().split(sep)
# title , artist, length,album,year, genre
if len(song_data) >= 5:
title = song_data[0]
artist = song_data[1]
duration = song_data[2]
album = song_data[3]
year = song_data[4]
gen = song_data[5]
Uid = song_data[6]
if len(artist) > 100:
artist = artist[:100]
if len(album) > 100:
album = album[:100]
# --- sql to add dat NOTE: year is just the year this might
# be wrong in the db table ---
if len(title) < 50:
# adding songs
# place new user in db
# sql = "insert into song(songid,title, releasedate, length)"\
# "values(%s,%s, %s, %s)"
#
# cur.execute(sql, (Uid,title,year,float(duration)))
#
# adding songs
# sql = "insert into songgenre(songid, genreid)"\
# "values((select songid from song where songid = %s),"\
# "(select genreid from genre where genre.genreid = %s))"
# cur.execute(sql, (Uid,genres[gen.strip()]))
# sql = "insert into artist(artistname)"\
# " values(%s) on conflict do nothing"
# cur.execute(sql, (artist.strip(),))
#
#
# sql = "insert into songartist(songid,artistid)"\
# "values((select songid from song where songid = %s),"\
# "(select artistid from artist where artist.artistname = %s)) on conflict do nothing"
# cur.execute(sql, (Uid, artist.strip()))
#
#
#
# sql = "insert into album(albumname,releasedate,artistname)"\
# " values(%s,%s,%s) on conflict do nothing"
# cur.execute(sql, (album.strip(),year,artist.strip()))
# name = album + artist.strip()
# if name in album_hist_seen:
# album_hist_seen[name] += 1
# else:
# album_hist_seen[name] = 1
#
#
#
# sql = "insert into albumcontains(albumid,songid,tracknumber)"\
# " values((select albumid from album where albumname = %s and artistname = %s),"\
# " (select songid from song where song.songid = %s),"\
# " (%s))"
# cur.execute(sql, (album.strip(),artist.strip(),Uid, album_hist_seen[name]))
b += 1
if b > 9388:
return
conn.commit()
# songid +=1
# time.Sleep(10)
cur.close()
|
{"/website/auth.py": ["/connection.py"], "/website/views.py": ["/connection.py"]}
|
45,293,056
|
amb8489/music_DB
|
refs/heads/master
|
/main.py
|
# aaron berghash
# greg mockler
# tanner bradford
# ranen mirot
"""
main.py is the main class for the application
"""
from website import create_app
app = create_app()
if __name__ == "__main__":
app.run(debug=True)
|
{"/website/auth.py": ["/connection.py"], "/website/views.py": ["/connection.py"]}
|
45,293,057
|
amb8489/music_DB
|
refs/heads/master
|
/website/views.py
|
from flask import Blueprint, render_template, request, session
from datetime import datetime
from connection import get_connection
views = Blueprint('views', __name__)
# the home page
@views.route("/")
def home():
"""
:return: homepage render template
"""
return render_template("home.html")
# the user page
@views.route("/userpage")
def userpage():
"""
show the userpage
:return: render template
"""
# getting and saving new data
user_data = request.args['user_data']
user_data = session['user_data']
session['user_data'] = user_data
user_data["searched_friend"] = "None"
return render_template("userpage.html", user_data=user_data)
@views.route('/addtoplaylist/', methods=['POST', 'GET'])
def add_song_to_playlist():
"""
function to add song to playlist
:return: render template
"""
if request.method == 'GET':
return render_template('userpage.html')
if request.method == 'POST':
try:
# geting form data
conn = get_connection()
cur = conn.cursor()
user_data = session['user_data']
songid = request.form["songid"].split("<sep>")
add_album = songid[1]
if add_album == "True":
playlistname = request.form['currentplaylist']
albumInfo = request.form["songid"].split("<sep>")
print("ALBUM INFO:", albumInfo)
user_data = session['user_data']
userid = user_data['id']
conn = get_connection()
cur = conn.cursor()
# getting getting album id
sql = "select collectionid " \
"from collection " \
"where name = %s and userid = %s"
cur.execute(sql, (playlistname, userid))
playlistid = cur.fetchone()
sql = "select album.albumid " \
"from album " \
"inner join albumcontains on album.albumid = albumcontains.albumid " \
"and album.albumname = %s " \
"inner join song on albumcontains.songid = song.songid " \
"inner join songartist on song.songid = songartist.songid " \
"inner join artist on songartist.artistid = artist.artistid " \
"and artist.artistname = %s"
cur.execute(sql, (albumInfo[0], albumInfo[2]))
albumID = cur.fetchone()
sql = "insert into collectionalbum(collectionid,albumid)" \
"values(%s, %s) on conflict do nothing"
cur.execute(sql, (playlistid, albumID))
conn.commit()
cur.close()
session['user_data'] = user_data
else:
conn = get_connection()
cur = conn.cursor()
playlistname = request.form['currentplaylist']
userid = user_data["id"]
sql = "select collectionid " \
"from collection " \
"where name = %s and userid = %s"
cur.execute(sql, (playlistname, userid))
playlistid = cur.fetchone()
sql = "insert into collectionsong(collectionid,songid)" \
"values(%s, %s) on conflict do nothing"
cur.execute(sql, (playlistid, songid[0]))
conn.commit()
cur.close()
user_data["explore"] = True
user_data["myAlbums"] = False
session['user_data'] = user_data
except:
pass
return render_template('userpage.html', user_data=user_data)
@views.route('/deletefromplaylist/', methods=['POST', 'GET'])
def delete_song_from_playlist():
"""
function to delete song from playlist
:return: render template
"""
if request.method == 'GET':
return render_template('userpage.html')
if request.method == 'POST':
# geting form data
user_data = session['user_data']
userid = user_data["id"]
songid = request.form["songid"]
playlistname = user_data["current_playlist_name"]
conn = get_connection()
cur = conn.cursor()
sql = "select collectionid " \
"from collection " \
"where name = %s and userid = %s"
cur.execute(sql, (playlistname, userid))
playlistid = cur.fetchone()
sql = "delete from collectionsong " \
"where collectionid = %s and songid = %s"
cur.execute(sql, (playlistid, songid))
conn.commit()
cur.close()
user_data["explore"] = False
user_data["myAlbums"] = True
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/deletealbumfromplaylist/', methods=['POST', 'GET'])
def delete_album_from_playlist():
"""
function to delete album from playlist
:return: render template
"""
if request.method == 'GET':
return render_template('userpage.html')
if request.method == 'POST':
# geting form data
user_data = session['user_data']
userid = user_data["id"]
albumname = request.form["album"]
playlistname = user_data["current_playlist_name"]
conn = get_connection()
cur = conn.cursor()
sql = "select collectionid " \
"from collection " \
"where name = %s and userid = %s"
cur.execute(sql, (playlistname, userid))
playlistid = cur.fetchone()
sql = "select album.albumid " \
"from album " \
"inner join collectionalbum c2 " \
"on album.albumid = c2.albumid " \
"and album.albumname = 'Over My Head'"
cur.execute(sql, (playlistname, userid))
albumid = cur.fetchone()
sql = "delete from collectionalbum " \
"where collectionid = %s and albumid = %s"
cur.execute(sql, (playlistid, albumid))
conn.commit()
cur.close()
user_data["explore"] = False
user_data["myAlbums"] = True
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/removeplaylist/', methods=['POST', 'GET'])
def remove_playlist():
"""
remove a playlist from the database
:return: the render template
"""
if request.method == 'GET':
return render_template('userpage.html')
if request.method == 'POST':
rmplaylist_name = request.form["rmplaylist"]
user_data = session['user_data']
user_data["playlist_name"].remove(rmplaylist_name)
user_data["current_playlist_length"] = 0
user_data["current_playlist_number"] = 0
conn = get_connection()
cur = conn.cursor()
sql = "SELECT collectionid FROM collection WHERE name = %s "
cur.execute(sql, (rmplaylist_name,))
collectionID = cur.fetchone()[0]
sql = "DELETE FROM collectionsong WHERE collectionid = %s "
cur.execute(sql, (collectionID,))
sql = "DELETE FROM collection WHERE collectionid = %s "
cur.execute(sql, (collectionID,))
user_data["current_playlist"] = []
conn.commit()
cur.close()
user_data["explore"] = False
user_data["myAlbums"] = True
user_data["num_of_costom_playlist"] = str(len(user_data["playlist_name"]))
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/getplaylist/', methods=['POST', 'GET'])
def get_playlist():
"""
get a user the playlist requested
:return: render template
"""
if request.method == 'GET':
return render_template('userpage.html')
if request.method == 'POST':
# getttting form data
playlist_name = request.form["playlist"]
user_data = session['user_data']
userID = user_data["id"]
conn = get_connection()
cur = conn.cursor()
sql = " SELECT songid,title,length FROM song WHERE songid IN " \
"(SELECT songid FROM collectionsong WHERE collectionid IN " \
"(SELECT collectionid FROM collection where name = %s AND userid = %s)) "
cur.execute(sql, (playlist_name, userID))
songs = cur.fetchall()
sql = "SELECT collectionid FROM collection WHERE name = %s AND userid = %s"
cur.execute(sql, (playlist_name, userID))
collectionid = cur.fetchone()
# getting all albums
sql = "SELECT albumname " \
"FROM album " \
"WHERE albumid " \
"IN (SELECT albumid FROM collectionalbum WHERE collectionid = %s)"
cur.execute(sql, (collectionid,))
albumnames = cur.fetchall()
albumnames = [name[0] for name in albumnames]
user_data["current_albums"] = albumnames
user_data["current_playlist"] = songs
user_data["current_playlist_name"] = playlist_name
user_data["current_playlist_length"] = round(sum([song[2] for song in songs]) / 60, 2)
user_data["current_playlist_number"] = len(songs)
user_data["myAlbums"] = True
user_data["explore"] = False
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/makenewplaylists/', methods=['POST', 'GET'])
def make_new_playlist():
"""
function to make a new empty playlist
:return: render template
"""
if request.method == 'GET':
return render_template('userpage.html')
if request.method == 'POST':
# getting form data
form_data = request.form
user_data = session['user_data']
new_playlist_name = form_data["playlist_name"]
userID = user_data["id"]
conn = get_connection()
cur = conn.cursor()
# making new collection in db
sql = "insert into collection(name,userid) " \
"values(%s, %s) RETURNING collectionid"
cur.execute(sql, (new_playlist_name, userID))
playlistID = cur.fetchone()[0]
conn.commit()
cur.close()
user_data["playlist_name"].append(new_playlist_name)
print(user_data["playlist_name"])
user_data["new_playlist_id"] = playlistID
user_data["explore"] = True
user_data["myAlbums"] = False
user_data["num_of_costom_playlist"] = str(len(user_data["playlist_name"]))
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/renamecollection/', methods=['POST', 'GET'])
def rename_collection():
"""
route to rename a collection
:return: render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
form_data = request.form
user_data = session['user_data']
playlist = user_data['current_playlist_name']
new_name = form_data['new_name']
userid = user_data["id"]
conn = get_connection()
cur = conn.cursor()
sql = "update collection " \
"set name = %s " \
"where name = %s and userid = %s"
cur.execute(sql, (new_name, playlist, userid))
user_data["current_playlist_name"] = new_name
conn.commit()
sql = "SELECT ALL name FROM collection where userid = %s"
cur.execute(sql, (user_data["id"],))
all_playlists = cur.fetchall()
user_data["playlist_name"] = [name[0] for name in all_playlists]
cur.close()
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/searchedsong/', methods=['POST', 'GET'])
def searched_song():
"""
function to get a searched song
:return: render template
"""
if request.method == 'GET':
return render_template('userpage.html')
if request.method == 'POST':
# getting form data
form_data = request.form
search_text = form_data["song_name"]
# how we want to search for song by
filter_selected = form_data['options']
amount_of_songs = form_data['amount']
sort_by = form_data['sort']
user_data = session['user_data']
user_id = user_data["id"]
conn = get_connection()
cur = conn.cursor()
result = None # get outer scope
if sort_by == "song":
sort_sql = " order by song.title, artist.artistname"
elif sort_by == "genre":
sort_sql = " order by genre.genrename"
elif sort_by == "artist":
sort_sql = " order by artist.artistname"
else:
sort_sql = " order by song.releasedate"
# FILTER_SELECTED IS USED TO GET THE SONGS (in 'result'), THEN THE SHARED 'IF' BELOW IS USED
if filter_selected == "title":
sql = "select song.songid, song.title, song.length, artist.artistname, " \
"album.albumname, genre.genrename, song.releasedate " \
"from song inner join songartist on song.songid = songartist.songid " \
"and song.title = %s " \
"inner join artist on songartist.artistid = artist.artistid " \
"inner join albumcontains on song.songid = albumcontains.songid " \
"inner join album on albumcontains.albumid = album.albumid " \
"inner join songgenre on song.songid = songgenre.songid " \
"inner join genre on songgenre.genreid = genre.genreid " \
"left outer join userplayssong on song.songid = userplayssong.songid " \
"and userplayssong.userid = %s " + sort_sql
cur.execute(sql, (search_text, user_id))
result = cur.fetchall()
elif filter_selected == "genre":
sql = "select song.songid, song.title, song.length, artist.artistname, " \
"album.albumname, genre.genrename, song.releasedate " \
"from song inner join songartist on song.songid = songartist.songid " \
"inner join artist on songartist.artistid = artist.artistid " \
"inner join albumcontains on song.songid = albumcontains.songid " \
"inner join album on albumcontains.albumid = album.albumid " \
"inner join songgenre on song.songid = songgenre.songid " \
"inner join genre on songgenre.genreid = genre.genreid " \
"and genre.genrename = %s " \
"left outer join userplayssong on song.songid = userplayssong.songid " \
"and userplayssong.userid = %s" + sort_sql
cur.execute(sql, (search_text, user_id))
result = cur.fetchall()
elif filter_selected == "album":
sql = "select song.songid, song.title, song.length, artist.artistname, " \
"album.albumname, genre.genrename, song.releasedate " \
"from song inner join songartist on song.songid = songartist.songid " \
"inner join artist on songartist.artistid = artist.artistid " \
"inner join albumcontains on song.songid = albumcontains.songid " \
"inner join album on albumcontains.albumid = album.albumid " \
"and album.albumname = %s " \
"inner join songgenre on song.songid = songgenre.songid " \
"inner join genre on songgenre.genreid = genre.genreid " \
"left outer join userplayssong on song.songid = userplayssong.songid " \
"and userplayssong.userid = %s" + sort_sql
cur.execute(sql, (search_text, user_id))
result = cur.fetchall()
else:
sql = "select song.songid, song.title, song.length, artist.artistname, " \
"album.albumname, genre.genrename, song.releasedate " \
"from song inner join songartist on song.songid = songartist.songid " \
"inner join artist on songartist.artistid = artist.artistid " \
"and artist.artistname = %s " \
"inner join albumcontains on song.songid = albumcontains.songid " \
"inner join album on albumcontains.albumid = album.albumid " \
"inner join songgenre on song.songid = songgenre.songid " \
"inner join genre on songgenre.genreid = genre.genreid " \
"left outer join userplayssong on song.songid = userplayssong.songid " \
"and userplayssong.userid = %s" + sort_sql
cur.execute(sql, (search_text, user_id))
result = cur.fetchall()
if (result):
if len(result) > int(amount_of_songs):
result = result[:int(amount_of_songs)]
#HERE getting play count
sql = "select count(songid) from userplayssong where userid = %s and songid = %s"
cur.execute(sql, (user_id,result[0][0]))
count = cur.fetchone()[0]
for i in range(len(result)):
result[i] = (result[i][0], result[i][1], result[i][2], result[i][3],
result[i][4], result[i][5], result[i][6], count) #HERE play count
user_data["searched_songs"] = result
user_data["searched_song_error"] = "None"
else:
user_data["searched_song_error"] = "no song found!"
user_data["searched_songs"] = "None"
user_data["explore"] = True
user_data["myAlbums"] = False
session['user_data'] = user_data
cur.close()
return render_template('userpage.html', user_data=user_data)
@views.route('/followuser/', methods=['POST', 'GET'])
def follow_user():
"""
function to follow another user
:return: render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
# getting form data
form_data = request.form
user_data = session['user_data']
user_id = user_data["id"]
# calculating follower count
conn = get_connection()
cur = conn.cursor()
sql = "select userid " \
"from useraccount " \
"where username = %s"
cur.execute(sql, (user_data["searched_friend"],))
searched_user_id = cur.fetchone()[0]
sql = "insert into userfollows(useridfollower, useridfollowing)" \
" values(%s, %s) on conflict do nothing returning null"
cur.execute(sql, (user_id, searched_user_id))
result = cur.fetchone()
if result:
user_data["following"].append(user_data["searched_friend"].strip())
session.modified = True
sql = "select count(useridfollower) " \
"from userfollows " \
"where useridfollower = %s"
cur.execute(sql, (user_id,))
num_following = cur.fetchone()[0]
sql = "select count(useridfollowing) " \
"from userfollows " \
"where useridfollowing = %s"
cur.execute(sql, (user_id,))
num_followers = cur.fetchone()[0]
user_data["num_followers"] = num_followers
user_data["num_following"] = num_following
user_data['error'] = None
else:
user_data['error'] = 'You already follow this user'
user_data["searched_friend"] = "None"
conn.commit()
cur.close()
user_data["explore"] = False
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/unfollowuser/', methods=['POST', 'GET'])
def unfollow_user():
"""
function to unfollow another user
:return: render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
# getting form data
form_data = request.form
user_data = session['user_data']
user_id = user_data["id"]
if int(user_data["num_following"]) > 0:
# calculating follower count
conn = get_connection()
cur = conn.cursor()
sql = "select userid " \
"from useraccount " \
"where username = %s"
cur.execute(sql, (form_data["usr"],))
searched_user_id = cur.fetchone()[0]
sql = "DELETE FROM userfollows " \
"WHERE useridfollower = %s and useridfollowing = %s"
cur.execute(sql, (user_id, searched_user_id))
user_data["following"].remove(form_data["usr"].strip())
session.modified = True
sql = "select count(useridfollower) " \
"from userfollows " \
"where useridfollower = %s"
cur.execute(sql, (user_data["id"],))
num_following = cur.fetchone()[0]
sql = "select count(useridfollowing) " \
"from userfollows " \
"where useridfollowing = %s"
cur.execute(sql, (user_data["id"],))
num_followers = cur.fetchone()[0]
user_data["num_followers"] = num_followers
user_data["num_following"] = num_following
conn.commit()
cur.close()
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
return render_template('userpage.html', user_data=user_data)
@views.route('/searchusers/', methods=['POST', 'GET'])
def search_users():
"""
function to find a user by email
:return: render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
form_data = request.form
# getting user data
user_data = session['user_data']
email = form_data["usr_email"]
# searching for user in db
conn = get_connection()
cur = conn.cursor()
sql = "select username " \
"from useraccount " \
"where email = %s"
cur.execute(sql, (email.strip(),))
result = cur.fetchone()
user_data["searched_friend"] = "None"
# if user not found
# TODO: show a message that says user not found
if result:
user_data["searched_friend"] = result[0]
user_data["explore"] = False
user_data['error'] = None
session['user_data'] = user_data
cur.close()
return render_template('userpage.html', user_data=user_data)
@views.route('/playentirealbum/', methods=['POST', 'GET'])
def play_album():
"""
adds 1 to the of all songs on an album
:return: render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
songID = int(request.form["songid"])
user_data = session['user_data']
userid = user_data['id']
print("+++++____+_+_+_+_+_+_+_+_+_+_", songID)
conn = get_connection()
cur = conn.cursor()
sql = "SELECT songid from albumcontains WHERE albumid = (SELECT albumid FROM albumcontains " \
"WHERE songid = %s)"
cur.execute(sql, (songID,))
result = cur.fetchall()
for songid in result:
print()
print(songid[0])
print()
sql = "insert into userplayssong(userid, songid, dateplayed) " \
"values(%s, %s, %s)"
cur.execute(sql, (userid, int(songid[0]), datetime.now()))
conn.commit()
cur.close()
i = 0
for song in user_data['searched_songs']:
song = song[0:7] + (song[7] + 1,)
user_data['searched_songs'][i] = song
i += 1
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/playsong/', methods=['POST', 'GET'])
def play_song():
"""
route to play a song
:return: render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
songid = request.form["songid"]
user_data = session['user_data']
userid = user_data['id']
conn = get_connection()
cur = conn.cursor()
sql = "insert into userplayssong(userid, songid, dateplayed) " \
"values(%s, %s, %s)"
cur.execute(sql, (userid, int(songid), datetime.now()))
conn.commit()
sql = "SELECT artistid FROM songartist WHERE songid = %s"
cur.execute(sql, (int(songid),))
ARTISTID = cur.fetchone()[0]
sql = "SELECT playcount FROM artist_play_counts WHERE userid = %s and artistid = %s"
cur.execute(sql, (userid, ARTISTID))
count = cur.fetchone()
if count is None:
count=1
sql = "insert into artist_play_counts(userid, artistid, playcount)"\
"values(%s, %s, %s) on conflict(artistid) do update " \
"set playcount = artist_play_counts.playcount + 1"
cur.execute(sql, (userid, ARTISTID, count))
conn.commit()
#######----------
sql = "SELECT playcount,artistid FROM artist_play_counts WHERE userid = %s"
cur.execute(sql, (userid,))
artist_play_counts = list(cur.fetchall())
artist_play_counts = sorted(artist_play_counts)
artist_play_counts = artist_play_counts[::-1]
if len(artist_play_counts)>10:
artist_play_counts = artist_play_counts[:10]
print("1_______\n",artist_play_counts)
artist_play_counts = [artistID[1] for artistID in artist_play_counts]
print("2_______\n",artist_play_counts)
user_data["top10artists"] = []
for artistid in artist_play_counts:
sql = "SELECT artistname from artist where artistid = %s"
cur.execute(sql, (artistid,))
user_data["top10artists"].append(cur.fetchone())
cur.close()
user_data["top5genre"] = ["rap", "pop", "country", "R&B", "rock"]
user_data["explore"] = True
user_data["myAlbums"] = False
i = 0
for song in user_data['searched_songs']:
if int(song[0]) == int(songid):
song = song[0:7] + (song[7] + 1,)
user_data['searched_songs'][i] = song
i += 1
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
@views.route('/playcollection/', methods=['POST', 'GET'])
def play_collection():
"""
route to play a collection of songs
:return: render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
user_data = session['user_data']
userid = user_data['id']
songs = user_data["current_playlist"]
print(songs)
conn = get_connection()
cur = conn.cursor()
for song in songs:
sql = "insert into userplayssong(userid, songid) " \
"values(%s, %s)"
cur.execute(sql, (userid, int(song[0])))
conn.commit()
cur.close()
user_data["explore"] = False
user_data["myAlbums"] = True
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
|
{"/website/auth.py": ["/connection.py"], "/website/views.py": ["/connection.py"]}
|
45,293,058
|
amb8489/music_DB
|
refs/heads/master
|
/website/auth.py
|
from datetime import datetime
import random
from flask import Blueprint, render_template, request, redirect, url_for, session
from connection import get_connection
auth = Blueprint('auth', __name__)
# new user sign up page
@auth.route("/signup", methods=['POST', 'GET'])
def signup():
"""
signup signs a new user up
:return: the template render
"""
if request.method == 'GET':
return render_template('signup.html')
if request.method == 'POST':
# get form data from signup page
form_data = request.form
# check for good credentials
user_data, allowed, error = confirm_new_account(form_data)
# setting up new user info
if allowed:
user_data["num_followers"] = "0"
user_data["num_following"] = "0"
user_data["new_playlist"] = []
user_data["top5genre"] = ["rap", "pop", "country", "R&B", "rock"]
user_data["playlist_name"] = []
user_data.update(form_data)
# saving user data into session
session['user_data'] = user_data
return redirect(url_for('views.userpage', user_data=user_data))
return render_template('signup.html', error=error)
def confirm_new_account(form_data):
"""
confirm new account's data is valid
:param form_data: the data entered into the signup form
:return: the render template
"""
user_data = {}
error = ''
success = False
# if any part of user data is empty
for key in form_data:
if form_data[key] == "":
error = 'please input a valid {}'.format(key)
return user_data, success, error
# username already exists
if username_taken(form_data["username"]):
error = "username already taken"
return user_data, success, error
# email already exists
if email_taken(form_data["emailAddress"]):
error = "email already taken"
return user_data, success, error
# setting up new user in db
success = True
user_data['passwordHash'] = hash(form_data['password'])
user_data['following'] = []
user_data.update(form_data)
# place new user in db
conn = get_connection()
cur = conn.cursor()
sql = "insert into useraccount(username, firstname, lastname, email, password, creationdate, lastaccess)" \
" values(%s, %s, %s, %s, %s, %s, %s)"
cur.execute(sql, (user_data["username"], user_data["firstName"], user_data["lastName"], user_data["emailAddress"],
user_data["password"], datetime.now(), datetime.now()))
# getting that users data from db
sql = "select email, creationdate, lastaccess, userid " \
"from useraccount " \
"where username = %s"
cur.execute(sql, (form_data["username"],))
result = cur.fetchone()
# caching user data
user_data = {"username": form_data["username"], "emailAddress": result[0], "creationDate": result[1],
"lastAccess": result[2], "searched_friend": "None", "num_followers": 0,
"num_following": 0, "id": result[3], 'following': []}
conn.commit()
cur.close()
user_data["num_of_costom_playlist"] = "0"
user_data["top10artists"] = []
return user_data, success, error
def email_taken(email):
"""
check if an email is already taken
:param email: the email address string
:return: True if taken, else False
"""
conn = get_connection()
cur = conn.cursor()
sql = "select 1 from useraccount " \
"where email = %s"
cur.execute(sql, (email,))
result = cur.fetchone()
cur.close()
# if account doesn't exist
if result is None:
return False
return True
def username_taken(username):
"""
checks if a username is already taken
:param username: the username string
:return: True if taken else False
"""
conn = get_connection()
cur = conn.cursor()
sql = "select 1 from useraccount " \
"where username = %s"
cur.execute(sql, (username,))
result = cur.fetchone()
cur.close()
# if account doesn't exist
if result is None:
return False
return True
def getUserData(form_data):
conn = get_connection()
cur = conn.cursor()
sql = "select email, creationdate, lastaccess, userid " \
"from useraccount " \
"where username = %s"
cur.execute(sql, (form_data["username"],))
result = cur.fetchone()
sql = "select count(useridfollower) " \
"from userfollows " \
"where useridfollower = %s"
cur.execute(sql, (result[3],))
num_following = cur.fetchone()[0]
sql = "select count(useridfollowing) " \
"from userfollows " \
"where useridfollowing = %s"
cur.execute(sql, (result[3],))
num_followers = cur.fetchone()[0]
# caching user data
user_data = {"username": form_data["username"], "emailAddress": result[0], "creationDate": result[1],
"lastAccess": result[2], "searched_friend": "None", "num_followers": num_followers,
"num_following": num_following, "id": result[3], 'following': []}
user_data["new_playlist"] = []
# getting the user that they are following
sql = "SELECT useridfollowing" \
" FROM userfollows" \
" WHERE useridfollower = %s"
cur.execute(sql, (user_data["id"],))
result = cur.fetchall()
# if users follow nobody
if len(result) > 0:
sql = "SELECT username " \
"FROM useraccount " \
"WHERE userid IN %s"
cur.execute(sql, (tuple(result),))
result = cur.fetchall()
# formatting names
names = []
for name in result:
name = name[0]
names.append(name)
user_data['following'] = names
sql = "SELECT name FROM collection where userid = %s"
cur.execute(sql, (user_data["id"],))
all_playlists = cur.fetchall()
for each in all_playlists:
user_data[each[0]] = ''
userID = user_data["id"]
if len(all_playlists) > 0:
user_data["playlist_name"] = [name[0] for name in all_playlists]
else:
user_data["playlist_name"] = []
user_data["num_of_costom_playlist"] = str(len(user_data["playlist_name"]))
sql = "SELECT playcount,artistid FROM artist_play_counts WHERE userid = %s"
cur.execute(sql, (user_data["id"],))
artist_play_counts = list(cur.fetchall())
artist_play_counts = sorted(artist_play_counts)
artist_play_counts = artist_play_counts[::-1]
if len(artist_play_counts)>10:
artist_play_counts = artist_play_counts[:10]
print("_______\n",artist_play_counts)
artist_play_counts = [artistID[1] for artistID in artist_play_counts]
user_data["top10artists"] = []
for artistid in artist_play_counts:
sql = "SELECT artistname from artist where artistid = %s"
cur.execute(sql, (artistid,))
user_data["top10artists"].append(cur.fetchone())
# get the genres of the top songs of the month---
sql = "SELECT songid from userplayssong WHERE userid > 0"
cur.execute(sql, (user_data["id"],))
songids = cur.fetchall()
if len(songids) > 20:
songids = songids[:20]
songs = []
for songid in songids:
sql = "select count(songid) from userplayssong WHERE songid = %s"
cur.execute(sql, (songid[0],))
songs.append((cur.fetchone()[0],songid[0]))
songs = sorted(songs)
songs = songs[::-1]
songs = [song[1] for song in songs]
top5genre = []
already_have = set()
for songid in songs:
sql = "SELECT genrename FROM genre WHERE genreid IN (SELECT genreid "\
"FROM songgenre WHERE songid = %s)"
cur.execute(sql, (songid,))
gen = cur.fetchone()[0]
if gen not in already_have:
top5genre.append(gen)
already_have.add(gen)
if len(top5genre) == 5:
break
i = 0
genres = {0:"rap", 1:"pop", 2:"country", 3:"R&B", 4:"rock", 5:"alternative", 6:"indie"}
while len(top5genre) < 5:
top5genre.appened(genres[i])
i+=1
print("\n",top5genre,"\n")
user_data["top5genre"] = list(top5genre)
## RECOMMENDATIONS ##
# top 50 songs
rec = []
sql = "select songid "\
"from userplayssong "\
"where dateplayed >= date_trunc('month', current_date - interval '1' month) "\
"group by songid "\
"order by count(*) desc"
cur.execute(sql)
song_ids = cur.fetchall()
if len(song_ids) > 50:
song_ids = song_ids[:50]
for songid in song_ids:
sql = "SELECT title FROM song WHERE songid = %s"
cur.execute(sql, (songid[0],))
similar_songs = cur.fetchall()
rec.append(similar_songs[0])
user_data["top50bymonth"] = rec
# top 50 songs friends
sql = "select useridfollowing from userfollows where useridfollower = %s"
cur.execute(sql, (user_data["id"],))
following_ids = cur.fetchall()
following_ids = tuple([each[0] for each in following_ids])
percent_s = ", ".join(["%s"]*len(following_ids))
sql = "select songid from userplayssong where userid in (" + percent_s + \
") group by songid order by count(songid) desc"
cur.execute(sql, following_ids)
song_ids = cur.fetchall()
if len(song_ids) > 50:
song_ids = song_ids[:50]
percent_s = ", ".join(["%s"] * len(song_ids))
sql = "select title from song where songid in (" + percent_s + ")"
cur.execute(sql, song_ids)
top_songs = cur.fetchall()
user_data["top50byfriends"] = top_songs
# -------recommend--------
# choosing a song from eachof the top five most popular genres
rec = []
genreids = {"rap":0,"pop":1,"country":2,"R&B":3,"rock":4,"alternative":5,"indie":6}
for genre in top5genre:
sql = "SELECT title FROM song WHERE songid IN (SELECT songid from "\
"songgenre WHERE genreid = %s)"
cur.execute(sql, (genreids[genre],))
similar_songs = cur.fetchall()
rec.append(similar_songs[random.randint(0,len(similar_songs))][0])
# choosing a song from the top five most popular songs
#TODO
already_have = set()
for i in range(0,10):
sng = random.choice(user_data["top50byfriends"])
if sng not in already_have:
rec.append(sng[0])
already_have.add(sng)
# add more recommend songs to rec list
user_data["recommend"] = rec
cur.close()
return user_data
def confirm_login(form_data):
"""
confirms the log in was successful
:param form_data:
:return:
"""
# check db credentials exists
username = form_data["username"]
password = form_data["password"]
conn = get_connection()
cur = conn.cursor()
sql = "select 1 from useraccount " \
"where username = %s and password = %s"
cur.execute(sql, (username, password))
result = cur.fetchone()
# if credentials don't exist
if result is None:
return False
# if good login set user last log in in db
sql = "update useraccount" \
" set lastaccess = %s" \
" where username = %s"
cur.execute(sql, (datetime.now(), username))
conn.commit()
cur.close()
return True
# sign in page
@auth.route("/login", methods=['POST', 'GET'])
def login():
"""
log a user in
:return: the render template
"""
if request.method == 'GET':
return render_template('login.html')
if request.method == 'POST':
# getting data the user inputed
form_data = request.form
# confirming the user log in details
authenticated = confirm_login(form_data)
if authenticated:
# getting that users data from db
user_data = getUserData(form_data)
# saving user details into the session for global use
session['user_data'] = user_data
return render_template('userpage.html', user_data=user_data)
else:
# log in was bad
error = "username or password is incorrect"
return render_template('login.html', error=error)
|
{"/website/auth.py": ["/connection.py"], "/website/views.py": ["/connection.py"]}
|
45,303,256
|
R-Ramana/Time-Series-Analysis
|
refs/heads/main
|
/Source Code/main.py
|
import eval_model
from sklearn.model_selection import train_test_split
import sys
import matplotlib.pyplot as plt
import matplotlib as mpl
import pandas as pd
import numpy as np
import csv
import statsmodels.tsa.api as smt
import statsmodels as sm
# to ignore warnings thrown by deprecated models of statmodels to use updated ones
import warnings
warnings.filterwarnings("ignore")
import metrics
import data_models as dm
from eval_model import model_metrics
from datetime import datetime, timedelta
from sklearn.linear_model import LinearRegression
from pylab import rcParams
def main_fn(file_name, col_name, num_hours, split_date):
#Extra settings
seed = 42
np.random.seed(seed)
plt.style.use('bmh')
mpl.rcParams['axes.labelsize'] = 14
mpl.rcParams['xtick.labelsize'] = 12
mpl.rcParams['ytick.labelsize'] = 12
mpl.rcParams['text.color'] = 'k'
#Data Input
df = pd.read_csv(file_name, parse_dates=['timestamp'])
#print(df.columns)
df.set_index('timestamp',inplace=True)
#df.head()
#df.describe()
# plot column
#df.plot(y=col_name, figsize=(20, 10), grid=True)
#Decompose data
#result = dm.decompose(df, col_name, 8760, 0)
#autoregression
#moving average
#Fit a linear regression model to identify trend
plt.figure(figsize=(15, 7))
layout = (3,2)
pm_ax = plt.subplot2grid(layout, (0,0), colspan=2)
mv_ax = plt.subplot2grid(layout, (1,0), colspan=2)
fit_ax = plt.subplot2grid(layout, (2,0), colspan=2)
#pm_ax.plot(result.trend)
pm_ax.set_title("Automatic decomposed trend")
mm = df[col_name].rolling(24).mean()
#mv_ax.plot(mm)
mv_ax.set_title("Moving average 24 steps")
#plt.show()
X = [i for i in range(0, len(df[col_name]))]
X = np.reshape(X, (len(X), 1))
y = df[col_name].values
model = LinearRegression()
model.fit(np.isnan(X), np.isnan(y))
# calculate trend
trend = model.predict(X)
#fit_ax.plot(trend)
fit_ax.set_title("Trend fitted by linear regression")
#plt.tight_layout()
#plt.show()
#check first month of data
#result = dm.decompose(df, col_name, 744, 0)
#check last month of data
#result = dm.decompose(df, col_name, 8760, (8760-744))
#Looking for weekly seasonality
resample = df.resample('W')
weekly_mean = resample.mean()
plt.figure()
weekly_mean[col_name].plot(label='Weekly mean')
plt.title("Resampled series to weekly mean values")
plt.legend()
#plt.show()
#Preparing data for forecasting
#Split training and test data for verification
df_training = df.loc[df.index <= split_date]
df_testcase = df.loc[df.index > split_date]
#To download datasets for viewing
#dm.download(df_training, 'training')
#dm.download(df_testcase, 'test')
resultsDict={}
predictionsDict={}
#ACF/PACF
#dm.calc_pacf(df.dropna(), col_name)
#dm.calc_acf(df.dropna(), col_name)
#dm.training_forecast("SES", col_name, resultsDict, predictionsDict, df, df_training, df_testcase)
#dm.training_forecast("HWES", col_name, resultsDict, predictionsDict, df, df_training, df_testcase)
dm.training_forecast("AR", col_name, resultsDict, predictionsDict, df, df_training, df_testcase)
#dm.training_forecast("ARIMA", col_name, resultsDict, predictionsDict, df, df_training, df_testcase)
#dm.training_forecast("diff_ARIMA", col_name, resultsDict, predictionsDict, df, df_training, df_testcase)
#dm.training_forecast("SARIMAX", col_name, resultsDict, predictionsDict, df, df_training, df_testcase)
#dm.training_forecast("Auto", col_name, resultsDict, predictionsDict, df, df_training, df_testcase)
#print(predictionsDict)
#print(resultsDict)
#dm.diff_arima(df, col_name)
dm.pickles(resultsDict, predictionsDict)
#best_model = model_metrics(resultsDict)
#print(best_model)
dm.forecast('AR', col_name, resultsDict, predictionsDict, df, num_hours)
|
{"/main.py": ["/data_models.py"], "/Source Code/main.py": ["/data_models.py"], "/Source Code/CPPS.py": ["/main.py"]}
|
45,303,257
|
R-Ramana/Time-Series-Analysis
|
refs/heads/main
|
/Source Code/CPPS.py
|
from main import main_fn
file_name = 'data/Singapore_2015_hourly.csv'
col_name = 'Singapore Temperature [2 m elevation corrected]'
num_hours = 24
split_date = '2020-09-01T00:00:00'
main_fn(file_name, col_name, num_hours, split_date)
#
# CNA news by 2030 --> reduce consumption, make use of renewable energy etc
# Forecast for application planning (solar power usage)
# Methodology of the various forecast algorithms
# How forecasting is being done (generic format) --> seasonality, trend etc.
|
{"/main.py": ["/data_models.py"], "/Source Code/main.py": ["/data_models.py"], "/Source Code/CPPS.py": ["/main.py"]}
|
45,303,258
|
R-Ramana/Time-Series-Analysis
|
refs/heads/main
|
/Source Code/data_models.py
|
import matplotlib.pyplot as plt
import pandas as pd
from pylab import rcParams
import metrics
import pickle
from statsmodels.graphics.tsaplots import plot_acf, plot_pacf
from statsmodels.tsa.stattools import acf, pacf
from statsmodels.tsa.seasonal import seasonal_decompose
from statsmodels.tsa.holtwinters import SimpleExpSmoothing, ExponentialSmoothing
from statsmodels.tsa.ar_model import AR
from statsmodels.tsa.arima.model import ARIMA
from statsmodels.tsa.statespace.sarimax import SARIMAX
from pmdarima.arima import auto_arima
# progress bar
from tqdm import tqdm, tqdm_notebook
def decompose(df, column_name, end_value, start_value):
rcParams['figure.figsize'] = 18, 8
plt.figure(num=None, figsize=(50, 20), dpi=80, facecolor='w', edgecolor='k')
series = df[column_name][start_value:end_value]
result = seasonal_decompose(series.values, model='additive', period = 24)
result.plot()
plt.show()
return result
def calc_pacf(df, column_name):
plot_pacf(df[column_name], lags = 40)
plt.show()
#print(pacf(df[column_name], nlags = 24))
def calc_acf(df, column_name):
plot_acf(df[column_name], lags = 40)
plt.show()
#print(acf(df[column_name], nlags = 24))
def download(df, name):
filepath = 'data\\training\\' + name + '.csv'
df.to_csv(filepath)
def training_forecast(model_name, column_name, resultsDict, predictionsDict, df, df_training, df_testcase):
yhat = list()
for t in tqdm(range(len(df_testcase[column_name]))):
temp_train = df[:len(df_training)+t]
if model_name == "SES":
model = SimpleExpSmoothing(temp_train[column_name])
elif model_name == "HWES":
model = ExponentialSmoothing(temp_train[column_name])
elif model_name == "AR":
model = AR(temp_train[column_name])
# elif model_name == "MA":
# model = ARMA(temp_train[column_name], order=(0, 1))
# elif model_name == "ARMA":
# model = ARMA(temp_train[column_name], order=(1, 1))
elif model_name == "ARIMA":
model = ARIMA(temp_train[column_name], order=(2,0, 1))
# elif model_name == "diff_ARIMA":
# #update order
# model = ARIMA(differenced, order=(1, 0, 0))
elif model_name == "SARIMAX":
model = SARIMAX(temp_train[column_name], order=(
1, 0, 0), seasonal_order=(0, 0, 0, 3))
# elif model_name == "Auto":
# model = auto_arima(temp_train[column_name], start_p=0, d=1, start_q=0, max_p=5, max_d=5, max_q=5, start_P=0, D=1, start_Q=0,
# max_P=5, max_D=5, max_Q=5, m=12, seasonal=True, error_action='warn', trace=True, surpress_warnings=True, stepwise=True, random_state=20, n_fits=50)
model_fit = model.fit()
if model_name == "SES" or "HWES":
predictions = model_fit.predict(start=len(temp_train), end=len(temp_train))
elif model_name == "AR" or "ARIMA" or "SARIMAX" or "Auto":
predictions = model_fit.predict(start=len(temp_train), end=len(temp_train), dynamic=False)
yhat = yhat + [predictions]
yhat = pd.concat(yhat)
resultsDict[model_name] = metrics.evaluate(
df_testcase[column_name], yhat.values)
predictionsDict[model_name] = yhat.values
plt.plot(df_testcase[column_name].values , label='Original')
plt.plot(yhat.values,color='red',label= model_name + ' predicted')
plt.legend()
#plt.show()
pd.DataFrame(yhat, columns=[model_name]).to_csv('data/models/' + model_name + '.csv')
# def diff_arima(df, column_name):
# hours = 24
# differenced = df[column_name].diff()
# plot_acf(differenced, lags=40)
# plt.show()
# plot_pacf(differenced, lags=40)
# plt.show()
# # differenced = difference(df, column_name, 1)
# print(differenced)
#calc_acf(differenced, column_name)
#calc_pacf(differenced, column_name)
# invert differenced value
# def inverse_difference(history, yhat, interval=1):
# return yhat + history[-interval]
# multi-step out-of-sample forecast
# forecast = model_fit.forecast(steps=24)
# invert the differenced forecast to something usable
# history = [x for x in X]
# hour = 1
# for yhat in forecast:
# inverted = inverse_difference(history, yhat, hours_in_a_day)
# print('Hour %d: %f' % (hour, inverted))
# history.append(inverted)
# hour += 1
# create a differenced series
# interval = difference = 1
# def difference(df, col_name, interval):
# for i in range(interval, len(df[col_name])):
# difference = df[i] - df[i - interval]
# return difference
# Walk throught the test data, training and predicting 1 day ahead for all the test data
# index = len(df_training)
# yhat = list()
# for t in tqdm(range(len(df_test.pollution_today))):
# temp_train = air_pollution[:len(df_training)+t]
# model = SARIMAX(temp_train.pollution_today, order=(
# 1, 0, 0), seasonal_order=(0, 0, 0, 3))
# model_fit = model.fit(disp=False)
# predictions = model_fit.predict(
# start=len(temp_train), end=len(temp_train), dynamic=False)
# yhat = yhat + [predictions]
# yhat = pd.concat(yhat)
# resultsDict['SARIMAX'] = evaluate(df_test.pollution_today, yhat.values)
# predictionsDict['SARIMAX'] = yhat.values
# plt.plot(df_test.pollution_today.values, label='Original')
# plt.plot(yhat.values, color='red', label='SARIMAX')
# plt.legend()
def forecast(model_name, column_name, resultsDict, predictionsDict, df, num_hours):
yhat = list()
df.fillna(value=0, inplace=True)
for t in tqdm(range(len(df[column_name]))):
temp_train = df[:len(df)+t]
if model_name == "SES":
model = SimpleExpSmoothing(temp_train[column_name])
elif model_name == "HWES":
model = ExponentialSmoothing(temp_train[column_name])
elif model_name == "AR":
model = AR(temp_train[column_name])
# elif model_name == "MA":
# model = ARMA(temp_train[column_name], order=(0, 1))
# elif model_name == "ARMA":
# model = ARMA(temp_train[column_name], order=(1, 1))
elif model_name == "ARIMA":
model = ARIMA(temp_train[column_name], order=(1,1, 0))
elif model_name == "SARIMAX":
model = SARIMAX(temp_train[column_name], order=(
1, 0, 0), seasonal_order=(0, 0, 0, 3))
# elif model_name == "Auto":
# model = auto_arima(temp_train[column_name], start_p=0, d=1, start_q=0, max_p=5, max_d=5, max_q=5, start_P=0, D=1, start_Q=0,
# max_P=5, max_D=5, max_Q=5, m=12, seasonal=True, error_action='warn', trace=True, surpress_warnings=True, stepwise=True, random_state=20, n_fits=50)
model_fit = model.fit()
start_index = len(temp_train)
end_index = start_index + num_hours
if model_name == "SES" or "HWES":
predictions = model_fit.predict(start=start_index, end=end_index)
elif model_name == "AR" or "ARIMA" or "SARIMAX" or "Auto":
predictions = model_fit.predict(start=start_index, end=end_index, dynamic=False)
#yhat = yhat + [predictions]
yhat = yhat + [predictions]
yhat = pd.concat(yhat)
pd.DataFrame(yhat, columns=[model_name]).to_csv('data/' + model_name + '_' + str(num_hours) + '_' + column_name + '_forecast.csv')
predicted_values = pd.DataFrame(yhat, columns=[column_name])
#append data to predictions list
# new_data = df
# new_data.dropna(inplace=True)
# new_data.append(predicted_values, ignore_index=False).to_csv('data/' + model_name + '_append_test.csv')
def pickles(resultsDict, predictionsDict):
with open('scores.pickle', 'wb') as handle:
pickle.dump(resultsDict, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open('predictions.pickle', 'wb') as handle:
pickle.dump(predictionsDict, handle, protocol=pickle.HIGHEST_PROTOCOL)
with open('scores.pickle', 'rb') as handle:
resultsDict = pickle.load(handle)
## Load our results from the model notebook
with open('predictions.pickle', 'rb') as handle:
predictionsDict = pickle.load(handle)
|
{"/main.py": ["/data_models.py"], "/Source Code/main.py": ["/data_models.py"], "/Source Code/CPPS.py": ["/main.py"]}
|
45,303,259
|
R-Ramana/Time-Series-Analysis
|
refs/heads/main
|
/Source Code/eval_model.py
|
import matplotlib.pyplot as plt
import pandas as pd
def model_metrics(resultsDict):
df = pd.DataFrame.from_dict(resultsDict)
colors = plt.rcParams["axes.prop_cycle"].by_key()["color"]
pallette = plt.cm.get_cmap("tab20c", len(df.columns))
colors = [pallette(x) for x in range(len(df.columns))]
color_dict = dict(zip(df.columns, colors))
fig = plt.figure(figsize=(20, 15))
# MAE plot
fig.add_subplot(2, 2, 1)
df.loc["mae"].sort_values().plot(
kind="bar", colormap="Paired", color=[
color_dict.get(
x, "#333333") for x in df.loc["mae"].sort_values().index], )
plt.legend()
#plt.title("MAE Metric, lower is better")
plt.title("MAE")
fig.add_subplot(2, 2, 2)
df.loc["rmse"].sort_values().plot(
kind="bar", colormap="Paired", color=[
color_dict.get(
x, "#333333") for x in df.loc["rmse"].sort_values().index], )
plt.legend()
#plt.title("RMSE Metric, lower is better")
plt.title("RMSE")
fig.add_subplot(2, 2, 3)
df.loc["mape"].sort_values().plot(
kind="bar", colormap="Paired", color=[
color_dict.get(
x, "#333333") for x in df.loc["mape"].sort_values().index], )
plt.legend()
#plt.title("MAPE Metric, lower is better")
plt.title("MAPE")
fig.add_subplot(2, 2, 4)
df.loc["r2"].sort_values(ascending=False).plot(
kind="bar",
colormap="Paired",
color=[
color_dict.get(x, "#333333")
for x in df.loc["r2"].sort_values(ascending=False).index
],
)
plt.legend()
#plt.title("R2 Metric, higher is better")
plt.title("R2")
plt.tight_layout()
plt.savefig("metrics.png")
plt.show()
best_model = chooseBest(resultsDict)
return best_model
def chooseBest(resultsDict):
count = {}
mae = []
rmse = []
r2 = []
key_list = list(resultsDict.keys())
count = dict.fromkeys(resultsDict.keys(), 1)
#{AR: 1, HWES: 3} returning max val key
for i in resultsDict:
mae.append(resultsDict.get(i).get('mae'))
rmse.append(resultsDict.get(i).get('rmse'))
r2.append(resultsDict.get(i).get('r2'))
find_min(mae, key_list, count)
find_min(rmse, key_list, count)
find_min(r2, key_list, count)
best_model = max(count, key=count.get)
return best_model
def find_min(model_list, key_list, count):
min_val = model_list[0]
index = 0
for i in range(1, len(model_list)):
if model_list[i] < min_val:
min_val = model_list[i]
index = i
count_index = str(key_list[index])
if count_index in count:
count[count_index] += 1
|
{"/main.py": ["/data_models.py"], "/Source Code/main.py": ["/data_models.py"], "/Source Code/CPPS.py": ["/main.py"]}
|
45,348,487
|
dbcli/litecli
|
refs/heads/main
|
/tests/test_dbspecial.py
|
from litecli.packages.completion_engine import suggest_type
from test_completion_engine import sorted_dicts
from litecli.packages.special.utils import format_uptime
from litecli.packages.special.utils import check_if_sqlitedotcommand
def test_import_first_argument():
test_cases = [
# text, expecting_arg_idx
[".import ", 1],
[".import ./da", 1],
[".import ./data.csv ", 2],
[".import ./data.csv t", 2],
[".import ./data.csv `t", 2],
['.import ./data.csv "t', 2],
]
for text, expecting_arg_idx in test_cases:
suggestions = suggest_type(text, text)
if expecting_arg_idx == 1:
assert suggestions == [{"type": "file_name"}]
else:
assert suggestions == [{"type": "table", "schema": []}]
def test_u_suggests_databases():
suggestions = suggest_type("\\u ", "\\u ")
assert sorted_dicts(suggestions) == sorted_dicts([{"type": "database"}])
def test_describe_table():
suggestions = suggest_type("\\dt", "\\dt ")
assert sorted_dicts(suggestions) == sorted_dicts(
[
{"type": "table", "schema": []},
{"type": "view", "schema": []},
{"type": "schema"},
]
)
def test_list_or_show_create_tables():
suggestions = suggest_type("\\dt+", "\\dt+ ")
assert sorted_dicts(suggestions) == sorted_dicts(
[
{"type": "table", "schema": []},
{"type": "view", "schema": []},
{"type": "schema"},
]
)
def test_format_uptime():
seconds = 59
assert "59 sec" == format_uptime(seconds)
seconds = 120
assert "2 min 0 sec" == format_uptime(seconds)
seconds = 54890
assert "15 hours 14 min 50 sec" == format_uptime(seconds)
seconds = 598244
assert "6 days 22 hours 10 min 44 sec" == format_uptime(seconds)
seconds = 522600
assert "6 days 1 hour 10 min 0 sec" == format_uptime(seconds)
def test_indexes():
suggestions = suggest_type(".indexes", ".indexes ")
assert sorted_dicts(suggestions) == sorted_dicts(
[
{"type": "table", "schema": []},
{"type": "view", "schema": []},
{"type": "schema"},
]
)
def test_check_if_sqlitedotcommand():
test_cases = [
[".tables", True],
[".BiNarY", True],
["binary", False],
[234, False],
[".changes test! test", True],
["NotDotcommand", False]]
for command, expected_result in test_cases:
assert check_if_sqlitedotcommand(command) == expected_result
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,488
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/special/iocommands.py
|
from __future__ import unicode_literals
import os
import re
import locale
import logging
import subprocess
import shlex
from io import open
from time import sleep
import click
import sqlparse
from configobj import ConfigObj
from . import export
from .main import special_command, NO_QUERY, PARSED_QUERY
from .favoritequeries import FavoriteQueries
from .utils import handle_cd_command
from litecli.packages.prompt_utils import confirm_destructive_query
use_expanded_output = False
PAGER_ENABLED = True
tee_file = None
once_file = written_to_once_file = None
favoritequeries = FavoriteQueries(ConfigObj())
@export
def set_favorite_queries(config):
global favoritequeries
favoritequeries = FavoriteQueries(config)
@export
def set_pager_enabled(val):
global PAGER_ENABLED
PAGER_ENABLED = val
@export
def is_pager_enabled():
return PAGER_ENABLED
@export
@special_command(
"pager",
"\\P [command]",
"Set PAGER. Print the query results via PAGER.",
arg_type=PARSED_QUERY,
aliases=("\\P",),
case_sensitive=True,
)
def set_pager(arg, **_):
if arg:
os.environ["PAGER"] = arg
msg = "PAGER set to %s." % arg
set_pager_enabled(True)
else:
if "PAGER" in os.environ:
msg = "PAGER set to %s." % os.environ["PAGER"]
else:
# This uses click's default per echo_via_pager.
msg = "Pager enabled."
set_pager_enabled(True)
return [(None, None, None, msg)]
@export
@special_command(
"nopager",
"\\n",
"Disable pager, print to stdout.",
arg_type=NO_QUERY,
aliases=("\\n",),
case_sensitive=True,
)
def disable_pager():
set_pager_enabled(False)
return [(None, None, None, "Pager disabled.")]
@export
def set_expanded_output(val):
global use_expanded_output
use_expanded_output = val
@export
def is_expanded_output():
return use_expanded_output
_logger = logging.getLogger(__name__)
@export
def editor_command(command):
"""
Is this an external editor command?
:param command: string
"""
# It is possible to have `\e filename` or `SELECT * FROM \e`. So we check
# for both conditions.
return command.strip().endswith("\\e") or command.strip().startswith("\\e")
@export
def get_filename(sql):
if sql.strip().startswith("\\e"):
command, _, filename = sql.partition(" ")
return filename.strip() or None
@export
def get_editor_query(sql):
"""Get the query part of an editor command."""
sql = sql.strip()
# The reason we can't simply do .strip('\e') is that it strips characters,
# not a substring. So it'll strip "e" in the end of the sql also!
# Ex: "select * from style\e" -> "select * from styl".
pattern = re.compile("(^\\\e|\\\e$)")
while pattern.search(sql):
sql = pattern.sub("", sql)
return sql
@export
def open_external_editor(filename=None, sql=None):
"""Open external editor, wait for the user to type in their query, return
the query.
:return: list with one tuple, query as first element.
"""
message = None
filename = filename.strip().split(" ", 1)[0] if filename else None
sql = sql or ""
MARKER = "# Type your query above this line.\n"
# Populate the editor buffer with the partial sql (if available) and a
# placeholder comment.
query = click.edit(
"{sql}\n\n{marker}".format(sql=sql, marker=MARKER),
filename=filename,
extension=".sql",
)
if filename:
try:
with open(filename, encoding="utf-8") as f:
query = f.read()
except IOError:
message = "Error reading file: %s." % filename
if query is not None:
query = query.split(MARKER, 1)[0].rstrip("\n")
else:
# Don't return None for the caller to deal with.
# Empty string is ok.
query = sql
return (query, message)
@special_command(
"\\f",
"\\f [name [args..]]",
"List or execute favorite queries.",
arg_type=PARSED_QUERY,
case_sensitive=True,
)
def execute_favorite_query(cur, arg, verbose=False, **_):
"""Returns (title, rows, headers, status)"""
if arg == "":
for result in list_favorite_queries():
yield result
"""Parse out favorite name and optional substitution parameters"""
name, _, arg_str = arg.partition(" ")
args = shlex.split(arg_str)
query = favoritequeries.get(name)
if query is None:
message = "No favorite query: %s" % (name)
yield (None, None, None, message)
elif "?" in query:
for sql in sqlparse.split(query):
sql = sql.rstrip(";")
title = "> %s" % (sql) if verbose else None
cur.execute(sql, args)
if cur.description:
headers = [x[0] for x in cur.description]
yield (title, cur, headers, None)
else:
yield (title, None, None, None)
else:
query, arg_error = subst_favorite_query_args(query, args)
if arg_error:
yield (None, None, None, arg_error)
else:
for sql in sqlparse.split(query):
sql = sql.rstrip(";")
title = "> %s" % (sql) if verbose else None
cur.execute(sql)
if cur.description:
headers = [x[0] for x in cur.description]
yield (title, cur, headers, None)
else:
yield (title, None, None, None)
def list_favorite_queries():
"""List of all favorite queries.
Returns (title, rows, headers, status)"""
headers = ["Name", "Query"]
rows = [(r, favoritequeries.get(r)) for r in favoritequeries.list()]
if not rows:
status = "\nNo favorite queries found." + favoritequeries.usage
else:
status = ""
return [("", rows, headers, status)]
def subst_favorite_query_args(query, args):
"""replace positional parameters ($1...$N) in query."""
for idx, val in enumerate(args):
shell_subst_var = "$" + str(idx + 1)
question_subst_var = "?"
if shell_subst_var in query:
query = query.replace(shell_subst_var, val)
elif question_subst_var in query:
query = query.replace(question_subst_var, val, 1)
else:
return [
None,
"Too many arguments.\nQuery does not have enough place holders to substitute.\n"
+ query,
]
match = re.search("\\?|\\$\d+", query)
if match:
return [
None,
"missing substitution for " + match.group(0) + " in query:\n " + query,
]
return [query, None]
@special_command("\\fs", "\\fs name query", "Save a favorite query.")
def save_favorite_query(arg, **_):
"""Save a new favorite query.
Returns (title, rows, headers, status)"""
usage = "Syntax: \\fs name query.\n\n" + favoritequeries.usage
if not arg:
return [(None, None, None, usage)]
name, _, query = arg.partition(" ")
# If either name or query is missing then print the usage and complain.
if (not name) or (not query):
return [(None, None, None, usage + "Err: Both name and query are required.")]
favoritequeries.save(name, query)
return [(None, None, None, "Saved.")]
@special_command("\\fd", "\\fd [name]", "Delete a favorite query.")
def delete_favorite_query(arg, **_):
"""Delete an existing favorite query."""
usage = "Syntax: \\fd name.\n\n" + favoritequeries.usage
if not arg:
return [(None, None, None, usage)]
status = favoritequeries.delete(arg)
return [(None, None, None, status)]
@special_command("system", "system [command]", "Execute a system shell command.")
def execute_system_command(arg, **_):
"""Execute a system shell command."""
usage = "Syntax: system [command].\n"
if not arg:
return [(None, None, None, usage)]
try:
command = arg.strip()
if command.startswith("cd"):
ok, error_message = handle_cd_command(arg)
if not ok:
return [(None, None, None, error_message)]
return [(None, None, None, "")]
args = arg.split(" ")
process = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output, error = process.communicate()
response = output if not error else error
# Python 3 returns bytes. This needs to be decoded to a string.
if isinstance(response, bytes):
encoding = locale.getpreferredencoding(False)
response = response.decode(encoding)
return [(None, None, None, response)]
except OSError as e:
return [(None, None, None, "OSError: %s" % e.strerror)]
def parseargfile(arg):
if arg.startswith("-o "):
mode = "w"
filename = arg[3:]
else:
mode = "a"
filename = arg
if not filename:
raise TypeError("You must provide a filename.")
return {"file": os.path.expanduser(filename), "mode": mode}
@special_command(
"tee",
"tee [-o] filename",
"Append all results to an output file (overwrite using -o).",
)
def set_tee(arg, **_):
global tee_file
try:
tee_file = open(**parseargfile(arg))
except (IOError, OSError) as e:
raise OSError("Cannot write to file '{}': {}".format(e.filename, e.strerror))
return [(None, None, None, "")]
@export
def close_tee():
global tee_file
if tee_file:
tee_file.close()
tee_file = None
@special_command("notee", "notee", "Stop writing results to an output file.")
def no_tee(arg, **_):
close_tee()
return [(None, None, None, "")]
@export
def write_tee(output):
global tee_file
if tee_file:
click.echo(output, file=tee_file, nl=False)
click.echo("\n", file=tee_file, nl=False)
tee_file.flush()
@special_command(
".once",
"\\o [-o] filename",
"Append next result to an output file (overwrite using -o).",
aliases=("\\o", "\\once"),
)
def set_once(arg, **_):
global once_file
once_file = parseargfile(arg)
return [(None, None, None, "")]
@export
def write_once(output):
global once_file, written_to_once_file
if output and once_file:
try:
f = open(**once_file)
except (IOError, OSError) as e:
once_file = None
raise OSError(
"Cannot write to file '{}': {}".format(e.filename, e.strerror)
)
with f:
click.echo(output, file=f, nl=False)
click.echo("\n", file=f, nl=False)
written_to_once_file = True
@export
def unset_once_if_written():
"""Unset the once file, if it has been written to."""
global once_file, written_to_once_file
if written_to_once_file:
once_file = written_to_once_file = None
@special_command(
"watch",
"watch [seconds] [-c] query",
"Executes the query every [seconds] seconds (by default 5).",
)
def watch_query(arg, **kwargs):
usage = """Syntax: watch [seconds] [-c] query.
* seconds: The interval at the query will be repeated, in seconds.
By default 5.
* -c: Clears the screen between every iteration.
"""
if not arg:
yield (None, None, None, usage)
raise StopIteration
seconds = 5
clear_screen = False
statement = None
while statement is None:
arg = arg.strip()
if not arg:
# Oops, we parsed all the arguments without finding a statement
yield (None, None, None, usage)
raise StopIteration
(current_arg, _, arg) = arg.partition(" ")
try:
seconds = float(current_arg)
continue
except ValueError:
pass
if current_arg == "-c":
clear_screen = True
continue
statement = "{0!s} {1!s}".format(current_arg, arg)
destructive_prompt = confirm_destructive_query(statement)
if destructive_prompt is False:
click.secho("Wise choice!")
raise StopIteration
elif destructive_prompt is True:
click.secho("Your call!")
cur = kwargs["cur"]
sql_list = [
(sql.rstrip(";"), "> {0!s}".format(sql)) for sql in sqlparse.split(statement)
]
old_pager_enabled = is_pager_enabled()
while True:
if clear_screen:
click.clear()
try:
# Somewhere in the code the pager its activated after every yield,
# so we disable it in every iteration
set_pager_enabled(False)
for (sql, title) in sql_list:
cur.execute(sql)
if cur.description:
headers = [x[0] for x in cur.description]
yield (title, cur, headers, None)
else:
yield (title, None, None, None)
sleep(seconds)
except KeyboardInterrupt:
# This prints the Ctrl-C character in its own line, which prevents
# to print a line with the cursor positioned behind the prompt
click.secho("", nl=True)
raise StopIteration
finally:
set_pager_enabled(old_pager_enabled)
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,489
|
dbcli/litecli
|
refs/heads/main
|
/tests/test_clistyle.py
|
# -*- coding: utf-8 -*-
"""Test the litecli.clistyle module."""
import pytest
from pygments.style import Style
from pygments.token import Token
from litecli.clistyle import style_factory
@pytest.mark.skip(reason="incompatible with new prompt toolkit")
def test_style_factory():
"""Test that a Pygments Style class is created."""
header = "bold underline #ansired"
cli_style = {"Token.Output.Header": header}
style = style_factory("default", cli_style)
assert isinstance(style(), Style)
assert Token.Output.Header in style.styles
assert header == style.styles[Token.Output.Header]
@pytest.mark.skip(reason="incompatible with new prompt toolkit")
def test_style_factory_unknown_name():
"""Test that an unrecognized name will not throw an error."""
style = style_factory("foobar", {})
assert isinstance(style(), Style)
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,490
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/special/dbcommands.py
|
from __future__ import unicode_literals, print_function
import csv
import logging
import os
import sys
import platform
import shlex
from sqlite3 import ProgrammingError
from litecli import __version__
from litecli.packages.special import iocommands
from litecli.packages.special.utils import format_uptime
from .main import special_command, RAW_QUERY, PARSED_QUERY, ArgumentMissing
log = logging.getLogger(__name__)
@special_command(
".tables",
"\\dt",
"List tables.",
arg_type=PARSED_QUERY,
case_sensitive=True,
aliases=("\\dt",),
)
def list_tables(cur, arg=None, arg_type=PARSED_QUERY, verbose=False):
if arg:
args = ("{0}%".format(arg),)
query = """
SELECT name FROM sqlite_master
WHERE type IN ('table','view') AND name LIKE ? AND name NOT LIKE 'sqlite_%'
ORDER BY 1
"""
else:
args = tuple()
query = """
SELECT name FROM sqlite_master
WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%'
ORDER BY 1
"""
log.debug(query)
cur.execute(query, args)
tables = cur.fetchall()
status = ""
if cur.description:
headers = [x[0] for x in cur.description]
else:
return [(None, None, None, "")]
# if verbose and arg:
# query = "SELECT sql FROM sqlite_master WHERE name LIKE ?"
# log.debug(query)
# cur.execute(query)
# status = cur.fetchone()[1]
return [(None, tables, headers, status)]
@special_command(
".schema",
".schema[+] [table]",
"The complete schema for the database or a single table",
arg_type=PARSED_QUERY,
case_sensitive=True,
)
def show_schema(cur, arg=None, **_):
if arg:
args = (arg,)
query = """
SELECT sql FROM sqlite_master
WHERE name==?
ORDER BY tbl_name, type DESC, name
"""
else:
args = tuple()
query = """
SELECT sql FROM sqlite_master
ORDER BY tbl_name, type DESC, name
"""
log.debug(query)
cur.execute(query, args)
tables = cur.fetchall()
status = ""
if cur.description:
headers = [x[0] for x in cur.description]
else:
return [(None, None, None, "")]
return [(None, tables, headers, status)]
@special_command(
".databases",
".databases",
"List databases.",
arg_type=RAW_QUERY,
case_sensitive=True,
aliases=("\\l",),
)
def list_databases(cur, **_):
query = "PRAGMA database_list"
log.debug(query)
cur.execute(query)
if cur.description:
headers = [x[0] for x in cur.description]
return [(None, cur, headers, "")]
else:
return [(None, None, None, "")]
@special_command(
".indexes",
".indexes [tablename]",
"List indexes.",
arg_type=PARSED_QUERY,
case_sensitive=True,
aliases=("\\di",),
)
def list_indexes(cur, arg=None, arg_type=PARSED_QUERY, verbose=False):
if arg:
args = ("{0}%".format(arg),)
query = """
SELECT name FROM sqlite_master
WHERE type = 'index' AND tbl_name LIKE ? AND name NOT LIKE 'sqlite_%'
ORDER BY 1
"""
else:
args = tuple()
query = """
SELECT name FROM sqlite_master
WHERE type = 'index' AND name NOT LIKE 'sqlite_%'
ORDER BY 1
"""
log.debug(query)
cur.execute(query, args)
indexes = cur.fetchall()
status = ""
if cur.description:
headers = [x[0] for x in cur.description]
else:
return [(None, None, None, "")]
return [(None, indexes, headers, status)]
@special_command(
".status",
"\\s",
"Show current settings.",
arg_type=RAW_QUERY,
aliases=("\\s",),
case_sensitive=True,
)
def status(cur, **_):
# Create output buffers.
footer = []
footer.append("--------------")
# Output the litecli client information.
implementation = platform.python_implementation()
version = platform.python_version()
client_info = []
client_info.append("litecli {0},".format(__version__))
client_info.append("running on {0} {1}".format(implementation, version))
footer.append(" ".join(client_info))
# Build the output that will be displayed as a table.
query = "SELECT file from pragma_database_list() where name = 'main';"
log.debug(query)
cur.execute(query)
db = cur.fetchone()[0]
if db is None:
db = ""
footer.append("Current database: " + db)
if iocommands.is_pager_enabled():
if "PAGER" in os.environ:
pager = os.environ["PAGER"]
else:
pager = "System default"
else:
pager = "stdout"
footer.append("Current pager:" + pager)
footer.append("--------------")
return [(None, None, "", "\n".join(footer))]
@special_command(
".load",
".load path",
"Load an extension library.",
arg_type=PARSED_QUERY,
case_sensitive=True,
)
def load_extension(cur, arg, **_):
args = shlex.split(arg)
if len(args) != 1:
raise TypeError(".load accepts exactly one path")
path = args[0]
conn = cur.connection
conn.enable_load_extension(True)
conn.load_extension(path)
return [(None, None, None, "")]
@special_command(
"describe",
"\\d [table]",
"Description of a table",
arg_type=PARSED_QUERY,
case_sensitive=True,
aliases=("\\d", "desc"),
)
def describe(cur, arg, **_):
if arg:
args = (arg,)
query = """
PRAGMA table_info({})
""".format(
arg
)
else:
raise ArgumentMissing("Table name required.")
log.debug(query)
cur.execute(query)
tables = cur.fetchall()
status = ""
if cur.description:
headers = [x[0] for x in cur.description]
else:
return [(None, None, None, "")]
return [(None, tables, headers, status)]
@special_command(
".import",
".import filename table",
"Import data from filename into an existing table",
arg_type=PARSED_QUERY,
case_sensitive=True,
)
def import_file(cur, arg=None, **_):
def split(s):
# this is a modification of shlex.split function, just to make it support '`',
# because table name might contain '`' character.
lex = shlex.shlex(s, posix=True)
lex.whitespace_split = True
lex.commenters = ""
lex.quotes += "`"
return list(lex)
args = split(arg)
log.debug("[arg = %r], [args = %r]", arg, args)
if len(args) != 2:
raise TypeError("Usage: .import filename table")
filename, table = args
cur.execute('PRAGMA table_info("%s")' % table)
ncols = len(cur.fetchall())
insert_tmpl = 'INSERT INTO "%s" VALUES (?%s)' % (table, ",?" * (ncols - 1))
with open(filename, "r") as csvfile:
dialect = csv.Sniffer().sniff(csvfile.read(1024))
csvfile.seek(0)
reader = csv.reader(csvfile, dialect)
cur.execute("BEGIN")
ninserted, nignored = 0, 0
for i, row in enumerate(reader):
if len(row) != ncols:
print(
"%s:%d expected %d columns but found %d - ignored"
% (filename, i, ncols, len(row)),
file=sys.stderr,
)
nignored += 1
continue
cur.execute(insert_tmpl, row)
ninserted += 1
cur.execute("COMMIT")
status = "Inserted %d rows into %s" % (ninserted, table)
if nignored > 0:
status += " (%d rows are ignored)" % nignored
return [(None, None, None, status)]
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,491
|
dbcli/litecli
|
refs/heads/main
|
/litecli/lexer.py
|
from pygments.lexer import inherit
from pygments.lexers.sql import MySqlLexer
from pygments.token import Keyword
class LiteCliLexer(MySqlLexer):
"""Extends SQLite lexer to add keywords."""
tokens = {"root": [(r"\brepair\b", Keyword), (r"\boffset\b", Keyword), inherit]}
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,492
|
dbcli/litecli
|
refs/heads/main
|
/litecli/encodingutils.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from litecli.compat import PY2
if PY2:
binary_type = str
string_types = basestring
text_type = unicode
else:
binary_type = bytes
string_types = str
text_type = str
def unicode2utf8(arg):
"""Convert strings to UTF8-encoded bytes.
Only in Python 2. In Python 3 the args are expected as unicode.
"""
if PY2 and isinstance(arg, text_type):
return arg.encode("utf-8")
return arg
def utf8tounicode(arg):
"""Convert UTF8-encoded bytes to strings.
Only in Python 2. In Python 3 the errors are returned as strings.
"""
if PY2 and isinstance(arg, binary_type):
return arg.decode("utf-8")
return arg
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,493
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/parseutils.py
|
from __future__ import print_function
import re
import sqlparse
from sqlparse.sql import IdentifierList, Identifier, Function
from sqlparse.tokens import Keyword, DML, Punctuation
cleanup_regex = {
# This matches only alphanumerics and underscores.
"alphanum_underscore": re.compile(r"(\w+)$"),
# This matches everything except spaces, parens, colon, and comma
"many_punctuations": re.compile(r"([^():,\s]+)$"),
# This matches everything except spaces, parens, colon, comma, and period
"most_punctuations": re.compile(r"([^\.():,\s]+)$"),
# This matches everything except a space.
"all_punctuations": re.compile("([^\s]+)$"),
}
def last_word(text, include="alphanum_underscore"):
"""
Find the last word in a sentence.
>>> last_word('abc')
'abc'
>>> last_word(' abc')
'abc'
>>> last_word('')
''
>>> last_word(' ')
''
>>> last_word('abc ')
''
>>> last_word('abc def')
'def'
>>> last_word('abc def ')
''
>>> last_word('abc def;')
''
>>> last_word('bac $def')
'def'
>>> last_word('bac $def', include='most_punctuations')
'$def'
>>> last_word('bac \def', include='most_punctuations')
'\\\\def'
>>> last_word('bac \def;', include='most_punctuations')
'\\\\def;'
>>> last_word('bac::def', include='most_punctuations')
'def'
"""
if not text: # Empty string
return ""
if text[-1].isspace():
return ""
else:
regex = cleanup_regex[include]
matches = regex.search(text)
if matches:
return matches.group(0)
else:
return ""
# This code is borrowed from sqlparse example script.
# <url>
def is_subselect(parsed):
if not parsed.is_group:
return False
for item in parsed.tokens:
if item.ttype is DML and item.value.upper() in (
"SELECT",
"INSERT",
"UPDATE",
"CREATE",
"DELETE",
):
return True
return False
def extract_from_part(parsed, stop_at_punctuation=True):
tbl_prefix_seen = False
for item in parsed.tokens:
if tbl_prefix_seen:
if is_subselect(item):
for x in extract_from_part(item, stop_at_punctuation):
yield x
elif stop_at_punctuation and item.ttype is Punctuation:
return
# An incomplete nested select won't be recognized correctly as a
# sub-select. eg: 'SELECT * FROM (SELECT id FROM user'. This causes
# the second FROM to trigger this elif condition resulting in a
# `return`. So we need to ignore the keyword if the keyword
# FROM.
# Also 'SELECT * FROM abc JOIN def' will trigger this elif
# condition. So we need to ignore the keyword JOIN and its variants
# INNER JOIN, FULL OUTER JOIN, etc.
elif (
item.ttype is Keyword
and (not item.value.upper() == "FROM")
and (not item.value.upper().endswith("JOIN"))
):
return
else:
yield item
elif (
item.ttype is Keyword or item.ttype is Keyword.DML
) and item.value.upper() in ("COPY", "FROM", "INTO", "UPDATE", "TABLE", "JOIN"):
tbl_prefix_seen = True
# 'SELECT a, FROM abc' will detect FROM as part of the column list.
# So this check here is necessary.
elif isinstance(item, IdentifierList):
for identifier in item.get_identifiers():
if identifier.ttype is Keyword and identifier.value.upper() == "FROM":
tbl_prefix_seen = True
break
def extract_table_identifiers(token_stream):
"""yields tuples of (schema_name, table_name, table_alias)"""
for item in token_stream:
if isinstance(item, IdentifierList):
for identifier in item.get_identifiers():
# Sometimes Keywords (such as FROM ) are classified as
# identifiers which don't have the get_real_name() method.
try:
schema_name = identifier.get_parent_name()
real_name = identifier.get_real_name()
except AttributeError:
continue
if real_name:
yield (schema_name, real_name, identifier.get_alias())
elif isinstance(item, Identifier):
real_name = item.get_real_name()
schema_name = item.get_parent_name()
if real_name:
yield (schema_name, real_name, item.get_alias())
else:
name = item.get_name()
yield (None, name, item.get_alias() or name)
elif isinstance(item, Function):
yield (None, item.get_name(), item.get_name())
# extract_tables is inspired from examples in the sqlparse lib.
def extract_tables(sql):
"""Extract the table names from an SQL statement.
Returns a list of (schema, table, alias) tuples
"""
parsed = sqlparse.parse(sql)
if not parsed:
return []
# INSERT statements must stop looking for tables at the sign of first
# Punctuation. eg: INSERT INTO abc (col1, col2) VALUES (1, 2)
# abc is the table name, but if we don't stop at the first lparen, then
# we'll identify abc, col1 and col2 as table names.
insert_stmt = parsed[0].token_first().value.lower() == "insert"
stream = extract_from_part(parsed[0], stop_at_punctuation=insert_stmt)
return list(extract_table_identifiers(stream))
def find_prev_keyword(sql):
"""Find the last sql keyword in an SQL statement
Returns the value of the last keyword, and the text of the query with
everything after the last keyword stripped
"""
if not sql.strip():
return None, ""
parsed = sqlparse.parse(sql)[0]
flattened = list(parsed.flatten())
logical_operators = ("AND", "OR", "NOT", "BETWEEN")
for t in reversed(flattened):
if t.value == "(" or (
t.is_keyword and (t.value.upper() not in logical_operators)
):
# Find the location of token t in the original parsed statement
# We can't use parsed.token_index(t) because t may be a child token
# inside a TokenList, in which case token_index thows an error
# Minimal example:
# p = sqlparse.parse('select * from foo where bar')
# t = list(p.flatten())[-3] # The "Where" token
# p.token_index(t) # Throws ValueError: not in list
idx = flattened.index(t)
# Combine the string values of all tokens in the original list
# up to and including the target keyword token t, to produce a
# query string with everything after the keyword token removed
text = "".join(tok.value for tok in flattened[: idx + 1])
return t, text
return None, ""
def query_starts_with(query, prefixes):
"""Check if the query starts with any item from *prefixes*."""
prefixes = [prefix.lower() for prefix in prefixes]
formatted_sql = sqlparse.format(query.lower(), strip_comments=True)
return bool(formatted_sql) and formatted_sql.split()[0] in prefixes
def queries_start_with(queries, prefixes):
"""Check if any queries start with any item from *prefixes*."""
for query in sqlparse.split(queries):
if query and query_starts_with(query, prefixes) is True:
return True
return False
def is_destructive(queries):
"""Returns if any of the queries in *queries* is destructive."""
keywords = ("drop", "shutdown", "delete", "truncate", "alter")
return queries_start_with(queries, keywords)
if __name__ == "__main__":
sql = "select * from (select t. from tabl t"
print(extract_tables(sql))
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,494
|
dbcli/litecli
|
refs/heads/main
|
/tests/test_sqlexecute.py
|
# coding=UTF-8
import os
import pytest
from utils import run, dbtest, set_expanded_output, is_expanded_output
from sqlite3 import OperationalError, ProgrammingError
def assert_result_equal(
result,
title=None,
rows=None,
headers=None,
status=None,
auto_status=True,
assert_contains=False,
):
"""Assert that an sqlexecute.run() result matches the expected values."""
if status is None and auto_status and rows:
status = "{} row{} in set".format(len(rows), "s" if len(rows) > 1 else "")
fields = {"title": title, "rows": rows, "headers": headers, "status": status}
if assert_contains:
# Do a loose match on the results using the *in* operator.
for key, field in fields.items():
if field:
assert field in result[0][key]
else:
# Do an exact match on the fields.
assert result == [fields]
@dbtest
def test_conn(executor):
run(executor, """create table test(a text)""")
run(executor, """insert into test values('abc')""")
results = run(executor, """select * from test""")
assert_result_equal(results, headers=["a"], rows=[("abc",)])
@dbtest
def test_bools(executor):
run(executor, """create table test(a boolean)""")
run(executor, """insert into test values(1)""")
results = run(executor, """select * from test""")
assert_result_equal(results, headers=["a"], rows=[(1,)])
@dbtest
def test_binary(executor):
run(executor, """create table foo(blb BLOB NOT NULL)""")
run(executor, """INSERT INTO foo VALUES ('\x01\x01\x01\n')""")
results = run(executor, """select * from foo""")
expected = "\x01\x01\x01\n"
assert_result_equal(results, headers=["blb"], rows=[(expected,)])
## Failing in Travis for some unknown reason.
# @dbtest
# def test_table_and_columns_query(executor):
# run(executor, "create table a(x text, y text)")
# run(executor, "create table b(z text)")
# assert set(executor.tables()) == set([("a",), ("b",)])
# assert set(executor.table_columns()) == set([("a", "x"), ("a", "y"), ("b", "z")])
@dbtest
def test_database_list(executor):
databases = executor.databases()
assert "main" in list(databases)
@dbtest
def test_invalid_syntax(executor):
with pytest.raises(OperationalError) as excinfo:
run(executor, "invalid syntax!")
assert "syntax error" in str(excinfo.value)
@dbtest
def test_invalid_column_name(executor):
with pytest.raises(OperationalError) as excinfo:
run(executor, "select invalid command")
assert "no such column: invalid" in str(excinfo.value)
@dbtest
def test_unicode_support_in_output(executor):
run(executor, "create table unicodechars(t text)")
run(executor, u"insert into unicodechars (t) values ('é')")
# See issue #24, this raises an exception without proper handling
results = run(executor, u"select * from unicodechars")
assert_result_equal(results, headers=["t"], rows=[(u"é",)])
@dbtest
def test_invalid_unicode_values_dont_choke(executor):
run(executor, "create table unicodechars(t text)")
# \xc3 is not a valid utf-8 char. But we can insert it into the database
# which can break querying if not handled correctly.
run(executor, u"insert into unicodechars (t) values (cast(x'c3' as text))")
results = run(executor, u"select * from unicodechars")
assert_result_equal(results, headers=["t"], rows=[("\\xc3",)])
@dbtest
def test_multiple_queries_same_line(executor):
results = run(executor, "select 'foo'; select 'bar'")
expected = [
{
"title": None,
"headers": ["'foo'"],
"rows": [(u"foo",)],
"status": "1 row in set",
},
{
"title": None,
"headers": ["'bar'"],
"rows": [(u"bar",)],
"status": "1 row in set",
},
]
assert expected == results
@dbtest
def test_multiple_queries_same_line_syntaxerror(executor):
with pytest.raises(OperationalError) as excinfo:
run(executor, "select 'foo'; invalid syntax")
assert "syntax error" in str(excinfo.value)
@dbtest
def test_favorite_query(executor):
set_expanded_output(False)
run(executor, "create table test(a text)")
run(executor, "insert into test values('abc')")
run(executor, "insert into test values('def')")
results = run(executor, "\\fs test-a select * from test where a like 'a%'")
assert_result_equal(results, status="Saved.")
results = run(executor, "\\f+ test-a")
assert_result_equal(
results,
title="> select * from test where a like 'a%'",
headers=["a"],
rows=[("abc",)],
auto_status=False,
)
results = run(executor, "\\fd test-a")
assert_result_equal(results, status="test-a: Deleted")
@dbtest
def test_bind_parameterized_favorite_query(executor):
set_expanded_output(False)
run(executor, "create table test(name text, id integer)")
run(executor, "insert into test values('def', 2)")
run(executor, "insert into test values('two words', 3)")
results = run(executor, "\\fs q_param select * from test where name=?")
assert_result_equal(results, status="Saved.")
results = run(executor, "\\f+ q_param def")
assert_result_equal(
results,
title="> select * from test where name=?",
headers=["name", "id"],
rows=[("def", 2)],
auto_status=False,
)
results = run(executor, "\\f+ q_param 'two words'")
assert_result_equal(
results,
title="> select * from test where name=?",
headers=["name", "id"],
rows=[("two words", 3)],
auto_status=False,
)
with pytest.raises(ProgrammingError):
results = run(executor, "\\f+ q_param")
with pytest.raises(ProgrammingError):
results = run(executor, "\\f+ q_param 1 2")
@dbtest
def test_verbose_feature_of_favorite_query(executor):
set_expanded_output(False)
run(executor, "create table test(a text, id integer)")
run(executor, "insert into test values('abc', 1)")
run(executor, "insert into test values('def', 2)")
results = run(executor, "\\fs sh_param select * from test where id=$1")
assert_result_equal(results, status="Saved.")
results = run(executor, "\\f sh_param 1")
assert_result_equal(
results,
title=None,
headers=["a", "id"],
rows=[("abc", 1)],
auto_status=False,
)
results = run(executor, "\\f+ sh_param 1")
assert_result_equal(
results,
title="> select * from test where id=1",
headers=["a", "id"],
rows=[("abc", 1)],
auto_status=False,
)
@dbtest
def test_shell_parameterized_favorite_query(executor):
set_expanded_output(False)
run(executor, "create table test(a text, id integer)")
run(executor, "insert into test values('abc', 1)")
run(executor, "insert into test values('def', 2)")
results = run(executor, "\\fs sh_param select * from test where id=$1")
assert_result_equal(results, status="Saved.")
results = run(executor, "\\f+ sh_param 1")
assert_result_equal(
results,
title="> select * from test where id=1",
headers=["a", "id"],
rows=[("abc", 1)],
auto_status=False,
)
results = run(executor, "\\f+ sh_param")
assert_result_equal(
results,
title=None,
headers=None,
rows=None,
status="missing substitution for $1 in query:\n select * from test where id=$1",
)
results = run(executor, "\\f+ sh_param 1 2")
assert_result_equal(
results,
title=None,
headers=None,
rows=None,
status="Too many arguments.\nQuery does not have enough place holders to substitute.\nselect * from test where id=1",
)
@dbtest
def test_favorite_query_multiple_statement(executor):
set_expanded_output(False)
run(executor, "create table test(a text)")
run(executor, "insert into test values('abc')")
run(executor, "insert into test values('def')")
results = run(
executor,
"\\fs test-ad select * from test where a like 'a%'; "
"select * from test where a like 'd%'",
)
assert_result_equal(results, status="Saved.")
results = run(executor, "\\f+ test-ad")
expected = [
{
"title": "> select * from test where a like 'a%'",
"headers": ["a"],
"rows": [("abc",)],
"status": None,
},
{
"title": "> select * from test where a like 'd%'",
"headers": ["a"],
"rows": [("def",)],
"status": None,
},
]
assert expected == results
results = run(executor, "\\fd test-ad")
assert_result_equal(results, status="test-ad: Deleted")
@dbtest
def test_favorite_query_expanded_output(executor):
set_expanded_output(False)
run(executor, """create table test(a text)""")
run(executor, """insert into test values('abc')""")
results = run(executor, "\\fs test-ae select * from test")
assert_result_equal(results, status="Saved.")
results = run(executor, "\\f+ test-ae \G")
assert is_expanded_output() is True
assert_result_equal(
results,
title="> select * from test",
headers=["a"],
rows=[("abc",)],
auto_status=False,
)
set_expanded_output(False)
results = run(executor, "\\fd test-ae")
assert_result_equal(results, status="test-ae: Deleted")
@dbtest
def test_special_command(executor):
results = run(executor, "\\?")
assert_result_equal(
results,
rows=("quit", "\\q", "Quit."),
headers="Command",
assert_contains=True,
auto_status=False,
)
@dbtest
def test_cd_command_without_a_folder_name(executor):
results = run(executor, "system cd")
assert_result_equal(results, status="No folder name was provided.")
@dbtest
def test_system_command_not_found(executor):
results = run(executor, "system xyz")
assert_result_equal(
results, status="OSError: No such file or directory", assert_contains=True
)
@dbtest
def test_system_command_output(executor):
test_dir = os.path.abspath(os.path.dirname(__file__))
test_file_path = os.path.join(test_dir, "test.txt")
results = run(executor, "system cat {0}".format(test_file_path))
assert_result_equal(results, status="litecli is awesome!\n")
@dbtest
def test_cd_command_current_dir(executor):
test_path = os.path.abspath(os.path.dirname(__file__))
run(executor, "system cd {0}".format(test_path))
assert os.getcwd() == test_path
run(executor, "system cd ..")
@dbtest
def test_unicode_support(executor):
results = run(executor, u"SELECT '日本語' AS japanese;")
assert_result_equal(results, headers=["japanese"], rows=[(u"日本語",)])
@dbtest
def test_timestamp_null(executor):
run(executor, """create table ts_null(a timestamp null)""")
run(executor, """insert into ts_null values(null)""")
results = run(executor, """select * from ts_null""")
assert_result_equal(results, headers=["a"], rows=[(None,)])
@dbtest
def test_datetime_null(executor):
run(executor, """create table dt_null(a datetime null)""")
run(executor, """insert into dt_null values(null)""")
results = run(executor, """select * from dt_null""")
assert_result_equal(results, headers=["a"], rows=[(None,)])
@dbtest
def test_date_null(executor):
run(executor, """create table date_null(a date null)""")
run(executor, """insert into date_null values(null)""")
results = run(executor, """select * from date_null""")
assert_result_equal(results, headers=["a"], rows=[(None,)])
@dbtest
def test_time_null(executor):
run(executor, """create table time_null(a time null)""")
run(executor, """insert into time_null values(null)""")
results = run(executor, """select * from time_null""")
assert_result_equal(results, headers=["a"], rows=[(None,)])
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,495
|
dbcli/litecli
|
refs/heads/main
|
/tests/utils.py
|
# -*- coding: utf-8 -*-
import os
import time
import signal
import platform
import multiprocessing
from contextlib import closing
import sqlite3
import pytest
from litecli.main import special
DATABASE = os.getenv("PYTEST_DATABASE", "test.sqlite3")
def db_connection(dbname=":memory:"):
conn = sqlite3.connect(database=dbname, isolation_level=None)
return conn
try:
db_connection()
CAN_CONNECT_TO_DB = True
except Exception as ex:
CAN_CONNECT_TO_DB = False
dbtest = pytest.mark.skipif(
not CAN_CONNECT_TO_DB, reason="Error creating sqlite connection"
)
def create_db(dbname):
with closing(db_connection().cursor()) as cur:
try:
cur.execute("""DROP DATABASE IF EXISTS _test_db""")
cur.execute("""CREATE DATABASE _test_db""")
except:
pass
def drop_tables(dbname):
with closing(db_connection().cursor()) as cur:
try:
cur.execute("""DROP DATABASE IF EXISTS _test_db""")
except:
pass
def run(executor, sql, rows_as_list=True):
"""Return string output for the sql to be run."""
result = []
for title, rows, headers, status in executor.run(sql):
rows = list(rows) if (rows_as_list and rows) else rows
result.append(
{"title": title, "rows": rows, "headers": headers, "status": status}
)
return result
def set_expanded_output(is_expanded):
"""Pass-through for the tests."""
return special.set_expanded_output(is_expanded)
def is_expanded_output():
"""Pass-through for the tests."""
return special.is_expanded_output()
def send_ctrl_c_to_pid(pid, wait_seconds):
"""Sends a Ctrl-C like signal to the given `pid` after `wait_seconds`
seconds."""
time.sleep(wait_seconds)
system_name = platform.system()
if system_name == "Windows":
os.kill(pid, signal.CTRL_C_EVENT)
else:
os.kill(pid, signal.SIGINT)
def send_ctrl_c(wait_seconds):
"""Create a process that sends a Ctrl-C like signal to the current process
after `wait_seconds` seconds.
Returns the `multiprocessing.Process` created.
"""
ctrl_c_process = multiprocessing.Process(
target=send_ctrl_c_to_pid, args=(os.getpid(), wait_seconds)
)
ctrl_c_process.start()
return ctrl_c_process
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,496
|
dbcli/litecli
|
refs/heads/main
|
/tests/test_main.py
|
import os
from collections import namedtuple
from textwrap import dedent
from tempfile import NamedTemporaryFile
import shutil
import click
from click.testing import CliRunner
from litecli.main import cli, LiteCli
from litecli.packages.special.main import COMMANDS as SPECIAL_COMMANDS
from utils import dbtest, run
test_dir = os.path.abspath(os.path.dirname(__file__))
project_dir = os.path.dirname(test_dir)
default_config_file = os.path.join(project_dir, "tests", "liteclirc")
CLI_ARGS = ["--liteclirc", default_config_file, "_test_db"]
@dbtest
def test_execute_arg(executor):
run(executor, "create table test (a text)")
run(executor, 'insert into test values("abc")')
sql = "select * from test;"
runner = CliRunner()
result = runner.invoke(cli, args=CLI_ARGS + ["-e", sql])
assert result.exit_code == 0
assert "abc" in result.output
result = runner.invoke(cli, args=CLI_ARGS + ["--execute", sql])
assert result.exit_code == 0
assert "abc" in result.output
expected = "a\nabc\n"
assert expected in result.output
@dbtest
def test_execute_arg_with_table(executor):
run(executor, "create table test (a text)")
run(executor, 'insert into test values("abc")')
sql = "select * from test;"
runner = CliRunner()
result = runner.invoke(cli, args=CLI_ARGS + ["-e", sql] + ["--table"])
expected = "+-----+\n| a |\n+-----+\n| abc |\n+-----+\n"
assert result.exit_code == 0
assert expected in result.output
@dbtest
def test_execute_arg_with_csv(executor):
run(executor, "create table test (a text)")
run(executor, 'insert into test values("abc")')
sql = "select * from test;"
runner = CliRunner()
result = runner.invoke(cli, args=CLI_ARGS + ["-e", sql] + ["--csv"])
expected = '"a"\n"abc"\n'
assert result.exit_code == 0
assert expected in "".join(result.output)
@dbtest
def test_batch_mode(executor):
run(executor, """create table test(a text)""")
run(executor, """insert into test values('abc'), ('def'), ('ghi')""")
sql = "select count(*) from test;\n" "select * from test limit 1;"
runner = CliRunner()
result = runner.invoke(cli, args=CLI_ARGS, input=sql)
assert result.exit_code == 0
assert "count(*)\n3\na\nabc\n" in "".join(result.output)
@dbtest
def test_batch_mode_table(executor):
run(executor, """create table test(a text)""")
run(executor, """insert into test values('abc'), ('def'), ('ghi')""")
sql = "select count(*) from test;\n" "select * from test limit 1;"
runner = CliRunner()
result = runner.invoke(cli, args=CLI_ARGS + ["-t"], input=sql)
expected = dedent(
"""\
+----------+
| count(*) |
+----------+
| 3 |
+----------+
+-----+
| a |
+-----+
| abc |
+-----+"""
)
assert result.exit_code == 0
assert expected in result.output
@dbtest
def test_batch_mode_csv(executor):
run(executor, """create table test(a text, b text)""")
run(executor, """insert into test (a, b) values('abc', 'de\nf'), ('ghi', 'jkl')""")
sql = "select * from test;"
runner = CliRunner()
result = runner.invoke(cli, args=CLI_ARGS + ["--csv"], input=sql)
expected = '"a","b"\n"abc","de\nf"\n"ghi","jkl"\n'
assert result.exit_code == 0
assert expected in "".join(result.output)
def test_help_strings_end_with_periods():
"""Make sure click options have help text that end with a period."""
for param in cli.params:
if isinstance(param, click.core.Option):
assert hasattr(param, "help")
assert param.help.endswith(".")
def output(monkeypatch, terminal_size, testdata, explicit_pager, expect_pager):
global clickoutput
clickoutput = ""
m = LiteCli(liteclirc=default_config_file)
class TestOutput:
def get_size(self):
size = namedtuple("Size", "rows columns")
size.columns, size.rows = terminal_size
return size
class TestExecute:
host = "test"
user = "test"
dbname = "test"
port = 0
def server_type(self):
return ["test"]
class PromptBuffer:
output = TestOutput()
m.prompt_app = PromptBuffer()
m.sqlexecute = TestExecute()
m.explicit_pager = explicit_pager
def echo_via_pager(s):
assert expect_pager
global clickoutput
clickoutput += s
def secho(s):
assert not expect_pager
global clickoutput
clickoutput += s + "\n"
monkeypatch.setattr(click, "echo_via_pager", echo_via_pager)
monkeypatch.setattr(click, "secho", secho)
m.output(testdata)
if clickoutput.endswith("\n"):
clickoutput = clickoutput[:-1]
assert clickoutput == "\n".join(testdata)
def test_conditional_pager(monkeypatch):
testdata = "Lorem ipsum dolor sit amet consectetur adipiscing elit sed do".split(
" "
)
# User didn't set pager, output doesn't fit screen -> pager
output(
monkeypatch,
terminal_size=(5, 10),
testdata=testdata,
explicit_pager=False,
expect_pager=True,
)
# User didn't set pager, output fits screen -> no pager
output(
monkeypatch,
terminal_size=(20, 20),
testdata=testdata,
explicit_pager=False,
expect_pager=False,
)
# User manually configured pager, output doesn't fit screen -> pager
output(
monkeypatch,
terminal_size=(5, 10),
testdata=testdata,
explicit_pager=True,
expect_pager=True,
)
# User manually configured pager, output fit screen -> pager
output(
monkeypatch,
terminal_size=(20, 20),
testdata=testdata,
explicit_pager=True,
expect_pager=True,
)
SPECIAL_COMMANDS["nopager"].handler()
output(
monkeypatch,
terminal_size=(5, 10),
testdata=testdata,
explicit_pager=False,
expect_pager=False,
)
SPECIAL_COMMANDS["pager"].handler("")
def test_reserved_space_is_integer():
"""Make sure that reserved space is returned as an integer."""
def stub_terminal_size():
return (5, 5)
old_func = shutil.get_terminal_size
shutil.get_terminal_size = stub_terminal_size
lc = LiteCli()
assert isinstance(lc.get_reserved_space(), int)
shutil.get_terminal_size = old_func
@dbtest
def test_import_command(executor):
data_file = os.path.join(project_dir, "tests", "data", "import_data.csv")
run(executor, """create table tbl1(one varchar(10), two smallint)""")
# execute
run(executor, """.import %s tbl1""" % data_file)
# verify
sql = "select * from tbl1;"
runner = CliRunner()
result = runner.invoke(cli, args=CLI_ARGS + ["--csv"], input=sql)
expected = """one","two"
"t1","11"
"t2","22"
"""
assert result.exit_code == 0
assert expected in "".join(result.output)
def test_startup_commands(executor):
m = LiteCli(liteclirc=default_config_file)
assert m.startup_commands['commands'] == ['create table startupcommands(a text)', "insert into startupcommands values('abc')"]
# implement tests on executions of the startupcommands
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,497
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/special/utils.py
|
import os
import subprocess
def handle_cd_command(arg):
"""Handles a `cd` shell command by calling python's os.chdir."""
CD_CMD = "cd"
tokens = arg.split(CD_CMD + " ")
directory = tokens[-1] if len(tokens) > 1 else None
if not directory:
return False, "No folder name was provided."
try:
os.chdir(directory)
subprocess.call(["pwd"])
return True, None
except OSError as e:
return False, e.strerror
def format_uptime(uptime_in_seconds):
"""Format number of seconds into human-readable string.
:param uptime_in_seconds: The server uptime in seconds.
:returns: A human-readable string representing the uptime.
>>> uptime = format_uptime('56892')
>>> print(uptime)
15 hours 48 min 12 sec
"""
m, s = divmod(int(uptime_in_seconds), 60)
h, m = divmod(m, 60)
d, h = divmod(h, 24)
uptime_values = []
for value, unit in ((d, "days"), (h, "hours"), (m, "min"), (s, "sec")):
if value == 0 and not uptime_values:
# Don't include a value/unit if the unit isn't applicable to
# the uptime. E.g. don't do 0 days 0 hours 1 min 30 sec.
continue
elif value == 1 and unit.endswith("s"):
# Remove the "s" if the unit is singular.
unit = unit[:-1]
uptime_values.append("{0} {1}".format(value, unit))
uptime = " ".join(uptime_values)
return uptime
def check_if_sqlitedotcommand(command):
"""Does a check if the command supplied is in the list of SQLite dot commands.
:param command: A command (str) supplied from the user
:returns: True/False
"""
sqlite3dotcommands = ['.archive','.auth','.backup','.bail','.binary','.cd','.changes','.check','.clone','.connection','.databases','.dbconfig','.dbinfo','.dump','.echo','.eqp','.excel','.exit','.expert','.explain','.filectrl','.fullschema','.headers','.help','.import','.imposter','.indexes','.limit','.lint','.load','.log','.mode','.nonce','.nullvalue','.once','.open','.output','.parameter','.print','.progress','.prompt','.quit','.read','.recover','.restore','.save','.scanstats','.schema','.selftest','.separator','.session','.sha3sum','.shell','.show','.stats','.system','.tables','.testcase','.testctrl','.timeout','.timer','.trace','.vfsinfo','.vfslist','.vfsname','.width']
if isinstance(command, str):
command = command.split(' ', 1)[0].lower()
return (command in sqlite3dotcommands)
else:
return False
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,498
|
dbcli/litecli
|
refs/heads/main
|
/tests/test_parseutils.py
|
import pytest
from litecli.packages.parseutils import (
extract_tables,
query_starts_with,
queries_start_with,
is_destructive,
)
def test_empty_string():
tables = extract_tables("")
assert tables == []
def test_simple_select_single_table():
tables = extract_tables("select * from abc")
assert tables == [(None, "abc", None)]
def test_simple_select_single_table_schema_qualified():
tables = extract_tables("select * from abc.def")
assert tables == [("abc", "def", None)]
def test_simple_select_multiple_tables():
tables = extract_tables("select * from abc, def")
assert sorted(tables) == [(None, "abc", None), (None, "def", None)]
def test_simple_select_multiple_tables_schema_qualified():
tables = extract_tables("select * from abc.def, ghi.jkl")
assert sorted(tables) == [("abc", "def", None), ("ghi", "jkl", None)]
def test_simple_select_with_cols_single_table():
tables = extract_tables("select a,b from abc")
assert tables == [(None, "abc", None)]
def test_simple_select_with_cols_single_table_schema_qualified():
tables = extract_tables("select a,b from abc.def")
assert tables == [("abc", "def", None)]
def test_simple_select_with_cols_multiple_tables():
tables = extract_tables("select a,b from abc, def")
assert sorted(tables) == [(None, "abc", None), (None, "def", None)]
def test_simple_select_with_cols_multiple_tables_with_schema():
tables = extract_tables("select a,b from abc.def, def.ghi")
assert sorted(tables) == [("abc", "def", None), ("def", "ghi", None)]
def test_select_with_hanging_comma_single_table():
tables = extract_tables("select a, from abc")
assert tables == [(None, "abc", None)]
def test_select_with_hanging_comma_multiple_tables():
tables = extract_tables("select a, from abc, def")
assert sorted(tables) == [(None, "abc", None), (None, "def", None)]
def test_select_with_hanging_period_multiple_tables():
tables = extract_tables("SELECT t1. FROM tabl1 t1, tabl2 t2")
assert sorted(tables) == [(None, "tabl1", "t1"), (None, "tabl2", "t2")]
def test_simple_insert_single_table():
tables = extract_tables('insert into abc (id, name) values (1, "def")')
# sqlparse mistakenly assigns an alias to the table
# assert tables == [(None, 'abc', None)]
assert tables == [(None, "abc", "abc")]
@pytest.mark.xfail
def test_simple_insert_single_table_schema_qualified():
tables = extract_tables('insert into abc.def (id, name) values (1, "def")')
assert tables == [("abc", "def", None)]
def test_simple_update_table():
tables = extract_tables("update abc set id = 1")
assert tables == [(None, "abc", None)]
def test_simple_update_table_with_schema():
tables = extract_tables("update abc.def set id = 1")
assert tables == [("abc", "def", None)]
def test_join_table():
tables = extract_tables("SELECT * FROM abc a JOIN def d ON a.id = d.num")
assert sorted(tables) == [(None, "abc", "a"), (None, "def", "d")]
def test_join_table_schema_qualified():
tables = extract_tables("SELECT * FROM abc.def x JOIN ghi.jkl y ON x.id = y.num")
assert tables == [("abc", "def", "x"), ("ghi", "jkl", "y")]
def test_join_as_table():
tables = extract_tables("SELECT * FROM my_table AS m WHERE m.a > 5")
assert tables == [(None, "my_table", "m")]
def test_query_starts_with():
query = "USE test;"
assert query_starts_with(query, ("use",)) is True
query = "DROP DATABASE test;"
assert query_starts_with(query, ("use",)) is False
def test_query_starts_with_comment():
query = "# comment\nUSE test;"
assert query_starts_with(query, ("use",)) is True
def test_queries_start_with():
sql = "# comment\n" "show databases;" "use foo;"
assert queries_start_with(sql, ("show", "select")) is True
assert queries_start_with(sql, ("use", "drop")) is True
assert queries_start_with(sql, ("delete", "update")) is False
def test_is_destructive():
sql = "use test;\n" "show databases;\n" "drop database foo;"
assert is_destructive(sql) is True
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,499
|
dbcli/litecli
|
refs/heads/main
|
/litecli/__init__.py
|
__version__ = "1.9.0"
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,500
|
dbcli/litecli
|
refs/heads/main
|
/litecli/compat.py
|
# -*- coding: utf-8 -*-
"""Platform and Python version compatibility support."""
import sys
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
WIN = sys.platform in ("win32", "cygwin")
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,501
|
dbcli/litecli
|
refs/heads/main
|
/setup.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import ast
from io import open
import re
from setuptools import setup, find_packages
_version_re = re.compile(r"__version__\s+=\s+(.*)")
with open("litecli/__init__.py", "rb") as f:
version = str(
ast.literal_eval(_version_re.search(f.read().decode("utf-8")).group(1))
)
def open_file(filename):
"""Open and read the file *filename*."""
with open(filename) as f:
return f.read()
readme = open_file("README.md")
install_requirements = [
"click >= 4.1",
"Pygments>=1.6",
"prompt_toolkit>=3.0.3,<4.0.0",
"sqlparse",
"configobj >= 5.0.5",
"cli_helpers[styles] >= 2.2.1",
]
setup(
name="litecli",
author="dbcli",
author_email="litecli-users@googlegroups.com",
license="BSD",
version=version,
url="https://github.com/dbcli/litecli",
packages=find_packages(),
package_data={"litecli": ["liteclirc", "AUTHORS"]},
description="CLI for SQLite Databases with auto-completion and syntax "
"highlighting.",
long_description=readme,
long_description_content_type="text/markdown",
install_requires=install_requirements,
# cmdclass={"test": test, "lint": lint},
entry_points={
"console_scripts": ["litecli = litecli.main:cli"],
"distutils.commands": ["lint = tasks:lint", "test = tasks:test"],
},
classifiers=[
"Intended Audience :: Developers",
"License :: OSI Approved :: BSD License",
"Operating System :: Unix",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: SQL",
"Topic :: Database",
"Topic :: Database :: Front-Ends",
"Topic :: Software Development",
"Topic :: Software Development :: Libraries :: Python Modules",
],
)
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,502
|
dbcli/litecli
|
refs/heads/main
|
/litecli/sqlexecute.py
|
import logging
import sqlite3
import uuid
from contextlib import closing
from sqlite3 import OperationalError
from litecli.packages.special.utils import check_if_sqlitedotcommand
import sqlparse
import os.path
from .packages import special
_logger = logging.getLogger(__name__)
# FIELD_TYPES = decoders.copy()
# FIELD_TYPES.update({
# FIELD_TYPE.NULL: type(None)
# })
class SQLExecute(object):
databases_query = """
PRAGMA database_list
"""
tables_query = """
SELECT name
FROM sqlite_master
WHERE type IN ('table','view') AND name NOT LIKE 'sqlite_%'
ORDER BY 1
"""
table_columns_query = """
SELECT m.name as tableName, p.name as columnName
FROM sqlite_master m
LEFT OUTER JOIN pragma_table_info((m.name)) p ON m.name <> p.name
WHERE m.type IN ('table','view') AND m.name NOT LIKE 'sqlite_%'
ORDER BY tableName, columnName
"""
indexes_query = """
SELECT name
FROM sqlite_master
WHERE type = 'index' AND name NOT LIKE 'sqlite_%'
ORDER BY 1
"""
functions_query = '''SELECT ROUTINE_NAME FROM INFORMATION_SCHEMA.ROUTINES
WHERE ROUTINE_TYPE="FUNCTION" AND ROUTINE_SCHEMA = "%s"'''
def __init__(self, database):
self.dbname = database
self._server_type = None
self.connection_id = None
self.conn = None
if not database:
_logger.debug("Database is not specified. Skip connection.")
return
self.connect()
def connect(self, database=None):
db = database or self.dbname
_logger.debug("Connection DB Params: \n" "\tdatabase: %r", database)
db_name = os.path.expanduser(db)
db_dir_name = os.path.dirname(os.path.abspath(db_name))
if not os.path.exists(db_dir_name):
raise Exception("Path does not exist: {}".format(db_dir_name))
conn = sqlite3.connect(database=db_name, isolation_level=None)
conn.text_factory = lambda x: x.decode("utf-8", "backslashreplace")
if self.conn:
self.conn.close()
self.conn = conn
# Update them after the connection is made to ensure that it was a
# successful connection.
self.dbname = db
# retrieve connection id
self.reset_connection_id()
def run(self, statement):
"""Execute the sql in the database and return the results. The results
are a list of tuples. Each tuple has 4 values
(title, rows, headers, status).
"""
# Remove spaces and EOL
statement = statement.strip()
if not statement: # Empty string
yield (None, None, None, None)
# Split the sql into separate queries and run each one.
# Unless it's saving a favorite query, in which case we
# want to save them all together.
if statement.startswith("\\fs"):
components = [statement]
else:
components = sqlparse.split(statement)
for sql in components:
# Remove spaces, eol and semi-colons.
sql = sql.rstrip(";")
# \G is treated specially since we have to set the expanded output.
if sql.endswith("\\G"):
special.set_expanded_output(True)
sql = sql[:-2].strip()
if not self.conn and not (
sql.startswith(".open")
or sql.lower().startswith("use")
or sql.startswith("\\u")
or sql.startswith("\\?")
or sql.startswith("\\q")
or sql.startswith("help")
or sql.startswith("exit")
or sql.startswith("quit")
):
_logger.debug(
"Not connected to database. Will not run statement: %s.", sql
)
raise OperationalError("Not connected to database.")
# yield ('Not connected to database', None, None, None)
# return
cur = self.conn.cursor() if self.conn else None
try: # Special command
_logger.debug("Trying a dbspecial command. sql: %r", sql)
for result in special.execute(cur, sql):
yield result
except special.CommandNotFound: # Regular SQL
if check_if_sqlitedotcommand(sql):
yield ('dot command not implemented', None, None, None)
else:
_logger.debug("Regular sql statement. sql: %r", sql)
cur.execute(sql)
yield self.get_result(cur)
def get_result(self, cursor):
"""Get the current result's data from the cursor."""
title = headers = None
# cursor.description is not None for queries that return result sets,
# e.g. SELECT.
if cursor.description is not None:
headers = [x[0] for x in cursor.description]
status = "{0} row{1} in set"
cursor = list(cursor)
rowcount = len(cursor)
else:
_logger.debug("No rows in result.")
status = "Query OK, {0} row{1} affected"
rowcount = 0 if cursor.rowcount == -1 else cursor.rowcount
cursor = None
status = status.format(rowcount, "" if rowcount == 1 else "s")
return (title, cursor, headers, status)
def tables(self):
"""Yields table names"""
with closing(self.conn.cursor()) as cur:
_logger.debug("Tables Query. sql: %r", self.tables_query)
cur.execute(self.tables_query)
for row in cur:
yield row
def table_columns(self):
"""Yields column names"""
with closing(self.conn.cursor()) as cur:
_logger.debug("Columns Query. sql: %r", self.table_columns_query)
cur.execute(self.table_columns_query)
for row in cur:
yield row
def databases(self):
if not self.conn:
return
with closing(self.conn.cursor()) as cur:
_logger.debug("Databases Query. sql: %r", self.databases_query)
for row in cur.execute(self.databases_query):
yield row[1]
def functions(self):
"""Yields tuples of (schema_name, function_name)"""
with closing(self.conn.cursor()) as cur:
_logger.debug("Functions Query. sql: %r", self.functions_query)
cur.execute(self.functions_query % self.dbname)
for row in cur:
yield row
def show_candidates(self):
with closing(self.conn.cursor()) as cur:
_logger.debug("Show Query. sql: %r", self.show_candidates_query)
try:
cur.execute(self.show_candidates_query)
except sqlite3.DatabaseError as e:
_logger.error("No show completions due to %r", e)
yield ""
else:
for row in cur:
yield (row[0].split(None, 1)[-1],)
def server_type(self):
self._server_type = ("sqlite3", "3")
return self._server_type
def get_connection_id(self):
if not self.connection_id:
self.reset_connection_id()
return self.connection_id
def reset_connection_id(self):
# Remember current connection id
_logger.debug("Get current connection id")
# res = self.run('select connection_id()')
self.connection_id = uuid.uuid4()
# for title, cur, headers, status in res:
# self.connection_id = cur.fetchone()[0]
_logger.debug("Current connection id: %s", self.connection_id)
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,503
|
dbcli/litecli
|
refs/heads/main
|
/litecli/clibuffer.py
|
from __future__ import unicode_literals
from prompt_toolkit.enums import DEFAULT_BUFFER
from prompt_toolkit.filters import Condition
from prompt_toolkit.application import get_app
def cli_is_multiline(cli):
@Condition
def cond():
doc = get_app().layout.get_buffer_by_name(DEFAULT_BUFFER).document
if not cli.multi_line:
return False
else:
return not _multiline_exception(doc.text)
return cond
def _multiline_exception(text):
orig = text
text = text.strip()
# Multi-statement favorite query is a special case. Because there will
# be a semicolon separating statements, we can't consider semicolon an
# EOL. Let's consider an empty line an EOL instead.
if text.startswith("\\fs"):
return orig.endswith("\n")
return (
text.startswith("\\") # Special Command
or text.endswith(";") # Ended with a semi-colon
or text.endswith("\\g") # Ended with \g
or text.endswith("\\G") # Ended with \G
or (text == "exit") # Exit doesn't need semi-colon
or (text == "quit") # Quit doesn't need semi-colon
or (text == ":q") # To all the vim fans out there
or (text == "") # Just a plain enter without any text
)
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,504
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/special/favoritequeries.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
class FavoriteQueries(object):
section_name = "favorite_queries"
usage = """
Favorite Queries are a way to save frequently used queries
with a short name.
Examples:
# Save a new favorite query.
> \\fs simple select * from abc where a is not Null;
# List all favorite queries.
> \\f
╒════════╤═══════════════════════════════════════╕
│ Name │ Query │
╞════════╪═══════════════════════════════════════╡
│ simple │ SELECT * FROM abc where a is not NULL │
╘════════╧═══════════════════════════════════════╛
# Run a favorite query.
> \\f simple
╒════════╤════════╕
│ a │ b │
╞════════╪════════╡
│ 日本語 │ 日本語 │
╘════════╧════════╛
# Delete a favorite query.
> \\fd simple
simple: Deleted
"""
def __init__(self, config):
self.config = config
def list(self):
return self.config.get(self.section_name, [])
def get(self, name):
return self.config.get(self.section_name, {}).get(name, None)
def save(self, name, query):
if self.section_name not in self.config:
self.config[self.section_name] = {}
self.config[self.section_name][name] = query
self.config.write()
def delete(self, name):
try:
del self.config[self.section_name][name]
except KeyError:
return "%s: Not Found." % name
self.config.write()
return "%s: Deleted" % name
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,505
|
dbcli/litecli
|
refs/heads/main
|
/litecli/clitoolbar.py
|
from __future__ import unicode_literals
from prompt_toolkit.key_binding.vi_state import InputMode
from prompt_toolkit.enums import EditingMode
from prompt_toolkit.application import get_app
def create_toolbar_tokens_func(cli, show_fish_help):
"""
Return a function that generates the toolbar tokens.
"""
def get_toolbar_tokens():
result = []
result.append(("class:bottom-toolbar", " "))
if cli.multi_line:
result.append(
("class:bottom-toolbar", " (Semi-colon [;] will end the line) ")
)
if cli.multi_line:
result.append(("class:bottom-toolbar.on", "[F3] Multiline: ON "))
else:
result.append(("class:bottom-toolbar.off", "[F3] Multiline: OFF "))
if cli.prompt_app.editing_mode == EditingMode.VI:
result.append(
("class:botton-toolbar.on", "Vi-mode ({})".format(_get_vi_mode()))
)
if show_fish_help():
result.append(
("class:bottom-toolbar", " Right-arrow to complete suggestion")
)
if cli.completion_refresher.is_refreshing():
result.append(("class:bottom-toolbar", " Refreshing completions..."))
return result
return get_toolbar_tokens
def _get_vi_mode():
"""Get the current vi mode for display."""
return {
InputMode.INSERT: "I",
InputMode.NAVIGATION: "N",
InputMode.REPLACE: "R",
InputMode.INSERT_MULTIPLE: "M",
InputMode.REPLACE_SINGLE: "R",
}[get_app().vi_state.input_mode]
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,506
|
dbcli/litecli
|
refs/heads/main
|
/litecli/sqlcompleter.py
|
from __future__ import print_function
from __future__ import unicode_literals
import logging
from re import compile, escape
from collections import Counter
from prompt_toolkit.completion import Completer, Completion
from .packages.completion_engine import suggest_type
from .packages.parseutils import last_word
from .packages.special.iocommands import favoritequeries
from .packages.filepaths import parse_path, complete_path, suggest_path
_logger = logging.getLogger(__name__)
class SQLCompleter(Completer):
keywords = [
"ABORT",
"ACTION",
"ADD",
"AFTER",
"ALL",
"ALTER",
"ANALYZE",
"AND",
"AS",
"ASC",
"ATTACH",
"AUTOINCREMENT",
"BEFORE",
"BEGIN",
"BETWEEN",
"BIGINT",
"BLOB",
"BOOLEAN",
"BY",
"CASCADE",
"CASE",
"CAST",
"CHARACTER",
"CHECK",
"CLOB",
"COLLATE",
"COLUMN",
"COMMIT",
"CONFLICT",
"CONSTRAINT",
"CREATE",
"CROSS",
"CURRENT",
"CURRENT_DATE",
"CURRENT_TIME",
"CURRENT_TIMESTAMP",
"DATABASE",
"DATE",
"DATETIME",
"DECIMAL",
"DEFAULT",
"DEFERRABLE",
"DEFERRED",
"DELETE",
"DETACH",
"DISTINCT",
"DO",
"DOUBLE PRECISION",
"DOUBLE",
"DROP",
"EACH",
"ELSE",
"END",
"ESCAPE",
"EXCEPT",
"EXCLUSIVE",
"EXISTS",
"EXPLAIN",
"FAIL",
"FILTER",
"FLOAT",
"FOLLOWING",
"FOR",
"FOREIGN",
"FROM",
"FULL",
"GLOB",
"GROUP",
"HAVING",
"IF",
"IGNORE",
"IMMEDIATE",
"IN",
"INDEX",
"INDEXED",
"INITIALLY",
"INNER",
"INSERT",
"INSTEAD",
"INT",
"INT2",
"INT8",
"INTEGER",
"INTERSECT",
"INTO",
"IS",
"ISNULL",
"JOIN",
"KEY",
"LEFT",
"LIKE",
"LIMIT",
"MATCH",
"MEDIUMINT",
"NATIVE CHARACTER",
"NATURAL",
"NCHAR",
"NO",
"NOT",
"NOTHING",
"NULL",
"NULLS FIRST",
"NULLS LAST",
"NUMERIC",
"NVARCHAR",
"OF",
"OFFSET",
"ON",
"OR",
"ORDER BY",
"OUTER",
"OVER",
"PARTITION",
"PLAN",
"PRAGMA",
"PRECEDING",
"PRIMARY",
"QUERY",
"RAISE",
"RANGE",
"REAL",
"RECURSIVE",
"REFERENCES",
"REGEXP",
"REINDEX",
"RELEASE",
"RENAME",
"REPLACE",
"RESTRICT",
"RIGHT",
"ROLLBACK",
"ROW",
"ROWS",
"SAVEPOINT",
"SELECT",
"SET",
"SMALLINT",
"TABLE",
"TEMP",
"TEMPORARY",
"TEXT",
"THEN",
"TINYINT",
"TO",
"TRANSACTION",
"TRIGGER",
"UNBOUNDED",
"UNION",
"UNIQUE",
"UNSIGNED BIG INT",
"UPDATE",
"USING",
"VACUUM",
"VALUES",
"VARCHAR",
"VARYING CHARACTER",
"VIEW",
"VIRTUAL",
"WHEN",
"WHERE",
"WINDOW",
"WITH",
"WITHOUT",
]
functions = [
"ABS",
"AVG",
"CHANGES",
"CHAR",
"COALESCE",
"COUNT",
"CUME_DIST",
"DATE",
"DATETIME",
"DENSE_RANK",
"GLOB",
"GROUP_CONCAT",
"HEX",
"IFNULL",
"INSTR",
"JSON",
"JSON_ARRAY",
"JSON_ARRAY_LENGTH",
"JSON_EACH",
"JSON_EXTRACT",
"JSON_GROUP_ARRAY",
"JSON_GROUP_OBJECT",
"JSON_INSERT",
"JSON_OBJECT",
"JSON_PATCH",
"JSON_QUOTE",
"JSON_REMOVE",
"JSON_REPLACE",
"JSON_SET",
"JSON_TREE",
"JSON_TYPE",
"JSON_VALID",
"JULIANDAY",
"LAG",
"LAST_INSERT_ROWID",
"LENGTH",
"LIKELIHOOD",
"LIKELY",
"LOAD_EXTENSION",
"LOWER",
"LTRIM",
"MAX",
"MIN",
"NTILE",
"NULLIF",
"PERCENT_RANK",
"PRINTF",
"QUOTE",
"RANDOM",
"RANDOMBLOB",
"RANK",
"REPLACE",
"ROUND",
"ROW_NUMBER",
"RTRIM",
"SOUNDEX",
"SQLITE_COMPILEOPTION_GET",
"SQLITE_COMPILEOPTION_USED",
"SQLITE_OFFSET",
"SQLITE_SOURCE_ID",
"SQLITE_VERSION",
"STRFTIME",
"SUBSTR",
"SUM",
"TIME",
"TOTAL",
"TOTAL_CHANGES",
"TRIM",
]
def __init__(self, supported_formats=(), keyword_casing="auto"):
super(self.__class__, self).__init__()
self.reserved_words = set()
for x in self.keywords:
self.reserved_words.update(x.split())
self.name_pattern = compile("^[_a-z][_a-z0-9\$]*$")
self.special_commands = []
self.table_formats = supported_formats
if keyword_casing not in ("upper", "lower", "auto"):
keyword_casing = "auto"
self.keyword_casing = keyword_casing
self.reset_completions()
def escape_name(self, name):
if name and (
(not self.name_pattern.match(name))
or (name.upper() in self.reserved_words)
or (name.upper() in self.functions)
):
name = "`%s`" % name
return name
def unescape_name(self, name):
"""Unquote a string."""
if name and name[0] == '"' and name[-1] == '"':
name = name[1:-1]
return name
def escaped_names(self, names):
return [self.escape_name(name) for name in names]
def extend_special_commands(self, special_commands):
# Special commands are not part of all_completions since they can only
# be at the beginning of a line.
self.special_commands.extend(special_commands)
def extend_database_names(self, databases):
self.databases.extend(databases)
def extend_keywords(self, additional_keywords):
self.keywords.extend(additional_keywords)
self.all_completions.update(additional_keywords)
def extend_schemata(self, schema):
if schema is None:
return
metadata = self.dbmetadata["tables"]
metadata[schema] = {}
# dbmetadata.values() are the 'tables' and 'functions' dicts
for metadata in self.dbmetadata.values():
metadata[schema] = {}
self.all_completions.update(schema)
def extend_relations(self, data, kind):
"""Extend metadata for tables or views
:param data: list of (rel_name, ) tuples
:param kind: either 'tables' or 'views'
:return:
"""
# 'data' is a generator object. It can throw an exception while being
# consumed. This could happen if the user has launched the app without
# specifying a database name. This exception must be handled to prevent
# crashing.
try:
data = [self.escaped_names(d) for d in data]
except Exception:
data = []
# dbmetadata['tables'][$schema_name][$table_name] should be a list of
# column names. Default to an asterisk
metadata = self.dbmetadata[kind]
for relname in data:
try:
metadata[self.dbname][relname[0]] = ["*"]
except KeyError:
_logger.error(
"%r %r listed in unrecognized schema %r",
kind,
relname[0],
self.dbname,
)
self.all_completions.add(relname[0])
def extend_columns(self, column_data, kind):
"""Extend column metadata
:param column_data: list of (rel_name, column_name) tuples
:param kind: either 'tables' or 'views'
:return:
"""
# 'column_data' is a generator object. It can throw an exception while
# being consumed. This could happen if the user has launched the app
# without specifying a database name. This exception must be handled to
# prevent crashing.
try:
column_data = [self.escaped_names(d) for d in column_data]
except Exception:
column_data = []
metadata = self.dbmetadata[kind]
for relname, column in column_data:
metadata[self.dbname][relname].append(column)
self.all_completions.add(column)
def extend_functions(self, func_data):
# 'func_data' is a generator object. It can throw an exception while
# being consumed. This could happen if the user has launched the app
# without specifying a database name. This exception must be handled to
# prevent crashing.
try:
func_data = [self.escaped_names(d) for d in func_data]
except Exception:
func_data = []
# dbmetadata['functions'][$schema_name][$function_name] should return
# function metadata.
metadata = self.dbmetadata["functions"]
for func in func_data:
metadata[self.dbname][func[0]] = None
self.all_completions.add(func[0])
def set_dbname(self, dbname):
self.dbname = dbname
def reset_completions(self):
self.databases = []
self.dbname = ""
self.dbmetadata = {"tables": {}, "views": {}, "functions": {}}
self.all_completions = set(self.keywords + self.functions)
@staticmethod
def find_matches(
text,
collection,
start_only=False,
fuzzy=True,
casing=None,
punctuations="most_punctuations",
):
"""Find completion matches for the given text.
Given the user's input text and a collection of available
completions, find completions matching the last word of the
text.
If `start_only` is True, the text will match an available
completion only at the beginning. Otherwise, a completion is
considered a match if the text appears anywhere within it.
yields prompt_toolkit Completion instances for any matches found
in the collection of available completions.
"""
last = last_word(text, include=punctuations)
text = last.lower()
completions = []
if fuzzy:
regex = ".*?".join(map(escape, text))
pat = compile("(%s)" % regex)
for item in sorted(collection):
r = pat.search(item.lower())
if r:
completions.append((len(r.group()), r.start(), item))
else:
match_end_limit = len(text) if start_only else None
for item in sorted(collection):
match_point = item.lower().find(text, 0, match_end_limit)
if match_point >= 0:
completions.append((len(text), match_point, item))
if casing == "auto":
casing = "lower" if last and last[-1].islower() else "upper"
def apply_case(kw):
if casing == "upper":
return kw.upper()
return kw.lower()
return (
Completion(z if casing is None else apply_case(z), -len(text))
for x, y, z in sorted(completions)
)
def get_completions(self, document, complete_event):
word_before_cursor = document.get_word_before_cursor(WORD=True)
completions = []
suggestions = suggest_type(document.text, document.text_before_cursor)
for suggestion in suggestions:
_logger.debug("Suggestion type: %r", suggestion["type"])
if suggestion["type"] == "column":
tables = suggestion["tables"]
_logger.debug("Completion column scope: %r", tables)
scoped_cols = self.populate_scoped_cols(tables)
if suggestion.get("drop_unique"):
# drop_unique is used for 'tb11 JOIN tbl2 USING (...'
# which should suggest only columns that appear in more than
# one table
scoped_cols = [
col
for (col, count) in Counter(scoped_cols).items()
if count > 1 and col != "*"
]
cols = self.find_matches(word_before_cursor, scoped_cols)
completions.extend(cols)
elif suggestion["type"] == "function":
# suggest user-defined functions using substring matching
funcs = self.populate_schema_objects(suggestion["schema"], "functions")
user_funcs = self.find_matches(word_before_cursor, funcs)
completions.extend(user_funcs)
# suggest hardcoded functions using startswith matching only if
# there is no schema qualifier. If a schema qualifier is
# present it probably denotes a table.
# eg: SELECT * FROM users u WHERE u.
if not suggestion["schema"]:
predefined_funcs = self.find_matches(
word_before_cursor,
self.functions,
start_only=True,
fuzzy=False,
casing=self.keyword_casing,
)
completions.extend(predefined_funcs)
elif suggestion["type"] == "table":
tables = self.populate_schema_objects(suggestion["schema"], "tables")
tables = self.find_matches(word_before_cursor, tables)
completions.extend(tables)
elif suggestion["type"] == "view":
views = self.populate_schema_objects(suggestion["schema"], "views")
views = self.find_matches(word_before_cursor, views)
completions.extend(views)
elif suggestion["type"] == "alias":
aliases = suggestion["aliases"]
aliases = self.find_matches(word_before_cursor, aliases)
completions.extend(aliases)
elif suggestion["type"] == "database":
dbs = self.find_matches(word_before_cursor, self.databases)
completions.extend(dbs)
elif suggestion["type"] == "keyword":
keywords = self.find_matches(
word_before_cursor,
self.keywords,
start_only=True,
fuzzy=False,
casing=self.keyword_casing,
punctuations="many_punctuations",
)
completions.extend(keywords)
elif suggestion["type"] == "special":
special = self.find_matches(
word_before_cursor,
self.special_commands,
start_only=True,
fuzzy=False,
punctuations="many_punctuations",
)
completions.extend(special)
elif suggestion["type"] == "favoritequery":
queries = self.find_matches(
word_before_cursor,
favoritequeries.list(),
start_only=False,
fuzzy=True,
)
completions.extend(queries)
elif suggestion["type"] == "table_format":
formats = self.find_matches(
word_before_cursor, self.table_formats, start_only=True, fuzzy=False
)
completions.extend(formats)
elif suggestion["type"] == "file_name":
file_names = self.find_files(word_before_cursor)
completions.extend(file_names)
return completions
def find_files(self, word):
"""Yield matching directory or file names.
:param word:
:return: iterable
"""
base_path, last_path, position = parse_path(word)
paths = suggest_path(word)
for name in sorted(paths):
suggestion = complete_path(name, last_path)
if suggestion:
yield Completion(suggestion, position)
def populate_scoped_cols(self, scoped_tbls):
"""Find all columns in a set of scoped_tables
:param scoped_tbls: list of (schema, table, alias) tuples
:return: list of column names
"""
columns = []
meta = self.dbmetadata
for tbl in scoped_tbls:
# A fully qualified schema.relname reference or default_schema
# DO NOT escape schema names.
schema = tbl[0] or self.dbname
relname = tbl[1]
escaped_relname = self.escape_name(tbl[1])
# We don't know if schema.relname is a table or view. Since
# tables and views cannot share the same name, we can check one
# at a time
try:
columns.extend(meta["tables"][schema][relname])
# Table exists, so don't bother checking for a view
continue
except KeyError:
try:
columns.extend(meta["tables"][schema][escaped_relname])
# Table exists, so don't bother checking for a view
continue
except KeyError:
pass
try:
columns.extend(meta["views"][schema][relname])
except KeyError:
pass
return columns
def populate_schema_objects(self, schema, obj_type):
"""Returns list of tables or functions for a (optional) schema"""
metadata = self.dbmetadata[obj_type]
schema = schema or self.dbname
try:
objects = metadata[schema].keys()
except KeyError:
# schema doesn't exist
objects = []
return objects
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,507
|
dbcli/litecli
|
refs/heads/main
|
/tests/test_smart_completion_public_schema_only.py
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
from mock import patch
from prompt_toolkit.completion import Completion
from prompt_toolkit.document import Document
metadata = {
"users": ["id", "email", "first_name", "last_name"],
"orders": ["id", "ordered_date", "status"],
"select": ["id", "insert", "ABC"],
"réveillé": ["id", "insert", "ABC"],
}
@pytest.fixture
def completer():
import litecli.sqlcompleter as sqlcompleter
comp = sqlcompleter.SQLCompleter()
tables, columns = [], []
for table, cols in metadata.items():
tables.append((table,))
columns.extend([(table, col) for col in cols])
comp.set_dbname("test")
comp.extend_schemata("test")
comp.extend_relations(tables, kind="tables")
comp.extend_columns(columns, kind="tables")
return comp
@pytest.fixture
def complete_event():
from mock import Mock
return Mock()
def test_empty_string_completion(completer, complete_event):
text = ""
position = 0
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert list(map(Completion, sorted(completer.keywords))) == result
def test_select_keyword_completion(completer, complete_event):
text = "SEL"
position = len("SEL")
result = completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
assert list(result) == list([Completion(text="SELECT", start_position=-3)])
def test_table_completion(completer, complete_event):
text = "SELECT * FROM "
position = len(text)
result = completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
assert list(result) == list(
[
Completion(text="`réveillé`", start_position=0),
Completion(text="`select`", start_position=0),
Completion(text="orders", start_position=0),
Completion(text="users", start_position=0),
]
)
def test_function_name_completion(completer, complete_event):
text = "SELECT MA"
position = len("SELECT MA")
result = completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
assert list(result) == list(
[
Completion(text="MAX", start_position=-2),
Completion(text="MATCH", start_position=-2),
]
)
def test_suggested_column_names(completer, complete_event):
"""Suggest column and function names when selecting from table.
:param completer:
:param complete_event:
:return:
"""
text = "SELECT from users"
position = len("SELECT ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="*", start_position=0),
Completion(text="email", start_position=0),
Completion(text="first_name", start_position=0),
Completion(text="id", start_position=0),
Completion(text="last_name", start_position=0),
]
+ list(map(Completion, completer.functions))
+ [Completion(text="users", start_position=0)]
+ list(map(Completion, sorted(completer.keywords)))
)
def test_suggested_column_names_in_function(completer, complete_event):
"""Suggest column and function names when selecting multiple columns from
table.
:param completer:
:param complete_event:
:return:
"""
text = "SELECT MAX( from users"
position = len("SELECT MAX(")
result = completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
assert list(result) == list(
[
Completion(text="*", start_position=0),
Completion(text="email", start_position=0),
Completion(text="first_name", start_position=0),
Completion(text="id", start_position=0),
Completion(text="last_name", start_position=0),
]
)
def test_suggested_column_names_with_table_dot(completer, complete_event):
"""Suggest column names on table name and dot.
:param completer:
:param complete_event:
:return:
"""
text = "SELECT users. from users"
position = len("SELECT users.")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="*", start_position=0),
Completion(text="email", start_position=0),
Completion(text="first_name", start_position=0),
Completion(text="id", start_position=0),
Completion(text="last_name", start_position=0),
]
)
def test_suggested_column_names_with_alias(completer, complete_event):
"""Suggest column names on table alias and dot.
:param completer:
:param complete_event:
:return:
"""
text = "SELECT u. from users u"
position = len("SELECT u.")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="*", start_position=0),
Completion(text="email", start_position=0),
Completion(text="first_name", start_position=0),
Completion(text="id", start_position=0),
Completion(text="last_name", start_position=0),
]
)
def test_suggested_multiple_column_names(completer, complete_event):
"""Suggest column and function names when selecting multiple columns from
table.
:param completer:
:param complete_event:
:return:
"""
text = "SELECT id, from users u"
position = len("SELECT id, ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="*", start_position=0),
Completion(text="email", start_position=0),
Completion(text="first_name", start_position=0),
Completion(text="id", start_position=0),
Completion(text="last_name", start_position=0),
]
+ list(map(Completion, completer.functions))
+ [Completion(text="u", start_position=0)]
+ list(map(Completion, sorted(completer.keywords)))
)
def test_suggested_multiple_column_names_with_alias(completer, complete_event):
"""Suggest column names on table alias and dot when selecting multiple
columns from table.
:param completer:
:param complete_event:
:return:
"""
text = "SELECT u.id, u. from users u"
position = len("SELECT u.id, u.")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="*", start_position=0),
Completion(text="email", start_position=0),
Completion(text="first_name", start_position=0),
Completion(text="id", start_position=0),
Completion(text="last_name", start_position=0),
]
)
def test_suggested_multiple_column_names_with_dot(completer, complete_event):
"""Suggest column names on table names and dot when selecting multiple
columns from table.
:param completer:
:param complete_event:
:return:
"""
text = "SELECT users.id, users. from users u"
position = len("SELECT users.id, users.")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="*", start_position=0),
Completion(text="email", start_position=0),
Completion(text="first_name", start_position=0),
Completion(text="id", start_position=0),
Completion(text="last_name", start_position=0),
]
)
def test_suggested_aliases_after_on(completer, complete_event):
text = "SELECT u.name, o.id FROM users u JOIN orders o ON "
position = len("SELECT u.name, o.id FROM users u JOIN orders o ON ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[Completion(text="o", start_position=0), Completion(text="u", start_position=0)]
)
def test_suggested_aliases_after_on_right_side(completer, complete_event):
text = "SELECT u.name, o.id FROM users u JOIN orders o ON o.user_id = "
position = len("SELECT u.name, o.id FROM users u JOIN orders o ON o.user_id = ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[Completion(text="o", start_position=0), Completion(text="u", start_position=0)]
)
def test_suggested_tables_after_on(completer, complete_event):
text = "SELECT users.name, orders.id FROM users JOIN orders ON "
position = len("SELECT users.name, orders.id FROM users JOIN orders ON ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="orders", start_position=0),
Completion(text="users", start_position=0),
]
)
def test_suggested_tables_after_on_right_side(completer, complete_event):
text = "SELECT users.name, orders.id FROM users JOIN orders ON orders.user_id = "
position = len(
"SELECT users.name, orders.id FROM users JOIN orders ON orders.user_id = "
)
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert list(result) == list(
[
Completion(text="orders", start_position=0),
Completion(text="users", start_position=0),
]
)
def test_table_names_after_from(completer, complete_event):
text = "SELECT * FROM "
position = len("SELECT * FROM ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert list(result) == list(
[
Completion(text="`réveillé`", start_position=0),
Completion(text="`select`", start_position=0),
Completion(text="orders", start_position=0),
Completion(text="users", start_position=0),
]
)
def test_auto_escaped_col_names(completer, complete_event):
text = "SELECT from `select`"
position = len("SELECT ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert (
result
== [
Completion(text="*", start_position=0),
Completion(text="`ABC`", start_position=0),
Completion(text="`insert`", start_position=0),
Completion(text="id", start_position=0),
]
+ list(map(Completion, completer.functions))
+ [Completion(text="`select`", start_position=0)]
+ list(map(Completion, sorted(completer.keywords)))
)
def test_un_escaped_table_names(completer, complete_event):
text = "SELECT from réveillé"
position = len("SELECT ")
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
assert result == list(
[
Completion(text="*", start_position=0),
Completion(text="`ABC`", start_position=0),
Completion(text="`insert`", start_position=0),
Completion(text="id", start_position=0),
]
+ list(map(Completion, completer.functions))
+ [Completion(text="réveillé", start_position=0)]
+ list(map(Completion, sorted(completer.keywords)))
)
def dummy_list_path(dir_name):
dirs = {
"/": ["dir1", "file1.sql", "file2.sql"],
"/dir1": ["subdir1", "subfile1.sql", "subfile2.sql"],
"/dir1/subdir1": ["lastfile.sql"],
}
return dirs.get(dir_name, [])
@patch("litecli.packages.filepaths.list_path", new=dummy_list_path)
@pytest.mark.parametrize(
"text,expected",
[
("source ", [(".", 0), ("..", 0), ("/", 0), ("~", 0)]),
("source /", [("dir1", 0), ("file1.sql", 0), ("file2.sql", 0)]),
("source /dir1/", [("subdir1", 0), ("subfile1.sql", 0), ("subfile2.sql", 0)]),
("source /dir1/subdir1/", [("lastfile.sql", 0)]),
],
)
def test_file_name_completion(completer, complete_event, text, expected):
position = len(text)
result = list(
completer.get_completions(
Document(text=text, cursor_position=position), complete_event
)
)
expected = list([Completion(txt, pos) for txt, pos in expected])
assert result == expected
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,508
|
dbcli/litecli
|
refs/heads/main
|
/tasks.py
|
# -*- coding: utf-8 -*-
"""Common development tasks for setup.py to use."""
import re
import subprocess
import sys
from setuptools import Command
from setuptools.command.test import test as TestCommand
class BaseCommand(Command, object):
"""The base command for project tasks."""
user_options = []
default_cmd_options = ("verbose", "quiet", "dry_run")
def __init__(self, *args, **kwargs):
super(BaseCommand, self).__init__(*args, **kwargs)
self.verbose = False
def initialize_options(self):
"""Override the distutils abstract method."""
pass
def finalize_options(self):
"""Override the distutils abstract method."""
# Distutils uses incrementing integers for verbosity.
self.verbose = bool(self.verbose)
def call_and_exit(self, cmd, shell=True):
"""Run the *cmd* and exit with the proper exit code."""
sys.exit(subprocess.call(cmd, shell=shell))
def call_in_sequence(self, cmds, shell=True):
"""Run multiple commands in a row, exiting if one fails."""
for cmd in cmds:
if subprocess.call(cmd, shell=shell) == 1:
sys.exit(1)
def apply_options(self, cmd, options=()):
"""Apply command-line options."""
for option in self.default_cmd_options + options:
cmd = self.apply_option(cmd, option, active=getattr(self, option, False))
return cmd
def apply_option(self, cmd, option, active=True):
"""Apply a command-line option."""
return re.sub(
r"{{{}\:(?P<option>[^}}]*)}}".format(option),
r"\g<option>" if active else "",
cmd,
)
class lint(BaseCommand):
description = "check code using black (and fix violations)"
user_options = [("fix", "f", "fix the violations in place")]
def initialize_options(self):
"""Set the default options."""
self.fix = False
def finalize_options(self):
pass
def run(self):
cmd = "black"
if not self.fix:
cmd += " --check"
cmd += " ."
sys.exit(subprocess.call(cmd, shell=True))
class test(TestCommand):
user_options = [("pytest-args=", "a", "Arguments to pass to pytest")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.pytest_args = ""
def run_tests(self):
unit_test_errno = subprocess.call(
"pytest tests " + self.pytest_args, shell=True
)
# cli_errno = subprocess.call('behave test/features', shell=True)
# sys.exit(unit_test_errno or cli_errno)
sys.exit(unit_test_errno)
# class test(BaseCommand):
# """Run the test suites for this project."""
# description = "run the test suite"
# user_options = [
# ("all", "a", "test against all supported versions of Python"),
# ("coverage", "c", "measure test coverage"),
# ]
# unit_test_cmd = (
# "py.test{quiet: -q}{verbose: -v}{dry_run: --setup-only}"
# "{coverage: --cov-report= --cov=litecli}"
# )
# # cli_test_cmd = 'behave{quiet: -q}{verbose: -v}{dry_run: -d} test/features'
# test_all_cmd = "tox{verbose: -v}{dry_run: --notest}"
# coverage_cmd = "coverage combine && coverage report"
# def initialize_options(self):
# """Set the default options."""
# self.all = False
# self.coverage = False
# super(test, self).initialize_options()
# def run(self):
# """Run the test suites."""
# if self.all:
# cmd = self.apply_options(self.test_all_cmd)
# self.call_and_exit(cmd)
# else:
# cmds = (
# self.apply_options(self.unit_test_cmd, ("coverage",)),
# # self.apply_options(self.cli_test_cmd)
# )
# if self.coverage:
# cmds += (self.apply_options(self.coverage_cmd),)
# self.call_in_sequence(cmds)
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,509
|
dbcli/litecli
|
refs/heads/main
|
/litecli/config.py
|
import errno
import shutil
import os
import platform
from os.path import expanduser, exists, dirname
from configobj import ConfigObj
def config_location():
if "XDG_CONFIG_HOME" in os.environ:
return "%s/litecli/" % expanduser(os.environ["XDG_CONFIG_HOME"])
elif platform.system() == "Windows":
return os.getenv("USERPROFILE") + "\\AppData\\Local\\dbcli\\litecli\\"
else:
return expanduser("~/.config/litecli/")
def load_config(usr_cfg, def_cfg=None):
cfg = ConfigObj()
cfg.merge(ConfigObj(def_cfg, interpolation=False))
cfg.merge(ConfigObj(expanduser(usr_cfg), interpolation=False, encoding="utf-8"))
cfg.filename = expanduser(usr_cfg)
return cfg
def ensure_dir_exists(path):
parent_dir = expanduser(dirname(path))
try:
os.makedirs(parent_dir)
except OSError as exc:
# ignore existing destination (py2 has no exist_ok arg to makedirs)
if exc.errno != errno.EEXIST:
raise
def write_default_config(source, destination, overwrite=False):
destination = expanduser(destination)
if not overwrite and exists(destination):
return
ensure_dir_exists(destination)
shutil.copyfile(source, destination)
def upgrade_config(config, def_config):
cfg = load_config(config, def_config)
cfg.write()
def get_config(liteclirc_file=None):
from litecli import __file__ as package_root
package_root = os.path.dirname(package_root)
liteclirc_file = liteclirc_file or "%sconfig" % config_location()
default_config = os.path.join(package_root, "liteclirc")
write_default_config(default_config, liteclirc_file)
return load_config(liteclirc_file, default_config)
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,510
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/special/__init__.py
|
__all__ = []
def export(defn):
"""Decorator to explicitly mark functions that are exposed in a lib."""
globals()[defn.__name__] = defn
__all__.append(defn.__name__)
return defn
from . import dbcommands
from . import iocommands
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,511
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/special/main.py
|
from __future__ import unicode_literals
import logging
from collections import namedtuple
from . import export
log = logging.getLogger(__name__)
NO_QUERY = 0
PARSED_QUERY = 1
RAW_QUERY = 2
SpecialCommand = namedtuple(
"SpecialCommand",
[
"handler",
"command",
"shortcut",
"description",
"arg_type",
"hidden",
"case_sensitive",
],
)
COMMANDS = {}
@export
class ArgumentMissing(Exception):
pass
@export
class CommandNotFound(Exception):
pass
@export
def parse_special_command(sql):
command, _, arg = sql.partition(" ")
verbose = "+" in command
command = command.strip().replace("+", "")
return (command, verbose, arg.strip())
@export
def special_command(
command,
shortcut,
description,
arg_type=PARSED_QUERY,
hidden=False,
case_sensitive=False,
aliases=(),
):
def wrapper(wrapped):
register_special_command(
wrapped,
command,
shortcut,
description,
arg_type,
hidden,
case_sensitive,
aliases,
)
return wrapped
return wrapper
@export
def register_special_command(
handler,
command,
shortcut,
description,
arg_type=PARSED_QUERY,
hidden=False,
case_sensitive=False,
aliases=(),
):
cmd = command.lower() if not case_sensitive else command
COMMANDS[cmd] = SpecialCommand(
handler, command, shortcut, description, arg_type, hidden, case_sensitive
)
for alias in aliases:
cmd = alias.lower() if not case_sensitive else alias
COMMANDS[cmd] = SpecialCommand(
handler,
command,
shortcut,
description,
arg_type,
case_sensitive=case_sensitive,
hidden=True,
)
@export
def execute(cur, sql):
"""Execute a special command and return the results. If the special command
is not supported a KeyError will be raised.
"""
command, verbose, arg = parse_special_command(sql)
if (command not in COMMANDS) and (command.lower() not in COMMANDS):
raise CommandNotFound
try:
special_cmd = COMMANDS[command]
except KeyError:
special_cmd = COMMANDS[command.lower()]
if special_cmd.case_sensitive:
raise CommandNotFound("Command not found: %s" % command)
if special_cmd.arg_type == NO_QUERY:
return special_cmd.handler()
elif special_cmd.arg_type == PARSED_QUERY:
return special_cmd.handler(cur=cur, arg=arg, verbose=verbose)
elif special_cmd.arg_type == RAW_QUERY:
return special_cmd.handler(cur=cur, query=sql)
@special_command(
"help", "\\?", "Show this help.", arg_type=NO_QUERY, aliases=("\\?", "?")
)
def show_help(): # All the parameters are ignored.
headers = ["Command", "Shortcut", "Description"]
result = []
for _, value in sorted(COMMANDS.items()):
if not value.hidden:
result.append((value.command, value.shortcut, value.description))
return [(None, result, headers, None)]
@special_command(".exit", "\\q", "Exit.", arg_type=NO_QUERY, aliases=("\\q", "exit"))
@special_command("quit", "\\q", "Quit.", arg_type=NO_QUERY)
def quit(*_args):
raise EOFError
@special_command(
"\\e",
"\\e",
"Edit command with editor (uses $EDITOR).",
arg_type=NO_QUERY,
case_sensitive=True,
)
@special_command(
"\\G",
"\\G",
"Display current query results vertically.",
arg_type=NO_QUERY,
case_sensitive=True,
)
def stub():
raise NotImplementedError
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,512
|
dbcli/litecli
|
refs/heads/main
|
/litecli/main.py
|
from __future__ import unicode_literals
from __future__ import print_function
import os
import sys
import traceback
import logging
import threading
from time import time
from datetime import datetime
from io import open
from collections import namedtuple
from sqlite3 import OperationalError
import shutil
from cli_helpers.tabular_output import TabularOutputFormatter
from cli_helpers.tabular_output import preprocessors
import click
import sqlparse
from prompt_toolkit.completion import DynamicCompleter
from prompt_toolkit.enums import DEFAULT_BUFFER, EditingMode
from prompt_toolkit.shortcuts import PromptSession, CompleteStyle
from prompt_toolkit.styles.pygments import style_from_pygments_cls
from prompt_toolkit.document import Document
from prompt_toolkit.filters import HasFocus, IsDone
from prompt_toolkit.formatted_text import ANSI
from prompt_toolkit.layout.processors import (
HighlightMatchingBracketProcessor,
ConditionalProcessor,
)
from prompt_toolkit.lexers import PygmentsLexer
from prompt_toolkit.history import FileHistory
from prompt_toolkit.auto_suggest import AutoSuggestFromHistory
from .packages.special.main import NO_QUERY
from .packages.prompt_utils import confirm, confirm_destructive_query
from .packages import special
from .sqlcompleter import SQLCompleter
from .clitoolbar import create_toolbar_tokens_func
from .clistyle import style_factory, style_factory_output
from .sqlexecute import SQLExecute
from .clibuffer import cli_is_multiline
from .completion_refresher import CompletionRefresher
from .config import config_location, ensure_dir_exists, get_config
from .key_bindings import cli_bindings
from .encodingutils import utf8tounicode, text_type
from .lexer import LiteCliLexer
from .__init__ import __version__
from .packages.filepaths import dir_path_exists
import itertools
click.disable_unicode_literals_warning = True
# Query tuples are used for maintaining history
Query = namedtuple("Query", ["query", "successful", "mutating"])
PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__))
class LiteCli(object):
default_prompt = "\\d> "
max_len_prompt = 45
def __init__(
self,
sqlexecute=None,
prompt=None,
logfile=None,
auto_vertical_output=False,
warn=None,
liteclirc=None,
):
self.sqlexecute = sqlexecute
self.logfile = logfile
# Load config.
c = self.config = get_config(liteclirc)
self.multi_line = c["main"].as_bool("multi_line")
self.key_bindings = c["main"]["key_bindings"]
special.set_favorite_queries(self.config)
self.formatter = TabularOutputFormatter(format_name=c["main"]["table_format"])
self.formatter.litecli = self
self.syntax_style = c["main"]["syntax_style"]
self.less_chatty = c["main"].as_bool("less_chatty")
self.show_bottom_toolbar = c["main"].as_bool("show_bottom_toolbar")
self.cli_style = c["colors"]
self.output_style = style_factory_output(self.syntax_style, self.cli_style)
self.wider_completion_menu = c["main"].as_bool("wider_completion_menu")
self.autocompletion = c["main"].as_bool("autocompletion")
c_dest_warning = c["main"].as_bool("destructive_warning")
self.destructive_warning = c_dest_warning if warn is None else warn
self.login_path_as_host = c["main"].as_bool("login_path_as_host")
# read from cli argument or user config file
self.auto_vertical_output = auto_vertical_output or c["main"].as_bool(
"auto_vertical_output"
)
# audit log
if self.logfile is None and "audit_log" in c["main"]:
try:
self.logfile = open(os.path.expanduser(c["main"]["audit_log"]), "a")
except (IOError, OSError):
self.echo(
"Error: Unable to open the audit log file. Your queries will not be logged.",
err=True,
fg="red",
)
self.logfile = False
# Load startup commands.
try:
self.startup_commands = c["startup_commands"]
except KeyError: # Redundant given the load_config() function that merges in the standard config, but put here to avoid fail if user do not have updated config file.
self.startup_commands = None
self.completion_refresher = CompletionRefresher()
self.logger = logging.getLogger(__name__)
self.initialize_logging()
prompt_cnf = self.read_my_cnf_files(["prompt"])["prompt"]
self.prompt_format = (
prompt or prompt_cnf or c["main"]["prompt"] or self.default_prompt
)
self.prompt_continuation_format = c["main"]["prompt_continuation"]
keyword_casing = c["main"].get("keyword_casing", "auto")
self.query_history = []
# Initialize completer.
self.completer = SQLCompleter(
supported_formats=self.formatter.supported_formats,
keyword_casing=keyword_casing,
)
self._completer_lock = threading.Lock()
# Register custom special commands.
self.register_special_commands()
self.prompt_app = None
def register_special_commands(self):
special.register_special_command(
self.change_db,
".open",
".open",
"Change to a new database.",
aliases=("use", "\\u"),
)
special.register_special_command(
self.refresh_completions,
"rehash",
"\\#",
"Refresh auto-completions.",
arg_type=NO_QUERY,
aliases=("\\#",),
)
special.register_special_command(
self.change_table_format,
".mode",
"\\T",
"Change the table format used to output results.",
aliases=("tableformat", "\\T"),
case_sensitive=True,
)
special.register_special_command(
self.execute_from_file,
".read",
"\\. filename",
"Execute commands from file.",
case_sensitive=True,
aliases=("\\.", "source"),
)
special.register_special_command(
self.change_prompt_format,
"prompt",
"\\R",
"Change prompt format.",
aliases=("\\R",),
case_sensitive=True,
)
def change_table_format(self, arg, **_):
try:
self.formatter.format_name = arg
yield (None, None, None, "Changed table format to {}".format(arg))
except ValueError:
msg = "Table format {} not recognized. Allowed formats:".format(arg)
for table_type in self.formatter.supported_formats:
msg += "\n\t{}".format(table_type)
yield (None, None, None, msg)
def change_db(self, arg, **_):
if arg is None:
self.sqlexecute.connect()
else:
self.sqlexecute.connect(database=arg)
self.refresh_completions()
yield (
None,
None,
None,
'You are now connected to database "%s"' % (self.sqlexecute.dbname),
)
def execute_from_file(self, arg, **_):
if not arg:
message = "Missing required argument, filename."
return [(None, None, None, message)]
try:
with open(os.path.expanduser(arg), encoding="utf-8") as f:
query = f.read()
except IOError as e:
return [(None, None, None, str(e))]
if self.destructive_warning and confirm_destructive_query(query) is False:
message = "Wise choice. Command execution stopped."
return [(None, None, None, message)]
return self.sqlexecute.run(query)
def change_prompt_format(self, arg, **_):
"""
Change the prompt format.
"""
if not arg:
message = "Missing required argument, format."
return [(None, None, None, message)]
self.prompt_format = self.get_prompt(arg)
return [(None, None, None, "Changed prompt format to %s" % arg)]
def initialize_logging(self):
log_file = self.config["main"]["log_file"]
if log_file == "default":
log_file = config_location() + "log"
ensure_dir_exists(log_file)
log_level = self.config["main"]["log_level"]
level_map = {
"CRITICAL": logging.CRITICAL,
"ERROR": logging.ERROR,
"WARNING": logging.WARNING,
"INFO": logging.INFO,
"DEBUG": logging.DEBUG,
}
# Disable logging if value is NONE by switching to a no-op handler
# Set log level to a high value so it doesn't even waste cycles getting called.
if log_level.upper() == "NONE":
handler = logging.NullHandler()
log_level = "CRITICAL"
elif dir_path_exists(log_file):
handler = logging.FileHandler(log_file)
else:
self.echo(
'Error: Unable to open the log file "{}".'.format(log_file),
err=True,
fg="red",
)
return
formatter = logging.Formatter(
"%(asctime)s (%(process)d/%(threadName)s) "
"%(name)s %(levelname)s - %(message)s"
)
handler.setFormatter(formatter)
root_logger = logging.getLogger("litecli")
root_logger.addHandler(handler)
root_logger.setLevel(level_map[log_level.upper()])
logging.captureWarnings(True)
root_logger.debug("Initializing litecli logging.")
root_logger.debug("Log file %r.", log_file)
def read_my_cnf_files(self, keys):
"""
Reads a list of config files and merges them. The last one will win.
:param files: list of files to read
:param keys: list of keys to retrieve
:returns: tuple, with None for missing keys.
"""
cnf = self.config
sections = ["main"]
def get(key):
result = None
for sect in cnf:
if sect in sections and key in cnf[sect]:
result = cnf[sect][key]
return result
return {x: get(x) for x in keys}
def connect(self, database=""):
cnf = {"database": None}
cnf = self.read_my_cnf_files(cnf.keys())
# Fall back to config values only if user did not specify a value.
database = database or cnf["database"]
# Connect to the database.
def _connect():
self.sqlexecute = SQLExecute(database)
try:
_connect()
except Exception as e: # Connecting to a database could fail.
self.logger.debug("Database connection failed: %r.", e)
self.logger.error("traceback: %r", traceback.format_exc())
self.echo(str(e), err=True, fg="red")
exit(1)
def handle_editor_command(self, text):
"""Editor command is any query that is prefixed or suffixed by a '\e'.
The reason for a while loop is because a user might edit a query
multiple times. For eg:
"select * from \e"<enter> to edit it in vim, then come
back to the prompt with the edited query "select * from
blah where q = 'abc'\e" to edit it again.
:param text: Document
:return: Document
"""
while special.editor_command(text):
filename = special.get_filename(text)
query = special.get_editor_query(text) or self.get_last_query()
sql, message = special.open_external_editor(filename, sql=query)
if message:
# Something went wrong. Raise an exception and bail.
raise RuntimeError(message)
while True:
try:
text = self.prompt_app.prompt(default=sql)
break
except KeyboardInterrupt:
sql = ""
continue
return text
def run_cli(self):
iterations = 0
sqlexecute = self.sqlexecute
logger = self.logger
self.configure_pager()
self.refresh_completions()
history_file = config_location() + "history"
if dir_path_exists(history_file):
history = FileHistory(history_file)
else:
history = None
self.echo(
'Error: Unable to open the history file "{}". '
"Your query history will not be saved.".format(history_file),
err=True,
fg="red",
)
key_bindings = cli_bindings(self)
if not self.less_chatty:
print("Version:", __version__)
print("Mail: https://groups.google.com/forum/#!forum/litecli-users")
print("GitHub: https://github.com/dbcli/litecli")
# print("Home: https://litecli.com")
def get_message():
prompt = self.get_prompt(self.prompt_format)
if (
self.prompt_format == self.default_prompt
and len(prompt) > self.max_len_prompt
):
prompt = self.get_prompt("\\d> ")
prompt = prompt.replace("\\x1b", "\x1b")
return ANSI(prompt)
def get_continuation(width, line_number, is_soft_wrap):
continuation = " " * (width - 1) + " "
return [("class:continuation", continuation)]
def show_suggestion_tip():
return iterations < 2
def one_iteration(text=None):
if text is None:
try:
text = self.prompt_app.prompt()
except KeyboardInterrupt:
return
special.set_expanded_output(False)
try:
text = self.handle_editor_command(text)
except RuntimeError as e:
logger.error("sql: %r, error: %r", text, e)
logger.error("traceback: %r", traceback.format_exc())
self.echo(str(e), err=True, fg="red")
return
if not text.strip():
return
if self.destructive_warning:
destroy = confirm_destructive_query(text)
if destroy is None:
pass # Query was not destructive. Nothing to do here.
elif destroy is True:
self.echo("Your call!")
else:
self.echo("Wise choice!")
return
# Keep track of whether or not the query is mutating. In case
# of a multi-statement query, the overall query is considered
# mutating if any one of the component statements is mutating
mutating = False
try:
logger.debug("sql: %r", text)
special.write_tee(self.get_prompt(self.prompt_format) + text)
if self.logfile:
self.logfile.write("\n# %s\n" % datetime.now())
self.logfile.write(text)
self.logfile.write("\n")
successful = False
start = time()
res = sqlexecute.run(text)
self.formatter.query = text
successful = True
result_count = 0
for title, cur, headers, status in res:
logger.debug("headers: %r", headers)
logger.debug("rows: %r", cur)
logger.debug("status: %r", status)
threshold = 1000
if is_select(status) and cur and cur.rowcount > threshold:
self.echo(
"The result set has more than {} rows.".format(threshold),
fg="red",
)
if not confirm("Do you want to continue?"):
self.echo("Aborted!", err=True, fg="red")
break
if self.auto_vertical_output:
max_width = self.prompt_app.output.get_size().columns
else:
max_width = None
formatted = self.format_output(
title, cur, headers, special.is_expanded_output(), max_width
)
t = time() - start
try:
if result_count > 0:
self.echo("")
try:
self.output(formatted, status)
except KeyboardInterrupt:
pass
self.echo("Time: %0.03fs" % t)
except KeyboardInterrupt:
pass
start = time()
result_count += 1
mutating = mutating or is_mutating(status)
special.unset_once_if_written()
except EOFError as e:
raise e
except KeyboardInterrupt:
# get last connection id
connection_id_to_kill = sqlexecute.connection_id
logger.debug("connection id to kill: %r", connection_id_to_kill)
# Restart connection to the database
sqlexecute.connect()
try:
for title, cur, headers, status in sqlexecute.run(
"kill %s" % connection_id_to_kill
):
status_str = str(status).lower()
if status_str.find("ok") > -1:
logger.debug(
"cancelled query, connection id: %r, sql: %r",
connection_id_to_kill,
text,
)
self.echo("cancelled query", err=True, fg="red")
except Exception as e:
self.echo(
"Encountered error while cancelling query: {}".format(e),
err=True,
fg="red",
)
except NotImplementedError:
self.echo("Not Yet Implemented.", fg="yellow")
except OperationalError as e:
logger.debug("Exception: %r", e)
if e.args[0] in (2003, 2006, 2013):
logger.debug("Attempting to reconnect.")
self.echo("Reconnecting...", fg="yellow")
try:
sqlexecute.connect()
logger.debug("Reconnected successfully.")
one_iteration(text)
return # OK to just return, cuz the recursion call runs to the end.
except OperationalError as e:
logger.debug("Reconnect failed. e: %r", e)
self.echo(str(e), err=True, fg="red")
# If reconnection failed, don't proceed further.
return
else:
logger.error("sql: %r, error: %r", text, e)
logger.error("traceback: %r", traceback.format_exc())
self.echo(str(e), err=True, fg="red")
except Exception as e:
logger.error("sql: %r, error: %r", text, e)
logger.error("traceback: %r", traceback.format_exc())
self.echo(str(e), err=True, fg="red")
else:
# Refresh the table names and column names if necessary.
if need_completion_refresh(text):
self.refresh_completions(reset=need_completion_reset(text))
finally:
if self.logfile is False:
self.echo("Warning: This query was not logged.", err=True, fg="red")
query = Query(text, successful, mutating)
self.query_history.append(query)
get_toolbar_tokens = create_toolbar_tokens_func(self, show_suggestion_tip)
if self.wider_completion_menu:
complete_style = CompleteStyle.MULTI_COLUMN
else:
complete_style = CompleteStyle.COLUMN
if not self.autocompletion:
complete_style = CompleteStyle.READLINE_LIKE
with self._completer_lock:
if self.key_bindings == "vi":
editing_mode = EditingMode.VI
else:
editing_mode = EditingMode.EMACS
self.prompt_app = PromptSession(
lexer=PygmentsLexer(LiteCliLexer),
reserve_space_for_menu=self.get_reserved_space(),
message=get_message,
prompt_continuation=get_continuation,
bottom_toolbar=get_toolbar_tokens if self.show_bottom_toolbar else None,
complete_style=complete_style,
input_processors=[
ConditionalProcessor(
processor=HighlightMatchingBracketProcessor(chars="[](){}"),
filter=HasFocus(DEFAULT_BUFFER) & ~IsDone(),
)
],
tempfile_suffix=".sql",
completer=DynamicCompleter(lambda: self.completer),
history=history,
auto_suggest=AutoSuggestFromHistory(),
complete_while_typing=True,
multiline=cli_is_multiline(self),
style=style_factory(self.syntax_style, self.cli_style),
include_default_pygments_style=False,
key_bindings=key_bindings,
enable_open_in_editor=True,
enable_system_prompt=True,
enable_suspend=True,
editing_mode=editing_mode,
search_ignore_case=True,
)
def startup_commands():
if self.startup_commands:
if "commands" in self.startup_commands:
for command in self.startup_commands['commands']:
try:
res = sqlexecute.run(command)
except Exception as e:
click.echo(command)
self.echo(str(e), err=True, fg="red")
else:
click.echo(command)
for title, cur, headers, status in res:
if title == 'dot command not implemented':
self.echo("The SQLite dot command '" + command.split(' ', 1)[0]+"' is not yet implemented.", fg="yellow")
else:
output = self.format_output(title, cur, headers)
for line in output:
self.echo(line)
else:
self.echo("Could not read commands. The startup commands needs to be formatted as: \n commands = 'command1', 'command2', ...", fg="yellow")
try:
startup_commands()
except Exception as e:
self.echo("Could not execute all startup commands: \n"+str(e), fg="yellow")
try:
while True:
one_iteration()
iterations += 1
except EOFError:
special.close_tee()
if not self.less_chatty:
self.echo("Goodbye!")
def log_output(self, output):
"""Log the output in the audit log, if it's enabled."""
if self.logfile:
click.echo(utf8tounicode(output), file=self.logfile)
def echo(self, s, **kwargs):
"""Print a message to stdout.
The message will be logged in the audit log, if enabled.
All keyword arguments are passed to click.echo().
"""
self.log_output(s)
click.secho(s, **kwargs)
def get_output_margin(self, status=None):
"""Get the output margin (number of rows for the prompt, footer and
timing message."""
margin = (
self.get_reserved_space()
+ self.get_prompt(self.prompt_format).count("\n")
+ 2
)
if status:
margin += 1 + status.count("\n")
return margin
def output(self, output, status=None):
"""Output text to stdout or a pager command.
The status text is not outputted to pager or files.
The message will be logged in the audit log, if enabled. The
message will be written to the tee file, if enabled. The
message will be written to the output file, if enabled.
"""
if output:
size = self.prompt_app.output.get_size()
margin = self.get_output_margin(status)
fits = True
buf = []
output_via_pager = self.explicit_pager and special.is_pager_enabled()
for i, line in enumerate(output, 1):
self.log_output(line)
special.write_tee(line)
special.write_once(line)
if fits or output_via_pager:
# buffering
buf.append(line)
if len(line) > size.columns or i > (size.rows - margin):
fits = False
if not self.explicit_pager and special.is_pager_enabled():
# doesn't fit, use pager
output_via_pager = True
if not output_via_pager:
# doesn't fit, flush buffer
for line in buf:
click.secho(line)
buf = []
else:
click.secho(line)
if buf:
if output_via_pager:
# sadly click.echo_via_pager doesn't accept generators
click.echo_via_pager("\n".join(buf))
else:
for line in buf:
click.secho(line)
if status:
self.log_output(status)
click.secho(status)
def configure_pager(self):
# Provide sane defaults for less if they are empty.
if not os.environ.get("LESS"):
os.environ["LESS"] = "-RXF"
cnf = self.read_my_cnf_files(["pager", "skip-pager"])
if cnf["pager"]:
special.set_pager(cnf["pager"])
self.explicit_pager = True
else:
self.explicit_pager = False
if cnf["skip-pager"] or not self.config["main"].as_bool("enable_pager"):
special.disable_pager()
def refresh_completions(self, reset=False):
if reset:
with self._completer_lock:
self.completer.reset_completions()
self.completion_refresher.refresh(
self.sqlexecute,
self._on_completions_refreshed,
{
"supported_formats": self.formatter.supported_formats,
"keyword_casing": self.completer.keyword_casing,
},
)
return [
(None, None, None, "Auto-completion refresh started in the background.")
]
def _on_completions_refreshed(self, new_completer):
"""Swap the completer object in cli with the newly created completer."""
with self._completer_lock:
self.completer = new_completer
if self.prompt_app:
# After refreshing, redraw the CLI to clear the statusbar
# "Refreshing completions..." indicator
self.prompt_app.app.invalidate()
def get_completions(self, text, cursor_positition):
with self._completer_lock:
return self.completer.get_completions(
Document(text=text, cursor_position=cursor_positition), None
)
def get_prompt(self, string):
self.logger.debug("Getting prompt")
sqlexecute = self.sqlexecute
now = datetime.now()
string = string.replace("\\d", sqlexecute.dbname or "(none)")
string = string.replace("\\f", os.path.basename(sqlexecute.dbname or "(none)"))
string = string.replace("\\n", "\n")
string = string.replace("\\D", now.strftime("%a %b %d %H:%M:%S %Y"))
string = string.replace("\\m", now.strftime("%M"))
string = string.replace("\\P", now.strftime("%p"))
string = string.replace("\\R", now.strftime("%H"))
string = string.replace("\\r", now.strftime("%I"))
string = string.replace("\\s", now.strftime("%S"))
string = string.replace("\\_", " ")
return string
def run_query(self, query, new_line=True):
"""Runs *query*."""
results = self.sqlexecute.run(query)
for result in results:
title, cur, headers, status = result
self.formatter.query = query
output = self.format_output(title, cur, headers)
for line in output:
click.echo(line, nl=new_line)
def format_output(self, title, cur, headers, expanded=False, max_width=None):
expanded = expanded or self.formatter.format_name == "vertical"
output = []
output_kwargs = {
"dialect": "unix",
"disable_numparse": True,
"preserve_whitespace": True,
"preprocessors": (preprocessors.align_decimals,),
"style": self.output_style,
}
if title: # Only print the title if it's not None.
output = itertools.chain(output, [title])
if cur:
column_types = None
if hasattr(cur, "description"):
def get_col_type(col):
# col_type = FIELD_TYPES.get(col[1], text_type)
# return col_type if type(col_type) is type else text_type
return text_type
column_types = [get_col_type(col) for col in cur.description]
if max_width is not None:
cur = list(cur)
formatted = self.formatter.format_output(
cur,
headers,
format_name="vertical" if expanded else None,
column_types=column_types,
**output_kwargs
)
if isinstance(formatted, (text_type)):
formatted = formatted.splitlines()
formatted = iter(formatted)
first_line = next(formatted)
formatted = itertools.chain([first_line], formatted)
if (
not expanded
and max_width
and headers
and cur
and len(first_line) > max_width
):
formatted = self.formatter.format_output(
cur,
headers,
format_name="vertical",
column_types=column_types,
**output_kwargs
)
if isinstance(formatted, (text_type)):
formatted = iter(formatted.splitlines())
output = itertools.chain(output, formatted)
return output
def get_reserved_space(self):
"""Get the number of lines to reserve for the completion menu."""
reserved_space_ratio = 0.45
max_reserved_space = 8
_, height = shutil.get_terminal_size()
return min(int(round(height * reserved_space_ratio)), max_reserved_space)
def get_last_query(self):
"""Get the last query executed or None."""
return self.query_history[-1][0] if self.query_history else None
@click.command()
@click.option("-V", "--version", is_flag=True, help="Output litecli's version.")
@click.option("-D", "--database", "dbname", help="Database to use.")
@click.option(
"-R",
"--prompt",
"prompt",
help='Prompt format (Default: "{0}").'.format(LiteCli.default_prompt),
)
@click.option(
"-l",
"--logfile",
type=click.File(mode="a", encoding="utf-8"),
help="Log every query and its results to a file.",
)
@click.option(
"--liteclirc",
default=config_location() + "config",
help="Location of liteclirc file.",
type=click.Path(dir_okay=False),
)
@click.option(
"--auto-vertical-output",
is_flag=True,
help="Automatically switch to vertical output mode if the result is wider than the terminal width.",
)
@click.option(
"-t", "--table", is_flag=True, help="Display batch output in table format."
)
@click.option("--csv", is_flag=True, help="Display batch output in CSV format.")
@click.option(
"--warn/--no-warn", default=None, help="Warn before running a destructive query."
)
@click.option("-e", "--execute", type=str, help="Execute command and quit.")
@click.argument("database", default="", nargs=1)
def cli(
database,
dbname,
version,
prompt,
logfile,
auto_vertical_output,
table,
csv,
warn,
execute,
liteclirc,
):
"""A SQLite terminal client with auto-completion and syntax highlighting.
\b
Examples:
- litecli lite_database
"""
if version:
print("Version:", __version__)
sys.exit(0)
litecli = LiteCli(
prompt=prompt,
logfile=logfile,
auto_vertical_output=auto_vertical_output,
warn=warn,
liteclirc=liteclirc,
)
# Choose which ever one has a valid value.
database = database or dbname
litecli.connect(database)
litecli.logger.debug("Launch Params: \n" "\tdatabase: %r", database)
# --execute argument
if execute:
try:
if csv:
litecli.formatter.format_name = "csv"
elif not table:
litecli.formatter.format_name = "tsv"
litecli.run_query(execute)
exit(0)
except Exception as e:
click.secho(str(e), err=True, fg="red")
exit(1)
if sys.stdin.isatty():
litecli.run_cli()
else:
stdin = click.get_text_stream("stdin")
stdin_text = stdin.read()
try:
sys.stdin = open("/dev/tty")
except (FileNotFoundError, OSError):
litecli.logger.warning("Unable to open TTY as stdin.")
if (
litecli.destructive_warning
and confirm_destructive_query(stdin_text) is False
):
exit(0)
try:
new_line = True
if csv:
litecli.formatter.format_name = "csv"
elif not table:
litecli.formatter.format_name = "tsv"
litecli.run_query(stdin_text, new_line=new_line)
exit(0)
except Exception as e:
click.secho(str(e), err=True, fg="red")
exit(1)
def need_completion_refresh(queries):
"""Determines if the completion needs a refresh by checking if the sql
statement is an alter, create, drop or change db."""
for query in sqlparse.split(queries):
try:
first_token = query.split()[0]
if first_token.lower() in (
"alter",
"create",
"use",
"\\r",
"\\u",
"connect",
"drop",
):
return True
except Exception:
return False
def need_completion_reset(queries):
"""Determines if the statement is a database switch such as 'use' or '\\u'.
When a database is changed the existing completions must be reset before we
start the completion refresh for the new database.
"""
for query in sqlparse.split(queries):
try:
first_token = query.split()[0]
if first_token.lower() in ("use", "\\u"):
return True
except Exception:
return False
def is_mutating(status):
"""Determines if the statement is mutating based on the status."""
if not status:
return False
mutating = set(
[
"insert",
"update",
"delete",
"alter",
"create",
"drop",
"replace",
"truncate",
"load",
]
)
return status.split(None, 1)[0].lower() in mutating
def is_select(status):
"""Returns true if the first word in status is 'select'."""
if not status:
return False
return status.split(None, 1)[0].lower() == "select"
if __name__ == "__main__":
cli()
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,348,513
|
dbcli/litecli
|
refs/heads/main
|
/litecli/packages/filepaths.py
|
# -*- coding: utf-8
from __future__ import unicode_literals
from litecli.encodingutils import text_type
import os
def list_path(root_dir):
"""List directory if exists.
:param dir: str
:return: list
"""
res = []
if os.path.isdir(root_dir):
for name in os.listdir(root_dir):
res.append(name)
return res
def complete_path(curr_dir, last_dir):
"""Return the path to complete that matches the last entered component.
If the last entered component is ~, expanded path would not
match, so return all of the available paths.
:param curr_dir: str
:param last_dir: str
:return: str
"""
if not last_dir or curr_dir.startswith(last_dir):
return curr_dir
elif last_dir == "~":
return os.path.join(last_dir, curr_dir)
def parse_path(root_dir):
"""Split path into head and last component for the completer.
Also return position where last component starts.
:param root_dir: str path
:return: tuple of (string, string, int)
"""
base_dir, last_dir, position = "", "", 0
if root_dir:
base_dir, last_dir = os.path.split(root_dir)
position = -len(last_dir) if last_dir else 0
return base_dir, last_dir, position
def suggest_path(root_dir):
"""List all files and subdirectories in a directory.
If the directory is not specified, suggest root directory,
user directory, current and parent directory.
:param root_dir: string: directory to list
:return: list
"""
if not root_dir:
return map(text_type, [os.path.abspath(os.sep), "~", os.curdir, os.pardir])
if "~" in root_dir:
root_dir = text_type(os.path.expanduser(root_dir))
if not os.path.exists(root_dir):
root_dir, _ = os.path.split(root_dir)
return list_path(root_dir)
def dir_path_exists(path):
"""Check if the directory path exists for a given file.
For example, for a file /home/user/.cache/litecli/log, check if
/home/user/.cache/litecli exists.
:param str path: The file path.
:return: Whether or not the directory path exists.
"""
return os.path.exists(os.path.dirname(path))
|
{"/tests/test_dbspecial.py": ["/litecli/packages/special/utils.py"], "/litecli/packages/special/iocommands.py": ["/litecli/packages/special/__init__.py", "/litecli/packages/special/main.py", "/litecli/packages/special/favoritequeries.py", "/litecli/packages/special/utils.py"], "/litecli/packages/special/dbcommands.py": ["/litecli/__init__.py", "/litecli/packages/special/__init__.py", "/litecli/packages/special/utils.py", "/litecli/packages/special/main.py"], "/litecli/encodingutils.py": ["/litecli/compat.py"], "/tests/utils.py": ["/litecli/main.py"], "/tests/test_main.py": ["/litecli/main.py", "/litecli/packages/special/main.py"], "/tests/test_parseutils.py": ["/litecli/packages/parseutils.py"], "/litecli/sqlexecute.py": ["/litecli/packages/special/utils.py"], "/litecli/sqlcompleter.py": ["/litecli/packages/parseutils.py", "/litecli/packages/special/iocommands.py", "/litecli/packages/filepaths.py"], "/tests/test_smart_completion_public_schema_only.py": ["/litecli/sqlcompleter.py"], "/litecli/config.py": ["/litecli/__init__.py"], "/litecli/packages/special/main.py": ["/litecli/packages/special/__init__.py"], "/litecli/main.py": ["/litecli/packages/special/main.py", "/litecli/sqlcompleter.py", "/litecli/clitoolbar.py", "/litecli/sqlexecute.py", "/litecli/clibuffer.py", "/litecli/config.py", "/litecli/encodingutils.py", "/litecli/lexer.py", "/litecli/__init__.py", "/litecli/packages/filepaths.py"], "/litecli/packages/filepaths.py": ["/litecli/encodingutils.py"]}
|
45,357,227
|
KiboNaku/EE461L_Project
|
refs/heads/master
|
/base/routes.py
|
from models.user import RentRecord
from flask import Flask, jsonify, request
from base import app, db
from models.user import User, RentRecord
from models.project import Project
from models.hardware import Hardware
import json
import jwt
import datetime
from functools import wraps
def token_required(function):
@wraps(function)
def decorated(*args, **kwargs):
token = request.get_json()["token"]
if not token:
app.logger.debug("Token is missing")
return jsonify({'error': 'Token is missing!'}), 403
try:
data = jwt.decode(token, app.config['SECRET_KEY'])
except Exception as e:
app.logger.debug("Token is invalid", e)
return jsonify({'error': 'Token is invalid!'}), 403
return function(*args, **kwargs, token_data=data)
return decorated
@app.route("/api/validate-token", methods=["POST"])
@token_required
def validate_token(token_data):
r_val = {"email": None, "success": 0, "error": None}
return r_val
@app.route("/api/register", methods=["POST"])
def register():
# # initialize return value
r_val = {"email": None, "success": 0, "error": None}
# # get user info
# validator, user = User.get_json()
record = request.get_json()
exist_email = User.objects(email=record['email']).first()
exist_username = User.objects(username=record['username']).first()
if not exist_email and not exist_username:
user = User(
username=record["username"],
email=record["email"],
password=record["password"],
)
user.save()
else:
r_val["success"] = -1
if exist_email:
r_val["error"] = "An account with the email already exists."
else:
r_val["error"] = "An account with the username already exists."
return r_val
@app.route("/api/login", methods=["POST"])
def login():
# initialize return value
r_val = {"success": 0, "error": None, "token": None}
# # get args from front end
record = json.loads(request.data)
user = User.objects(email=record['email']).first()
if not user:
r_val["success"] = -1
r_val["error"] = "Invalid email. No account exists."
else:
if record["password"] != user["password"]:
r_val["success"] = -1
r_val["error"] = "Invalid email and password combination."
else:
token = jwt.encode(
{
'user': user["username"],
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=1)
},
app.config['SECRET_KEY'])
r_val["token"] = token.decode('UTF-8')
return r_val
@app.route("/api/logout", methods=["POST"])
def logout():
# TODO: consider making a list of blacklisted tokens for logged out users
pass
@app.route("/api/fetch-project", methods=["GET"])
def fetch_project():
project_list = Project.objects()
projects = []
for project in project_list:
projects.append(project.to_json())
result = {"projects": projects}
return result
@app.route("/api/join-project", methods=["POST"])
@token_required
def join_project(token_data):
r_val = {"success": 0, "error": None}
data = json.loads(request.data)
user = User.objects(username=token_data['user']).first()
project_request = data.get("project")
project = Project.objects(id=project_request['id']).first()
if not project:
r_val["success"] = -1
r_val["error"] = "No project found on query."
elif not user:
r_val["success"] = -1
r_val["error"] = "No user found."
else:
project.update(add_to_set__contributors=[user])
user.update(add_to_set__contributed_projects=[project])
return r_val
@app.route("/api/fetch-hardware", methods=["GET"])
def fetch_hardware():
hardware_list = Hardware.objects()
hardware_sets = []
for hardware in hardware_list:
hardware_sets.append(hardware.to_json())
result = jsonify({"HWSets": hardware_sets})
return result
@app.route("/api/fetch-user-hardware", methods=["POST"])
@token_required
def fetch_user_hardware(token_data):
r_val = {"error": None, "rented_hardware": []}
user = User.objects(username=token_data['user']).first()
rented = user.rented_hardware
for r in rented:
r_val["rented_hardware"].append(
r.to_json()
)
return r_val
@app.route("/api/fetch-user-projects", methods=["POST"])
@token_required
def fetch_user_projects(token_data):
r_val = {"error": None, "owned_projects": [], "contr_projects": []}
user = User.objects(username=token_data['user']).first()
owned_projects = user.owned_projects
contr_projects = user.contributed_projects
for project in owned_projects:
r_val["owned_projects"].append(project.to_json())
for project in contr_projects:
r_val["contr_projects"].append(project.to_json())
return r_val
@app.route("/api/rent-hardware", methods=["POST"])
@token_required
def rent_hardware(token_data):
first = True
hardware_list = Hardware.objects()
r_val = {"success": 0, "error": None, "data": ""}
wanted_hardware = request.get_json()["hardware"]
user = User.objects(username=token_data['user']).first()
r_val["data"] = "User " + str(user.username) + " rented the following hardware:"
if user:
if(enough_available_hardware(wanted_hardware)):
for ware in range(5):
check = "HWSet" + str(ware + 1)
if(int(wanted_hardware[check]) > 0):
if not first:
r_val["data"] += ", " + wanted_hardware[check] + " of " + check
else:
r_val["data"] += " " + wanted_hardware[check] + " of " + check
first = False
hw = Hardware.objects(hardware_name=check).first()
old_record = RentRecord.objects(hardware=hw.pk, user=user.pk).first()
print(old_record)
if old_record != None:
old_value = old_record.amount
old_record.update(set__amount=old_value + int(wanted_hardware[check]))
old_record.save()
else:
today = datetime.date.today()
expiration = datetime.date(today.year + (today.month + 6)//12, (today.month + 6) % 12, today.day)
record = RentRecord(
user=user.to_dbref(),
hardware=Hardware.objects(hardware_name=check).first().to_dbref(),
amount=wanted_hardware[check],
date_rented=today,
date_expired=expiration
)
record.save()
user.update(add_to_set__rented_hardware=[record.to_dbref()])
hwset = Hardware.objects(hardware_name=check).first()
hwset.update(set__available_count=hardware_list[ware].available_count - int(wanted_hardware[check]))
hwset.reload()
else:
r_val["success"] = -1
r_val["error"] = "You cannot rent more hardware than is currently available."
return r_val
return r_val
def enough_available_hardware(hardware):
hardware_list = Hardware.objects()
for ware in range(5):
check = "HWSet" + str(ware + 1)
if int(hardware[check]) > int(hardware_list[ware].available_count):
return False
return True
@app.route("/api/return-hardware", methods=["POST"])
@token_required
def return_hardware(token_data):
return_hardware = request.get_json()["hardware"]
hardware_list = Hardware.objects()
r_val = { "success": 0, "error": None }
user = User.objects(username=token_data['user']).first()
if user:
user_hw = get_user_hw(user)
for hardware in return_hardware:
if int(return_hardware[hardware]) > 0:
if int(return_hardware[hardware]) <= user_hw[int(get_hardware_digit(hardware))-1]:
hardware_list[int(get_hardware_digit(hardware))-1].update(set__available_count=
hardware_list[int(get_hardware_digit(hardware))-1]
.available_count + int(return_hardware[hardware]))
hw = Hardware.objects(hardware_name=hardware).first()
record = RentRecord.objects(hardware=hw.pk, user=user.pk).first()
record.update(set__amount=user_hw[int(get_hardware_digit(hardware))-1] - int(return_hardware[hardware]))
record.reload()
else:
r_val["success"] = -1
r_val["error"] = "You cannot return more hardware than you own."
return r_val
return r_val
def get_user_hw(user):
rented = user.rented_hardware
user_hw = [0, 0, 0, 0, 0]
for r in rented:
digit = get_hardware_digit(r.hardware.hardware_name)
user_hw[int(digit) - 1] += r.amount
return user_hw
def get_hardware_digit(r):
for char in r:
if char.isdigit():
return char
return 0
@app.route("/api/add-project", methods=["POST"])
@token_required
def add_project(token_data):
r_val = {"error": None}
project = request.get_json()["project"]
user = User.objects(username=token_data['user']).first()
if user:
project = Project(
project_name=project["name"],
owner=user.to_dbref(),
description=project["description"],
tags=project["tags"]
)
# TODO: add hardware references and find total cost
project.total_cost = 0
project.save()
user.update(add_to_set__owned_projects=[project.to_dbref()])
return r_val
else:
app.logger.debug("Username is invalid. Could not add project.")
r_val["error"] = "Username is invalid"
return r_val, 403
@app.route("/api/user-info", methods=["POST"])
@token_required
def user_info(token_data):
user_request = token_data["user"]
user = User.objects(username=user_request).first()
return {"user": user.to_json()}
@app.route("/api/fetch-project-info", methods=["POST"])
def fetch_project_info():
project_id = request.get_json()["project_id"]
project = Project.objects(pk=project_id).first()
return {"project": project.to_json()}
|
{"/models/user.py": ["/base/__init__.py"], "/test_backend.py": ["/base/routes.py", "/models/user.py", "/models/hardware.py", "/models/project.py", "/base/__init__.py"], "/models/hardware.py": ["/base/__init__.py"], "/models/__init__.py": ["/models/hardware.py", "/models/project.py", "/models/user.py"], "/models/dataset.py": ["/base/__init__.py"], "/app.py": ["/base/__init__.py", "/base/routes.py", "/models/hardware.py"], "/base/routes.py": ["/models/user.py", "/base/__init__.py", "/models/project.py", "/models/hardware.py", "/models/dataset.py"], "/models/project.py": ["/base/__init__.py"], "/add_dummy_hardware.py": ["/models/__init__.py"]}
|
45,357,228
|
KiboNaku/EE461L_Project
|
refs/heads/master
|
/app.py
|
from base import app
from base.routes import *
from models.hardware import Hardware
import os
# from flask import Flask, render_template,send_from_directory,request, jsonify, make_response
# from flask_cors import CORS
# if __name__ == "__main__":
# app.run(debug=True, port=5000)
if __name__ == "__main__":
app.run(host='0.0.0.0')
# hardware = Hardware(
# hardware_name="HWSet1",
# price_per_unit="20",
# total_count="10",
# available_count="10",
# extra_time_rate="30"
# )
# hardware.save()
# hardware = Hardware(
# hardware_name="HWSet2",
# price_per_unit="10",
# total_count="25",
# available_count="25",
# extra_time_rate="15"
# )
# hardware.save()
# hardware = Hardware(
# hardware_name="HWSet3",
# price_per_unit="50",
# total_count="5",
# available_count="5",
# extra_time_rate="60"
# )
# hardware.save()
# hardware = Hardware(
# hardware_name="HWSet4",
# price_per_unit="15",
# total_count="15",
# available_count="15",
# extra_time_rate="20"
# )
# hardware.save()
# hardware = Hardware(
# hardware_name="HWSet5",
# price_per_unit="5",
# total_count="50",
# available_count="50",
# extra_time_rate="6"
# )
# hardware.save()
|
{"/models/user.py": ["/base/__init__.py"], "/test_backend.py": ["/base/routes.py", "/models/user.py", "/models/hardware.py", "/models/project.py", "/base/__init__.py"], "/models/hardware.py": ["/base/__init__.py"], "/models/__init__.py": ["/models/hardware.py", "/models/project.py", "/models/user.py"], "/models/dataset.py": ["/base/__init__.py"], "/app.py": ["/base/__init__.py", "/base/routes.py", "/models/hardware.py"], "/base/routes.py": ["/models/user.py", "/base/__init__.py", "/models/project.py", "/models/hardware.py", "/models/dataset.py"], "/models/project.py": ["/base/__init__.py"], "/add_dummy_hardware.py": ["/models/__init__.py"]}
|
45,363,998
|
telemahos/flask_fin_app
|
refs/heads/main
|
/config.py
|
"""Flask configuration."""
TESTING = True
DEBUG = True
FLASK_ENV = 'development'
SECRET_KEY = 'GDtfDCFYjD'
|
{"/application.py": ["/forms.py"]}
|
45,363,999
|
telemahos/flask_fin_app
|
refs/heads/main
|
/application.py
|
import os
from cs50 import SQL
# import sqlite3
from flask import Flask, flash, jsonify, redirect, render_template, request, session, url_for
from datetime import datetime, date
from flask_session import Session
from tempfile import mkdtemp
from werkzeug.exceptions import default_exceptions, HTTPException, InternalServerError
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, usd
# lookup,
from forms import IncomeForm, OutcomeForm
# Configure application
app = Flask(__name__)
app.config.from_pyfile('config.py')
# Ensure templates are auto-reloaded
app.config["TEMPLATES_AUTO_RELOAD"] = True
# Ensure responses aren't cached
@app.after_request
def after_request(response):
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
# Custom filter
# app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_FILE_DIR"] = mkdtemp()
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
# db = SQL("sqlite:///finance.db")
db = SQL("sqlite:///test.db")
@app.route("/")
@login_required
def index():
# Get user account Info
user_id = session["user_id"]
greeting = "HALLO from Flask"
today = date.today()
the_date = today.strftime("%B %d, %Y")
numRows = db.execute('SELECT COUNT(*) FROM (SELECT * FROM user)')
rows = db.execute("SELECT * FROM user WHERE id = :user_id", user_id=user_id)
print(numRows)
# If no transaction are yet made
if not rows:
return render_template("index.html")
else:
for row in rows:
username = row["username"]
return render_template("index.html", user_id=user_id, username=username, the_date=the_date, greeting=greeting)
# POST INCOME
@app.route("/post-income", methods=["GET","POST"])
@login_required
def post_income():
today = date.today()
# today = "2021-03-11"
the_date = today.strftime("%B %d, %Y")
form = IncomeForm()
numRows = db.execute('SELECT COUNT(*) FROM (SELECT * FROM income WHERE date = :today)', today=today)
rows = db.execute("SELECT * FROM income WHERE date = :today", today=today)
print(numRows)
# There is no Rows with this date
if request.method == "GET":
if not rows:
# print("6666")
print("not ROW")
return render_template("post-income.html", form=form, the_date=the_date)
# Else theere is a POST Method to Collect the new Income form data an inert it to database
else:
print("0000")
z_count = request.form.get("z_count")
early_income = request.form.get("early_income")
late_income = request.form.get("late_income")
notes = request.form.get("notes")
if z_count or early_income:
print("1111")
# Insert If there is no income yet at this day
db.execute("INSERT INTO income (date, z_count, early_income, late_income, notes) VALUES (:date, :z_count, :early_income, :late_income, :notes)", date=today, z_count=z_count, early_income=early_income, late_income=late_income, notes=notes)
return redirect("/")
""" Income Form """
if form.validate_on_submit():
return redirect(url_for("index"))
return render_template("post-income.html", form=form)
@app.route("/edit-income", methods=["GET","POST"])
@login_required
def edit_income():
today = date.today()
# today = "2021-03-11"
the_date = today.strftime("%B %d, %Y")
form = IncomeForm()
numRows = db.execute('SELECT COUNT(*) FROM (SELECT * FROM income WHERE date = :today)', today=today)
rows = db.execute("SELECT * FROM income WHERE date = :today", today=today)
print(numRows)
# There is no Rows with this date
if not rows:
print("6666")
print("not ROW")
return render_template("income.html", form=form, the_date=the_date)
# If there is a row, colect the row data and send them into the form
else:
print("7777")
for row in rows:
row_id = row["id"]
today = row["date"]
z_count = row["z_count"]
early_income = row["early_income"]
late_income = row["late_income"]
notes = row["notes"]
# Send all requested data to the income form
if request.method == "GET":
# print("8888")
return render_template("income.html", today=today, z_count=z_count, early_income=early_income, late_income=late_income, notes=notes, form=form)
# Else theere is a POST Method to Collect the new Income form data an inert it to database
else:
# print("0000")
z_count = request.form.get("z_count")
early_income = request.form.get("early_income")
late_income = request.form.get("late_income")
notes = request.form.get("notes")
if not rows:
# print("1111")
# Insert If there is no income yet at this day
db.execute("INSERT INTO income (date, z_count, early_income, late_income, notes) VALUES (:date, :z_count, :early_income, :late_income, :notes)", date=today, z_count=z_count, early_income=early_income, late_income=late_income, notes=notes)
return redirect("/")
else:
# for row in rows:
# print("2222")
# Else update the day entry with new data
sql_update_query = """UPDATE income SET z_count = ? WHERE id = ? """
db.execute(sql_update_query, z_count, row_id)
return redirect("/")
""" Income Form """
if form.validate_on_submit():
return redirect(url_for("index"))
return render_template("income.html", form=form)
@app.route("/outcome")
@login_required
def outcome():
# Check if this stock is in users Portfolio
user_id = session["user_id"]
return render_template("outcome.html")
@app.route("/staff")
@login_required
def staff():
# Check if this stock is in users Portfolio
user_id = session["user_id"]
return render_template("staff.html")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM user WHERE username = :username", username=request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
def errorhandler(e):
"""Handle error"""
if not isinstance(e, HTTPException):
e = InternalServerError()
return apology(e.name, e.code)
# Listen for errors
for code in default_exceptions:
app.errorhandler(code)(errorhandler)
# User can add cash in his account
# @app.route("/cash", methods=["GET", "POST"])
# @login_required
# def cash():
# """Add Cash in your account"""
# user_id = session["user_id"]
# # Get the user cash ammount
# cash_row = db.execute("SELECT * FROM budget WHERE user_id = :user_id", user_id=user_id)
# cash = 0
# for i in cash_row:
# cash = i["cash"]
# print("cash", cash)
# if request.method == "GET":
# return render_template("cash.html", cash=cash)
# else:
# new_cash = request.form.get("cash")
# cash = round(float(cash) + float(new_cash), 2)
# # Update the cash amount from user, in the budget table
# sql_update_query = """UPDATE budget SET cash = ? WHERE user_id = ?"""
# data = (cash, user_id)
# db.execute(sql_update_query, data)
# flash("CASH ADDED!")
# return redirect("/")
|
{"/application.py": ["/forms.py"]}
|
45,364,000
|
telemahos/flask_fin_app
|
refs/heads/main
|
/forms.py
|
from flask_wtf import FlaskForm
from wtforms import StringField, TextField, SubmitField, TextAreaField, DateField, BooleanField
from wtforms.validators import DataRequired, Length, ValidationError
class IncomeForm(FlaskForm):
""" Income Form """
date = StringField("Ημερομηνία", [DataRequired()])
z_count = StringField("Z")
early_income = StringField("Πρωϊνός Τζίρος")
late_income = StringField("Βραδυνός Τζίρος")
notes = TextAreaField("Παρατηρήσεις", [Length(min=4, message=("Start a Note!"))] )
submit = SubmitField("Καταχώρηση")
class OutcomeForm(FlaskForm):
""" Outcome Form """
date = DateField("Ημερομηνία")
description = TextAreaField("Περιγραφή")
invoice_num = StringField("Αρ. Τιμολογίου")
total_cost = StringField("Συνολικό Κόστος")
extra_cost = StringField("Επιπλέον Κοστολόγιο")
tax_percent = StringField("ΦΠΑ Ποσοστό")
tax_percent2 = StringField("Επιπλέον ΦΠΑ Ποσοστό")
supplier = StringField("Προμηθευτής")
staff_id = StringField("Εργαζόμενος")
is_fix_cost = BooleanField("Πάγια έξοδα")
is_purchase_cost = BooleanField("Έξοδα αγορών")
is_salary_cost = BooleanField("Έξοδα Προσωπικού")
is_insurance_cost = BooleanField("Έξοδα Ασφάλειας/ΙΚΑ")
is_misc_cost = BooleanField("Διάφορα Έξοδα")
paymeny_way = StringField("Τρόπος Πήρωμής")
is_paid = StringField("Είναι πληρωμένο;")
notes = TextAreaField("Παρατηρήσεις", [Length(min=4, message=("Start a Note!"))] )
|
{"/application.py": ["/forms.py"]}
|
45,396,806
|
lea98/search_books
|
refs/heads/main
|
/helpers/general.py
|
def match_author(lista, author=None, title=None):
if author and title:
res = [i for i in lista if (author in i["author"] and i["title"] == title)]
elif title:
res = [i for i in lista if (i["title"] == title)]
else:
res = [i for i in lista if (author in i["author"])]
return res
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,807
|
lea98/search_books
|
refs/heads/main
|
/helpers/models.py
|
from flask_login import UserMixin
from flask_sqlalchemy import SQLAlchemy
from datetime import datetime
db = SQLAlchemy()
book_authors = db.Table(
"book_authors",
db.Column("book_id", db.Integer, db.ForeignKey("books.id")),
db.Column("author_id", db.Integer, db.ForeignKey("authors.id")),
)
class Authors(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(200))
def __repr__(self):
return '<Authors %r>' % self.id
class Books(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(200))
book_authors_connection = db.relationship(
"Authors", secondary=book_authors, backref=db.backref("auth", lazy="dynamic")
)
offer = db.relationship("Offers", backref=db.backref("off"))
def __repr__(self):
return '<Books %r>' % self.id
class Offers(db.Model):
link = db.Column(db.String(200), primary_key=True)
price = db.Column(db.String(200))
book_id = db.Column(db.Integer, db.ForeignKey("books.id"))
pages_id = db.Column(db.Integer, db.ForeignKey("pages.id"))
date_added = db.Column(db.DateTime, default=datetime.utcnow)
def __repr__(self):
return "<Offers %r>" % self.link
class Pages(db.Model):
id = db.Column(db.Integer, primary_key=True)
link = db.Column(db.String(200))
name = db.Column(db.String(200))
image = db.Column(db.String(200))
offer = db.relationship("Offers", backref=db.backref("offp"))
def __repr__(self):
return "<Pages %r>" % self.id
class Oglasi(db.Model):
__bind_key__ = "oglasnik"
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(30))
price = db.Column(db.String(10))
user_id = db.Column(db.Integer, db.ForeignKey("users.id"))
body = db.Column(db.String(300))
date_created = db.Column(db.DateTime, default=datetime.utcnow)
img_url = db.Column(db.String(30))
def __repr__(self):
return "<Oglasi %r>" % self.id
class Users(db.Model, UserMixin):
__bind_key__ = "oglasnik"
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(30))
email = db.Column(db.String(30))
username = db.Column(db.String(30))
password = db.Column(db.String(120))
ogl = db.relationship("Oglasi", backref=db.backref("ogl_user"))
def __repr__(self):
return '<Users %r>' % self.id
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,808
|
lea98/search_books
|
refs/heads/main
|
/views/scraper.py
|
from flask import Blueprint, render_template
bp = Blueprint("scraper", __name__)
@bp.route("/scraper", methods=["GET", "POST"])
def scraper():
return render_template("scraper.html")
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,809
|
lea98/search_books
|
refs/heads/main
|
/selenium_bookstores/mozaik.py
|
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
import os
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.wait import WebDriverWait
def get_currency(product):
try:
product.find_element_by_xpath(".//p[@class='price']//ins/span")
return product.find_element_by_xpath(
".//p[@class='price']//ins/span"
).text.replace("\xa0", " ")
except NoSuchElementException:
return product.find_element_by_xpath(".//p[@class='price']/span").text.replace(
"\xa0", " "
)
def get_authors(product, index):
list_au = []
auth = product.find_elements_by_xpath(
f"(//div[@class='books-container']/div[contains(@class,'book')])[{index}]//div[@class='author']//a"
)
for au in auth:
list_au.append(au.text)
return list_au
def form_text_to_send(task_title, task_cont):
if task_title and task_cont:
text_to_send = task_cont + " " + task_title + Keys.ENTER
elif task_title:
text_to_send = task_title + Keys.ENTER
elif task_cont:
text_to_send = task_cont + Keys.ENTER
else:
return []
return text_to_send
def mozaik(auth_name, book_title):
options = Options()
options.add_argument("--headless")
options.add_argument(
"user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36"
)
options.add_argument("--disable-dev-shm-usage")
options.add_argument("--no-sandbox")
options.binary_location = os.environ.get("GOOGLE_CHROME_BIN")
# LOCAL TESTING - change executeble path
# driver_path = r"C:\Users\LeaBratić\Desktop\chromedriver.exe"
driver = webdriver.Chrome(
executable_path=os.environ.get("CHROMEDRIVER_PATH"), options=options
)
driver.minimize_window()
# --| Parse or automation
url = "https://mozaik-knjiga.hr/"
driver.get(url)
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, "//input[@class='pretrazi-knjige']"))
)
text_to_send = form_text_to_send(auth_name, book_title)
input_element = driver.find_element_by_xpath("//input[@class='pretrazi-knjige']")
input_element.send_keys(text_to_send)
WebDriverWait(driver, 10).until(
EC.presence_of_element_located((By.XPATH, "//h1[contains(text(),'Rezultati')]"))
)
try:
all_books = driver.find_elements_by_xpath(
"//div[@class='books-container']/div[contains(@class,'book')]"
)
except NoSuchElementException:
return []
lista = []
for index, one_book in enumerate(all_books):
lista.append(
{
"price": get_currency(one_book),
"author": get_authors(one_book, index + 1),
"title": one_book.find_element_by_xpath(
".//div[@class='title']//a"
).get_attribute("title"),
"link": one_book.find_element_by_xpath(
".//div[@class='title']//a"
).get_attribute("href"),
"page": 3,
}
)
driver.quit()
return lista
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,810
|
lea98/search_books
|
refs/heads/main
|
/beautifulsoup_bookstores/znanje.py
|
import time
from bs4 import BeautifulSoup
import requests
def form_url_from_title_and_auth(task_cont, task_title):
if task_cont and task_title:
to_url = (task_cont + " " + task_title).replace(" ", "+")
elif task_cont:
to_url = task_cont.replace(" ", "+")
else:
to_url = task_title.replace(" ", "+")
return to_url
def znanje(auth_name, auth_title):
hdr = {
"Accept": "text/html,application/xhtml+xml,*/*",
"user-agent": "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/48.0.2564.116 Safari/537.36",
}
to_url = form_url_from_title_and_auth(auth_name, auth_title)
url = f"https://znanje.hr/pretraga?query={to_url}"
page = requests.get(url, headers=hdr)
time.sleep(1)
soup = BeautifulSoup(page.content, "html.parser")
# data = soup.findAll('div', attrs={'class','product-card'})
price = soup.findAll("h4", attrs={"class", "product-price"})
title_meta = soup.findAll("h3", attrs={"class", "product-title"})
authors_meta = soup.findAll("p", attrs={"class", "product-author"})
lista = []
for (i, j, k) in zip(price, authors_meta, title_meta):
lista.append(
{
"price": i.text.replace("\n", "").strip().split(" ")[0],
"author": j.findChildren("a")[0]
.text.replace("\n", "")
.strip()
.split(", "),
"title": k.findChildren("span")[0].text.strip(),
"link": f"https://znanje.hr/{k.findChildren('a')[0]['href']}",
"page": 2,
}
)
return lista
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,811
|
lea98/search_books
|
refs/heads/main
|
/views/edit_oglas.py
|
import os
from flask import Blueprint, render_template, current_app, url_for, request
from flask_login import login_required
from flask_wtf import FlaskForm
from werkzeug.utils import redirect, secure_filename
from datetime import datetime
from wtforms import StringField, TextAreaField, FileField
from helpers.models import db, Oglasi
bp = Blueprint("edit_oglas", __name__)
class OglasFormEdit(FlaskForm):
title = StringField("New title")
body = TextAreaField("New text")
img_url = FileField("")
price = StringField("New price")
@bp.route("/edit_oglas/<string:id>", methods=["GET", "POST"])
@login_required
def edit_oglas(id):
form = OglasFormEdit()
oglas = db.session.query(Oglasi).filter_by(id=id).first()
if request.method == "POST" and form.validate():
db.session.commit()
title = form.title.data
body = form.body.data
price = form.price.data
if price:
oglas.price = price
if title:
oglas.title = title
if body:
oglas.body = body
if form.img_url.data:
f = form.img_url.data
filename = secure_filename(form.img_url.data.filename)
filename = datetime.strftime(datetime.now(), "%M%S") + filename
f.save(os.path.join(current_app.config["UPLOADED_IMAGES_DEST"], filename))
oglas.img_url = filename
if price or title or body or form.img_url.data:
oglas.date_created = datetime.now().replace(microsecond=0)
db.session.commit()
return redirect(url_for("dashboard.dashboard"))
return render_template("edit_oglas.html", form=form, oglas=oglas)
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,812
|
lea98/search_books
|
refs/heads/main
|
/selenium_bookstores/knjiga.py
|
from selenium import webdriver
from bs4 import BeautifulSoup
from selenium.webdriver.chrome.options import Options
import os
# --| Setup
def knjiga(task_cont, task_title):
options = Options()
options.add_argument("--headless")
options.add_argument(
"user-agent=Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.101 Safari/537.36"
)
# LOCAL TESTING - change executable_path
# driver_path = r"C:\Users\LeaBratić\Desktop\chromedriver.exe"
options.add_argument("--disable-dev-shm-usage")
options.add_argument("--no-sandbox")
options.binary_location = os.environ.get("GOOGLE_CHROME_BIN")
driver = webdriver.Chrome(
executable_path=os.environ.get("CHROMEDRIVER_PATH"), options=options
)
driver.minimize_window()
# --| Parse or automation
if task_cont and task_title:
to_url = (task_cont + " " + task_title).replace(" ", "+")
elif task_cont:
to_url = task_cont.replace(" ", "+")
else:
to_url = task_title.replace(" ", "+")
url = f"https://knjiga.hr/?s={to_url}&post_type=product"
driver.get(url)
driver.implicitly_wait(1)
soup = BeautifulSoup(driver.page_source, "lxml")
price = soup.findAll("span", attrs={"class", "woocommerce-Price-amount amount"})
title_meta = soup.findAll("h2", attrs={"class", "woocommerce-loop-product__title"})
authors_meta = soup.findAll("div", attrs={"class", "author"})
links = soup.findAll("div", attrs={"class", "author-and-title-wrapper"})
lista = []
for (i, j, k, s) in zip(price, authors_meta, title_meta, links):
lista.append(
{
"price": i.findChildren("bdi")[0]
.text.replace("\xa0", " ")
.strip()
.split(" ")[0],
"author": j.text.replace("\n", "").strip().split(", "),
"title": " ".join((k.text.replace("\n", "").strip()).split()),
"link": s.findChildren("a")[0]["href"],
"page": 5,
}
)
driver.quit()
return lista
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,813
|
lea98/search_books
|
refs/heads/main
|
/app.py
|
import os
from flask import Flask
from flask_login import (
LoginManager,
)
from flask_bootstrap import Bootstrap
from flask_uploads import configure_uploads, IMAGES, UploadSet
from views import blueprints
LOGOS_FOLDER = os.path.join("static", "logos")
UPLOADS_FOLDER = os.path.join("static", "uploads")
app = Flask(__name__) # setup app, name referencing this file
app.config["SECRET_KEY"] = os.environ.get("SECRET_KEY")
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
bootstrap = Bootstrap(app)
from helpers.models import Users,db,Pages
# # LOCAL TESTING
# app.config[
# "SQLALCHEMY_DATABASE_URI"
# ] = "postgresql://postgres:books1234@localhost/bookscraper"
# app.config["SQLALCHEMY_BINDS"] = {
# "oglasnik": "postgresql://postgres:books1234@localhost/oglasnik"
# }
DATABASE_URL = os.environ.get('DATABASE_URL').replace('postgres', 'postgresql')
HEROKU_POSTGRESQL_CHARCOAL_URL = os.environ.get('HEROKU_POSTGRESQL_CHARCOAL_URL').replace('postgres', 'postgresql')
app.config["SQLALCHEMY_DATABASE_URI"] = DATABASE_URL
app.config['SQLALCHEMY_BINDS'] = {
'oglasnik': HEROKU_POSTGRESQL_CHARCOAL_URL,
}
app.config["UPLOAD_FOLDER"] = LOGOS_FOLDER
app.config["UPLOADED_IMAGES_DEST"] = UPLOADS_FOLDER
images = UploadSet("images", IMAGES)
configure_uploads(app, images)
# app.config.from_object(__name__)
for blue in blueprints:
app.register_blueprint(blue.bp)
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.login_view = "login"
db.init_app(app)
with app.app_context():
db.create_all()
db.session.commit()
if not (db.session.execute("select count(*) from pages").first())[0]:
page1 = Pages(id=1,link="https://www.barnesandnoble.com/",name="Barnes & Noble",image="barnesandnoble.jpg")
page2 = Pages(id=2,link="https://znanje.hr/",name="Znanje",image="znanje.jpg")
page3 = Pages(id=3,link="https://mozaik-knjiga.hr/",name="Mozaik Knjiga",image="mozaik.jpg")
page4 = Pages(id=4,link="https://www.ljevak.hr/",name="Ljevak",image="ljevak.jpg")
page5 = Pages(id=5,link="https://knjiga.hr/",name="Knjiga",image="knjiga.jpg")
db.session.add(page1)
db.session.add(page2)
db.session.add(page3)
db.session.add(page4)
db.session.add(page5)
db.session.commit()
@login_manager.user_loader
def load_user(id):
return Users.query.get(int(id))
if __name__ == "__main__":
app.run(debug=True)
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,814
|
lea98/search_books
|
refs/heads/main
|
/views/register.py
|
from flask import Blueprint, render_template, url_for
from flask_wtf import FlaskForm
from werkzeug.security import generate_password_hash
from werkzeug.utils import redirect
from wtforms import StringField, PasswordField
from wtforms.validators import InputRequired, Email, Length
from helpers.models import Users, db
bp = Blueprint("register", __name__)
class RegisterForm(FlaskForm):
email = StringField(
"Email",
validators=[InputRequired(), Email(message="Invalid email"), Length(max=50)],
)
username = StringField(
"Username", validators=[InputRequired(), Length(min=3, max=15)]
)
name = StringField("Name", validators=[InputRequired(), Length(min=3, max=15)])
password = PasswordField(
"Password", validators=[InputRequired(), Length(min=3, max=50)]
)
@bp.route("/register", methods=["GET", "POST"])
def register():
form = RegisterForm()
if form.validate_on_submit():
hashed_password = generate_password_hash(form.password.data, method="sha256")
exists_user_username = (
db.session.query(Users).filter(Users.username == form.username.data).first()
is not None
)
if exists_user_username:
return render_template(
"register.html", form=form, message="Username already exists"
)
exists_user_mail = (
db.session.query(Users).filter(Users.email == form.email.data).first()
is not None
)
if exists_user_mail:
return render_template(
"register.html", form=form, message="Email already exists"
)
new_user = Users(
username=form.username.data,
email=form.email.data,
password=hashed_password,
name=form.name.data,
)
db.session.add(new_user)
db.session.commit()
return redirect(url_for("login.login"))
return render_template("register.html", form=form)
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,815
|
lea98/search_books
|
refs/heads/main
|
/views/dashboard.py
|
from flask import Blueprint, render_template
from flask_login import login_required, current_user
from sqlalchemy import desc
from helpers.models import Oglasi, db
from views.add_oglas import OglasForm
bp = Blueprint("dashboard", __name__)
@bp.route("/dashboard")
@login_required
def dashboard():
form = OglasForm()
oglasi = (
db.session.query(
Oglasi.id,
Oglasi.title,
Oglasi.price,
Oglasi.body,
Oglasi.img_url,
Oglasi.date_created,
)
.order_by(desc(Oglasi.date_created))
.filter(Oglasi.user_id == current_user.id)
.all()
)
return render_template("dashboard.html", oglasi_list=oglasi, form=form)
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,816
|
lea98/search_books
|
refs/heads/main
|
/views/oglasnik.py
|
from flask import Blueprint, render_template
from sqlalchemy import desc
from helpers.models import Oglasi, db, Users
bp = Blueprint("oglasnik", __name__)
@bp.route("/oglasnik", methods=["GET", "POST"])
def oglasnik():
oglasi = (
db.session.query(
Oglasi.title,
Oglasi.body,
Oglasi.price,
Oglasi.date_created,
Oglasi.img_url,
Users.email,
)
.order_by(desc(Oglasi.date_created))
.join(Users, Users.id == Oglasi.user_id)
.all()
)
return render_template("oglasnik.html", oglasi_list=oglasi)
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,817
|
lea98/search_books
|
refs/heads/main
|
/views/delete_oglas.py
|
import os
from flask import Blueprint, current_app, url_for
from flask_login import login_required
from werkzeug.utils import redirect
from helpers.models import db, Oglasi
bp = Blueprint("delete_oglas", __name__)
@bp.route("/delete_oglas/<string:id>", methods=["POST"])
@login_required
def delete_oglas(id):
filename = db.session.query(Oglasi.img_url).filter(Oglasi.id == id).first()
os.remove(os.path.join(current_app.config["UPLOADED_IMAGES_DEST"], filename[0]))
Oglasi.query.filter_by(id=id).delete()
db.session.commit()
return redirect(url_for("dashboard.dashboard"))
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,818
|
lea98/search_books
|
refs/heads/main
|
/views/login.py
|
import flask
from flask import Blueprint, url_for, render_template, request
from flask_login import login_user
from flask_wtf import FlaskForm
from werkzeug.utils import redirect
from werkzeug.security import check_password_hash
from urllib.parse import urlparse, urljoin
from helpers.models import Users
from wtforms import StringField, PasswordField
from wtforms.validators import InputRequired, Length
bp = Blueprint("login", __name__)
class LoginForm(FlaskForm):
username = StringField(
"Username", validators=[InputRequired(), Length(min=3, max=15)]
)
password = PasswordField(
"Password", validators=[InputRequired(), Length(min=3, max=50)]
)
@bp.route("/login", methods=["GET", "POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
user = Users.query.filter_by(username=form.username.data).first()
if user:
if check_password_hash(user.password, form.password.data):
login_user(user, remember=True) # maybe add option
next = flask.request.args.get("next")
if not is_safe_url(next):
return flask.abort(400)
return redirect(next or url_for("dashboard.dashboard"))
return render_template("login.html", form=form, invalid_pass=True)
return render_template("login.html", form=form, invalid_pass=False)
def is_safe_url(target):
ref_url = urlparse(request.host_url)
test_url = urlparse(urljoin(request.host_url, target))
return test_url.scheme in ("http", "https") and ref_url.netloc == test_url.netloc
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,819
|
lea98/search_books
|
refs/heads/main
|
/views/handle_data.py
|
import copy
from flask import Blueprint, render_template, url_for, request
from werkzeug.utils import redirect
from helpers.database import check_database, live_scraping
bp = Blueprint("handle_data", __name__)
@bp.route("/handle_data", methods=["POST", "GET"])
def handle_data():
if request.method == "GET":
return redirect(url_for("scraper.scraper"))
task_title = request.form.get("title")
task_author = request.form.get("author")
if request.form.get("submit_button") == "Check DB":
new_lista = check_database(task_author, task_title)
if new_lista:
return render_template("index.html", lista=new_lista)
else:
return render_template(
"index.html",
lista=new_lista,
author_name=task_author,
book_title=task_title,
show_button=True,
)
else:
new_lista = live_scraping(task_author, task_title)
change_list = copy.deepcopy(new_lista)
for d in change_list:
d.update((k, ", ".join(map(str, v))) for k, v in d.items() if k == "author")
d.update(
(k, "https://znanje.hr/")
for k, v in d.items()
if k == "page" and v == 2
)
d.update(
(k, "https://knjiga.hr/")
for k, v in d.items()
if k == "page" and v == 5
)
d.update(
(k, "https://mozaik-knjiga.hr/")
for k, v in d.items()
if k == "page" and v == 3
)
return render_template("index.html", lista=change_list)
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
45,396,820
|
lea98/search_books
|
refs/heads/main
|
/helpers/database.py
|
import os
from datetime import datetime, timedelta
from flask import after_this_request, current_app
from beautifulsoup_bookstores.znanje import znanje
from helpers.general import match_author
from helpers.models import Books, Pages, db
from selenium_bookstores.knjiga import knjiga
from selenium_bookstores.mozaik import mozaik
def check_database(task_author, task_title):
new_lista = []
book_exists = db.session.query(Books.id).filter(Books.title == task_title).all()
result = ""
if task_title and task_author:
result = db.session.execute(
"""SELECT offers.link, offers.price, offers.book_id, offers.pages_id,offers.date_added
FROM offers
INNER JOIN books ON offers.book_id=books.id Where books.id IN (SELECT b.id
FROM books b
JOIN book_authors ba ON b.id = ba.book_id
JOIN authors a ON ba.author_id = a.id
WHERE a.name = :autname AND b.title = :title);""",
{"autname": task_author, "title": task_title}
)
if not task_title and not task_author:
pass
elif not task_title:
result = db.session.execute(
"""SELECT offers.link, offers.price, offers.book_id, offers.pages_id,offers.date_added
FROM offers
INNER JOIN books ON offers.book_id=books.id WHERE books.id IN (SELECT b.id
FROM books b
JOIN book_authors ba ON b.id = ba.book_id
JOIN authors a ON ba.author_id = a.id
WHERE a.name = :autname
);""",
{"autname": task_author}
)
elif not task_author:
result = db.session.execute(
"""SELECT offers.link, offers.price, offers.book_id, offers.pages_id, offers.date_added
FROM offers
INNER JOIN books ON offers.book_id=books.id Where books.id IN (SELECT b.id
FROM books b
JOIN book_authors ba ON b.id = ba.book_id
JOIN authors a ON ba.author_id = a.id
WHERE b.title = :title);""",
{"title": task_title}
)
if result:
offers = result.mappings().all()
for offer in offers:
check_date = (
datetime.strptime(offer.date_added.split(" ")[0], "%Y-%m-%d")
if isinstance(offer.date_added, str)
else offer.date_added
) # fix
if check_date <= (datetime.now() - timedelta(weeks=500)):
continue
book_full_name = (
db.session.query(Books.title)
.filter(Books.id == offer["book_id"])
.first()
)
authors_for_book = db.session.execute(
"""SELECT authors.name
FROM authors
JOIN book_authors ON authors.id = book_authors.author_id
JOIN books ON book_authors.book_id = :bookid GROUP BY authors.name""",
{"bookid": offer["book_id"]}
)
authors_to_display = ", ".join(
i["name"] for i in (authors_for_book.mappings().all())
)
page_link = (
db.session.query(Pages.link, Pages.image)
.filter(Pages.id == offer["pages_id"])
.first()
)
new_lista.append(
{
"price": offer["price"],
"author": authors_to_display,
"title": book_full_name[0],
"link": page_link[0] + offer["link"],
"page": page_link[0],
"page_logo": os.path.join(
current_app.config["UPLOAD_FOLDER"], page_link[1]
),
}
)
return new_lista
def live_scraping(task_author, task_title):
new_lista = []
znanje_list = znanje(task_title, task_author)
znanje_list = match_author(znanje_list, task_author, task_title)
if znanje_list:
for item in znanje_list:
item["page_logo"] = os.path.join(
current_app.config["UPLOAD_FOLDER"], "znanje.jpg"
)
knjiga_list = knjiga(task_title, task_author)
knjiga_list = match_author(knjiga_list, task_author, task_title)
if knjiga_list:
for item in knjiga_list:
item["page_logo"] = os.path.join(
current_app.config["UPLOAD_FOLDER"], "knjiga.jpg"
)
mozaik_list = mozaik(task_title, task_author)
mozaik_list = match_author(mozaik_list, task_author, task_title)
if mozaik_list:
for item in mozaik_list:
item["page_logo"] = os.path.join(
current_app.config["UPLOAD_FOLDER"], "mozaik.jpg"
)
new_lista = mozaik_list + knjiga_list + znanje_list
@after_this_request
def save_to_db_after_scraping(response):
for item in new_lista:
exists_in = check_if_exists_in_table(item)
if not exists_in:
book_id = db.session.execute("insert into books values (DEFAULT,:titles) RETURNING id;",{'titles':item["title"]})
book_id_num = book_id.first()[0]
for auth in item["author"]:
auth_is_there_list = db.session.execute(
"select id from authors where name = :autname;",
{"autname": auth}
).fetchone()
if not auth_is_there_list:
auth_id_num = db.session.execute("insert into authors values (DEFAULT,:authname) RETURNING id;",{'authname':auth}).first()[0]
else:
auth_id_num = auth_is_there_list[0]
db.session.execute(
"insert into book_authors values (:bookid,:autid);",
{"bookid": book_id_num, "autid": auth_id_num}
)
else:
book_id_num = exists_in
new_link = (
item["link"]
.replace("https://mozaik-knjiga.hr/", "")
.replace("https://znanje.hr/", "")
.replace("https://knjiga.hr/", "")
)
db.session.execute(
"""INSERT INTO offers (link,price,book_id,pages_id,date_added)
VALUES (:newlink,:price,:bookid,:page,:dateadd)
ON CONFLICT (link) DO UPDATE SET (price, date_added) = (:price,:dateadd);""",
{
"newlink": new_link,
"price": item["price"],
"bookid": book_id_num,
"page": item["page"],
"dateadd": datetime.utcnow(),
}
)
db.session.commit()
return response
return new_lista
def check_if_exists_in_table(item):
# title_escape_quote = item["title"].replace("'", "''")
all_books_with_that_name = db.session.execute(
"select id from books where title = :title", {"title": item["title"]}
)
for book_id in list(all_books_with_that_name):
book_authors_match = db.session.execute(
"select author_id from book_authors where book_id = :bookid",
{"bookid": book_id[0]}
)
for author_name in list(book_authors_match):
authors_match = db.session.execute(
"select name from authors where id = :autname",
{"autname": author_name[0]}
)
if not list(authors_match)[0][0] in item["author"]:
break
return book_id[0]
return False
|
{"/app.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/selenium_bookstores/knjiga.py", "/views/__init__.py", "/helpers/models.py"], "/views/edit_oglas.py": ["/helpers/models.py"], "/views/register.py": ["/helpers/models.py"], "/views/dashboard.py": ["/helpers/models.py", "/views/add_oglas.py"], "/views/oglasnik.py": ["/helpers/models.py"], "/views/delete_oglas.py": ["/helpers/models.py"], "/views/login.py": ["/helpers/models.py"], "/views/handle_data.py": ["/helpers/database.py"], "/helpers/database.py": ["/beautifulsoup_bookstores/znanje.py", "/helpers/general.py", "/helpers/models.py", "/selenium_bookstores/knjiga.py", "/selenium_bookstores/mozaik.py"], "/views/add_oglas.py": ["/helpers/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.