index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
69,592 | seaquamrobotics/seahawk-scouter | refs/heads/master | /manage.py | #!/usr/bin/env python
import csv
import sys
import textwrap
import dbutils
import sqlite3
import requests
from server import db_name, current_tournament_id, current_year
def add_team(*args):
doc = textwrap.dedent("""
Usage: ./manage.py addteam [team_id]
Adds a team to the current tournament.
Arguments:
team_id: VRC team number to add
""").strip()
if len(args) == 0:
print(doc)
sys.exit(-1)
team_id = args[0]
db = sqlite3.connect(db_name)
try:
dbutils.add_team_to_tournament(db, current_tournament_id, team_id)
except ValueError as e:
print(e)
sys.exit(-1)
print("Successfully added team: %s" % team_id)
def dump_csv(*args):
doc = textwrap.dedent("""
Usage: ./manage.py dumpcsv [optional tournament_id]
Dumps all scouting data for a given tournament ID to a CSV file.
""").strip()
if len(args) == 0:
tournament_id = current_tournament_id
elif args[0].lower() == "help":
print(doc)
sys.exit(0)
else:
tournament_id = args[0]
db = sqlite3.connect(db_name)
c = db.cursor()
c.execute(
'SELECT team_name, color, side, auton_score, auton_high_flags, auton_low_flags, auton_high_caps, '
'auton_low_caps, auton_park, driver_score, driver_high_flags, driver_low_flags, driver_high_caps, '
'driver_low_caps, driver_park, note FROM Reports WHERE tournament_id=?', (str(tournament_id),))
robot_data = c.fetchall()
c.close()
csvwriter = csv.writer(sys.stdout, delimiter=',', quotechar='|', quoting=csv.QUOTE_MINIMAL)
csvwriter.writerow(
["tournament_id", "team_name", "color", "side", "auton_score", "auton_high_flags", "auton_low_flags",
"auton_high_caps", "auton_low_caps", "auton_park", "driver_score", "driver_high_flags", "driver_low_flags",
"driver_high_caps", "driver_low_caps", "driver_park", "note"])
for row in robot_data:
csvwriter.writerow(row)
def import_tournament(*args):
doc = textwrap.dedent("""
Usage: ./manage.py importtournament [csv file] [tournament id]
Imports tournament team data from a csv file
""").strip()
if len(args) == 2:
csv_path = args[0]
tournament_id = args[1]
elif len(args) == 1:
csv_path = args[0]
tournament_id = current_tournament_id
else:
print(doc)
sys.exit(0)
# Get tournament name
print("Looking for tournament %s on vexdb..." % tournament_id)
resp = requests.get("http://api.vexdb.io/v1/get_events?sku=RE-VRC-%d-%d"
% (current_year, tournament_id)).json()
if resp["size"] > 0:
name = resp["result"][0]["name"]
print("Found tournament: %s" % name)
else:
print("Couldn't find tournament!")
sys.exit(-1)
with open(csv_path, 'r') as file:
reader = csv.reader(file)
next(reader) # skip first row
team_list = [row[0] for row in reader]
teams = " ".join(team_list)
db = sqlite3.connect(db_name)
dbutils.create_tournament(db, dbutils.Tournament(
tournament_id=tournament_id,
tournament_name=name,
team_list=teams
))
print("Added %d teams to the database." % len(team_list))
if __name__ == "__main__":
doc = textwrap.dedent("""
Usage: ./manage.py [command]
Management tasks for the scouting database.
""").strip()
# Check length of arguments
if len(sys.argv) <= 1:
print(doc)
sys.exit(-1)
if sys.argv[1] == "addteam":
add_team(*sys.argv[2:])
elif sys.argv[1] == "dumpcsv":
dump_csv(*sys.argv[2:])
elif sys.argv[1] == "importtournament":
import_tournament(*sys.argv[2:])
else:
print(doc)
| {"/manage.py": ["/dbutils.py", "/server.py"], "/server.py": ["/dbutils.py"]} |
69,593 | seaquamrobotics/seahawk-scouter | refs/heads/master | /server.py | #TODO:
# -Skills scouter
# -Detailed robot info
# -Robot photo upload/viewer
# -Advanced auton scouter (could be used for skills)
# -Back buttons
from flask import Flask, Markup, render_template, request, g
from bs4 import BeautifulSoup
from datetime import datetime
import requests
import sqlite3
import time
import os
import dbutils
# Configuration
current_tournament_id = 5643
current_year = 18
db_name = 'vex_turning_point' #os.environ['DB_NAME']
clean = str.maketrans('', '', """ ^$#@~`&;:|{()}[]<>+=!?.,\/*-_"'""")
sanitize = str.maketrans('', '', """^~`;:|{()}[]+=\*_"'""")
app = Flask(__name__)
@app.before_first_request
def get_db():
# Uses the Flask global object. This way we only use one database
# object per request.
if "db" not in g:
g.db = sqlite3.connect(db_name)
return g.db
@app.teardown_appcontext
def teardown(exception):
if "db" in g:
g.db.close()
def get_tournament_info(tournament_id):
# Scrape tournament data from vexdb.io
# If the website layout changes I may need to rewrite the parsing
r = requests.get('https://vexdb.io/events/view/RE-VRC-'+str(current_year)+'-'+str(tournament_id))
parsed_html = BeautifulSoup(r.text, 'lxml')
# Get competing teams
teams = ''
for x in parsed_html.find_all('td', attrs={'class': 'number'}):
teams += x.get_text() + ' '
teams = teams[:-1] # Remove the extra space off the end
# Get tournament name
tournament_name = parsed_html.find('h2').get_text()
return teams, tournament_name
def get_unscouted_robots(tournament_id): # Returns a list of unscouted robots
scouted = []
unscouted = []
teams = dbutils.get_tournament_by_id(get_db(), tournament_id).team_list.split(" ")
for r in dbutils.get_reports_for_tournament(get_db(), tournament_id):
for t in teams:
if t == r.team_name and t not in scouted:
scouted.append(t)
for r in teams:
if r not in scouted:
unscouted.append(r)
return unscouted
def compress_reports(tournament_id): # Compile data and find average values for each robot
robotData = dbutils.get_reports_for_tournament(get_db(), tournament_id)
robots = []
for row in robotData: # Find unique robots
if row.team_name not in robots: robots.append(row.team_name)
compressedData = []
for robot in robots:
best_drive_score = 0
best_auton_score = 0
total_drive_score = 0
total_auton_score = 0
entryCount = 0
notes = ''
for row in robotData:
if row.team_name == robot:
entryCount += 1
total_drive_score += int(row.driver_score)
total_auton_score += int(row.auton_score)
if int(row.driver_score) > best_drive_score:
best_drive_score = int(row.driver_score)
if int(row.auton_score) > best_auton_score:
best_auton_score = int(row.auton_score)
if row.note:
notes += '~ ' + row.note + '<br>'
compressedData.append([robot, best_drive_score, int(total_drive_score / entryCount), best_auton_score,
int(total_auton_score / entryCount), notes, entryCount])
return compressedData
def robot_power(robot_stats): # Enter compiled data row as input. Formula: power = best_driver + avg_driver + best_auton + avg_auton + highest_stack * 2 + times_scouted
power = 0
for i, s in enumerate(robot_stats[1:]):
if i != 4:
power += int(s)
return power
def reverse_bubble_sort(collection): # Sort reports by best robot power
length = len(collection)
for i in range(length-1, -1, -1):
for j in range(i):
if robot_power(collection[j]) > robot_power(collection[j+1]):
collection[j], collection[j+1] = collection[j+1], collection[j]
return collection[::-1]
@app.route('/') # Home page
def index():
tournament_name = dbutils.get_tournament_by_id(get_db(), current_tournament_id).tournament_name
return render_template('index.html', current_tournament_name=tournament_name, current_tournament_id=current_tournament_id, status="Seaquam Robotics Scouting")
@app.route('/scouting', methods=['POST', 'GET']) # Scouting submission page
def scouting():
#TODO: Store diffent autonomous positions
if request.method == 'POST':
tournament = dbutils.get_tournament_by_id(get_db(), current_tournament_id)
tournament_name = tournament.tournament_name
valid_teams = tournament.team_list.split()
auton_score = 0
score = 0
team_name = request.form['team'].translate(clean).upper()
if team_name in valid_teams:
print("Report submitted for " + team_name)
color = request.form.get('color', '')
side = request.form.get('side', '')
auton_park_string = request.form.get('auton_park', '')
driver_park_string = request.form.get('driver_park', '')
reporter_ip = request.environ['REMOTE_ADDR'].translate(clean)
auton_low_flags = int(request.form['auton_low_flags'])
auton_high_flags = int(request.form['auton_high_flags'])
auton_low_caps = int(request.form['auton_low_caps'])
auton_high_caps = int(request.form['auton_high_caps'])
auton_park = 0
if auton_park_string == 'alliance':
auton_park = 3
elif auton_park_string == 'none':
auton_park = 0
auton_score += auton_low_flags + auton_high_flags * 2 + auton_low_caps + auton_high_caps * 2 + auton_park
print(auton_score)
driver_low_flags = int(request.form['driver_low_flags'])
driver_high_flags = int(request.form['driver_high_flags'])
driver_low_caps = int(request.form['driver_low_caps'])
driver_high_caps = int(request.form['driver_high_caps'])
driver_park = 0
if driver_park_string == 'high':
driver_park = 6
elif driver_park_string == 'alliance':
driver_park = 3
else:
driver_park = 0
driver_score = driver_low_flags + driver_high_flags * 2 + driver_low_caps + driver_high_caps * 2 + driver_park
print(driver_score)
note = request.form['notes'].translate(sanitize)
report_info = dbutils.Report(
reporter_ip=reporter_ip,
team_name=team_name,
color=color,
side=side,
auton_score=auton_score,
auton_high_flags=auton_high_flags,
auton_low_flags=auton_low_flags,
auton_high_caps=auton_high_caps,
auton_low_caps=auton_low_caps,
auton_park=auton_park,
driver_score=driver_score,
driver_high_flags=driver_high_flags,
driver_low_flags=driver_low_flags,
driver_high_caps=driver_high_caps,
driver_low_caps=driver_low_caps,
driver_park=driver_park,
note=note,
timestamp=int(time.time()))
dbutils.create_report(get_db(), current_tournament_id, report_info)
else:
if len(team_name) == 0:
team_name = "NULL"
return render_template('index.html', current_tournament_name=tournament_name, current_tournament_id=current_tournament_id, status=Markup('<span class="error">Error:</span> Invalid team name: ' + team_name + " not found in list of participating robots."))
return render_template('index.html', current_tournament_name=tournament_name, current_tournament_id=current_tournament_id, status="Scouting report sent successfully")
elif request.method == 'GET':
return render_template('scouting.html')
@app.route('/data/<int:tournament_id>') # Compiled scouting reports page
def data(tournament_id):
tournament_name = dbutils.get_tournament_by_id(get_db(), tournament_id).tournament_name
robots_data = ''
for i, row in enumerate(reverse_bubble_sort(compress_reports(tournament_id))):
robots_data += '<tr><td>'+str(i+1)+'</td>'
for i, cell in enumerate(row):
if i == 0:
robots_data += '<td><a href="../autonomous/'+str(cell)+'">'+str(cell)+'</a></td>'
else:
robots_data += '<td>'+str(cell)+'</td>'
robots_data +='</tr>'
robots_data_html = Markup(robots_data)
unscouted_robots = get_unscouted_robots(tournament_id)
unscouted = ''
if unscouted_robots:
unscouted += '<h2>Not Yet Scouted:</h2><div class="unscouted">'
else:
unscouted = '<h2>All Robots Scouted</h2>'
for r in unscouted_robots:
unscouted += r+' '
unscouted_html = Markup(unscouted+'</div>')
return render_template('data.html', tournament_name=tournament_name, data=robots_data_html, unscouted=unscouted_html)
@app.route('/tournaments')
def tournaments():
tournaments_html = ''
tournaments = dbutils.get_all_tournaments(get_db())
for t in tournaments:
tournaments_html += '<a class="box2 bluebg" href="data/' + str(t.tournament_id) + '">' + t.tournament_name + '</a>'
tournaments_html = Markup(tournaments_html)
return render_template('past_tournaments.html', tournaments=tournaments_html)
@app.route('/autonomous/<string:team_name>') # Show all autonomous attempt details for a specified team
def autonomous(team_name):
autonomous_reports = ''
reports = dbutils.get_reports_for_tournament(get_db(), current_tournament_id, team_name)
for r in reports:
classes = ''
if r.color == 'red': classes = 'redteam '
elif r.color == 'blue': classes = 'blueteam '
else: classes = 'noteam '
side = ''
if r.side == 'right':
side = 'RIGHT'
elif r.side == 'left':
side = 'LEFT'
else:
side = '?????'
autonomous_reports += '<div class="'+classes+'box2"><span class="left">' + datetime.fromtimestamp(r.timestamp).strftime('%I:%M %p') + '</span>' + str(r.tournament_id) + ' Points <span class="right">' + side + ' TILE</span></div>'
autonomous_reports = Markup(autonomous_reports)
return render_template('autonomous.html', team_name=team_name.upper(), autonomous_reports=autonomous_reports)
@app.route("/upload", methods=['POST', 'GET'])
def upload():
return render_template('upload.html')
#uploaded_files = flask.request.files.getlist("file[]")
#print(uploaded_files)
#return "Your files have been uploaded"
#@app.route('/delete') # Page for deleting incorrect scouting reports
#ef delete():
# reporter_ip = request.environ['REMOTE_ADDR'].translate(clean)
# reports_html = ''
# c.execute('SELECT team_name, auton_score, driver_score, highest_stack, notes, time_stamp FROM Reports WHERE reporter_ip=' + str(reporter_ip))
# for row in c.fetchall():
# reports_html += '<div class="box2 bluebg">' + str(row[0]) + '</div>'
#
# return render_template('delete.html', reports=reports_html)
@app.route('/agenda')
def agenda():
return render_template('agenda.html')
@app.errorhandler(404) # Error 404 page
def page_not_found(e):
return render_template('404.html'), 404
if __name__ == '__main__':
# csv_output(current_tournament_id)
# Create tables if they do not already exist
#c.execute('SHOW TABLES')
# Db connection object for setting up
db = sqlite3.connect(db_name)
dbutils.create_db_tables(db)
# If current tournament does not exist in Tournaments table then add it
if dbutils.get_tournament_by_id(db, current_tournament_id) is None:
teams, tournament_name = get_tournament_info(current_tournament_id)
tournament_info = dbutils.Tournament(
tournament_id=current_tournament_id,
tournament_name=tournament_name,
team_list=teams
)
# Make new tournament entry
dbutils.create_tournament(db, tournament_info)
app.run(debug=True, host='0.0.0.0', port=8000)
| {"/manage.py": ["/dbutils.py", "/server.py"], "/server.py": ["/dbutils.py"]} |
69,594 | sj43/hackricecryptogame | refs/heads/master | /GameFunctions.py | import sys
from PySide2.QtWidgets import QApplication, QPushButton, QListWidget
from PySide2.QtCore import QFile, QObject, Signal, Slot
from classes.Player import *
from classes.Community import *
from classes.Information import *
# from Crypto.Parse import *
# from Crypto.Market import *
class GameFunctions(QObject):
"""Game functions"""
max_loan_amount = Signal(float, float)
see_loan = Signal()
see_property = Signal()
see_investment = Signal()
show_payment = Signal(float)
def __init__(self, startDate, parent=None):
super(GameFunctions, self).__init__(parent)
self.date = startDate
# salary, livingExpenses, savings, card, credit, payments
self.player = Player(100000.0/12.0, 500, 100000, 5000, 700, 0)
# interestRate, growthGDP
self.economy = Economy(0.05, 5.0)
self.bank = Bank()
@Slot(int)
def player_buy_crypto(self, choiceCrypto):
# cryptoInfo = get_crypto(choiceCrypto)
# self.player.assets.add_cryptocurrency(cryptoInfo)
# self.player.savings -= cryptoInfo[1]
print("buy")
@Slot(int)
def player_sell_crypto(self, choiceCrypto):
# cryptoInfo = get_crypto(choiceCrypto)
# self.player.assets.add_cryptocurrency(cryptoInfo)
# self.player.savings -= cryptoInfo[1]
print("sell")
@Slot()
def player_ask_property(self):
self.see_property.emit()
@Slot(int)
def player_buy_property(self, choiceProperty):
propertyInfo = get_property(choiceProperty)
self.player.assets.add_property(propertyInfo)
self.player.savings -= propertyInfo[1]
@Slot()
def player_ask_investment(self):
self.see_investment.emit()
@Slot(int)
def player_make_investment(self, choiceInvestment):
investmentInfo = get_investment(choiceInvestment)
self.player.assets.add_investment(investmentInfo)
self.player.savings -= investmentInfo[1]
@Slot()
def player_ask_loan(self):
maxLoanAmount = self.bank.howMuchCanILoan(self.player.salary, self.player.credit)
loanInterest = self.bank.getLoanInterest(self.player.credit)
self.max_loan_amount.emit(maxLoanAmount, loanInterest)
self.see_loan.emit()
@Slot(float)
def player_get_loan(self, choicePercent):
loanAmount = choicePercent * self.bank.howMuchCanILoan(self.player.salary, self.player.credit)
loanInterest = self.bank.getLoanInterest(self.player.credit)
self.player.savings += loanAmount
self.player.payments -= (loanInterest/12.0)*loanAmount
def get_income(self):
self.player.savings += self.player.salary
##for cryptoAsset in self.player.assets.cryptocurrency:
## self.player.savings += cryptoAsset.income()
##for investmentAsset in self.player.assets.investment:
## self.player.savings += investmentAsset.income()
##for propertyAsset in self.player.assets.property:
## self.player.savings += propertyAsset.income()
def pay_living_expenses(self):
self.show_payment.emit(self.player.livingExpenses)
@Slot(int)
def choice_living_expenses(self, choicePayment):
if choicePayment == 1:
self.player.card -= self.player.livingExpenses
elif choicePayment == 2:
self.player.savings -= self.player.livingExpenses
def sell_assets(self, paymentLeft):
##for cryptoAsset in self.player.assets.cryptocurrency:
## if paymentLeft <= 0:
## break
## paymentLeft -= cryptoAsset
## self.player.assets.investment.remove(cryptoAsset)
for investmentAsset in self.player.assets.investment:
if paymentLeft <= 0:
break
paymentLeft -= investmentAsset.investmentValue
self.player.assets.investment.remove(investmentAsset)
for propertyAsset in self.player.assets.property:
if paymentLeft <= 0:
break
paymentLeft -= propertyAsset.propertyValue
self.player.assets.investment.remove(propertyAsset)
return -paymentLeft
def pay_loans(self):
paymentLeft = self.player.payments
self.show_payment.emit(paymentLeft)
self.player.savings -= paymentLeft
if self.player.savings < 0:
self.player.savings = self.sell_assets(-self.player.savings)
def pay_card(self):
self.show_payment.emit(5000 - self.player.card)
@Slot(int)
def choice_card(self, choiceCard):
paymentLeft = 5000 - self.player.card
if choiceCard == 1:
self.player.savings -= paymentLeft
if self.player.savings < 0:
self.player.savings = self.sell_assets(-self.player.savings)
self.player.card = 0
elif choiceCard == 2:
self.player.savings -= paymentLeft * 0.015
if self.player.savings < 0:
self.player.savings = self.sell_assets(-self.player.savings)
def update_salary(self):
self.player.salary *= 1.0025
def update_credit(self):
self.player.credit += self.bank.updateCreditScore(self.player.card)
if self.player.credit < 300:
self.player.credit = 300
if self.player.credit > 850:
self.player.credit = 850
def update_assets(self):
for investmentAsset in self.player.assets.investment:
if "stock" in investmentAsset.name:
growth = 1.0 + (random.gauss(self.economy.growthGDP, 5) / 100)
if (growth < -0.5) or (growth > 0.5):
growth = 1.0
investmentAsset.investmentValue *= growth
if "fixed" in investmentAsset.name:
if investmentAsset.fixedCount == 0:
self.player.assets.investment.remove(investmentAsset)
for propertyAsset in self.player.assets.property:
if "estate" in propertyAsset.name:
propertyAsset.propertyValue *= (1 + self.economy.interestRate/12.0)
if "vehicle" in propertyAsset.name:
propertyAsset.propertyValue *= 0.98
def update_netWorth(self):
self.player.netWorth = self.player.compute_net_worth()
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,595 | sj43/hackricecryptogame | refs/heads/master | /windows/OptionWindows.py | import sys
from PySide2.QtUiTools import QUiLoader
from PySide2.QtWidgets import QApplication, QPushButton, QListWidget
from PySide2.QtCore import QFile, QObject, Signal, Slot
class CryptoWindow(QObject):
buy_crypto = Signal(int)
sell_crypto = Signal(int)
def __init__(self, ui_file, parent=None):
super(CryptoWindow, self).__init__(parent)
self.window = QUiLoader().load(ui_file)
self.extract_buttons()
self.connect_signals()
self.window.hide()
def extract_buttons(self):
self.back_button = self.window.findChild(QPushButton, 'back_button')
self.buy_button_1 = self.window.findChild(QPushButton, 'buy_button_1')
self.buy_button_2 = self.window.findChild(QPushButton, 'buy_button_2')
self.buy_button_3 = self.window.findChild(QPushButton, 'buy_button_3')
self.buy_button_4 = self.window.findChild(QPushButton, 'buy_button_4')
self.buy_button_5 = self.window.findChild(QPushButton, 'buy_button_5')
self.buy_button_6 = self.window.findChild(QPushButton, 'buy_button_6')
self.sell_button_1 = self.window.findChild(QPushButton, 'sell_button_1')
self.sell_button_2 = self.window.findChild(QPushButton, 'sell_button_2')
self.sell_button_3 = self.window.findChild(QPushButton, 'sell_button_3')
self.sell_button_4 = self.window.findChild(QPushButton, 'sell_button_4')
self.sell_button_5 = self.window.findChild(QPushButton, 'sell_button_5')
self.sell_button_6 = self.window.findChild(QPushButton, 'sell_button_6')
def connect_signals(self):
self.back_button.clicked.connect(self.close_window)
self.buy_button_1.clicked.connect(self.buy_crypto_1)
self.buy_button_2.clicked.connect(self.buy_crypto_2)
self.buy_button_3.clicked.connect(self.buy_crypto_3)
self.buy_button_4.clicked.connect(self.buy_crypto_4)
self.buy_button_5.clicked.connect(self.buy_crypto_5)
self.buy_button_6.clicked.connect(self.buy_crypto_6)
self.sell_button_1.clicked.connect(self.sell_crypto_1)
self.sell_button_2.clicked.connect(self.sell_crypto_2)
self.sell_button_3.clicked.connect(self.sell_crypto_3)
self.sell_button_4.clicked.connect(self.sell_crypto_4)
self.sell_button_5.clicked.connect(self.sell_crypto_5)
self.sell_button_6.clicked.connect(self.sell_crypto_6)
def open_window(self):
self.window.show()
def close_window(self):
self.window.hide()
def buy_crypto_1(self):
self.buy_crypto.emit(1)
self.window.hide()
def buy_crypto_2(self):
self.buy_crypto.emit(2)
self.window.hide()
def buy_crypto_3(self):
self.buy_crypto.emit(3)
self.window.hide()
def buy_crypto_4(self):
self.buy_crypto.emit(4)
self.window.hide()
def buy_crypto_5(self):
self.buy_crypto.emit(5)
self.window.hide()
def buy_crypto_6(self):
self.buy_crypto.emit(6)
self.window.hide()
def sell_crypto_1(self):
self.sell_crypto.emit(1)
self.window.hide()
def sell_crypto_2(self):
self.sell_crypto.emit(2)
self.window.hide()
def sell_crypto_3(self):
self.sell_crypto.emit(3)
self.window.hide()
def sell_crypto_4(self):
self.sell_crypto.emit(4)
self.window.hide()
def sell_crypto_5(self):
self.sell_crypto.emit(5)
self.window.hide()
def sell_crypto_6(self):
self.sell_crypto.emit(6)
self.window.hide()
class PropertyWindow(QObject):
buy_property = Signal(int)
def __init__(self, ui_file, parent=None):
super(PropertyWindow, self).__init__(parent)
self.window = QUiLoader().load(ui_file)
self.extract_buttons()
self.connect_signals()
self.window.hide()
def extract_buttons(self):
self.back_button = self.window.findChild(QPushButton, 'back_button')
self.estate_apartment_button = self.window.findChild(QPushButton, 'estate_apartment_button')
self.estate_house_button = self.window.findChild(QPushButton, 'estate_house_button')
self.estate_penthouse_button = self.window.findChild(QPushButton, 'estate_penthouse_button')
self.vehicle_economy_button = self.window.findChild(QPushButton, 'vehicle_economy_button')
self.vehicle_middle_button = self.window.findChild(QPushButton, 'vehicle_middle_button')
self.vehicle_luxury_button = self.window.findChild(QPushButton, 'vehicle_luxury_button')
def connect_signals(self):
self.back_button.clicked.connect(self.close_window)
self.estate_apartment_button.clicked.connect(self.buy_estate_apartment)
self.estate_house_button.clicked.connect(self.buy_estate_house)
self.estate_penthouse_button.clicked.connect(self.buy_estate_penthouse)
self.vehicle_economy_button.clicked.connect(self.buy_vehicle_economy)
self.vehicle_middle_button.clicked.connect(self.buy_vehicle_middle)
self.vehicle_luxury_button.clicked.connect(self.buy_vehicle_luxury)
def open_window(self):
self.window.show()
def close_window(self):
self.window.hide()
def buy_estate_apartment(self):
self.buy_property.emit(1)
self.window.hide()
def buy_estate_house(self):
self.buy_property.emit(2)
self.window.hide()
def buy_estate_penthouse(self):
self.buy_property.emit(3)
self.window.hide()
def buy_vehicle_economy(self):
self.buy_property.emit(4)
self.window.hide()
def buy_vehicle_middle(self):
self.buy_property.emit(5)
self.window.hide()
def buy_vehicle_luxury(self):
self.buy_property.emit(6)
self.window.hide()
class InvestmentWindow(QObject):
make_investment = Signal(int)
def __init__(self, ui_file, parent=None):
super(InvestmentWindow, self).__init__(parent)
self.window = QUiLoader().load(ui_file)
self.extract_buttons()
self.connect_signals()
self.window.hide()
def extract_buttons(self):
self.back_button = self.window.findChild(QPushButton, 'back_button')
self.stock_low_button = self.window.findChild(QPushButton, 'stock_low_button')
self.stock_avg_button = self.window.findChild(QPushButton, 'stock_avg_button')
self.stock_high_button = self.window.findChild(QPushButton, 'stock_high_button')
self.fixed_3_button = self.window.findChild(QPushButton, 'fixed_3_button')
self.fixed_6_button = self.window.findChild(QPushButton, 'fixed_6_button')
self.fixed_12_button = self.window.findChild(QPushButton, 'fixed_12_button')
def connect_signals(self):
self.back_button.clicked.connect(self.close_window)
self.stock_low_button.clicked.connect(self.buy_stock_low)
self.stock_avg_button.clicked.connect(self.buy_stock_avg)
self.stock_high_button.clicked.connect(self.buy_stock_high)
self.fixed_3_button.clicked.connect(self.buy_fixed_3)
self.fixed_6_button.clicked.connect(self.buy_fixed_6)
self.fixed_12_button.clicked.connect(self.buy_fixed_12)
def open_window(self):
self.window.show()
def close_window(self):
self.window.hide()
def buy_stock_low(self):
self.make_investment.emit(1)
self.window.hide()
def buy_stock_avg(self):
self.make_investment.emit(2)
self.window.hide()
def buy_stock_high(self):
self.make_investment.emit(3)
self.window.hide()
def buy_fixed_3(self):
self.make_investment.emit(4)
self.window.hide()
def buy_fixed_6(self):
self.make_investment.emit(5)
self.window.hide()
def buy_fixed_12(self):
self.make_investment.emit(6)
self.window.hide()
class BankWindow(QObject):
get_loan = Signal(int)
def __init__(self, ui_file, parent=None):
super(BankWindow, self).__init__(parent)
self.window = QUiLoader().load(ui_file)
self.extract_buttons()
self.connect_signals()
self.window.hide()
def extract_buttons(self):
self.interest_list = self.window.findChild(QListWidget, 'interest_list')
self.back_button = self.window.findChild(QPushButton, 'back_button')
self.loan_100_button = self.window.findChild(QPushButton, 'loan_100_button')
self.loan_30_button = self.window.findChild(QPushButton, 'loan_30_button')
self.loan_10_button = self.window.findChild(QPushButton, 'loan_10_button')
def connect_signals(self):
self.back_button.clicked.connect(self.close_window)
self.loan_100_button.clicked.connect(self.loan_100)
self.loan_30_button.clicked.connect(self.loan_30)
self.loan_10_button.clicked.connect(self.loan_10)
def open_window(self):
self.window.show()
def close_window(self):
self.window.hide()
def set_loan_amount(self, maxLoanAmount, loanInterest):
self.interest_list.clear()
self.interest_list.addItem(str("Max Loan Amount: ") + str(maxLoanAmount))
self.interest_list.addItem(str("Loan Interest: ") + str(loanInterest))
def loan_100(self):
self.get_loan.emit(1.0)
self.window.hide()
def loan_30(self):
self.get_loan.emit(0.3)
self.window.hide()
def loan_10(self):
self.get_loan.emit(0.1)
self.window.hide()
class MainWindow(QObject):
open_crypto = Signal()
open_property = Signal()
open_investment = Signal()
open_bank = Signal()
living_expenses = Signal(int)
card_repay = Signal(int)
end_turn_signal = Signal()
def __init__(self, ui_file, parent=None):
super(MainWindow, self).__init__(parent)
self.window = QUiLoader().load(ui_file)
self.extract_buttons()
self.connect_signals()
self.window.show()
self.fee_payment_list = self.window.findChild(QListWidget, 'fee_payment')
def extract_buttons(self):
self.bank_button = self.window.findChild(QPushButton, 'BankButton')
self.crypto_button = self.window.findChild(QPushButton, 'CryptoButton')
self.property_button = self.window.findChild(QPushButton, 'PropertyButton')
self.investment_button = self.window.findChild(QPushButton, 'InvestmentButton')
self.end_turn_button = self.window.findChild(QPushButton, 'end_turn')
self.living_card_button = self.window.findChild(QPushButton, 'living_card')
self.living_savings_button = self.window.findChild(QPushButton, 'living_savings')
self.card_repay_button = self.window.findChild(QPushButton, 'card_repay')
self.card_notrepay_button = self.window.findChild(QPushButton, 'card_notrepay')
def connect_signals(self):
self.bank_button.clicked.connect(self.open_bank_window)
self.crypto_button.clicked.connect(self.open_crypto_window)
self.property_button.clicked.connect(self.open_property_window)
self.investment_button.clicked.connect(self.open_investment_window)
self.living_card_button.clicked.connect(self.living_expenses_card)
self.living_savings_button.clicked.connect(self.living_expenses_savings)
self.card_repay_button.clicked.connect(self.credit_card_repay)
self.card_notrepay_button.clicked.connect(self.credit_card_notrepay)
self.end_turn_button.clicked.connect(self.end_turn)
def open_window(self):
self.window.show()
def close_window(self):
self.window.hide()
def open_bank_window(self):
self.open_bank.emit()
def open_crypto_window(self):
self.open_crypto.emit()
def open_property_window(self):
self.open_property.emit()
def open_investment_window(self):
self.open_investment.emit()
def living_expenses_card(self):
self.living_expenses.emit(1)
def living_expenses_savings(self):
self.living_expenses.emit(2)
def credit_card_repay(self):
self.card_repay.emit(1)
def credit_card_notrepay(self):
self.card_repay.emit(2)
def display_fee_payment(self, payment):
self.fee_payment_list.addItem(payment)
def end_turn(self):
self.end_turn_signal.emit()
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,596 | sj43/hackricecryptogame | refs/heads/master | /main.py | from GameInstance import *
GameInstance(2019)
## Beta version
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,597 | sj43/hackricecryptogame | refs/heads/master | /classes/Player.py | import sys
from PySide2.QtWidgets import QApplication, QPushButton
from PySide2.QtCore import QObject, Signal, Slot
import random
from classes.Community import *
from classes.Information import *
class Property:
"""Class for large assets (real estate, vehicles)"""
def __init__(self, name, propertyValue):
self.name = name
self.propertyValue = propertyValue
def income(self):
if "estate" in self.name:
return self.propertyValue * 0.01
if "vehicle" in self.name:
return self.propertyValue * 0.02
class Investment:
"""Class for financial investments (stocks, fixed savings)"""
def __init__(self, name, investmentValue, fixedCount=0):
self.name = name
self.investmentValue = investmentValue
if "3" in self.name:
self.fixedCount = 3
if "6" in self.name:
self.fixedCount = 6
if "12" in self.name:
self.fixedCount = 12
self.fixedInterest = 0.002
def income(self):
if "stock" in self.name:
if "low" in self.name:
return self.investmentValue * random.uniform(-0.002, 0.004)
if "avg" in self.name:
return self.investmentValue * random.uniform(-0.004, 0.008)
if "high" in self.name:
return self.investmentValue * random.uniform(-0.008, 0.016)
if "fixed" in self.name:
self.fixedCount -= 1
self.fixedInterest += 0.0001
if self.fixedCount == 0:
return self.investmentValue * (1 + self.fixedInterest)
else:
return -1
class CryptoCurrency:
"""Class for cryptocurrency"""
def __init__(self, name, currencyValue):
self.name = name
self.currencyValue = currencyValue
class PlayerAssets:
"""Class for player assets"""
def __init__(self):
self.property = []
self.investment = []
self.cryptocurrency = []
def add_property(self, propertyInfo):
self.property.append(Property(propertyInfo[0], propertyInfo[1]))
def add_investment(self, investmentInfo):
self.investment.append(Investment(investmentInfo[0], investmentInfo[1]))
def add_cryptocurrency(self, cryptoAsset):
self.cryptocurrency.append(cryptoAsset)
class Player:
"""Class for game player finances"""
def __init__(self, salary, livingExpenses, savings, card, credit, payments):
self.salary = salary
self.livingExpenses = livingExpenses
self.savings = savings
self.card = card
self.credit = credit
self.payments = payments
self.assets = PlayerAssets()
self.netWorth = self.compute_net_worth()
self.my_quest = None
def compute_net_worth(self):
netWorth = self.savings
for property in self.assets.property:
netWorth += property.propertyValue
for investment in self.assets.investment:
netWorth += investment.investmentValue
for cryptocurrency in self.assets.cryptocurrency:
netWorth += cryptocurrency.currencyValue
return netWorth
def set_quest(self, quest):
self.my_quest = quest
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,598 | sj43/hackricecryptogame | refs/heads/master | /classes/Information.py | def get_property(choiceProperty):
return property_choices[choiceProperty - 1]
def get_investment(choiceInvestment):
return investment_choices[choiceInvestment - 1]
# Choices
property_choices = (("estate_apartment", 150000),
("estate_house", 300000),
("estate_penthouse", 600000),
("vehicle_economy", 20000),
("vehicle_middle", 50000),
("vehicle_luxury", 100000))
investment_choices = (("stock_low", 1000),
("stock_avg", 1000),
("stock_high", 1000),
("fixed_3", 1000),
("fixed_6", 1000),
("fixed_12", 1000))
# Quests
quests = {1: ("The economy is booming! It’s a chance for you to move to a new house! Purchase a new house. (within 3 months)", 3),
2: ("You got into an accident! You must purchase a new one (within this month)", 0),
3: ("You got robbed!!! You lose 5 percent of your money (instantly). Exchange cryptocurrencies to make up for your loss. (within 4 months)", 4),
4: ("Are you confident in exchanging cryptocurrencies and making money? Increase your net value by 100000 (within 6 turns)", 6),
5: ("Show me that you can become a millionaire. Increase your net value by $1000000 by exchanging cryptocurrencies (within 15 turns)", 15),
6: ("I know you want more $$. Loan 3x salary from bank. (by end of this turn)", 0),
7: ("Increase your credit score by 100. (within 10 turns)", 10),
8: ("Stock market crashed!! You will lose all stocks. RIP. Recover your loss. (within 15 turns)", 15)}
"""
# Vehicle
Bicycle = Community.Vehicle("Bicycle", 300, vehicleMonthlyPayment(300, Community.Economy.get_interestRate()))
EconomyClassVehicle = Community.Vehicle("EconomyClassVehicle", 20000, vehicleMonthlyPayment(20000, Community.Economy.get_interestRate()))
MiddleClassVehicle = Community.Vehicle("MiddleClassVehicle", 50000, vehicleMonthlyPayment(50000, Community.Economy.get_interestRate()))
LuxuryClassVehicle = Community.Vehicle("LuxuryClassVehicle", 100000, vehicleMonthlyPayment(100000, Community.Economy.get_interestRate()))
SuperClassVehicle = Community.Vehicle("SuperClassVehicle", 200000, vehicleMonthlyPayment(200000, Community.Economy.get_interestRate()))
# RealEstate
Hut = Community.Realestate("Hut", 5000, mortgageFormula(5000, Community.Economy.get_interestRate()))
Garage = Community.Realestate("Garage", 30000, mortgageFormula(30000, Community.Economy.get_interestRate()))
OneBedroomApt = Community.Realestate("OneBedroomApt", 100000, mortgageFormula(100000, Community.Economy.get_interestRate()))
TwoBedroomApt = Community.Realestate("TwoBedroomApt", 170000, mortgageFormula(170000, Community.Economy.get_interestRate()))
House = Community.Realestate("House", 300000, mortgageFormula(300000, Community.Economy.get_interestRate()))
Penthouse = Community.Realestate("Penthouse", 500000, mortgageFormula(500000, Community.Economy.get_interestRate()))
Mansion = Community.Realestate("Penthouse", 1000000, mortgageFormula(1000000, Community.Economy.get_interestRate()))
#stocks -- NEED TO CALL the method "return_result" !
low_risk_low_return_50000 = Community.Stock(50000, "low")
avg_risk_avg_return_50000 = Community.Stock(50000, "avg")
high_risk_high_return_50000 = Community.Stock(50000, "high")
low_risk_low_return_100000 = Community.Stock(100000, "low")
avg_risk_avg_return_100000 = Community.Stock(100000, "avg")
high_risk_high_return_100000 = Community.Stock(100000, "high")
low_risk_low_return_500000 = Community.Stock(500000, "low")
avg_risk_avg_return_500000 = Community.Stock(500000, "avg")
high_risk_high_return_500000 = Community.Stock(500000, "high")
low_risk_low_return_1000000 = Community.Stock(1000000, "low")
avg_risk_avg_return_1000000 = Community.Stock(1000000, "avg")
high_risk_high_return_1000000 = Community.Stock(1000000, "high")
# Fixed Saving -- Whatkind_initialamount_years
fixed_50000_2 = Community.FixedSaving(50000, 2, 0.005)
fixed_50000_5 = Community.FixedSaving(50000, 5, 0.007)
fixed_50000_10 = Community.FixedSaving(50000, 10, 0.01)
fixed_100000_2 = Community.FixedSaving(100000, 2, 0.005)
fixed_100000_5 = Community.FixedSaving(100000, 5, 0.007)
fixed_100000_10 = Community.FixedSaving(100000, 10, 0.01)
fixed_500000_2 = Community.FixedSaving(500000, 2, 0.005)
fixed_500000_5 = Community.FixedSaving(500000, 5, 0.007)
fixed_500000_10 = Community.FixedSaving(500000, 10, 0.01)
fixed_1000000_2 = Community.FixedSaving(1000000, 2, 0.005)
fixed_1000000_5 = Community.FixedSaving(1000000, 5, 0.007)
fixed_1000000_10 = Community.FixedSaving(1000000, 10, 0.01)
# Choices
property_choices = {"Vehicle":{Bicycle, EconomyClassVehicle, MiddleClassVehicle, LuxuryClassVehicle, SuperClassVehicle},
"RealEstate":{Hut, Garage, OneBedroomApt, TwoBedroomApt, House, Penthouse, Mansion}}
stock_choices = {low_risk_low_return_50000,
avg_risk_avg_return_50000,
high_risk_high_return_50000,
low_risk_low_return_100000,
avg_risk_avg_return_100000,
high_risk_high_return_100000,
low_risk_low_return_500000,
avg_risk_avg_return_500000,
high_risk_high_return_500000,
low_risk_low_return_1000000,
avg_risk_avg_return_1000000,
high_risk_high_return_1000000}
fixedsaving_choices = {fixed_50000_2,
fixed_50000_5,
fixed_50000_10,
fixed_100000_2,
fixed_100000_5,
fixed_100000_10,
fixed_500000_2,
fixed_500000_5,
fixed_500000_10,
fixed_1000000_2,
fixed_1000000_5,
fixed_1000000_10}
"""
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,599 | sj43/hackricecryptogame | refs/heads/master | /classes/quest.py | class Quest:
def __init__(self, date):
self.start_month = date
self.end_month = -1
self.success = False
self.whichquest = -1
self.quest_string = None
self.info_to_check = None
generate_quest_randomly()
start_info_check()
def generate_quest_randomly(self):
self.whichquest = random(1, 9)
self.quest_string, time_constraint = quests[self.whichquest]
self.end_month = self.start_month + time_constraint
def start_info_check(self, player):
if self.whichquest == 1:
quest1_check(player.asset.property)
elif self.whichquest == 2:
quest2_check(player.asset.property)
elif self.whichquest == 3:
quest3_check(player.savings)
elif self.whichquest == 4:
quest4_check(player.netWorth)
elif self.whichquest == 5:
quest5_check(player.netWorth)
elif self.whichquest == 6:
quest6_check(player.payments, player.salary)
elif self.whichquest == 7:
quest7_check(player.credit)
else:
quest8_check(player.savings, player.assets.investment)
def quest1_check(self, property):
self.info_to_check = len(property) + 1
def quest2_check(self, property):
self.info_to_check = len(property) + 1
def quest3_check(self, savings):
lost = 0.05 * savings
self.info_to_check = savings
savings -= lost
def quest4_check(self, netWorth):
self.info_to_check = netWorth + 100000
def quest5_check(self, netWorth):
self.info_to_check = netWorth + 1000000
def quest6_check(self, payments, salary):
self.info_to_check = payments - 3*salary
def quest7_check(self, credit):
self.info_to_check = credit + 100
def quest8_check(self, savings, investment):
self.info_to_check = savings
del investment[:]
def check_quest_overall_success(self, current_date, player):
if current_date == self.end_month:
if check_quest_requirements_success(player):
self.success = True
else:
self.success = False
def check_quest_requirements_success(self, player):
if whichquest == 1:
return quest1_check(player.asset.property)
elif whichquest == 2:
return quest2_check(player.asset.property)
elif whichquest == 3:
return quest3_check(player.savings)
elif whichquest == 4:
return quest4_check(player.netWorth)
elif whichquest == 5:
return quest5_check(player.netWorth)
elif whichquest == 6:
return quest6_check(player.payments)
elif whichquest == 7:
return quest7_check(player.credit)
else:
return quest8_check(player.savings, player.investment)
def quest1_check(self, property):
self.info_to_check <= len(property)
def quest2_check(self, property):
self.info_to_check <= len(property)
def quest3_check(self, savings):
self.info_to_check <= savings
def quest4_check(self, netWorth):
self.info_to_check <= netWorth
def quest5_check(self, netWorth):
self.info_to_check <= netWorth
def quest6_check(self, payments):
self.info_to_check >= payments
def quest7_check(self, credit):
self.info_to_check <= credit
def quest8_check(self, savings, investment):
self.info_to_check <= savings
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,600 | sj43/hackricecryptogame | refs/heads/master | /classes/Community.py | import sys
from PySide2.QtWidgets import QApplication, QPushButton
from PySide2.QtCore import QObject, Signal, Slot
import random
class Economy:
"""Class for economy and market conditions"""
def __init__(self, interestRate, growthGDP):
self.interestRate = interestRate
self.growthGDP = growthGDP
def set_interestRate(self, newAmount):
self.interestRate = newAmount
def set_growthGDP(self, newAmount):
self.growthGDP = newAmount
class Bank:
"""Class for bank system"""
def updateCreditScore(self, card):
if card < 1000:
amount_to_change = -100
elif card < 3000:
amount_to_change = -50
elif card < 4500:
amount_to_change = -20
elif card < 5000:
amount_to_change = 0
else:
amount_to_change = 20
return amount_to_change
def howMuchCanILoan(self, salary, credit):
if credit >= 720:
return 3*salary
elif credit >= 680:
return 2*salary
elif credit >= 640:
return 1*salary
else:
return -1
def getLoanInterest(self, credit):
"""
FICO credit score ranges from 300 to 850.
less than 660 is poor, over 660 is good, and 800 is excellent.
"""
if credit >= 720:
return 0.11
elif credit >= 680:
return 0.14
elif credit >= 640:
return 0.19
else:
return -1
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,601 | sj43/hackricecryptogame | refs/heads/master | /test/test_window_2.py | import sys
from PySide2.QtUiTools import QUiLoader
from PySide2.QtWidgets import QApplication, QPushButton, QListWidget, QMainWindow, QWidget
from PySide2.QtCore import QFile, QObject, Signal, Slot
class MainWindow(QObject):
open_bank = Signal()
open_crypto = Signal()
open_property = Signal()
open_investment = Signal()
living_expenses = Signal(int)
card_repay = Signal(int)
def __init__(self, ui_file, parent=None):
super(MainWindow, self).__init__(parent)
app = QApplication(sys.argv)
ui_file = QFile(ui_file)
ui_file.open(QFile.ReadOnly)
loader = QUiLoader()
self.window = loader.load(ui_file)
ui_file.close()
self.window.show()
sys.exit(app.exec_())
class PropertyWindow(QMainWindow):
buy_property = Signal(int)
def __init__(self, ui_file, parent=None):
super(PropertyWindow, self).__init__(parent)
"""
app = QApplication(sys.argv)
window = QUiLoader().load("property_window.ui")
window.show()
sys.exit(app.exec_())
"""
MainWindow("main_window.ui")
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,602 | sj43/hackricecryptogame | refs/heads/master | /GameInstance.py | from windows.OptionWindows import *
from GameFunctions import *
# from classes.quest import *
# from Crypto.market import *
class GameInstance(GameFunctions):
"""Game instance """
def __init__(self, startDate):
GameFunctions.__init__(self, startDate)
app = QApplication(sys.argv)
self.main_window = MainWindow("windows/main_window.ui")
self.crypto_window = CryptoWindow('windows/crypto_window.ui')
self.property_window = PropertyWindow('windows/property_window.ui')
self.investment_window = InvestmentWindow('windows/investment_window.ui')
self.bank_window = BankWindow('windows/bank_window.ui')
self.connect_signals_and_slots()
sys.exit(app.exec_())
def connect_signals_and_slots(self):
self.main_window.open_crypto.connect(self.crypto_window.open_window)
self.main_window.open_property.connect(self.player_ask_property)
self.main_window.open_investment.connect(self.player_ask_investment)
self.main_window.open_bank.connect(self.player_ask_loan)
self.crypto_window.buy_crypto.connect(self.player_buy_crypto)
self.crypto_window.sell_crypto.connect(self.player_sell_crypto)
self.property_window.buy_property.connect(self.player_buy_property)
self.investment_window.make_investment.connect(self.player_make_investment)
self.bank_window.get_loan.connect(self.player_get_loan)
self.max_loan_amount.connect(self.bank_window.set_loan_amount)
self.see_loan.connect(self.bank_window.open_window)
self.see_property.connect(self.property_window.open_window)
self.see_investment.connect(self.investment_window.open_window)
self.main_window.living_expenses.connect(self.choice_living_expenses)
self.main_window.card_repay.connect(self.choice_card)
# self.show_payment.connect(self.main_window.display_fee_payment())
self.main_window.end_turn_signal.connect(self.end_turn)
def player_action(self):
pass
def get_date(self):
return self.date
@Slot()
def end_turn(self):
# update player info
self.get_income()
# show quest here (1. only when there is no existing quest, 2. not always create a quest)
"""
if self.my_quest == None:
if random.randint(0, 100) < 20:
self.my_quest = Quest(self.date)
else: # check for existing quests success
if self.date == self.my_quest.end_month:
if self.my_quest.check_quest_overall_success(self.date, self.player):
pass # reward (scoring system)
else:
self.my_quest = None
"""
self.pay_living_expenses()
self.pay_loans()
self.pay_card()
self.update_salary()
self.update_credit()
self.update_assets()
self.update_netWorth()
# update community info
# update crypto info
# update screen info
self.update_screen()
def update_screen(self):
self.main_window.fee_payment_list.clear()
self.main_window.fee_payment_list.addItem("salary: ")
self.main_window.fee_payment_list.addItem(str(self.player.salary))
self.main_window.fee_payment_list.addItem("")
self.main_window.fee_payment_list.addItem("living expenses: ")
self.main_window.fee_payment_list.addItem(str(self.player.livingExpenses))
self.main_window.fee_payment_list.addItem("")
self.main_window.fee_payment_list.addItem("savings: ")
self.main_window.fee_payment_list.addItem(str(self.player.savings))
self.main_window.fee_payment_list.addItem("")
self.main_window.fee_payment_list.addItem("card: ")
self.main_window.fee_payment_list.addItem(str(self.player.card))
self.main_window.fee_payment_list.addItem("")
self.main_window.fee_payment_list.addItem("credit: ")
self.main_window.fee_payment_list.addItem(str(self.player.credit))
self.main_window.fee_payment_list.addItem("")
self.main_window.fee_payment_list.addItem("payments: ")
self.main_window.fee_payment_list.addItem(str(self.player.payments))
self.main_window.fee_payment_list.addItem("")
self.main_window.fee_payment_list.addItem("assets: ")
for investmentAsset in self.player.assets.investment:
self.main_window.fee_payment_list.addItem(str(investmentAsset.name))
for propertyAsset in self.player.assets.property:
self.main_window.fee_payment_list.addItem(str(propertyAsset.name))
self.main_window.fee_payment_list.addItem("")
self.main_window.fee_payment_list.addItem("net worth: ")
self.main_window.fee_payment_list.addItem(str(self.player.compute_net_worth()))
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,603 | sj43/hackricecryptogame | refs/heads/master | /test/test_signal.py | import sys
from PySide2.QtWidgets import QApplication, QPushButton
from PySide2.QtCore import QObject, Signal, Slot
app = QApplication(sys.argv)
# define a new slot that receives a string and has
# 'saySomeWords' as its name
@Slot(str)
def say_some_words(words):
print(words)
class Communicate(QObject):
# create a new signal on the fly and name it 'speak'
speak = Signal(str)
someone = Communicate()
someone = Communicate()
# connect signal and slot
someone.speak.connect(say_some_words)
# emit 'speak' signal
someone.speak.emit("Hello everybody!")
app.exec_() | {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,604 | sj43/hackricecryptogame | refs/heads/master | /Crypto/Parsing.py | import csv
crypto_to_data = {"Bitcoin": "btc",
"Dash": "dash",
"Ethereum": "eth",
"Litecoin": "ltc",
"Tether": "usdt",
"Stellar": "xlm",
"Ripple": "xrp"}
def read(cryptoname):
"""
Given a name of one of the 7 cryptocurrency, return a dictionary corresponding to it,
with dates as keys and price for that date as the values.
"""
dict = {}
abbre = crypto_to_data[cryptoname]
str = abbre + ".csv"
with open(str, newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
if row['date'].endswith("/1"):
dict[row['date'][:-2]] = row['price(USD)']
return dict
finaldict = {}
for item in crypto_to_data:
# Create our dataset for the price of each cryptocurrency at the first day of any given month
finaldict[item] = read(item)
print(finaldict)
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,605 | sj43/hackricecryptogame | refs/heads/master | /Crypto/Market.py | from hackricecryptogame.Crypto.Parsing import *
class Market:
# The Market is made up of a date represented by a string and a dictionary of Currency prices mapped to Currency names
def __init__(self, date, collection, monthly_gain):
self.date = date
self.collection = collection
self.monthly = monthly_gain
def getdate(self):
return self.date
def getcollection(self):
return self.collection
def getprice(self, cryptoname):
if cryptoname in self.collection:
print("The price of " + cryptoname + "is " + self.collection[cryptoname])
else:
print(cryptoname + " not found!!")
def getgrowth(self, cryptoname):
if cryptoname in self.monthly:
print("The growth of " + cryptoname + " last month is " + self.monthly[cryptoname])
else:
print(cryptoname + " not found!!")
def update(self, newdate):
self.date = newdate
for cryp in finaldict:
if newdate in finaldict[cryp]:
self.collection[cryp] = finaldict[cryp][newdate]
else:
self.collection[cryp] = -1
def updatemarket(self, newdate):
temp = dict()
for element in self.collection:
temp[element] = self.collection[element]
self.update(newdate)
for name in self.collection:
if (float(temp[name]) > 0.0):
self.monthly[name] = str(round((float(self.collection[name]) / float(temp[name]) - 1) * 100, 2)) + "%"
else:
self.monthly[name] = None
def datechange(olddate):
"""
To update the current date by switching to the next month
:param olddate: the previous date in the format YYYY/MM or YYYY/M
:return: the new date in the same format
"""
if (olddate.endswith("12")):
newyear = int(olddate[0:4]) + 1
newdate = str(newyear) + "/1"
elif (olddate[-2] == "/"):
month = int(olddate[-1]) + 1
newdate = olddate[:-1] + str(month)
elif (olddate[-3] == "/"):
month = int(olddate[-2:]) + 1
newdate = olddate[:-2] + str(month)
else:
newdate = "Formatting Error"
return newdate
turn = 20
Current = Market("2015/10", {}, {})
Current.update("2015/10")
# A simulation of how the market would react when the game is run
while (turn > 0):
newdate = datechange(Current.date)
Current.updatemarket(newdate)
print(Current.date)
print(Current.collection)
print(Current.monthly)
turn-=1
| {"/GameFunctions.py": ["/classes/Player.py", "/classes/Community.py", "/classes/Information.py"], "/main.py": ["/GameInstance.py"], "/classes/Player.py": ["/classes/Community.py", "/classes/Information.py"], "/GameInstance.py": ["/windows/OptionWindows.py", "/GameFunctions.py"]} |
69,608 | akinom/dspace-rest | refs/heads/master | /tests/test_dspace_login.py | import unittest
import dspace
import xml.etree.ElementTree as ET
URL = 'https://demo.dspace.org'
REST = '/rest'
ADMIN_EMAIL = 'dspacedemo+admin@gmail.com'
PWD = 'dspace'
class TestDSpaceRestLogin(unittest.TestCase):
def setUp(self):
self.api = dspace.Api(URL, REST)
def test_login(self):
user = self.api.login(ADMIN_EMAIL, PWD)
self.assertTrue(self.api.authenticated())
self.assertTrue(user == self.api.user())
self.assertTrue(user == ADMIN_EMAIL)
def test_login_failure(self):
user = self.api.login(ADMIN_EMAIL, PWD + "no-its-not")
self.assertFalse(self.api.authenticated())
self.assertFalse(self.api.user())
self.assertTrue(user == self.api.user())
if __name__ == '__main__':
unittest.main()
| {"/tests/test_dspace_login.py": ["/dspace/__init__.py"], "/examples/walk_collection.py": ["/dspace/__init__.py"], "/tests/test_dspace_rest.py": ["/dspace/__init__.py"], "/examples/item_metadata.py": ["/dspace/__init__.py"], "/dspace/__init__.py": ["/dspace/rest.py"]} |
69,609 | akinom/dspace-rest | refs/heads/master | /examples/walk_collection.py | import dspace
URL = 'https://dataspace.princeton.edu'
REST = '/rest'
HANDLE = '88435/dsp01x920g025r'
API = dspace.Api(URL, REST)
def list_items(api, items):
props =['type', 'handle', 'lastModified', 'name']
print("#" + "\t".join(props))
for item in items:
vals = []
for p in props:
vals.append(item.find(p).text)
print("\t".join(vals))
print("#" + "\t".join(props))
col = API.handle(HANDLE)
list_items(API, API.items(col))
| {"/tests/test_dspace_login.py": ["/dspace/__init__.py"], "/examples/walk_collection.py": ["/dspace/__init__.py"], "/tests/test_dspace_rest.py": ["/dspace/__init__.py"], "/examples/item_metadata.py": ["/dspace/__init__.py"], "/dspace/__init__.py": ["/dspace/rest.py"]} |
69,610 | akinom/dspace-rest | refs/heads/master | /dspace/rest.py | import requests
import xml.etree.ElementTree as ET
TYPE_TO_PATH = {
'community' : 'communities',
'collection' : 'collections',
'item' : 'items'
}
class Api:
def __init__(self, url, rest):
self.user_email = None
self.cookies = {}
self.url = url
self.root = rest
def user(self):
return self.user_email
def authenticated(self):
r = self._get("/status", {})
result = ET.fromstring(r.text)
auth = result.find('authenticated')
return auth.text.upper() == 'TRUE'
def login(self, user_mail, pwd):
self.cookies = {}
self.user_email = None
if (user_mail):
r = requests.post(self.url + self.root + "/login", data = {'email' : user_mail, 'password' : pwd})
if (r.status_code == 200 and 'Set-Cookie' in r.headers):
cookies = r.headers['Set-Cookie'].split('; ')
sessionids = list(filter(lambda x : x.startswith('JSESSIONID'), cookies))
if sessionids and len(sessionids):
k,v = sessionids[0].split('=')
self.cookies[k] = v
self.user_email = user_mail
return self.user_email
def handle(self, hdl):
r = self._get("/handle/" + hdl, {})
if (r.text):
return ET.fromstring(r.text)
return None
def topCommunities(self):
r = self._get("/communities/top-communities", {} )
return ET.fromstring(r.text).iter('community')
def communities(self, comm, params={}):
if (comm.tag != 'community'):
return iter([])
return self._get_iter(comm, 'community', params)
def collections(self, comm, params={}):
if (comm.tag != 'community'):
return iter([])
return self._get_iter(comm, 'collection', params)
def items(self, coll, params = {}):
if (coll.tag != 'collection'):
return iter([])
return self._get_iter(coll, 'item', params)
def get(self, type, id, params={}):
return self.get_path("/%s/%s" % (TYPE_TO_PATH[type], id))
def get_path(self, path, params=[]):
if path and path[-1] == "/":
path = path[:-1]
r = self._get(path, params)
return ET.fromstring(r.text)
def path(self, obj):
return "/%s/%s" % (TYPE_TO_PATH[obj.find('type').text], obj.find('id').text)
def _get_iter(self, parent, child, params):
type = parent.find('type').text
id = parent.find('id').text
path = "/%s/%s/%s" % (TYPE_TO_PATH[type], id, TYPE_TO_PATH[child])
return DSpaceObjIter(self, path, child, params)
def _get(self, path, params):
path = self.root + path
headers = { 'Accept' : 'application/xml, application/json, */*'}
print(("GET: %s " % path) + str(params))
r = requests.get(self.url + path, params=params, cookies= self.cookies, headers=headers)
return r
class DSpaceObjIter:
def __init__(self, api, path, select, params):
self.api = api
self.path = path
self.select = select
if not 'limit' in params:
params['limit'] = 100
if not 'offset' in params:
params['offset'] = 0
self.params = params
self._set_iter()
def __iter__(self):
return self
def __next__(self):
try:
return next(self.itr)
except StopIteration as e:
self.params['offset'] += self.params['limit']
self._set_iter()
return next(self.itr)
def _set_iter(self):
r = self.api._get(self.path, self.params)
self.itr = ET.fromstring(r.text).iter(self.select)
| {"/tests/test_dspace_login.py": ["/dspace/__init__.py"], "/examples/walk_collection.py": ["/dspace/__init__.py"], "/tests/test_dspace_rest.py": ["/dspace/__init__.py"], "/examples/item_metadata.py": ["/dspace/__init__.py"], "/dspace/__init__.py": ["/dspace/rest.py"]} |
69,611 | akinom/dspace-rest | refs/heads/master | /tests/test_dspace_rest.py | import unittest
import dspace
import xml.etree.ElementTree as ET
URL = 'https://dataspace.princeton.edu'
REST = '/rest'
SAMPLE_COMMUNITY_NAME = 'Princeton Plasma Physics Laboratory'
SAMPLE_HANDLE = {'community': '88435/dsp01pz50gz45g',
'collection' : '88435/dsp01x920g025r',
'item' : '88435/dsp01765373814' }
class TestDSpaceRest(unittest.TestCase):
def setUp(self):
self.api = dspace.Api(URL, REST)
def test_get_slash(self):
""" this does not come back with xml """
r = self.api._get("/", {})
self.assertTrue(r.status_code == 200)
def test_existing_handles(self ):
for tp in SAMPLE_HANDLE.keys():
hdl = SAMPLE_HANDLE[tp]
obj = self.api.handle(hdl)
type = obj.find('type').text
self.assertTrue(type in dspace.rest.TYPE_TO_PATH.keys(),
"unexpected type value %s for handle %s" % (type, hdl));
self.assertTrue(type == tp,
"type value in SAMPLE_HANDLE config %s for %s does not match type of returned object (%s)" % (tp, hdl, type));
def test_non_existing_handle(self):
obj = self.api.handle("XXX/YYY")
self.assertTrue(obj == None)
def test_path(self):
for tp in SAMPLE_HANDLE.keys():
obj = self.api.handle(SAMPLE_HANDLE[tp])
same = self.api.get_path(self.api.path(obj))
self.assertTrue(ET.tostring(obj) == ET.tostring(same))
def test_top_communities(self):
tops = self.api.topCommunities()
found_community_with_SAMPLE_NAME = False
for c in tops:
self.assertTrue(c.find('type').text == 'community')
name = c.find('name').text
found_community_with_SAMPLE_NAME = found_community_with_SAMPLE_NAME or (name == SAMPLE_COMMUNITY_NAME)
self.assertTrue(found_community_with_SAMPLE_NAME)
def test_sub_communities(self):
com = self.find_top_community_by_name(SAMPLE_COMMUNITY_NAME)
self.assertTrue(com, "can't find community with name %s" % SAMPLE_COMMUNITY_NAME)
sub_com = self.api.communities(com)
n = 0
for s in sub_com:
self.assertTrue(s.find('type').text == 'community')
n = n + 1
self.assertTrue(n > 0, "expected subcommunities in %s" % (SAMPLE_COMMUNITY_NAME))
def test_sub_community_on_invalid_obj(self):
for tp in ['item', 'collection']:
obj = self.api.handle(SAMPLE_HANDLE[tp])
self.assertTrue(obj)
sub = self.api.communities(obj)
self.assertTrue(len(list(sub)) == 0, '%ss have no sub communities' % tp)
def test_collections_in_com(self):
com = self.api.handle(SAMPLE_HANDLE['community'])
self.assertTrue(com, "can't find community %s" % SAMPLE_HANDLE['community'])
sub_coll = self.api.collections(com)
n = 0
for c in sub_coll:
self.assertTrue(c.find('type').text == 'collection')
n = n + 1
self.assertTrue(n > 0, "expected collections in %s" % (SAMPLE_COMMUNITY_NAME))
def test_collection_on_invalid_obj(self):
for tp in ['item', 'collection']:
obj = self.api.handle(SAMPLE_HANDLE[tp])
self.assertTrue(obj)
sub = self.api.collections(obj)
self.assertTrue(len(list(sub)) == 0, '%ss have no collections' % tp)
def test_items_in_collection(self):
obj = self.api.handle(SAMPLE_HANDLE['collection'])
lst = self.api.items(obj)
n = 0
for c in lst:
self.assertTrue(c.find('type').text == 'item')
n = n + 1
self.assertTrue(n > 0, "expected items in %s" % (SAMPLE_HANDLE['collection']))
def test_iter_inner_loop(self):
obj = self.api.handle(SAMPLE_HANDLE['collection'])
nitems100 = len(list(self.api.items(obj, params = { 'limit' : 100})))
self.assertTrue(nitems100 > 2 , "this is only a good test if collections has more than 2 items")
nitems2 = len(list(self.api.items(obj, params = { 'limit' :2})))
self.assertTrue(nitems2 == nitems100)
def test_items_on_invalid_obj(self):
for tp in ['item', 'community']:
obj = self.api.handle(SAMPLE_HANDLE[tp])
self.assertTrue(obj)
lst = self.api.items(obj)
self.assertTrue(len(list(lst)) == 0, '%ss have no items' % tp)
def test_item_expand_metadata(self):
obj = self.api.handle(SAMPLE_HANDLE['item'])
item = self.api.get_path(self.api.path(obj), params = { 'expand' : 'metadata'})
# test that there is at least one metadata element
next(item.iter('metadata'))
def find_top_community_by_name(self, com_name):
tops = self.api.topCommunities()
for c in tops:
name = c.find('name').text
if name == com_name:
return c
return None
if __name__ == '__main__':
unittest.main()
| {"/tests/test_dspace_login.py": ["/dspace/__init__.py"], "/examples/walk_collection.py": ["/dspace/__init__.py"], "/tests/test_dspace_rest.py": ["/dspace/__init__.py"], "/examples/item_metadata.py": ["/dspace/__init__.py"], "/dspace/__init__.py": ["/dspace/rest.py"]} |
69,612 | akinom/dspace-rest | refs/heads/master | /examples/item_metadata.py | import dspace
URL = 'https://dataspace.princeton.edu'
REST = '/rest'
HANDLE = '88435/dsp01x920g025r'
API = dspace.Api(URL, REST)
def get_first_metadata_value(item, field_name):
# include metadata values
item = API.get_path(API.path(item), params= {'expand' : 'metadata' })
mds = item.iter('metadata')
for m in mds:
if (m.find('key').text == field_name):
return m.find('value').text
return ''
col = API.handle(HANDLE)
# returns at most 1 item in given collection
iter = API.items(col, params = {'limit' : 1})
item = next(iter)
print(item.find('handle').text)
print(get_first_metadata_value(item, 'dc.date.accessioned'))
| {"/tests/test_dspace_login.py": ["/dspace/__init__.py"], "/examples/walk_collection.py": ["/dspace/__init__.py"], "/tests/test_dspace_rest.py": ["/dspace/__init__.py"], "/examples/item_metadata.py": ["/dspace/__init__.py"], "/dspace/__init__.py": ["/dspace/rest.py"]} |
69,613 | akinom/dspace-rest | refs/heads/master | /dspace/__init__.py | __all__ = ['rest']
from .rest import * | {"/tests/test_dspace_login.py": ["/dspace/__init__.py"], "/examples/walk_collection.py": ["/dspace/__init__.py"], "/tests/test_dspace_rest.py": ["/dspace/__init__.py"], "/examples/item_metadata.py": ["/dspace/__init__.py"], "/dspace/__init__.py": ["/dspace/rest.py"]} |
69,614 | perlman/vizrelay | refs/heads/master | /settings.py | '''
Settings (default) management for viz redirection service.
'''
from argschema import ArgSchema, ArgSchemaParser
from argschema.fields import Boolean, Nested, Str, Int, Float, Dict
from argschema.schemas import DefaultSchema
from argschema.utils import args_to_dict, smart_merge
import marshmallow as mm
DEFAULT_SETTINGS = {}
class NeuroglancerDefaultOptions(DefaultSchema):
blend = Str(default="default",
validate=mm.validate.OneOf(['default', 'additive']),
description="Default blend mode (default or additive)",
required=False)
layout = Str(validate=mm.validate.OneOf(['xy',
'yz',
'xy-3d',
'yz-3d',
'yz-3d',
'4panel',
'3d']),
description="default layout")
class NeuroglancerLayerOptions(DefaultSchema):
opacity = Float(validate=mm.validate.Range(0, 1),
description="default opacity of layers")
blend = Str(default="default",
validate=mm.validate.OneOf(['default', 'additive']),
description="Blend mode for this each layer created",
required=False)
shader = Str(description="shader to use")
class NeuroglancerSchema(DefaultSchema):
base_url = Str(default="https://neuroglancer-demo.appspot.com/",
description="Neuroglancer URL", required=False)
options = Nested(NeuroglancerDefaultOptions, default={})
layer_options = Nested(NeuroglancerLayerOptions, default={})
class RenderSchema(DefaultSchema):
protocol = Str(
default="http",
help="Protocol to connect to render with (http or https)",
required=False)
port = Int(default=80, required=False)
encoding = Str(
default="jpg",
help="Encoding option for the neuroglancer render datasource (jpg or raw16)",
required=False)
all_channels = Boolean(default=False,
help="Use Render API to query for and load all channels",
required=False)
alt_render = Str(
default="",
help="Alternate render host to use for vizrelay API calls [to work in Docker]",
required=False)
enable_one_channel = Boolean(default=False,
help="Enable only one of the channels",
required=False)
channel_name_shader_sub = Dict(default={},
help="Dictionary of CHANNEL_NAME : { SUB_NAME : SUB_VALUE }",
required=False)
class VizRelaySchema(ArgSchema):
neuroglancer = Nested(NeuroglancerSchema, required=False, default={})
render = Nested(RenderSchema, required=False, default={})
def add_defaults(args):
smart_merge(DEFAULT_SETTINGS, args)
def get_settings(query_args):
d = {}
smart_merge(d, DEFAULT_SETTINGS)
smart_merge(d, args_to_dict(query_args, schema=VizRelaySchema()))
return ArgSchemaParser(input_data=d, schema_type=VizRelaySchema, args=[])
| {"/main.py": ["/settings.py"]} |
69,615 | perlman/vizrelay | refs/heads/master | /main.py | #!/usr/bin/env python3
import argparse
import json
import pprint
import os
import settings
import argschema
import requests
from flask import Flask, request, redirect
app = Flask(__name__)
app.config['RELAY_CONFIG_FILE'] = os.environ.get(
"RELAY_CONFIG_FILE",
os.path.join(os.path.dirname(os.path.realpath(__file__)), "config.json")
)
app.config['RELAY_CONFIG_JSON'] = os.environ.get(
"RELAY_CONFIG_JSON",
"{}")
@app.before_first_request
def setup():
if 'RELAY_CONFIG_FILE' in app.config:
settings.add_defaults(json.load(open(app.config['RELAY_CONFIG_FILE'])))
if 'RELAY_CONFIG_JSON' in app.config:
settings.add_defaults(json.loads(app.config['RELAY_CONFIG_JSON']))
@app.route("/")
def main():
mod = settings.get_settings(request.args)
result = "<html><head></head><body>"
result += "<h2>Current settings</h2>"
result += "<pre>" + pprint.PrettyPrinter(indent=4).pformat(
mod.args) + "</pre>"
result += "</body>"
return result
# Sample URL http://ibs-forrestc-ux1:8002/render/ibs-forrestc-ux1.corp.alleninstitute.org/Forrest/H16_03_005_HSV_HEF1AG65_R2An15dTom/ACQGephyrin/
@app.route("/render/<server>/<owner>/<project>/<stack>/",
defaults={'channel': None})
@app.route("/render/<server>/<owner>/<project>/<stack>/<channel>/")
def render(server, owner, project, stack, channel):
config = settings.get_settings(request.args).args
if config['render']['all_channels']:
# Check for all available channels
# http://localhost:8080/render-ws/v1/owner/E/project/C/stack/S2_RoughAligned
if config['render']['alt_render']:
apiserver = config['render']['alt_render']
else:
apiserver = server
stack_info_url = "{0}://{1}/render-ws/v1/owner/{2}/project/{3}/stack/{4}".format(
config['render']['protocol'], apiserver, owner, project, stack)
stack_info = requests.get(stack_info_url).json()
params = {'layers' : {}}
if len(stack_info["stats"]["channelNames"]) > 0:
channelNames = sorted(stack_info["stats"]["channelNames"])
for channel in stack_info["stats"]["channelNames"]:
render_params = [owner, project, stack]
if channel:
render_params.append(channel)
render_source = "render://{0}://{1}/{2}?encoding={3}".format(
config['render']['protocol'], server,
'/'.join(render_params), config['render']['encoding'])
layer = {'type': 'image', 'source': render_source}
if config['render']['enable_one_channel'] and channel != channelNames[0]:
layer['visible'] = False
layer = argschema.utils.smart_merge(layer,
config['neuroglancer']['layer_options'])
if 'shader' in layer:
if channel in config['render']['channel_name_shader_sub']:
for k, v in config['render']['channel_name_shader_sub'][channel].items():
layer['shader'] = layer['shader'].replace(k,v)
if '__default__' in config['render']['channel_name_shader_sub']:
for k, v in config['render']['channel_name_shader_sub']['__default__'].items():
layer['shader'] = layer['shader'].replace(k,v)
params['layers'][channel] = layer
params = argschema.utils.smart_merge(params, config['neuroglancer']['options'])
params_json = json.dumps(params, separators=(',', ':'), sort_keys=True)
new_url = "{0}/#!{1}".format(config['neuroglancer']['base_url'],
params_json)
return redirect(new_url, code=303)
else:
pass # Default to use the regular code path (below) when there are no channels
render_params = [ owner, project, stack]
if channel:
render_params.append(channel)
render_source = "render://{0}://{1}/{2}?encoding={3}".format(
config['render']['protocol'], server,
'/'.join(render_params), config['render']['encoding'])
params = {}
layer = {'type': 'image', 'source': render_source}
layer = argschema.utils.smart_merge(layer,
config['neuroglancer']
['layer_options'])
params['layers'] = {stack: layer}
params = argschema.utils.smart_merge(params,
config['neuroglancer']['options'])
params_json = json.dumps(params, separators=(',', ':'))
new_url = "{0}/#!{1}".format(config['neuroglancer']['base_url'],
params_json)
new_url = new_url.replace('"', "'")
return redirect(new_url, code=303)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("-p", "--port", action="store", type=int, default=5000)
parser.add_argument("--host", action="store", type=str, default="0.0.0.0")
args = parser.parse_args()
app.run(host=args.host, port=args.port)
| {"/main.py": ["/settings.py"]} |
69,622 | J-HOVELAQUE/tank | refs/heads/master | /Tank2.0.py | #! /usr/bin/python3.5
# -*- coding:utf-8 -*
from tkinter import *
from bombarde_prot import *
from obus_prot import *
from terrain_prot import *
#from explosion import *
from random import randrange
from time import sleep
import winsound
class JeuDeTank(object):
def __init__(self, largeur=1200, hauteur=800, nb_joueurs=2):
self.hauteur = hauteur # Taille de l'aire de jeu
self.largeur = largeur
self.nb_joueurs = nb_joueurs
self.root = Tk() # Création de la fenètre d'interface
self.aire = Canvas(self.root, width=self.largeur, height=self.hauteur) # Création du canva de jeu
self.font = PhotoImage(file='IMG_1495.gif')
self.aire.create_image(600,400,image=self.font)
self.aire.grid(row=1, column=1, columnspan=4) # Affichage de l'interface
self.terrain = Terrain(self.aire, self.largeur, self.hauteur) # Création du terrain
self.creation_des_joueurs(self.nb_joueurs) # Lancer l'instanciation des joueurs
self.changement_de_joueur() # Pour lier les commandes
if self.terrain.vent < 0:
direction_vent = "<-"
elif self.terrain.vent > 0:
direction_vent = "->"
elif self.terrain.vent == 0:
direction_vent = " "
self.info = Label(text="Vent :\n {} {}".format(direction_vent, abs(self.terrain.vent)))
self.info.grid(row=2, column=2, columnspan=2)
self.joueur1.info.grid(row=2, column=1)
self.joueur2.info.grid(row=2, column=4)
self.root.mainloop()
def creation_des_joueurs(self, nb_joueurs):
"""Instanciation des joueurs"""
valide = False
self.liste_des_joueurs = []
while valide is False:
pos_joueur_1 = randrange(10,self.largeur-10)
pos_joueur_2 = randrange(10,self.largeur-10)
if abs(pos_joueur_1 - pos_joueur_2) > 200: # Vérifier que les deux joueurs sont assez éloigné
valide = True
self.joueur1 = Bombarde(self.aire, self.terrain, pos_joueur_1, self.terrain.skyline[pos_joueur_1][1], color='brown', name="joueur_1")
self.liste_des_joueurs.append(self.joueur1)
self.joueur2 = Bombarde(self.aire, self.terrain,pos_joueur_2 , self.terrain.skyline[pos_joueur_2][1], color='blue', name="joueur_2")
self.liste_des_joueurs.append(self.joueur2)
self.player_select = 1
def replacer_joueurs(self):
valide = False
self.liste_des_joueurs = []
while valide is False:
pos_joueur_1 = randrange(10,self.largeur-10)
pos_joueur_2 = randrange(10,self.largeur-10)
if abs(pos_joueur_1 - pos_joueur_2) > 200: # Vérifier que les deux joueurs sont assez éloigné
valide = True
self.joueur1.replacer(self.terrain, pos_joueur_1, self.terrain.skyline[pos_joueur_1][1])
self.liste_des_joueurs.append(self.joueur1)
self.joueur2.replacer(self.terrain,pos_joueur_2 , self.terrain.skyline[pos_joueur_2][1])
self.liste_des_joueurs.append(self.joueur2)
self.joueur1.maj_info()
self.joueur2.maj_info()
self.player_select = 1
def tirer(self, *event):
winsound.Beep(1000,200)
self.root.unbind("<Return>") # Impossible de lancer deux tir en même temps
self.projectile = self.liste_des_joueurs[self.player_select].tirer()
self.root.after(2000,self.bilan)
def changement_de_joueur(self):
""" Pour passer au joueur suivant"""
self.player_select += 1
if self.player_select >= len(self.liste_des_joueurs):
self.player_select = 0
### Redéterminer les commandes pour le joueur actif
self.root.bind("<Left>",self.liste_des_joueurs[self.player_select].augmenter_angle)
self.root.bind("<Right>",self.liste_des_joueurs[self.player_select].diminuer_angle)
self.root.bind("<Up>",self.liste_des_joueurs[self.player_select].augmenter_force)
self.root.bind("<Down>",self.liste_des_joueurs[self.player_select].diminuer_force)
self.root.bind("<Return>",self.tirer)
for traine in self.aire.find_withtag("trainée"):
self.aire.delete(traine)
def new_game(self):
self.aire.delete(ALL)
self.aire.create_image(600,400,image=self.font)
self.terrain = Terrain(self.aire, self.largeur, self.hauteur) # Création du terrain
#self.creation_des_joueurs(self.nb_joueurs) # Lancer l'instanciation des joueurs
self.replacer_joueurs()
self.changement_de_joueur() # Pour lier les commandes
if self.terrain.vent < 0:
direction_vent = "<-"
elif self.terrain.vent > 0:
direction_vent = "->"
elif self.terrain.vent == 0:
direction_vent = " "
self.info = Label(text="Vent :\n {} {}".format(direction_vent, abs(self.terrain.vent)))
self.info.grid(row=2, column=2, columnspan=2)
self.joueur1.info.grid(row=2, column=1)
self.joueur2.info.grid(row=2, column=4)
def bilan(self):
"""Pour vérifier si un tank est détruit après un tir"""
for tank in self.liste_des_joueurs:
if tank.chassis in self.projectile.contours():
winsound.Beep(100,1000)
self.liste_des_joueurs.remove(tank)
tank.destroy()
if len(self.liste_des_joueurs) == 1:
self.liste_des_joueurs[0].score += 1
self.new_game()
else:
self.changement_de_joueur()
if __name__ == "__main__":
jeu = JeuDeTank()
| {"/Tank2.0.py": ["/bombarde_prot.py", "/obus_prot.py", "/terrain_prot.py"], "/bombarde_prot.py": ["/obus_prot.py", "/terrain_prot.py"], "/obus_prot.py": ["/terrain_prot.py"]} |
69,623 | J-HOVELAQUE/tank | refs/heads/master | /bombarde_prot.py | #! /usr/bin/python3.5
# -*- coding:utf-8 -*
from tkinter import *
from obus_prot import *
import math
from time import sleep
from terrain_prot import *
class Bombarde(object):
def __init__(self, aire, terrain, x, y, score = 0, angle=90,name="player 1", color='dark red'):
self.aire = aire # Canva rattaché
self.x, self.y = x, y # Coordonnées du centre du tank
self.angle = angle # Angle d'origine du canon
self.color = color # Couleur du tank
self.force = 75 # Force de départ
self.name = name
self.broken = False
self.score = score # Nombre de manche remportées
self.terrain = terrain # Le terrain sur lequel est posée la bombarde
self.chassis = self.aire.create_arc(self.x-15, self.y-15, self.x+15, self.y+15, start=0, extent=180,fill=self.color,tag='tank') # Dessiner le corp
self.canon = self.aire.create_line(self.x, self.y,\
self.x + 30*math.cos(math.radians(self.angle)), self.y - 30*math.sin(math.radians(self.angle)),\
width=3,fill=self.color,tag='tank') # Dessiner le canon
self.info = Label(text="Angle : {}\nPuissance : {}\n\nScore : {}".format(self.angle, self.force, self.score), fg=self.color)
def augmenter_angle(self, *event):
"""Augmente l'angle du canon"""
if self.angle < 180:
self.angle += 1
self.aire.coords(self.canon,self.x,self.y,self.x+30*math.cos(math.radians(self.angle)),self.y-30*math.sin(math.radians(self.angle)))
self.maj_info()
def diminuer_angle(self, *event):
"""Diminue l'angle du canon"""
if self.angle > 0:
self.angle -= 1
self.aire.coords(self.canon,self.x,self.y,self.x+30*math.cos(math.radians(self.angle)),self.y-30*math.sin(math.radians(self.angle)))
self.maj_info()
def augmenter_force(self, *event):
"""Augmente la puissance du tir"""
self.force += 1
self.maj_info()
def diminuer_force(self, *event):
"""Diminue la puissance du tir"""
self.force -= 1
self.maj_info()
def tirer(self, *event):
"""Pour initialiser un tir d'obus"""
projectile = Projectile(self.aire, self.terrain, self.x, self.y, self.angle, self.force)
return projectile
def destroy(self):
"""Détruit le tank"""
self.aire.delete(self.chassis)
self.aire.delete(self.canon)
self.broken = True
def maj_info(self):
"""Renvoi l'angle et la puissance sous forme de chaine en vue de les afficher"""
self.info.configure(text="Angle : {}\nPuissance : {}\n\nScore : {}".format(self.angle, self.force, self.score))
def replacer(self, terrain, x, y, angle = 45):
self.aire.delete(self.chassis)
self.aire.delete(self.canon)
self.angle = 90
self.force = 75
self.terrain = terrain
self.x, self.y = x, y # Coordonnées du centre du tank
self.chassis = self.aire.create_arc(self.x-15, self.y-15, self.x+15, self.y+15, start=0, extent=180,fill=self.color,tag='tank') # Dessiner le corp
self.canon = self.aire.create_line(self.x, self.y,\
self.x + 30*math.cos(math.radians(self.angle)), self.y - 30*math.sin(math.radians(self.angle)),\
width=3,fill=self.color,tag='tank') # Dessiner le canon
class JoueurDeTank(object):
def __init__(self, aire, terrain, x, y, angle=45,name="player 1", color='dark red'):
self.tank = Bombarde(aire, terrain, x, y, angle=45,name="player 1", color='dark red')
if __name__ == "__main__":
test = Tk()
aire = Canvas(test, height= 800, width=1400)
aire.pack()
tank = Bombarde(aire, 500,800)
test.bind("<Left>",tank.augmenter_angle)
test.bind("<Right>",tank.diminuer_angle)
test.bind("<Up>",tank.augmenter_force)
test.bind("<Down>",tank.diminuer_force)
test.bind("<Return>",tank.tirer)
test.mainloop() | {"/Tank2.0.py": ["/bombarde_prot.py", "/obus_prot.py", "/terrain_prot.py"], "/bombarde_prot.py": ["/obus_prot.py", "/terrain_prot.py"], "/obus_prot.py": ["/terrain_prot.py"]} |
69,624 | J-HOVELAQUE/tank | refs/heads/master | /obus_prot.py | #! /usr/bin/python3.5
# -*- coding : utf -8 -*
"""Tentative de définir un projectile en tant que classe"""
from tkinter import *
from terrain_prot import *
import winsound
import math
class Projectile(object):
def __init__(self, surface, terrain, x0, y0, angle, puissance, gravite = 0.4, vent = 0, vitesse = 5, rayon_explosion=20):
self.etat = False
self.x0 = x0 # Position de la bombarde faisant feu
self.y0 = y0
self.aire = surface # Canvas attaché
self.terrain = terrain # Environement dans lequel l'obus est lancé
self.obstacles = self.aire.find_withtag("terrain") + self.aire.find_withtag("tank")
self.angle = angle # Angle du canon faisant feu
self.force = puissance/10 # Puissance de feu
self.g = gravite # Force d'attraction de la planète
self.vent = self.terrain.vent # Force du vent
self.vitesse = vitesse # Vitesse du tir
self.evolution = True # Si l'obus est toujour en progression
self.progression = 0 # Progression de l'explosion
self.rayon = rayon_explosion
self.t = 0 # Instant de départ
self.curve = [(self.x0 + 30*math.cos(math.radians(self.angle)), self.y0 - 30*math.sin(math.radians(self.angle)))] # Origine de la trajectoire (extrémitée du canon)
self.x, self.y = self.x0 + 30*math.cos(math.radians(self.angle)), self.y0 - 30*math.sin(math.radians(self.angle)) # Point de départ de l'obus
self.trajectoire_recursive(self.g, self.vent, self.vitesse) # Mise à feu
def trajectoire_recursive(self, g=0.1, vent=0, vitesse_tir=5):
"""Dessine la trajectoire (curve) d'un obus point par point en fonction de :
- l'angle du canon (self.angle)
- la puissance de feu (self.force)
- la force de gravitée (g)
- la force du vent (vent) (idéalement entre -10 et 10)
prend aussi en compte la vitesse d'affichage : vitesse_tir
"""
self.etat = True
angle = math.radians(self.angle) # Conversion de l'angle du canon en radians
vent = self.vent/300 # Etalonnage du vent
x = (vent * (self.t**2) / 2) + (self.force * math.cos(angle) * self.t) # Application des equations horaires de la trajectoire (instant t)
y = - ((g * (self.t**2)) / 2) + (self.force * math.sin(angle) * self.t)
self.y = -y + (self.y0 - 30*math.sin(angle)) # Determiner position actuelle de l'obus
self.x = x + (self.x0 + 30*math.cos(angle))
self.curve.append((self.x,self.y)) # Continuer de tracer la trajectoire
self.aire.create_line(self.curve, fill='red', smooth=True, tag="trainée")
del self.curve[0]
for obs in self.obstacles: # Faire exploser l'obus au contact du terrain
if obs in self.aire.find_overlapping(self.x,self.y,self.x,self.y):
if self.t != 0:
self.explosion()
self.evolution = False
self.t += 1 # Passer à l'instant suivant
if self.t < 1000 and self.evolution is True: # Poursuivre l'évolution de l'obus si conditions valides
self.aire.after(self.vitesse , self.trajectoire_recursive)
else:
self.etat = False
def explosion(self, vitesse=10):
"""Animation de l'explosion jusqu'à atteindre le rayon souhaité"""
self.aire.create_oval\
(self.x-self.progression,self.y-self.progression,self.x+self.progression,self.y+self.progression,\
fill='red',tag='explosion')
self.progression += 1
if self.progression < self.rayon: # On vérifie si l'expmosion est terminée
self.aire.after(vitesse,self.explosion)
else:
self.aire.after(200,self.effacer_explosion)
def effacer_explosion(self):
"""Effacer l'explosion et casser le terrain en conséquence"""
for cercle in self.aire.find_withtag("explosion"):
self.aire.delete(cercle)
self.terrain.destruction_circulaire(int(self.x),self.rayon)
winsound.Beep(100,1000)
def contours(self):
return self.aire.find_overlapping(self.x-self.rayon,self.y-self.rayon,self.x+self.rayon,self.y+self.rayon)
| {"/Tank2.0.py": ["/bombarde_prot.py", "/obus_prot.py", "/terrain_prot.py"], "/bombarde_prot.py": ["/obus_prot.py", "/terrain_prot.py"], "/obus_prot.py": ["/terrain_prot.py"]} |
69,625 | J-HOVELAQUE/tank | refs/heads/master | /terrain_prot.py | #! /usr/bin/python3.5
# -*- coding : utf-8 -*
"""Tentative de création d'une classe pour générer un terrain de jeu aléatoire"""
from tkinter import *
from random import randrange,choice
import math
class Terrain(object):
def __init__(self, surface, largeur, hauteur):
self.largeur = largeur
self.hauteur = hauteur
self.vent = randrange(-10,10)
self.aire = surface # Ratacher au canvas du jeu
self.skyline = [(0,hauteur//2)] # La ligne d'horizon et son point de départ
self.x = 0 # Coordonnées du point d'origine qui sera amené à évoluer
self.y = hauteur//2
## La liste des éléments de terrains:
self.element = [self.generer_pente_asc,self.generer_pente_desc,self.generer_plateau]
self.generation() # Lancer la génération procédurale du terrain
def generation(self):
"""Tire un élément de terrain aléatoirement et l'ajoute à la ligne d'horizon"""
while self.x < self.largeur:
choice(self.element)(randrange(50,200),randrange(1,5))
self.landscape = self.aire.create_polygon(self.skyline, self.largeur,self.hauteur,0,self.hauteur,fill='green',smooth=0, tag='terrain')
def generer_pente_asc(self,longueur=200,pente=5):
while longueur > 0:
self.x += 1
self.y -= randrange(0,5)*(pente/10)
self.skyline.append((self.x,self.y))
longueur -= 1
def generer_pente_desc(self,longueur=200,pente=5):
while longueur > 0:
self.x += 1
self.y += randrange(0,5)*(pente/10)
self.skyline.append((self.x,self.y))
longueur -= 1
def generer_plateau(self,longueur=200,pente=5):
while longueur > 0:
self.x += 1
self.skyline.append((self.x,self.y))
longueur -= 1
def destruction_circulaire(self, x0=200, r=20):
"""Détruit une portion de terrain selon un cercle de rayon r au point x=x0"""
gabarit = []
for x in self.skyline[x0-r:x0+r]:
newy = r**2 - (x[0]-x0)**2
newy = math.sqrt(newy)
newy = self.skyline[x0][1]+newy
trou = (x[0],newy)
gabarit.append(trou)
self.skyline[x0-r:x0+r] = gabarit
self.aire.delete(self.landscape)
self.landscape = self.aire.create_polygon(self.skyline, 1200,800,0,800,fill='green',smooth=0,tag='terrain')
def detruire(self, x0=100, x1=200):
while x0 < x1:
self.skyline[x0] = (self.skyline[x0][0],self.skyline[x0][1]+50)
x0 += 1
self.aire.delete(self.landscape)
self.landscape = self.aire.create_polygon(self.skyline, 1200,800,0,800,fill='green',smooth=0)
def casser(self,*event):
self.destruction_circulaire()
if __name__ == "__main__":
t = Tk()
aire = Canvas(t, width=1200, height=800)
landscape = Terrain(aire,1200,800)
t.bind("<Return>",landscape.casser)
aire.pack()
t.mainloop()
| {"/Tank2.0.py": ["/bombarde_prot.py", "/obus_prot.py", "/terrain_prot.py"], "/bombarde_prot.py": ["/obus_prot.py", "/terrain_prot.py"], "/obus_prot.py": ["/terrain_prot.py"]} |
69,629 | Jacobylee/Distributional-Semantics-Takes-the-SAT | refs/heads/main | /word_vector_model.py | import numpy as np
import scipy.linalg as scipy_linalg
from random import choice
class WordVectorModel:
def __init__(self, db_filename, questions, flag=True):
print("generating model...", end="")
dsm = []
index = {}
line_point = 0
with open(db_filename) as f:
for line in f:
index[line.split()[0]] = line_point
vector = list(map(float, line.split()[1:]))
dsm.append(np.array(vector))
line_point += 1
f.close()
print("Done!")
self.dsm = np.mat(dsm)
self.index = index
self.q_sets = questions
self.unknown = []
self.results = []
self.flag = flag
@staticmethod
def euclidean(v1, v2):
if len(v1) != len(v2):
return ValueError
else:
return scipy_linalg.norm(v1-v2)
@staticmethod
def cosine(v1, v2):
if len(v1) != len(v2):
return ValueError
else:
return np.dot(v1, v2) / (scipy_linalg.norm(v1) * scipy_linalg.norm(v2))
def synonym(self):
# q.sets : [{word: set(choice1, choice2, choice3, choice4, choice5)}, ....]
with open("synonym.txt", "w") as f:
for mc_question in self.q_sets:
token = list(mc_question.keys())[0]
target = token[3:]
if target in self.index.keys():
f.write("---------------------\n")
f.write("Question: %s \n" % token)
target_vector = np.array(self.dsm[self.index[target]])[0]
option_scores = {}
for option in list(mc_question.values())[0]:
opt = option[3:]
f.write("Selection: %s \n" % option)
if opt in self.index.keys():
opt_vector = np.array(self.dsm[self.index[opt]])[0]
if self.flag:
option_scores[self.cosine(target_vector, opt_vector)] = option # cosine
else:
option_scores[-self.euclidean(target_vector, opt_vector)] = option # euclidean use negative
else:
option_scores[-100] = option
selected = option_scores[max(option_scores.keys())]
self.results.append((token, selected))
else:
self.unknown.append(token)
def analogy(self):
tp = 0
fp = 0
for as_question in self.q_sets:
word1 = list(as_question.keys())[0][0]
word2 = list(as_question.keys())[0][1]
question_num = list(as_question.keys())[0][2]
answer = list(as_question.values())[0][list(as_question.keys())[0][3]]
if word1 in self.index.keys() and word2 in self.index.keys():
word1_vector = np.array(self.dsm[self.index[word1]])[0]
word2_vector = np.array(self.dsm[self.index[word2]])[0]
word_diff = word1_vector-word2_vector
option_scores = {}
for option in list(as_question.values())[0]:
option1 = option[0]
option2 = option[1]
if option1 in self.index.keys() and option2 in self.index.keys():
option1_vector = np.array(self.dsm[self.index[option1]])[0]
option2_vector = np.array(self.dsm[self.index[option2]])[0]
option_diff = option1_vector-option2_vector
if self.flag:
option_scores[self.cosine(word_diff, option_diff)] = option # cosine
else:
option_scores[-self.euclidean(word_diff, option_diff)] = option # euclidean use negative
else:
option_scores[-100] = (option[0], option[1])
selected = option_scores[max(option_scores.keys())]
self.results.append((question_num, selected))
else:
self.unknown.append((word1, word2))
selected = choice(list(as_question.values())[0])
if answer == selected:
tp += 1
else:
fp += 1
return tp/(fp+tp)
| {"/synonym_detection_SAT.py": ["/word_vector_model.py"]} |
69,630 | Jacobylee/Distributional-Semantics-Takes-the-SAT | refs/heads/main | /dsm.py | import numpy as np
from nltk import bigrams
import scipy.linalg as scipy_linalg
def generate_co_matrix(filename):
# read
data = open(filename).readlines()
# terms and bi grams
bi_grams = []
voca = []
for line in data:
voca += line.split()
bi_grams += list(bigrams(line.split()))
vocas = list(set(voca))
# term index
term_index = {word: i for i, word in enumerate(vocas)}
# co_matrix
co_matrix = np.zeros((len(vocas), len(vocas)), np.float64)
for bi in bi_grams:
co_matrix[term_index[bi[0]]][term_index[bi[1]]] += 1
co_matrix[term_index[bi[1]]][term_index[bi[0]]] += 1
# smooth
co_matrix = (co_matrix * 10) + 1
# PPMI
ppmi = np.zeros((len(vocas), len(vocas)), np.float64)
all_count = np.sum(co_matrix)
for word in range(len(co_matrix)):
for context in range(len(co_matrix[word])):
pw = sum(co_matrix[word]) / all_count
pc = sum(co_matrix[:, context]) / all_count
pwc = co_matrix[word][context] / all_count
ppmi[word][context] = max(np.log(pwc/(pw*pc)), 0)
return co_matrix, ppmi, term_index
def euclidean(word1, word2):
if len(word1) == len(word2):
return scipy_linalg.norm(word1-word2)
else:
print("Vectors have different dimension")
co = generate_co_matrix("dist_sim_data.txt")[0]
PPMI = generate_co_matrix("dist_sim_data.txt")[1]
index = generate_co_matrix("dist_sim_data.txt")[2]
print("Index:", index)
print("co-occurrence matrix")
print(co)
print("PPMI matrix:")
print(PPMI)
print("dogs co-occurrence: ", co[index["dogs"]])
print("dogs PPMI: ", PPMI[index["dogs"]])
print("-------distance------")
print("women and men: ", euclidean(PPMI[index["women"]], PPMI[index["men"]]))
print("women and dogs: ", euclidean(PPMI[index["women"]], PPMI[index["dogs"]]))
print("men and dogs: ", euclidean(PPMI[index["men"]], PPMI[index["dogs"]]))
print("feed and like: ", euclidean(PPMI[index["feed"]], PPMI[index["like"]]))
print("feed and bite: ", euclidean(PPMI[index["feed"]], PPMI[index["bite"]]))
print("like and bite: ", euclidean(PPMI[index["like"]], PPMI[index["bite"]]))
print("---------SVD---------")
U, E, Vt = scipy_linalg.svd(PPMI, full_matrices=False)
U = np.matrix(U) # compute U
E = np.matrix(np.diag(E)) # compute E
Vt = np.matrix(Vt) # compute Vt = conjugage transpose of V
V = Vt.T # compute V = conjugate transpose of Vt
print("U:")
print(U)
print("E:")
print(E)
print("V:")
print(V)
print("U·E·V")
print(np.around(np.dot(np.dot(U, E), Vt), decimals=8))
print("---------3D---------")
reduced_PPMI = np.around(PPMI * V[:, 0:3], decimals=8)
print("reduced_PPMI:")
print(reduced_PPMI)
print("women and men: ", euclidean(reduced_PPMI[index["women"]], reduced_PPMI[index["men"]]))
print("women and dogs: ", euclidean(reduced_PPMI[index["women"]], reduced_PPMI[index["dogs"]]))
print("men and dogs: ", euclidean(reduced_PPMI[index["men"]], reduced_PPMI[index["dogs"]]))
print("feed and like: ", euclidean(reduced_PPMI[index["feed"]], reduced_PPMI[index["like"]]))
print("feed and bite: ", euclidean(reduced_PPMI[index["feed"]], reduced_PPMI[index["bite"]]))
print("like and bite: ", euclidean(reduced_PPMI[index["like"]], reduced_PPMI[index["bite"]]))
| {"/synonym_detection_SAT.py": ["/word_vector_model.py"]} |
69,631 | Jacobylee/Distributional-Semantics-Takes-the-SAT | refs/heads/main | /synonym_detection_SAT.py | from collections import defaultdict
from random import choice, sample
from word_vector_model import WordVectorModel
# transfer txt to dictionary
def multiple_choice_database(filename):
data = open(filename).readlines()
data_base = defaultdict(list)
for line in data[1:]:
if line.split()[1] != '0' and line.split()[1] not in data_base[line.split()[0]]:
data_base[line.split()[0]].append(line.split()[1])
return data_base
# yield question from dictionary(synonym)
def question_generator(database, k):
verbs = list(database.keys())
all_words = set(verbs+sum(list(database.values()), []))
for i in range(k):
question = defaultdict(set)
verb = choice(verbs)
question[verb].add(choice(database[verb]))
question[verb].update(sample(set(filter(lambda x: x not in database[verb], all_words)), 4))
yield question
i += 1
# yield question from txt file(analogy)
def sat_analogy(filename):
data_base = []
start_token = ['#', '190', 'ML:', 'KS']
char_index = {'a': 0, 'b': 1, 'c': 2, 'd': 3, 'e': 4}
with open(filename) as f:
for line in f:
line_list = line.split()
if len(line_list) != 0:
if line_list[0] in start_token:
continue
else:
data_base.append(line_list)
sat_questions = {}
sat_selections = {}
sat_answers = {}
for lst in range(len(data_base)//7):
sat_questions[lst] = data_base[lst*7]
sat_answers[lst] = data_base[lst*7+6]
sat_choice = []
for choices in data_base[lst*7+1:lst*7+6]:
sat_choice.append((choices[0], choices[1]))
sat_selections[lst] = sat_choice
for sat in sat_questions:
question = (sat_questions[sat][0], sat_questions[sat][1], sat, char_index[sat_answers[sat][0]])
yield {question: sat_selections[sat]}
# given database and result to calculus the accuracy(synonym)
def accuracy(result, db):
tp = 0
fp = 0
for pair in result:
if pair[1] in db[pair[0]]:
tp += 1
else:
fp += 1
return round(tp / (tp + fp), 2)
if __name__ == "__main__":
# Q1
# build database
synonym_db = multiple_choice_database("EN_syn_verb.txt")
# build multiple choice questions
synonym_sets = [q for q in question_generator(synonym_db, 1000)]
composes_with_cosine = WordVectorModel("EN-wform.w.2.ppmi.svd.500.rcv_vocab.txt", synonym_sets, flag=True) # cosine
composes_with_cosine.synonym()
cc_result = accuracy(composes_with_cosine.results, synonym_db)
composes_with_euclidean = WordVectorModel("EN-wform.w.2.ppmi.svd.500.rcv_vocab.txt", synonym_sets, flag=False) # Eucli
composes_with_euclidean.synonym()
ce_result = accuracy(composes_with_euclidean.results, synonym_db)
word2vec_with_cosine = WordVectorModel("GoogleNews-vectors-rcv_vocab.txt", synonym_sets, flag=True) # cosine
word2vec_with_cosine.synonym()
wc_result = accuracy(word2vec_with_cosine.results, synonym_db)
word2vec_with_euclidean = WordVectorModel("GoogleNews-vectors-rcv_vocab.txt", synonym_sets, flag=False) # eu
word2vec_with_euclidean.synonym()
we_result = accuracy(word2vec_with_euclidean.results, synonym_db)
print("accuracy cos euclidean")
print("COMPOSES", cc_result, " ", ce_result)
print("word2vec", wc_result, " ", we_result)
# Q2
# build analogy questions
analogy_sets = [i for i in sat_analogy("SAT-package-V3.txt")]
analogy_composes_cosine = WordVectorModel("EN-wform.w.2.ppmi.svd.500.rcv_vocab.txt", analogy_sets, flag=True) # cosine
print("SAT analogy (COMPOSES cosine) Accuracy: %.2f " % analogy_composes_cosine.analogy())
analogy_composes_euclidean = WordVectorModel("EN-wform.w.2.ppmi.svd.500.rcv_vocab.txt", analogy_sets, flag=False) # eu
print("SAT analogy (COMPOSES euclidean) Accuracy: %.2f " % analogy_composes_euclidean.analogy())
analogy_word2vec_cosine = WordVectorModel("GoogleNews-vectors-rcv_vocab.txt", analogy_sets, flag=True) # cosine
print("SAT analogy (word2vector cosine) Accuracy: %.2f " % analogy_word2vec_cosine.analogy())
analogy_word2vec_euclidean = WordVectorModel("GoogleNews-vectors-rcv_vocab.txt", analogy_sets, flag=False) # eu
print("SAT analogy (word2vector euclidean) Accuracy: %.2f " % analogy_word2vec_euclidean.analogy())
| {"/synonym_detection_SAT.py": ["/word_vector_model.py"]} |
69,632 | Anushree-Prakash/AMAZON_POM | refs/heads/master | /resources/data/constant_variable.py | URL ="https://www.amazon.in/"
CHROME_PATH = "./resources/drivers/chromedriver"
FIREFOX_PATH = "./resources/drivers/geckodriver"
EXCEL_PATH = "./resources/data/amazon.xlsx" | {"/test_scripts/test_cases/test_amazon.py": ["/sources/pages/home_page_search_product.py", "/sources/pages/add_to_cart.py"], "/test_runner.py": ["/test_scripts/test_cases/test_amazon.py"], "/sources/pages/add_to_cart.py": ["/sources/pages/home_page_search_product.py"]} |
69,633 | Anushree-Prakash/AMAZON_POM | refs/heads/master | /sources/utilities/properties.py | from pyjavaproperties import Properties
from sources.utilities import globals
import sys
class ReadConfig:
@staticmethod
def get_properties():
prop = Properties()
prop.load(open(globals.PROPERTIES_PATH, mode='r'))
return prop
@staticmethod
def get_url():
prop = ReadConfig.get_properties()
return prop.getProperty("URL")
@staticmethod
def get_browser():
prop = ReadConfig.get_properties()
return prop.getProperty("Browser")
@staticmethod
def get_implicit_wait():
prop = ReadConfig.get_properties()
return int(prop.getProperty("I_wait"))
@staticmethod
def get_explicit_wait():
prop = ReadConfig.get_properties()
return int(prop.getProperty("e_wait"))
@staticmethod
def write_to_report():
prop = ReadConfig.get_properties()
out = open(globals.ALLURE_RESULTS + "environment.properties", mode='w')
prop.setProperty("Browser Name", ReadConfig.get_browser())
prop.setProperty("Application URL", ReadConfig.get_url())
prop.setProperty("Python version", str(sys.version))
prop.setProperty("Platform", str(sys.getwindowsversion()))
prop.store(out) | {"/test_scripts/test_cases/test_amazon.py": ["/sources/pages/home_page_search_product.py", "/sources/pages/add_to_cart.py"], "/test_runner.py": ["/test_scripts/test_cases/test_amazon.py"], "/sources/pages/add_to_cart.py": ["/sources/pages/home_page_search_product.py"]} |
69,634 | Anushree-Prakash/AMAZON_POM | refs/heads/master | /test_scripts/test_cases/test_amazon.py | from sources.pages.home_page_search_product import HomePageSearchProduct as searchProd
from sources.pages.add_to_cart import AddToCart
from sources.generic_utilities.webdriver_factory import WebDriverFactory
# Searching the product in search box
class Amazon(WebDriverFactory):
def __init__(self,driver):
super().__init__(driver)
self.driver =driver
def searchProduct(self,name):
searchProd(self.driver).SearchBox(name)
searchProd(self.driver).click_on_product()
AddToCart(self.driver).click_on_add_to_cart()
# def add_to_cart(self):
# AddToCart
| {"/test_scripts/test_cases/test_amazon.py": ["/sources/pages/home_page_search_product.py", "/sources/pages/add_to_cart.py"], "/test_runner.py": ["/test_scripts/test_cases/test_amazon.py"], "/sources/pages/add_to_cart.py": ["/sources/pages/home_page_search_product.py"]} |
69,635 | Anushree-Prakash/AMAZON_POM | refs/heads/master | /test_runner.py | from test_scripts.test_cases.test_amazon import Amazon
from sources.utilities import excel
import pytest
@pytest.mark.usefixtures("oneTimeSetUp")
class Test_AmazonProj():
@pytest.fixture(autouse=True)
def classSetUp(self):
self.hs = Amazon(self.driver)
# @pytest.mark.run(order=1)
def test_tc_001_add_product_and_verify(self):
product_name = excel.get_value("Cart", "TC_002", "ProductName")
self.hs.searchProduct(product_name)
# success_message = excel.get_value("Cart", "TC_002", "SuccessMessage")
# success_message_after_deleting = excel.get_value("Cart", "TC_002", "SuccessMessageAfterDelete")
"""
OUTPUT:
py.test -v -s test_runner.py --browser firefox
py.test -v -s test_runner.py --browser chrome
TO GENERATE HTML REPORT :
PREREQUISITE:
pip install pytest-HTML
TO GENERATE A REPORT :
py.test -x test_runner.py --browser chrome --html=Amazon_searchProduct.html
"""
| {"/test_scripts/test_cases/test_amazon.py": ["/sources/pages/home_page_search_product.py", "/sources/pages/add_to_cart.py"], "/test_runner.py": ["/test_scripts/test_cases/test_amazon.py"], "/sources/pages/add_to_cart.py": ["/sources/pages/home_page_search_product.py"]} |
69,636 | Anushree-Prakash/AMAZON_POM | refs/heads/master | /sources/pages/add_to_cart.py | '''
@author : anushree
@email : anu@gmail.com
@date : 18/12/2019
'''
from sources.generic_utilities.generic_methods import GenericMethods
from sources.utilities import custom_logger as cl
import logging
from sources.pages.home_page_search_product import HomePageSearchProduct
class AddToCart(GenericMethods):
"""
Searching products in Amazon
"""
log = cl.customLogger(logging.DEBUG)
__add_to_cart_button = "add-to-cart-button"
# def __add_to_cart(self):
# return self.getElement("add-to-cart-button",locatorType="id")
def __init__(self, driver):
# We are inheriting GenericMethods for webdriver(driver) using super() -->parameterized constructor
super().__init__(driver)
self.driver = driver
HomePageSearchProduct(driver)
def click_on_add_to_cart(self):
self.switch_to_child_window(self.driver)
self.elementClick(self.__add_to_cart_button)
self.timeSleep()
| {"/test_scripts/test_cases/test_amazon.py": ["/sources/pages/home_page_search_product.py", "/sources/pages/add_to_cart.py"], "/test_runner.py": ["/test_scripts/test_cases/test_amazon.py"], "/sources/pages/add_to_cart.py": ["/sources/pages/home_page_search_product.py"]} |
69,637 | Anushree-Prakash/AMAZON_POM | refs/heads/master | /sources/pages/home_page_search_product.py | '''
@author : anushree
@email : anu@gmail.com
@date : 18/12/2019
'''
import time
from lib2to3.pgen2 import driver
from selenium.webdriver.common.by import By
from sources.generic_utilities.generic_methods import GenericMethods
from sources.utilities import custom_logger as cl
import logging
class HomePageSearchProduct(GenericMethods):
"""
Searching products in Amazon
"""
log = cl.customLogger(logging.DEBUG)
def __init__(self, driver):
# We are inheriting GenericMethods for webdriver(driver) using super() -->parameterized constructor
super().__init__(driver)
self.driver = driver
self.search_Product_Name="twotabsearchtextbox"
def SearchBox(self,productName):
# using self keyword we can access the methods in generic_methods
self.sendKeys(productName,self.search_Product_Name)
# self.elementClick("//span[@id='nav-search-submit-text']/../input",locatorType='xpath')
self.elementClick("//input[@value='Go']", "xpath")
time.sleep(3)
def click_on_product(self):
# self.switch_to_child_window(self.driver)
self.elementClick("(//span[text()='Apple iPhone XR (64GB) - Black'])[2]","xpath")
self.timeSleep()
| {"/test_scripts/test_cases/test_amazon.py": ["/sources/pages/home_page_search_product.py", "/sources/pages/add_to_cart.py"], "/test_runner.py": ["/test_scripts/test_cases/test_amazon.py"], "/sources/pages/add_to_cart.py": ["/sources/pages/home_page_search_product.py"]} |
69,638 | Thenewprogramming/paccatcher | refs/heads/master | /PacCatcher.py | import pygame
import Game
import inputbox2
"""
This is the mainmenu class of the game.
"""
def init():
global quitgame
pygame.display.set_caption("PacCatcher")
quitgame = False
name = ""
ip = ""
name_input = None
ip_input = None
isclient = None
color_server = color_client = (255, 0, 0)
serverBtn = pygame.Rect((50, 50), (100, 25))
clientBtn = pygame.Rect((250, 50), (100, 25))
startBtn = pygame.Rect((300, 320), (90, 70))
while not quitgame:
if name_input != None:
if name_input.getFocus():
name, clear_name = name_input.update()
if clear_name:
name_input = None
clear_name = False
if ip_input != None:
if ip_input.getFocus():
ip, clear_ip = ip_input.update()
if clear_ip:
ip_input = None
clear_ip = False
for event in pygame.event.get():
if event.type == pygame.QUIT:
exitgame()
if event.type == pygame.MOUSEBUTTONDOWN:
mouserect = pygame.Rect(pygame.mouse.get_pos(), (1, 1))
if mouserect.colliderect(clientBtn):
name_input = inputbox2.inputbox(screen, "Your name", [screen.get_width() / 2, 100], (255, 255, 255), (0, 0, 0))
ip_input = inputbox2.inputbox(screen, "Servers ip", [screen.get_width() / 2, 200], (255, 255, 255), (0, 0, 0))
isclient = True
elif mouserect.colliderect(serverBtn):
name_input = inputbox2.inputbox(screen, "Your name", [screen.get_width() / 2, 100], (255, 255, 255), (0, 0, 0))
ip_input = None
isclient = False
elif mouserect.colliderect(startBtn):
print("Here we should start the game :P\nInfo: " + str(isclient) + " " + str(ip) + " " + str(name))
startgame(isclient, ip, name)
elif name_input != None:
if mouserect.colliderect(name_input.getRect()):
name_input.setFocus(True)
if isclient and ip_input != None:
ip_input.setFocus(False)
elif ip_input != None:
if mouserect.colliderect(ip_input.getRect()):
ip_input.setFocus(True)
if name_input != None:
name_input.setFocus(False)
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
exitgame()
screen.fill((255, 255, 255))
fontobject = pygame.font.Font(None, 30)
pygame.draw.rect(screen, color_client, clientBtn)
pygame.draw.rect(screen, color_server, serverBtn)
pygame.draw.rect(screen, (255, 0, 0), startBtn)
screen.blit(fontobject.render("server", True, (0, 0, 0)), (60, 55))
screen.blit(fontobject.render("client", True, (0, 0, 0)), (260, 55))
screen.blit(fontobject.render("Start", True, (0, 0, 0)), (322, 345))
if name_input != None:
name_input.draw()
if ip_input != None:
ip_input.draw()
pygame.display.update()
clock.tick(30)
def exitgame():
global quitgame
quitgame = True
def startgame(isclient, ip, name):
print("This is the ip: " + ip)
Game.init(isclient, ip, name, screen, clock)
if __name__ == "__main__":
pygame.init()
screen = pygame.display.set_mode((400, 400))
clock = pygame.time.Clock()
init()
| {"/PacCatcher.py": ["/Game.py", "/inputbox2.py"], "/Game.py": ["/Client.py", "/Server.py"]} |
69,639 | Thenewprogramming/paccatcher | refs/heads/master | /inputbox2.py | # by Timothy Downs, inputbox2.0 written for my map editor
# This program needs a little cleaning up
# It ignores the shift key
# And, for reasons of my own, this program converts "-" to "_"
# A program to get user input, allowing backspace etc
# shown in a box in the middle of the screen
# Called by:
# import inputbox
# answer = inputbox.ask(screen, "Your name")
#
# Only near the center of the screen is blitted to
import pygame.font
import pygame.event
import pygame.draw
class inputbox():
def __init__(self, screen, question, pos, color_back, color_text):
pygame.font.init()
self.current_string = []
self.string = ""
self.screen = screen
self.question = question
self.pos = pos
self.color_back = color_back
self.color_text = color_text
self.message = self.question + ": "
self.focus = False
def update(self):
inkey = self.get_key()
if inkey != None:
if inkey == pygame.K_BACKSPACE:
self.current_string = self.current_string[0:-1]
elif inkey == pygame.K_RETURN:
return self.string.join(self.current_string), True
elif inkey == pygame.K_MINUS:
self.current_string.append("_")
elif inkey <= 127:
self.current_string.append(chr(inkey))
self.message = self.question + ": " + self.string.join(self.current_string)
return "", False
def get_key(self):
events = []
for event in pygame.event.get():
events.append(event)
if event.type == pygame.KEYDOWN:
return event.key
for event in events:
pygame.event.post(event)
def getRect(self):
return pygame.Rect((self.pos[0]) - 100, (self.pos[1]) - 10, 200, 20)
def setFocus(self, focus):
self.focus = focus
def getFocus(self):
return self.focus
def draw(self):
fontobject = pygame.font.Font(None, 18)
pygame.draw.rect(self.screen, self.color_back,
((self.pos[0]) - 100,
(self.pos[1]) - 10,
200, 20), 0)
pygame.draw.rect(self.screen, self.color_text,
((self.pos[0]) - 102,
(self.pos[1]) - 12,
204, 24), 1)
if len(self.message) != 0:
self.screen.blit(fontobject.render(self.message, 2, self.color_text),
((self.pos[0]) - 100, (self.pos[1]) - 5))
pygame.display.flip()
| {"/PacCatcher.py": ["/Game.py", "/inputbox2.py"], "/Game.py": ["/Client.py", "/Server.py"]} |
69,640 | Thenewprogramming/paccatcher | refs/heads/master | /Game.py | import pygame
import Client
from Server import server
def init(isclient, serverip, name, screen, clock):
screen.fill((0, 0, 0))
pygame.display.update()
if isclient:
try:
host, port = serverip.split(":")
except ValueError:
host = serverip
port = 12341
Client.connect(host)
Client.ready(name)
else:
try:
host, port = serverip.split(":")
except ValueError:
host = ""
port = 12341
Server = server()
while True:
if not isclient:
if Server.isdone():
break
for thing in Server.connected:
print(thing.name)
| {"/PacCatcher.py": ["/Game.py", "/inputbox2.py"], "/Game.py": ["/Client.py", "/Server.py"]} |
69,641 | Thenewprogramming/paccatcher | refs/heads/master | /Client.py | import socket
def connect(ip, port=12345):
global HOST, PORT, s
HOST = ip
PORT = port
s = socket.socket()
s.connect((HOST, PORT))
def sendMsg(msg):
s.send((msg + "\r\n").encode())
def ready(name):
msg = "READY/" + name
sendMsg(msg)
def recv():
return s.recv(1024).decode()
if __name__ == "__main__":
connect("localhost")
sendMsg("bladibladibla")
while True:
print(recv())
| {"/PacCatcher.py": ["/Game.py", "/inputbox2.py"], "/Game.py": ["/Client.py", "/Server.py"]} |
69,642 | Thenewprogramming/paccatcher | refs/heads/master | /PacMan.py | import pygame
import os
class Ghost(pygame.sprite.Sprite):
color = None
score = None
name = None
speed = None # Can be boosted when we add potion-like items that appear randomly
image = None
whichimage = 1
def __init__(self, color, score, name, speed):
pygame.sprite.Sprite.__init__(self)
self.color = color
self.score = score
self.name = name
self.pos = (200, 200)
if not speed == None:
self.speed = speed
self.image = pygame.image.load(os.path.join("img", 'pcman1.png'))
self.rect = self.image.get_rect()
print (self.rect)
def setcolor(self, color):
self.color = color
def getpos(self):
return self.pos
def update(self, pos):
pos = (int(pos[0]), int(pos[1]))
if self.whichimage < 2:
self.image = pygame.image.load(os.path.join("img", 'pcman1.png'))
elif self.whichimage < 10:
self.image = pygame.image.load(os.path.join("img", 'pcman' + str(int(self.whichimage / 2)) + '.png'))
elif self.whichimage > 10:
self.image = pygame.image.load(os.path.join("img", 'pcman' + str(int((self.whichimage - 8) / 2)) + '.png'))
if (self.whichimage < 18):
self.whichimage += 1
else:
self.whichimage = 1
| {"/PacCatcher.py": ["/Game.py", "/inputbox2.py"], "/Game.py": ["/Client.py", "/Server.py"]} |
69,643 | Thenewprogramming/paccatcher | refs/heads/master | /Server.py | from socket import socket
from threading import Thread
class server():
def __init__(self, port=12345):
self.s = s = socket()
s.bind(("", port))
self.connected = []
self.stop = False
self.thread = Thread(target=self.listen)
self.thread.start()
def __call__(self, val):
try:
return self.connected[val]
except IndexError:
return None
def isdone(self):
return self.stop
def listen(self):
print("Starting server...")
self.s.listen(3)
while not self.stop:
conn, addr = self.s.accept()
server = serveclient(conn, addr)
self.connected += [server]
if len(self.connected) >= 3:
self.stop = True
class serveclient():
def __init__(self, conn, addr):
self.conn = conn
self.addr = addr
self.stop = False
self.name = None
self.thread = Thread(target=self.handle)
self.thread.start()
def handle(self):
while not self.stop:
data = self.conn.recv(1024).decode()
if not data:
print(self.addr, "Disconnected")
break
print(self.addr, "sent:", data)
data = data.split("/")
if data[0] == "READY":
self.name = data[1]
self.conn.send("Got it!\r\n".encode())
if __name__ == "__main__":
serv = server()
while True:
if serv.isdone():
break
for thing in serv.connected:
print(thing.name)
| {"/PacCatcher.py": ["/Game.py", "/inputbox2.py"], "/Game.py": ["/Client.py", "/Server.py"]} |
69,652 | dokzlo13/polyrasp | refs/heads/master | /worker/app/deferred.py | import os
from celery import Celery, group
from celery.schedules import crontab
from datetime import datetime, timedelta
import locale
from .shared.model import context_model
from .shared.model import Studiesdata, Userdata
from .shared.timeworks import timeout_has_passed, get_weeks_range, convert_concat_day_and_lesson, strf_list
from .collection import collect_groups, collect_faculties, collect_rasp, get_teachers, get_teacher_rasp
locale.setlocale(locale.LC_ALL, ('RU','UTF8'))
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379'),
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379')
MONGO_CONNECTION = os.environ.get('MONGO_CONNECTION', 'mongodb://localhost:27017/')
MONGO_DB = os.environ.get('MONGO_DB', 'raspisator')
UserStandalone = context_model(Userdata, MONGO_CONNECTION, MONGO_DB)
StudiesStandalone = context_model(Studiesdata, MONGO_CONNECTION, MONGO_DB)
app = Celery(broker=CELERY_BROKER_URL,
backend=CELERY_RESULT_BACKEND,
)
app.conf.timezone = 'UTC'
RENEW_TIMEOUT = 60 * 30
INITIAL_WEEKS_DEPTH = 10
WEEKS_DEPTH = 2
# NOTIFY_DELAY = 4
UNLINK_DELAY = 60.0*20
@app.on_after_configure.connect
def setup_periodic_tasks(sender, **kwargs):
sender.add_periodic_task(RENEW_TIMEOUT*2, get_all_subscibtions_data.s(), name='Collect all subscriptions data every hour')
sender.add_periodic_task(UNLINK_DELAY, unlink_non_used_subs.s(), name='Remove unused subscriptions')
# sender.add_periodic_task(30.0, test.s('world'), expires=10)
# Executes every hour
sender.add_periodic_task(
crontab(hour=3),
get_groups_schema.s(),
)
def merge_dictionaries(dict1, dict2):
merged_dictionary = {}
for key in dict1:
if key in dict2:
new_value = dict1[key] + dict2[key]
else:
new_value = dict1[key]
merged_dictionary[key] = new_value
for key in dict2:
if key not in merged_dictionary:
merged_dictionary[key] = dict2[key]
return merged_dictionary
@app.task(name='deferred.get_groups_schema')
def get_groups_schema():
with StudiesStandalone(purge_schema=['faculties', 'groups']) as s:
faculties_data = collect_faculties()
s.update_faculties(faculties_data)
groups_total = 0
for facult in faculties_data:
groups_data = collect_groups(facult['id'])
if groups_data:
for gr in groups_data:
groups_total += 1
gr.update({'facultie': facult['id']})
s.update_groups(groups_data)
return {'faculties': len(faculties_data), 'groups': groups_total}
def collect_lessons_data(facult, id_, params=None):
current_rasp = collect_rasp(facult, id_, params=params)
if current_rasp == None:
return
for rasp in current_rasp:
if rasp == []:
continue
weekday = datetime.strptime(rasp['date'], '%Y-%m-%d')
for lesson in rasp['lessons']:
lesson['time_start'] = convert_concat_day_and_lesson(lesson['time_start'], weekday)
lesson['time_end'] = convert_concat_day_and_lesson(lesson['time_end'], weekday)
lesson['weekday'] = rasp['weekday']
# use only lessons, without weeks info
lessons = []
for lesson in [rasp['lessons'] for rasp in current_rasp]:
lessons.extend(lesson)
return lessons
@app.task
def process_sub(sub, force=False, initial=False):
updates = {'timeout_passed':False}
with StudiesStandalone() as s, \
UserStandalone() as u:
if not force and not timeout_has_passed(sub, RENEW_TIMEOUT):
# print('Timeout isn\'t passed')
return updates
updates[sub['id']] = {}
updates['timeout_passed'] = True
# [None] means current week
if initial:
weeks = [None] + strf_list(get_weeks_range(INITIAL_WEEKS_DEPTH))
else:
weeks = [None] + strf_list(get_weeks_range(WEEKS_DEPTH))
for week in weeks:
lessons = collect_lessons_data(sub['facultie'], sub['id'], params={'date': week} if week != None else {})
if not lessons:
continue
upd = s.check_add_lessons(lessons, sub_id=str(sub['_id']))
updates[sub['id']] = merge_dictionaries(updates[sub['id']], upd)
u.update_subscription_acces_time(sub['_id'])
return updates
@app.task(name='deferred.get_all_subscibtions_data')
def get_all_subscibtions_data(force=False):
with UserStandalone() as u:
res = group([process_sub.s(i, force=force) for i in u.get_all_subs(string_id=True)]).delay()
return res
@app.task(name='deferred.get_subscribtion')
def get_subscribtion(sub_id, initial=False):
with UserStandalone() as u:
res = process_sub.delay(u.get_sub_by_string_id(sub_id=sub_id, string_id=True), initial=initial)
return res
@app.task(name='deferred.get_user_subscribtion')
def get_user_subscribtion(tel_user):
with UserStandalone() as u:
res = group([process_sub.s(i) for i in u.get_subscriptions(tel_user=tel_user, string_id=True)]).delay()
# updates = process_subs(s, u, u.get_subscriptions(tel_user=tel_user))
return res
@app.task(name='deferred.get_teacher_search')
def get_teacher_search(name):
return get_teachers(name)
@app.task(name='deferred.get_teacher_lessons')
def get_teacher_lessons(id_, params=None):
return get_teacher_rasp(id_, params=params)
@app.task(name='deferred.unlink_non_used_subs')
def unlink_non_used_subs():
with StudiesStandalone() as s, \
UserStandalone() as u:
unused = u.get_unused_subscriptions()
lessons_rem = s.remove_lessons_by_subscriptions([un['_id'] for un in unused])
subs_rem = u.delete_unused_subscriptions()
return {'lessons': lessons_rem, 'subscriptions': subs_rem}
@app.task(name='deferred.purge_subscription_timeouts')
def purge_subscription_timeouts():
with UserStandalone() as u:
return u.purge_subscription_timeouts().raw_result
@app.task(name='deferred.notify_users')
def notify_users():
with StudiesStandalone() as s, \
UserStandalone() as u:
for user_sub_set in u.get_all_users_subscription_settings():
for sub, setting in user_sub_set.items():
if setting['notify']:
print(s.get_lessons_by_subscription_in_range(sub, datetime.now(), datetime.now()+timedelta(days=3)))
# sub, settings = list(sub_set.items())
#
# print(sub)
# print(type(settings))
# if settings['notify']:
# print(s.get_lessons_by_subscription_in_time(sub, datetime.now(), timedelta(days=10)))
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,653 | dokzlo13/polyrasp | refs/heads/master | /worker/app/collection.py |
import re
import json
import requests
from lxml import etree
TIMEOUT = 5
def parse_react_init(element):
if element is None:
return None
data = element.text
data = data.replace('\n', '')
m = re.compile(' window.__INITIAL_STATE__ = (.*);').match(data)
data = json.loads(m.groups()[0])
return data
def collect_element_from_page(page, xpath, params=None, retries=5):
while retries > 0:
try:
response = requests.get(page, params=params, timeout=TIMEOUT)
except requests.Timeout:
print('Request timeout for {0}'.format(page))
retries -= 1
# return None
else:
if not response.ok:
return None
page = etree.HTML(response.text)
data = page.xpath(xpath)
if len(data) > 0:
return data[0]
return None
def collect_json(page, xpath, params=None, retries=5):
element = collect_element_from_page(page, xpath, params, retries)
data = parse_react_init(element)
return data
def collect_faculties():
data = collect_json('http://ruz.spbstu.ru/', '/html/body/script[1]')
if data:
return data['faculties']['data']
def collect_groups(faculty_id):
data = collect_json('http://ruz.spbstu.ru/faculty/{0}/groups'.format(faculty_id), '/html/body/script[1]')
if data:
return data['groups']['data'][str(faculty_id)]
def collect_rasp(faculty_id, group_id, params=None):
data = collect_json('http://ruz.spbstu.ru/faculty/{0}/groups/{1}'.format(faculty_id, group_id),
'/html/body/script[1]', params=params)
# pprint(data)
if data:
return data['lessons']['data'][str(group_id)]
def get_teachers(query):
data = collect_json('http://ruz.spbstu.ru/search/teacher?', '/html/body/script[1]', params={'q': query})
if data:
return data['searchTeacher']['data']
def get_teacher_rasp(teacher_id, params=None):
teacher_id = str(teacher_id)
data = collect_json('http://ruz.spbstu.ru/teachers/' + teacher_id, '/html/body/script[1]', params=params)
if data:
return data['teacherSchedule']['data'][teacher_id]
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,654 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/handlers/inline.py |
import inspect
from datetime import datetime
from .core import HandleMiddleware
from ..markups import create_group_settings_markup, gen_groups_settings_markup, \
create_week_inline, create_month_back_inline, create_calendar_inline, \
gen_groups_choice_markup
from ..templates import selected_group_message, lessons_template, ParseMode, Messages
from ..shared.timeworks import next_weekday, last_weekday, next_month, last_month
class InlineParser(HandleMiddleware):
__prefix__ = None
def _add_handlers(self):
pass
def __call__(self, call):
if self.__prefix__ == None:
raise ValueError('Wrnog prefix for inline parser')
call.data = call.data[len(self.__prefix__) + 1:]
for method in inspect.getmembers(self, predicate=inspect.ismethod):
if method[0].startswith('_'):
continue
if not call.data:
return
if call.data.startswith(method[0]):
args = call.data[len(method[0]) + 1:].split('-')
# print('User "{0}" call inline "{1}" args={2}'.format(call.from_user.id, self.__prefix__+'-'+method[0], args))
return method[1](call, *args)
def _get_user_lessons_by_date(self, uid, date, markup=True):
lessons = []
sub_id = self.cache.get_user_curr_gr(uid)
for sub in self.u.get_subscriptions(tel_user=uid, sub_id=sub_id):
lessons.append(self.s.get_lessons_in_day(sub["id"], date))
if all([lesson == [] for lesson in lessons]):
return Messages.no_schedule_on_date
for lesson in lessons:
if lesson == []:
continue
return lessons_template(lesson, markup)
def same_message(self, remote, uid, date):
local = self._get_user_lessons_by_date(uid, date, markup=False)
return local.split() == remote.split()
class SettingsInline(InlineParser):
__prefix__ = 'settings'
def subscription(self, call, *args):
sub_id = args[0]
sub, info = self.u.get_user_subscription_settings(call.from_user.id, sub_id)
markup = create_group_settings_markup(sub['name'], sub_id, info)
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
reply_markup=markup, text="Управление подпиской")
def push(self, call, *args):
sub_id = call.data[5:]
sub, info = self.u.change_notification_state(call.from_user.id, sub_id)
markup = create_group_settings_markup(sub['name'], sub_id, info)
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
reply_markup=markup, text="Управление подпиской")
def unsub(self, call, *args):
self.u.delete_subscription(call.from_user.id, args[0])
subs = list(self.u.get_subscriptions(tel_user=call.from_user.id))
self.bot.answer_callback_query(call.id, text=Messages.removed_group())
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
reply_markup=gen_groups_settings_markup(subs), text=Messages.please_select_group)
def groupinfo(self, call, *args):
sub_id = args[0]
sub, info = self.u.get_user_subscription_settings(call.from_user.id, sub_id)
markup = create_group_settings_markup(sub['name'], str(sub['_id']), info)
text = selected_group_message(sub)
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
reply_markup=markup, text=text, parse_mode=ParseMode.MARKDOWN)
def groupdefault(self, call, *args):
sub_id = args[0]
if self.u.get_user_default_group(call.from_user.id) == sub_id:
self.bot.answer_callback_query(call.id, text=Messages.already_default_group)
return
else:
self.u.set_user_default_group(call.from_user.id, sub_id)
sub, info = self.u.get_user_subscription_settings(call.from_user.id, sub_id)
markup = create_group_settings_markup(sub['name'], sub_id, info)
self.bot.answer_callback_query(call.id, text=Messages.setted_default_group)
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
reply_markup=markup, text="Управление подпиской")
def back(self, call, *args):
subs = list(self.u.get_subscriptions(tel_user=call.from_user.id, ))
self.bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id,
reply_markup=gen_groups_settings_markup(subs), text=Messages.please_select_group)
class DialogClose(InlineParser):
__prefix__ = 'dialog'
def close(self, call, *args):
self.bot.delete_message(chat_id=call.message.chat.id, message_id=call.message.message_id)
class WeekSwithcer(InlineParser):
__prefix__ = 'week'
def _create_week_inline(self, uid, date):
gr = self.cache.get_user_curr_gr(uid)
sub, _ = self.u.get_user_subscription_settings(uid, gr)
week_markup = create_week_inline(date, sub['name'])
return week_markup
def respond_mock(self, call, markup):
# if call.message.text != Messages.select_date:
self.bot.edit_message_text(Messages.select_date, call.from_user.id, call.message.message_id,
reply_markup=markup)
self.bot.answer_callback_query(call.id, text="")
def current(self, call, *args):
saved_date = self.cache.get_user_week(call.from_user.id)
markup = self._create_week_inline(call.from_user.id, saved_date)
self.respond_mock(call, markup)
def next(self, call, *args):
saved_date = self.cache.get_user_week(call.from_user.id)
if (saved_date is not None):
next_w = next_weekday(saved_date, 0)
self.cache.set_user_week(call.from_user.id, next_w)
markup = self._create_week_inline(call.from_user.id, next_w)
self.respond_mock(call, markup)
def previous(self, call, *args):
saved_date = self.cache.get_user_week(call.from_user.id)
if (saved_date is not None):
last_w = last_weekday(saved_date, 0)
self.cache.set_user_week(call.from_user.id, last_w)
markup = self._create_week_inline(call.from_user.id, last_w)
self.respond_mock(call, markup)
def day(self, call, *args):
uid = call.from_user.id
date = datetime.strptime(args[0], "%Y.%m.%d")
saved_date = self.cache.get_user_week(uid)
if not self.same_message(call.message.text, uid, date):
self.bot.edit_message_text(self._get_user_lessons_by_date(uid, date),
uid, call.message.message_id,
reply_markup=self._create_week_inline(uid, saved_date),
parse_mode=ParseMode.MARKDOWN,
)
self.bot.answer_callback_query(call.id, text="")
class CalendarDialog(InlineParser):
__prefix__ = 'calendar'
def _create_calendar_inline(self, uid, shown):
gr = self.cache.get_user_curr_gr(uid)
sub, _ = self.u.get_user_subscription_settings(uid, gr)
cal_markup = create_calendar_inline(*shown, sub['name'])
return cal_markup
def current(self, call, *args):
shown = self.cache.get_user_cal(call.from_user.id)
markup = self._create_calendar_inline(call.from_user.id, shown)
self.respond_mock(call, markup)
def respond_mock(self, call, markup):
# if call.message.text != Messages.select_date:
self.bot.edit_message_text(Messages.select_date, call.from_user.id, call.message.message_id,
reply_markup=markup)
self.bot.answer_callback_query(call.id, text="")
def next(self, call, *args):
saved_date = self.cache.get_user_cal(call.from_user.id)
if (saved_date is not None):
next_m = next_month(*saved_date)
self.cache.set_user_cal(call.from_user.id, next_m)
markup = self._create_calendar_inline(call.from_user.id, next_m)
self.respond_mock(call, markup)
def previous(self, call, *args):
saved_date = self.cache.get_user_cal(call.from_user.id)
if (saved_date is not None):
last_m = last_month(*saved_date)
self.cache.set_user_cal(call.from_user.id, last_m)
markup = self._create_calendar_inline(call.from_user.id, last_m)
self.respond_mock(call, markup)
def day(self, call, *args):
saved_date = self.cache.get_user_cal(call.from_user.id)
if (saved_date is not None):
uid = call.from_user.id
day = args[0]
date = datetime(int(saved_date[0]), int(saved_date[1]), int(day), 0, 0, 0)
if not self.same_message(call.message.text, uid, date):
self.bot.edit_message_text(self._get_user_lessons_by_date(uid, date),
uid, call.message.message_id,
reply_markup=create_month_back_inline(date),
parse_mode=ParseMode.MARKDOWN)
self.bot.answer_callback_query(call.id, text="")
class CurrrentGroupSwitcher(InlineParser):
__prefix__ = 'change_group'
def _create_changegroup_markup(self, uid, *back_to):
back_to = '-'.join(back_to)
subs = self.u.get_user_subscription_settings(uid)
cached = self.cache.get_user_curr_gr(uid)
markup = gen_groups_choice_markup(subs, back_to, cached)
return markup
def _respond_mock(self, call, markup):
self.bot.edit_message_text(Messages.please_select_current_group, call.from_user.id, call.message.message_id,
reply_markup=markup)
self.bot.answer_callback_query(call.id, text="")
def init(self, call, *args):
self._respond_mock(call, self._create_changegroup_markup(call.from_user.id, *args))
def select(self, call, *args):
to_select = args[0]
curr = self.cache.get_user_curr_gr(call.from_user.id)
if curr == to_select:
self.bot.answer_callback_query(call.id, text=Messages.already_current_group)
return
self.cache.set_user_curr_gr(call.from_user.id, to_select)
self.bot.answer_callback_query(call.id, text=Messages.group_select_succeed)
self._respond_mock(call, self._create_changegroup_markup(call.from_user.id, *args[1:]))
class InlineHandlers(HandleMiddleware):
def __init__(self, bot, usersmodel=None, studiesmodel=None, celery=None, cache=None, *, debug=False):
self.context = (bot, usersmodel, studiesmodel, celery, cache)
super().__init__(bot, usersmodel, studiesmodel, celery, cache, debug=debug)
@staticmethod
def alias_filter(cls):
def fun(call):
return call.data.startswith(cls.__prefix__)
return fun
def _add_handler(self, parser, **kwargs):
handler_dict = self.bot._build_handler_dict(parser(*self.context), func=self.alias_filter(parser))
self.bot.add_callback_query_handler(handler_dict)
def _add_handlers(self):
self._add_handler(DialogClose)
self._add_handler(SettingsInline)
self._add_handler(WeekSwithcer)
self._add_handler(CalendarDialog)
self._add_handler(CurrrentGroupSwitcher)
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,655 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/handlers/__init__.py | from .command import CommandHandlers
from .aliases import CommandsAliases
from .inline import InlineHandlers | {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,656 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/templates.py | from jinja2 import Template
from emoji import emojize
def emoj(string: str) -> str:
return emojize(string, use_aliases=True)
kind_mapper = {'Бакалавриат': 0, 'Магистратура': 1, 'Специалитет': 2}
type_mapper = {'Очная': 'common', 'Заочная': 'distance', 'Очно-Заочная': 'evening'}
level_mapper = {'1 Курс': 1, '2 Курс': 2, '3 Курс': 3, '4 Курс': 4, '5 Курс': 5, '6 Курс': 6}
group_checkout_mapper = {emoj(':white_check_mark: Сохранить группу'): 1}
main_menu = {'nearest': emoj(':fire: Скоро'),
'week': emoj(':mega: Неделя'),
'cal': emoj(':calendar: Месяц'),
'settings': emoj(':wrench: Настройки'),
'subs': emoj(':books: Группы'),
'renew': emoj(':arrows_counterclockwise: Обновить'),
'plan': emoj(':mag: Поиск'),
}
groups_menu = {
'add': emoj(':pencil: Добавить группу'),
'groupset': emoj(':wrench: Настроить группы')
}
search_menu = {
'teacher': emoj(':ok_woman: Поиск по преподавателю'),
}
main_menu_button = emoj(':house: Главное меню')
back_button = emoj(':arrow_backward: Назад')
_main_menu_msg = "Добро пожаловать!\n" \
":fire: - *Ближайшие пары*\n" \
":mega: - *Расписание на неделю*\n" \
":calendar: - *Расписание на месяц*\n\n" \
":mag: - *Поиск расписаний*\n" \
":books: - *Управление Вашими группами*\n" \
":arrows_counterclockwise: - *Обновить расписание*\n"
class Messages:
no_schedule_on_date = "Извините, расписаний для этого дня не найдено!"
select_date = "Пожалуста, выберите день:"
no_schedule = "Извините, активных расписаний для Вас не найдено!\n Попробуйте добавить группу /add"
faculties_unaviable = "Извините, на данный момент нет информации о расписаниях, попробуйте позже!"
schedule_will_be_updated = '*Информация о вашем расписании будет обновлена!*'
welcome = emoj(_main_menu_msg)
what_to_do = "*Что необходимо сделать?*"
hello = "Привет! Проверь список своих подписок. Для изменения настроек используй /subs"
settings = "Настройки групп"
please_select_group = "Пожалуйста, выберите группу"
please_select_current_group = "Пожалуйста, выберите группу для получения информации:"
already_default_group = emoj(":white_check_mark: Группа уже по-умолчанию")
already_current_group = emoj(":white_check_mark: Группа уже выбрана")
setted_default_group = emoj(":white_check_mark: Группа выбрана по-умолчанию")
group_select_succeed = emoj(":white_check_mark: Группа для показа информации выбрана")
time_template = "%A, %d %B %Y"
teacher_time_template = emoj(":calendar: %A, %d %B %Y")
@staticmethod
def teacher_date_templ(date):
return emoj(":calendar: {0}".format(date.strftime(Messages.time_template)))
@staticmethod
def schedule_for(date):
return "*Расписание на {0}:*".format(date.strftime(Messages.time_template))
@staticmethod
def removed_group(removed_group=None):
if removed_group:
return emoj(':no_entry_sign: Группа {0} удалена из ваших подписок!'.format(removed_group))
else:
return emoj(':no_entry_sign: Группа удалена из ваших подписок!')
class ParseMode(object):
"""This object represents a Telegram Message Parse Modes."""
MARKDOWN = 'Markdown'
""":obj:`str`: 'Markdown'"""
HTML = 'HTML'
""":obj:`str`: 'HTML'"""
def get_teacher_short(teacher_data):
if teacher_data:
return "{first_name} {middle_name[0]}. {last_name[0]}.".format(**teacher_data)
else:
return ''
def lessons_template(data, markup=True):
text = ""
# TODO: Add groups to text, if group list different
text += "{% if data[0] %}\n:calendar: "
text += "*{{ data[0]['time_start'].strftime('%A, %d %B') }}* \n" if markup \
else "{{ data[0]['time_start'].strftime('%A, %d %B') }} \n"
text += "{% endif %}"
text += "{% for lesson in data %}"
text += "{% if lesson['groups'] %}\n:two_men_holding_hands: {% endif %}"
text += "{% for group in lesson['groups']%}"
text += "{% if group['name'] %}"
text += "*{{ group['name'] }}*, " if markup else "{{ group['name'] }}, "
text += "{% endif %}"
text += "{% endfor %}"
text += "{% if lesson['additional_info'] %}\n:information_source: {{lesson['additional_info']}}{% endif %}"
text += "\n:pencil: *{{ lesson['subject']}}*" if markup else "\n:pencil: {{ lesson['subject']}}"
text += "\n:mag_right: _{{ lesson['typeObj']['name'] }}_ " if markup else "\n:mag_right: {{ lesson['typeObj']['name'] }} "
text+= ":clock10: {{ lesson['time_start'].strftime('%H:%M') }}-{{lesson['time_end'].strftime('%H:%M')}}"
text += "\n:school: {{ lesson['auditories'][0]['building']['abbr'] }}, {{ lesson['auditories'][0]['name'] }}"
text+= "{% if lesson['teachers'] %} ({{ short_fio(lesson['teachers'][0]) }}){% endif %}"
# text += "{{ day['addr'] }}, {{ day['room'] }} ({{ day['teacher'] }})\n\n"
text += "\n{% endfor %}"
t = Template(text)
t.globals['short_fio'] = get_teacher_short
message = emoj(t.render(data=data))
return message
def short_group(sub):
data = sub.copy()
km = dict((v, k) for k, v in kind_mapper.items())
tm = dict((v, k) for k, v in type_mapper.items())
data['type'] = tm.get(data['type'])
data['kind'] = km.get(data['kind'])
text = ""
text += "{% if data['kind'] %}:mortar_board: {{ data['kind'] }} {% endif %}"
text += "{% if data['type'] %}:pencil2:{{ data['type'] }} {% endif %}"
text += ":books: {{ data['level']}} "
text += ":school_satchel: {{ data['name'] }}"
t = Template(text)
message = emojize(t.render(data=data), use_aliases=True)
return message
def selected_group_message(data, facult=None, use_intro=True):
data = data.copy()
if facult:
facult = facult.copy()
km = dict((v, k) for k, v in kind_mapper.items())
tm = dict((v, k) for k, v in type_mapper.items())
data['type'] = tm.get(data['type'])
data['kind'] = km.get(data['kind'])
if use_intro:
text = "*Вы выбрали* :mag:\n"
else:
text = '\n'
text += "{% if facult['name'] %}:school: *Институт :*{{facult['name']}} ({{ facult['abbr']}})\n{% endif %}"
text += "{% if data['spec'] %}:telescope: *Специальность :*\n{{ data['spec'] }}\n{% endif %}"
text += "{% if data['kind'] %}:mortar_board: *Квалификация :* {{ data['kind'] }}\n{% endif %}"
text += "{% if data['type'] %}:pencil2: *Форма обучения :* {{ data['type'] }}\n{% endif %}"
text += ":books: *Курс:* {{ data['level']}}\n"
text += ":school_satchel: *Группа:* {{ data['name'] }}"
t = Template(text)
message = emojize(t.render(data=data, facult=facult), use_aliases=True)
return message
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,657 | dokzlo13/polyrasp | refs/heads/master | /shared/timeworks.py |
from datetime import datetime, timedelta
def timeout_has_passed(sub, renew_time):
if sub.get('upd_time') is None:
return True
date = sub['upd_time']
if isinstance(date, str):
try:
date = datetime.strptime(date, "%Y-%m-%dT%H:%M:%S.%f")
except ValueError:
date = datetime.strptime(date, "%Y-%m-%dT%H:%M:%S")
return (datetime.now() - date).seconds > renew_time
def next_weekday(date, weekday):
day_gap = weekday - date.weekday()
if day_gap <= 0:
day_gap += 7
return date + timedelta(days=day_gap)
def last_weekday(date, weekday):
day_gap = weekday - date.weekday()
if day_gap >= 0:
day_gap -= 7
return date + timedelta(days=day_gap)
def next_month(year, month):
month += 1
if month > 12:
month = 1
year += 1
return year, month
def last_month(year, month):
month -= 1
if month < 1:
month = 12
year -= 1
return year, month
def get_mondays_ahead(amount):
weeks = [datetime.now()]
for week in range(1, amount+1):
weeks.append(next_weekday(weeks[week - 1], 0))
weeks.pop(0)
return weeks
def get_mondays_behind(amount):
weeks = [datetime.now()]
for week in range(1, amount+1):
weeks.append(last_weekday(weeks[week - 1], 0))
weeks.pop(0)
return weeks
def get_weeks_range(depth):
weeks = []
# With current week
weeks.extend(get_mondays_behind(depth+1))
weeks.extend(get_mondays_ahead(depth))
return sorted(weeks)
def strf_list(datetime_list):
return [dat.strftime('%Y-%m-%d') for dat in datetime_list]
def convert_concat_day_and_lesson(lesson: str, weekday: datetime) -> datetime:
lesson = datetime.strptime(lesson, '%H:%M')
lesson = lesson.replace(year=weekday.year,
month=weekday.month,
day=weekday.day)
return lesson
def full_week(date):
year, week, dow = date.isocalendar()
if dow == 1:
start_date = date
else:
start_date = date - timedelta(dow)
for delta in map(timedelta, range(7)):
yield start_date + delta | {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,658 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/handlers/core.py |
from abc import ABCMeta, abstractmethod
class HandleMiddleware(metaclass=ABCMeta):
def __init__(self, bot, usersmodel=None, studiesmodel=None, celery=None, cache=None, *, debug=False):
self.bot = bot
self.u = usersmodel
self.s = studiesmodel
self.broker = celery
self.cache = cache
self._add_handlers()
@abstractmethod
def _add_handlers(self):
pass
def log_wrapper(self, fun):
def decor(message):
# print("User {0} send {1}".format(message.from_user.id, message.text))
return fun(message)
return decor
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,659 | dokzlo13/polyrasp | refs/heads/master | /raspisator/run.py |
# bot.infinity_polling(timeout=10, none_stop=True)
# bot.polling(timeout=10)
import time
import sys, traceback
from telebot import logger, apihelper
from app import bot
# bot.polling(timeout=40, interval=0, none_stop=False)
apihelper.CONNECT_TIMEOUT = 15
# updates = bot.get_updates(offset=(bot.last_update_id + 1), timeout=timeout)
# bot.process_new_updates(updates)
while True:
try:
bot.polling(timeout=30, interval=1, none_stop=False)
except KeyboardInterrupt:
bot.stop_bot()
exit(0)
except Exception as e:
logger.error("Error polling info: \"{0}\"".format(e))
exc_info = sys.exc_info()
traceback.print_exception(*exc_info)
time.sleep(1) | {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,660 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/cache.py |
from redis import Redis
from datetime import timedelta, datetime
import json
def _week(uid):
return 'week-{0}'.format(uid)
def _cal(uid):
return 'cal-{0}'.format(uid)
def _gr(uid):
return 'group-{0}'.format(uid)
class Cache:
def __init__(self, redis: Redis):
self.r = redis
def set_user_week(self, user_id, week_monday):
return self.r.set(_week(user_id), week_monday.strftime("%Y-%m-%d %H:%M"))
def get_user_week(self, user_id):
try:
msg = self.r.get(_week(user_id))
if not msg:
return None
w = datetime.strptime(msg.decode('utf-8'), "%Y-%m-%d %H:%M")
except TypeError:
return None
else:
return w
def set_user_cal(self, user_id, cal):
return self.r.set(_cal(user_id), json.dumps(cal))
def get_user_cal(self, user_id):
try:
msg = self.r.get(_cal(user_id))
if not msg:
return None
c = json.loads(msg)
except TypeError:
return None
else:
return c
def set_user_curr_gr(self, user_id, group):
return self.r.set(_gr(user_id), group)
def get_user_curr_gr(self, user_id):
try:
msg = self.r.get(_gr(user_id))
if not msg:
return None
g = msg.decode('utf-8')
except TypeError:
return None
else:
return g
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,661 | dokzlo13/polyrasp | refs/heads/master | /worker/app/__init__.py | from . import shared
from .deferred import app | {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,662 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/markups.py | import inspect
import sys
import calendar
from telebot import types
from .templates import main_menu, emoj, main_menu_button, back_button, search_menu, groups_menu
from .templates import lessons_template, short_group
from .shared.timeworks import full_week
def gen_dict_markup(mapper, back=True):
markup = types.ReplyKeyboardMarkup(row_width=1, resize_keyboard=True)
for facult in mapper.keys():
markup.add(facult)
markup.add(main_menu_button)
if back:
markup.add(back_button)
return markup
def gen_list_markup(list_, key=None, back=True):
markup = types.ReplyKeyboardMarkup(row_width=1, resize_keyboard=True)
for item in list_:
if key:
markup.add(str(item[key]))
else:
markup.add(str(item))
markup.add(main_menu_button)
if back:
markup.add(back_button)
return markup
def gen_search_menu_markup():
markup = types.ReplyKeyboardMarkup(row_width=2, resize_keyboard=True)
markup.row(types.KeyboardButton(search_menu['teacher']))
markup.row(types.KeyboardButton(main_menu_button))
return markup
def gen_main_menu_markup():
markup = types.ReplyKeyboardMarkup(row_width=2, resize_keyboard=True)
row = []
row.append(types.KeyboardButton(main_menu['nearest']))
row.append(types.KeyboardButton(main_menu['week']))
row.append(types.KeyboardButton(main_menu['cal']))
markup.row(*row)
row = []
row.append(types.KeyboardButton(main_menu['plan']))
row.append(types.KeyboardButton(main_menu['subs']))
row.append(types.KeyboardButton(main_menu['renew']))
markup.row(*row)
return markup
def gen_inline_groups_markup(subs, lessons):
groups_inline = []
for sub, lesson in zip(subs, lessons):
if lesson:
msg = lessons_template([lesson], markup=False)
else:
msg = 'Нет информации о ближайшей паре'
r = types.InlineQueryResultArticle(str(sub['_id']), short_group(sub),
types.InputTextMessageContent(msg))
groups_inline.append(r)
return groups_inline
def gen_groups_settings_markup(subs):
if not isinstance(subs, (list, tuple)):
subs = [subs]
markup = types.InlineKeyboardMarkup(row_width=1)
# First row - Month and Year
row = []
row.append(types.InlineKeyboardButton('Ваши группы:', callback_data="settings-ignore"))
for gr in subs:
row.append(types.InlineKeyboardButton(gr['name'], callback_data='settings-subscription-' + str(gr['_id'])))
row.append(types.InlineKeyboardButton('Закрыть', callback_data="dialog-close"))
markup.add(*row)
return markup
def gen_groups_choice_markup(subs, back_to=None, cached=None):
markup = types.InlineKeyboardMarkup(row_width=1)
row = []
for gr, set in subs:
if cached:
if str(gr['_id']) == cached:
name = emoj(':white_check_mark: {0}'.format(gr['name']))
else:
name = emoj(':white_medium_square: {0}'.format(gr['name']))
else:
if set['default']:
name = emoj(':white_check_mark: {0}'.format(gr['name']))
else:
name = emoj(':white_medium_square: {0}'.format(gr['name']))
row.append(types.InlineKeyboardButton(name,
callback_data='change_group-select-{0}-{1}'.format(str(gr['_id']), back_to)))
row.append(types.InlineKeyboardButton(emoj(":arrow_backward: Назад"), callback_data=str(back_to)))
markup.add(*row)
return markup
def gen_groups_settings_info():
markup = types.ReplyKeyboardMarkup(row_width=1)
markup.row(types.KeyboardButton(groups_menu['groupset']))
markup.row(types.KeyboardButton(groups_menu['add']))
markup.row(types.KeyboardButton(main_menu_button))
return markup
def create_group_settings_markup(name, sub_id, sub_state):
markup = types.InlineKeyboardMarkup(row_width=2)
row = []
row.append(types.InlineKeyboardButton(emoj(':no_entry_sign: Удалить'),
callback_data='settings-unsub-' + sub_id))
row.append(types.InlineKeyboardButton(emoj(':information_source: ' + name),
callback_data="settings-groupinfo-"+sub_id))
markup.row(*row)
row = []
row.append(types.InlineKeyboardButton(emoj(':white_check_mark: По-умолчанию') if sub_state['default']
else emoj(':white_medium_square: По-умолчанию'),
callback_data='settings-groupdefault-'+sub_id))
row.append(types.InlineKeyboardButton(emoj(":x: В разработке :x:"),
callback_data='settings-back'))
markup.row(*row)
markup.row(types.InlineKeyboardButton(emoj(":arrow_backward: Назад"),
callback_data='settings-back'))
return markup
def create_calendar_inline(year, month, current_group=None):
markup = types.InlineKeyboardMarkup()
#First row - Month and Year
row=[]
row.append(types.InlineKeyboardButton(calendar.month_name[month]+" "+str(year),callback_data="ignore"))
markup.row(*row)
#Second row - Week Days
week_days=["Пн","Вт","Ср","Чт","Пт","Сб","Вс"]
row=[]
for day in week_days:
row.append(types.InlineKeyboardButton(day,callback_data="ignore"))
markup.row(*row)
my_calendar = calendar.monthcalendar(year, month)
for week in my_calendar:
row=[]
for day in week:
if(day==0):
row.append(types.InlineKeyboardButton(" ",callback_data="ignore"))
else:
row.append(types.InlineKeyboardButton(str(day),callback_data="calendar-day-"+str(day)))
markup.row(*row)
#Last row - Buttons
row=[]
row.append(types.InlineKeyboardButton(emoj(":arrow_backward:"), callback_data="calendar-previous"))
row.append(types.InlineKeyboardButton("Закрыть",callback_data="dialog-close"))
row.append(types.InlineKeyboardButton(
current_group, callback_data="change_group-init-calendar-current"
))
row.append(types.InlineKeyboardButton(emoj(":arrow_forward:"), callback_data="calendar-next"))
markup.row(*row)
return markup
def create_month_back_inline(date):
markup = types.InlineKeyboardMarkup()
row = []
row.append(types.InlineKeyboardButton(emoj(":arrow_backward:"), callback_data="calendar-current"))
row.append(types.InlineKeyboardButton(date.strftime("%A, %d %B %Y"), callback_data="ignore"))
row.append(types.InlineKeyboardButton("Закрыть",callback_data="dialog-close"))
markup.row(*row)
return markup
def create_week_inline(date, current_group=None):
current_group = current_group or ''
markup = types.InlineKeyboardMarkup()
week = list(full_week(date))
row=[]
for day in week:
row.append(types.InlineKeyboardButton(day.strftime("%a"), callback_data="week-day-" + day.strftime("%Y.%m.%d")))
markup.row(*row)
row = []
row.append(types.InlineKeyboardButton(
'{0} {1}-{2}'.format(emoj(':date:'), week[0].strftime('%d %b'), week[-1].strftime('%d %b')),
callback_data="ignore")
)
row.append(types.InlineKeyboardButton(
current_group, callback_data="change_group-init-week-current"
))
markup.row(*row)
row=[]
row.append(types.InlineKeyboardButton(emoj(":arrow_backward:"), callback_data="week-previous"))
row.append(types.InlineKeyboardButton("Закрыть",callback_data="dialog-close"))
row.append(types.InlineKeyboardButton(emoj(":arrow_forward:"), callback_data="week-next"))
markup.row(*row)
return markup
# Allowing to import only functions, described in whis module
__all__ = [m[0] for m in inspect.getmembers(sys.modules[__name__], inspect.isfunction)
if m[1].__module__ == inspect.getmodule(sys.modules[__name__]).__name__]
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,663 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/worker.py |
import os
from celery import Celery
from redis import Redis
CELERY_BROKER_URL = os.environ.get('CELERY_BROKER_URL', 'redis://localhost:6379/0'),
CELERY_RESULT_BACKEND = os.environ.get('CELERY_RESULT_BACKEND', 'redis://localhost:6379/1')
REDIS_CACHE = os.environ.get('REDIS_CACHE', 'redis://localhost:6379/2')
celery = Celery(broker=CELERY_BROKER_URL,
backend=CELERY_RESULT_BACKEND,
)
redis = Redis.from_url(REDIS_CACHE) | {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,664 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/chains.py |
from functools import partial
from telebot import types
from types import FunctionType
from .templates import main_menu_button, back_button
class Retry(Exception):
pass
class NoMarkupError(Exception):
pass
def decor(method=None):
# If called without method, we've been called with optional arguments.
# We return a decorator with the optional arguments filled in.
# Next time round we'll be decorating method.
if method is None:
return partial(decor)
class wrapper:
def __init__(self, method):
self.method = method
self.main_menu = None
self.bot = None
self.markup = StaticMarkup(None)
self._next = None
self._previous = None
self.globals = {}
self.description = method.__doc__
def __call__(self, message=None, **kwargs):
if message.text == back_button:
if not self._previous:
return self.main_menu(message)
if self._previous.markup:
self.bot.send_message(message.chat.id, self._previous.description, reply_markup=self._previous.markup.body)
self.bot.register_next_step_handler_by_chat_id(message.chat.id, self._previous, **kwargs)
return
if message.text == main_menu_button:
return self.main_menu(message)
# 2-nd time submited text\markup
# if self.markup and self.description:
# self.bot.send_message(message.chat.id, self.description, reply_markup=self.markup)
try:
# global values need to be redifined
kwargs = {**kwargs, **self.globals}
kwargs = self.method(self.bot, message, **kwargs)
except Retry as r:
print('Retry was raised in "{0}"'.format(self.description))
self.bot.send_message(message.chat.id, str(r), reply_markup=self.markup.body)
self.bot.register_next_step_handler_by_chat_id(message.chat.id, self, **kwargs)
return
if not self._next:
return self.main_menu(message)
if not self._next.markup:
raise NoMarkupError("No markup aviable for step")
if isinstance(self._next.markup, DynamicMarkup):
next_mkp = kwargs.get('next_step_markup')
if next_mkp:
self._next.markup.set_body(next_mkp)
del kwargs['next_step_markup']
elif self._next.markup.body != None:
pass
else:
raise NoMarkupError("No markup aviable for step")
self.bot.send_message(message.chat.id, self._next.description, reply_markup=self._next.markup.body)
else:
self.bot.send_message(message.chat.id, 'Ашипка', reply_markup=types.ReplyKeyboardRemove(selective=False))
self.bot.register_next_step_handler_by_chat_id(message.chat.id, self._next, **kwargs)
def set_bot(self, bot):
self.bot = bot
def set_next(self, handler):
self._next = handler
def set_previous(self, handler):
self._previous = handler
def set_menu(self, handler):
self.main_menu = handler
def set_markup(self, markup):
self.markup = markup if markup != None else StaticMarkup(None)
def set_globals(self, globals):
self.globals.update(globals)
return wrapper(method)
class Dialog:
def __init__(self, handlers=None , main=None, globals=None):
"""
:param init_handler: @function
:param markup:
:param main_handler:
"""
self.main = None
self._chain = []
self.bot = None
if handlers:
for h in handlers:
self.add_step(h)
if main:
self.set_main_handler(main)
self.globals= globals or {}
def set_main_handler(self, handler: FunctionType):
self.main = handler
for step in self._chain:
step.set_menu(handler)
def add_step(self, step_handler: FunctionType, markup=None):
decorated = decor(step_handler)
if self.globals:
decorated.set_globals(self.globals)
if self.main is not None:
decorated.set_menu(self.main)
if len(self._chain) > 0:
decorated.set_previous(self._chain[-1])
decorated.set_markup(markup)
self._chain.append(decorated)
if len(self._chain) > 1:
self._chain[-2].set_next(self._chain[-1])
def register_in_bot(self, bot):
self.bot = bot
for step in self._chain:
step.set_bot(bot)
# @property
# def chain(self):
# return self._chain[0]
def start(self, message):
self.bot.send_message(message.chat.id, self._chain[0].description, reply_markup=self._chain[0].markup.body)
self.bot.register_next_step_handler_by_chat_id(message.chat.id, self._chain[0])
class DynamicMarkup:
def __init__(self):
self.body = None
def set_body(self, markup):
self.body = markup
class StaticMarkup(DynamicMarkup):
def __init__(self, body):
self.set_body(body)
#
# def handle_init(message):
# d = Dialog()
# d.set_main_handler(handle_main_menu)
# d.add_step(test1_handle)
# d.add_step(test2_handle, gen_list_markup([1, 2, 3]))
# d.add_step(test3_handle, DynamicMarkup())
# d.add_step(test4_handle, gen_list_markup(['Сохранить', 'Отменить']))
# d.register_in_bot(bot)
# return d.chain(message)
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,665 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/handlers/command.py |
import inspect
from datetime import datetime
from .core import HandleMiddleware
from ..markups import gen_main_menu_markup, gen_groups_settings_info, gen_list_markup, gen_dict_markup,\
gen_search_menu_markup, gen_groups_settings_markup, create_week_inline, create_calendar_inline
from ..templates import Messages, ParseMode, selected_group_message, group_checkout_mapper, lessons_template
from ..chains import Dialog, StaticMarkup, DynamicMarkup
# DIALOGS
from ..dialogs import handle_facultie_group_selection, handle_group_kind, handle_group_type, \
handle_group_level, handle_group, handle_group_commit
from ..dialogs import handle_teacher_name, handle_teacher_selection, handle_teacher_date
current_shown_weeks = {}
class CommandHandlers(HandleMiddleware):
def _add_handler(self, f, **kwargs):
handler_dict = self.bot._build_handler_dict(f, **kwargs)
self.bot.add_message_handler(handler_dict)
def _add_handlers(self):
for method in inspect.getmembers(self, predicate=inspect.ismethod):
if method[0].startswith('_'):
continue
# print('METHOD', method)
self._add_handler(self.log_wrapper(method[1]), commands=[method[0].replace('_handler', '')],
content_types=['text'],
)
def _init_user(self, message):
username = message.from_user.username if message.from_user.username else message.from_user.first_name
user = self.u.create_or_get_user(message.from_user.id, username)
subs = list(self.u.get_subscriptions(db_user=user))
return user, subs
def start_handler(self, message):
_, subs = self._init_user(message)
if subs:
self.bot.send_message(message.chat.id, text=Messages.hello)
return self.subs_handler(message)
else:
return self.add_handler(message)
def subs_handler(self, message):
_, subs = self._init_user(message)
if not subs:
self.bot.send_message(message.chat.id, Messages.no_schedule,
parse_mode=ParseMode.MARKDOWN)
return
text = ''
for gr in subs:
text += selected_group_message(gr, use_intro=False) + '\n'
self.bot.send_message(message.chat.id, text=text,
parse_mode=ParseMode.MARKDOWN,
reply_markup=gen_groups_settings_info())
def add_handler(self, message):
faculties = self.s.get_faculties_names()
if not faculties:
self.bot.send_message(message.chat.id, Messages.faculties_unaviable)
self.broker.send_task('deferred.get_groups_schema')
return
d = Dialog(globals={'m': self.s, 'u': self.u})
d.set_main_handler(self.main_handler)
d.add_step(handle_facultie_group_selection, markup=StaticMarkup(gen_list_markup(faculties)))
d.add_step(handle_group_kind, markup=DynamicMarkup())
d.add_step(handle_group_type, markup=DynamicMarkup())
d.add_step(handle_group_level, markup=DynamicMarkup())
d.add_step(handle_group, markup=DynamicMarkup())
d.add_step(handle_group_commit, markup=StaticMarkup(gen_dict_markup(group_checkout_mapper)))
d.register_in_bot(self.bot)
return d.start(message)
def main_handler(self, message):
markup = gen_main_menu_markup()
self.bot.send_message(message.chat.id, Messages.welcome, reply_markup=markup, parse_mode=ParseMode.MARKDOWN)
def plan_handler(self, message):
self.bot.send_message(message.chat.id, Messages.what_to_do,
reply_markup=gen_search_menu_markup(), parse_mode=ParseMode.MARKDOWN)
def nearest_handler(self, message):
user, subs = self._init_user(message)
if not subs:
self.bot.send_message(message.chat.id, Messages.no_schedule, reply_markup=gen_main_menu_markup())
return
lessons = []
for sub in subs:
lessons.append(self.s.get_nearest_lesson(sub['id']))
if not all(lessons):
self.bot.send_message(message.chat.id, Messages.no_schedule, reply_markup=gen_main_menu_markup())
return
for lesson in lessons:
msg = lessons_template([lesson])
self.bot.send_message(message.chat.id, msg, parse_mode=ParseMode.MARKDOWN, )
def renew_handler(self, message):
resp = self.broker.send_task('deferred.get_user_subscribtion', args=[message.from_user.id])
# resp = get_user_subscribtion.delay(message.from_user.id)
self.bot.send_message(message.chat.id, Messages.schedule_will_be_updated,
reply_markup=gen_main_menu_markup(), parse_mode=ParseMode.MARKDOWN)
def groupset_handler(self, message):
_, subs = self._init_user(message)
self.bot.send_message(message.chat.id, text=Messages.settings,
reply_markup=gen_groups_settings_markup(subs))
def cal_handler(self, message):
now = datetime.now() # Current date
date = (now.year, now.month)
self.cache.set_user_cal(message.from_user.id, date)
default_group = self.u.get_user_default_group(message.from_user.id)
self.cache.set_user_curr_gr(message.from_user.id, default_group)
sub, _ = self.u.get_user_subscription_settings(
message.from_user.id,
default_group
)
markup = create_calendar_inline(now.year, now.month, sub['name'])
self.bot.send_message(message.chat.id, Messages.select_date, reply_markup=markup)
def teacher_handler(self, message):
d = Dialog(globals={'m': self.s, 'u': self.u})
d.set_main_handler(self.main_handler)
d.add_step(handle_teacher_name)
d.add_step(handle_teacher_selection, markup=DynamicMarkup())
d.add_step(handle_teacher_date, markup=DynamicMarkup())
d.register_in_bot(self.bot)
return d.start(message)
def week_handler(self, message):
self.cache.set_user_week(message.from_user.id, datetime.now())
default_group = self.u.get_user_default_group(message.from_user.id)
self.cache.set_user_curr_gr(message.from_user.id, default_group)
sub, _ = self.u.get_user_subscription_settings(
message.from_user.id,
default_group
)
week_markup = create_week_inline(datetime.now(), sub['name'])
self.bot.send_message(message.chat.id, Messages.select_date, reply_markup=week_markup)
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,666 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/__init__.py | from . import shared
from .bot import bot | {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,667 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/handlers/aliases.py | import inspect
from .command import CommandHandlers
class CommandsAliases:
@staticmethod
def alias_filter(value):
def fun(message):
return message.text == value
return fun
def log_wrapper(self, fun):
def decor(message):
# print("User {0} send {1}".format(message.from_user.id, message.text))
return fun(message)
return decor
def __init__(self, command_handlers: CommandHandlers, *mappers: dict):
if len(mappers) < 1:
raise ValueError('Need to be setted almost one mapper for aliases')
elif len(mappers) == 1:
self.mapper = mappers[0]
else:
map_set = mappers[0].keys()
for m in mappers[1:]:
if any(map_set & m.keys()):
raise KeyError('Commads alias mappers cant have repeating keys!'
' Repeated: "{0}" from {1}'.format(map_set & m.keys(), m))
map_set = map_set | m.keys()
self.mapper = mappers[0]
for m in mappers[1:]:
self.mapper.update(m)
for method in inspect.getmembers(command_handlers, predicate=inspect.ismethod):
if method[0].startswith('_'):
continue
founded_alias = None
for key in self.mapper:
if method[0].startswith(key):
# name, func, alias
founded_alias = method[0], method[1], self.mapper[key]
if founded_alias:
print('Alias found for "{0}" -> "{2}"'.format(*founded_alias), )
command_handlers._add_handler(founded_alias[1],
commands=None,
func=self.log_wrapper(self.alias_filter(founded_alias[2])),
content_types=['text'],
regexp=None
)
else:
print('Alias NOT found for "{0}"'.format(method[0]))
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,668 | dokzlo13/polyrasp | refs/heads/master | /shared/model.py |
from bson.objectid import ObjectId
import collections
import hashlib
from pymongo import MongoClient
from datetime import timedelta, datetime
class Userdata:
def __init__(self, db):
self.users = db.get_collection('users')
self.subscriptions = db.get_collection('subscriptions')
def create_or_get_user(self, tele_user_id, user_name):
user = self.users.find_one({'uid': tele_user_id, "name": user_name})
if user:
return user['_id']
else:
return self.users.insert_one({'uid': tele_user_id,
'name': user_name,
'subscription': [],
'settings': {'default_group': None},
'subscription_settings': []
}).inserted_id
def add_subscription(self, tel_user, message_chat_id, sub_body):
# sub.update({'notification': True})
existed_sub = self.get_sub_by_group_id(sub_body['id'])
if not existed_sub:
sub = self.subscriptions.insert_one(sub_body).inserted_id
else:
sub = existed_sub['_id']
settings = {
'chat': message_chat_id,
'notify': True,
'default': False
}
settings.update({'id': sub})
self.users.update({'uid': tel_user},
{'$addToSet': {'subscription': sub}})
self.users.update({'uid': tel_user},
{'$push': {'subscription_settings': settings}},
# {'$set': {'subscription_settings.' + str(sub): settings}},
)
# Select first added group as default
if self.get_user_default_group(tel_user) == None:
self.set_user_default_group(tel_user, sub)
return sub
def get_user_by_tel_id(self, user_id):
return self.users.find_one({'uid': int(user_id)})
def get_sub_by_group_id(self, group_id):
return self.subscriptions.find_one({'id': int(group_id)})
def get_sub_by_string_id(self, sub_id, string_id=False):
raw = self.subscriptions.find_one({'_id': ObjectId(sub_id)})
if raw and string_id:
raw.update({'_id': str(raw['_id'])})
return raw
def get_all_subs(self, string_id=False):
raw = self.subscriptions.find()
if string_id:
for item in raw:
item.update({'_id': str(item['_id'])})
yield item
else:
yield from raw
def update_subscription_acces_time(self, sub_id):
self.subscriptions.update({'_id': ObjectId(sub_id)},
{'$set': {'upd_time': datetime.now()}})
def delete_subscription(self, tel_user, sub_id):
self.users.update({'uid': int(tel_user)},
{'$pull': {'subscription': ObjectId(sub_id)}})
self.users.update({'uid': int(tel_user)},
{"$pull": {'subscription_settings': {"id": ObjectId(sub_id)}}})
if self.get_user_default_group(tel_user) == str(sub_id):
# If user has another group (only one) - select this group as default
aviable = list(self.get_subscriptions(tel_user=tel_user))
if len(aviable) == 1:
print('SET TO ', aviable[0]['_id'])
self.set_user_default_group(tel_user, aviable[0]['_id'])
else:
print('UNSET ALL')
self.unset_default_groups(tel_user)
def get_user_subscription_settings(self, tel_user=None, sub_id=None):
subs = list(self.get_subscriptions(tel_user=tel_user, sub_id=sub_id))
if not subs:
return [], {}
if sub_id:
q = [
{"$unwind": "$subscription_settings"},
{"$match": {'uid': int(tel_user), "subscription_settings.id": subs[0]['_id']}},
{"$project": {"subscription_settings": 1, '_id':0}}
]
else:
q = [
{"$match": {'uid': int(tel_user)}},
{"$project": {"subscription_settings": 1, '_id': 0}}
]
try:
user_sub_settings = next(self.users.aggregate(q))['subscription_settings']
except StopIteration:
return [], {}
except KeyError:
return [], {}
else:
if sub_id:
return subs[0], user_sub_settings
else:
return zip(subs, user_sub_settings)
def change_notification_state(self, tel_user, sub_id):
sub, settings = self.get_user_subscription_settings(tel_user, sub_id)
self.users.update({"uid": int(tel_user), "subscription_settings.id": ObjectId(sub_id)},
{"$set": {
"subscription_settings.$.notify": not settings['notify']
}})
settings.update({"notify": not settings['notify']})
return sub, settings
def get_subscriptions(self, *, tel_user=None, db_user=None, sub_id=None, string_id=False):
query = [
{'$match': {'uid': int(tel_user)}} if tel_user else {'$match': {'_id': db_user}},
{'$lookup':
{
'from': 'subscriptions',
'localField': 'subscription',
"foreignField": "_id",
'as': 'subscription'
}
},
{'$project': {'subscription': 1, '_id':0}},
]
if sub_id:
query.append({"$unwind": "$subscription"})
query.append({'$match': {'subscription._id': ObjectId(sub_id)}})
raw = []
subs = self.users.aggregate(query)
try:
subs = next(subs)
except StopIteration:
yield
else:
if isinstance(subs['subscription'], (list, tuple)):
raw = subs['subscription']
else:
raw = [subs['subscription']]
if string_id:
for item in raw:
item.update({'_id': str(item['_id'])})
yield item
else:
yield from raw
def get_all_users_subscribes(self):
data = []
raw = self.users.find({}, {'subscription':1, '_id':0})
for item in raw:
data.extend(item['subscription'])
return data
def get_all_users_subscription_settings(self):
data = []
raw = self.users.find({}, {"subscription_settings" :1, '_id':0})
for item in raw:
data.extend(item["subscription_settings"])
return data
def get_unused_subscriptions(self):
return list(self.subscriptions.find({'_id': {"$nin": self.get_all_users_subscribes()}}))
def delete_unused_subscriptions(self):
return self.subscriptions.remove({'_id': {'$in': [i['_id'] for i in self.get_unused_subscriptions()]}})
def purge_subscription_timeouts(self):
return self.subscriptions.update_many({}, {'$set': {'upd_time': datetime.min}})
def get_user_default_group(self, tel_user):
try:
default = next(self.users.find({'uid': int(tel_user)}))['settings']['default_group']
except StopIteration:
return None
else:
if default == None:
return None
else:
return str(default)
def unset_default_groups(self, tel_user):
self.users.update({'uid': int(tel_user)},
{'$set': {'settings.default_group': None}},
)
self.users.update({"uid": int(tel_user)},
{"$set": {"subscription_settings.$[].default": False}})
def set_user_default_group(self, tel_user, sub_id):
self.unset_default_groups(tel_user)
self.users.update({'uid': int(tel_user)},
{'$set': {'settings.default_group': ObjectId(sub_id)}},
)
self.users.update({'uid': int(tel_user), 'subscription_settings.id': ObjectId(sub_id)},
{'$set': {'subscription_settings.$.default': True}},
)
class Studiesdata:
def __init__(self, db):
self.db = db
self.faculties = db.get_collection('faculties')
self.groups = db.get_collection('groups')
self.lessons = db.get_collection('lessons')
def update_faculties(self, data):
return self.faculties.insert_many(data)
def update_groups(self, data):
return self.groups.insert_many(data)
def get_faculties_names(self):
faculties = self.faculties.aggregate([{'$project': {'name':1, '_id':0}}])
return [i['name'] for i in faculties]
def get_facultie_by_facultie_name(self, fac_name):
return self.faculties.find_one({"name" : fac_name})
def get_facult_by_react_id(self, fac_id):
if isinstance(fac_id, (list, tuple)):
return self.faculties.find({'id': {'$in': fac_id}})
else:
return self.faculties.find_one({'id': fac_id})
def get_group_by_name(self, group_name):
return self.groups.find_one({'name': group_name})
def get_groups_by(self, type_=None, fac_id=None, level=None, kind=None):
query = {}
if type_:
query.update({'type': type_})
if fac_id:
query.update({'facultie': fac_id})
if level:
query.update({'level': level})
if kind:
query.update({'kind': kind})
return list(self.groups.find(query))
def add_lessons(self, data):
return self.lessons.insert_many(data)
def check_add_lessons(self, data, sub_id=None, checksums_check=True, matches_check=True):
counter_same = 0
counter_update = 0
checksum_cleared_data= []
for item in data:
checksum = sha256(gen_checkstring(item))
item['checksum'] = checksum
item['upd_time'] = datetime.now()
if sub_id:
item['sub_id'] = sub_id
if checksums_check:
existed = self.lessons.find_one({'checksum': checksum})
if existed:
counter_same += 1
self.lessons.update({'_id': existed['_id']}, {'$set': {'upd_time': datetime.now()}})
continue
else:
checksum_cleared_data.append(item)
else:
checksum_cleared_data.append(item)
clear_data = []
for item in checksum_cleared_data:
if matches_check:
existed = self.lessons.find_one({
"time_start": item["time_start"],
"time_end": item["time_end"],
"groups.id": {'$in' :[gr['id'] for gr in item["groups"]]},
"weekday": item["weekday"]
})
if existed:
self.lessons.update({'_id': existed['_id']}, item)
counter_update += 1
data.remove(item)
else:
clear_data.append(item)
else:
clear_data.append(item)
inserted = 0
if clear_data:
inserted = len(self.add_lessons(data).inserted_ids)
return {'new': inserted, 'same': counter_same, 'updated': counter_update}
def get_lessons_in_day(self, group_id:int, day: datetime):
return list(self.lessons.find({'groups.id':group_id,
'$and': [{"time_start": {'$gte': day}},
{"time_start": {'$lte': day+timedelta(days=1)}}]
}))
def get_nearest_lesson(self, group_id, delta=None):
delta = delta or timedelta(days=7)
return self.lessons.find_one({'groups.id': group_id,
"time_start":{'$gte': datetime.now(), '$lt': datetime.now() + delta}},
{'checksum': 0, '_id':0},)
def get_lessons_by_subscription_by_delta(self, sub_id, date, delta):
q = {"sub_id": str(sub_id)}
q.update({'$and':
[{"time_start": {'$gte': date - delta}},
{"time_start": {'$lte': date + delta}}]
}
)
return list(self.lessons.find(q))
def get_lessons_by_subscription_in_range(self, sub_id, from_, to):
q = {"sub_id": str(sub_id)}
q.update({'$and':
[{"time_start": {'$gte': from_}},
{"time_start": {'$lte': to}}]
}
)
return list(self.lessons.find(q))
def remove_lessons_by_subscriptions(self, sub_ids:list):
return self.lessons.remove({'sub_id': {'$in': [str(sub_id) for sub_id in sub_ids]}})
def gen_checkstring(dict_: dict) -> str:
dict_ = collections.OrderedDict(sorted(dict_.items()))
checkstr = ''
for v in dict_.values():
if isinstance(v, (list,tuple)):
for subv in v:
if isinstance(subv, dict):
checkstr += gen_checkstring(subv)
elif isinstance(v, dict):
checkstr += gen_checkstring(v)
elif isinstance(v, int):
checkstr += str(v)
elif isinstance(v, datetime):
checkstr += v.strftime('%Y%m%d%H%M%S')
elif isinstance(v, str):
checkstr += v
return checkstr
def sha256(w):
h = hashlib.sha256(w.encode('utf-8'))
return h.hexdigest()
def context_model(model, connection, db):
class context_model:
def __init__(self, **kwargs):
self._kwargs = kwargs
def __enter__(self):
self.conn = MongoClient(connection)
if hasattr(self._kwargs, 'purge_schema') and self._kwargs['purge_schema']:
for collection in self._kwargs['purge_schema']:
self.conn.drop_database(collection)
self.model = model(self.conn.get_database(db))
return self.model
def __exit__(self, exc_type, exc_val, exc_tb):
del self.model
self.conn.close()
return context_model
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,669 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/bot.py | import os
import locale
import telebot
import logging
from pymongo import MongoClient
from .shared.model import Studiesdata, Userdata
from .shared.timeworks import next_month, last_month
from .worker import celery, redis
from .templates import ParseMode, Messages, main_menu, groups_menu, search_menu, main_menu_button
from .handlers import CommandHandlers, CommandsAliases, InlineHandlers
from .cache import Cache
logger = telebot.logger
locale.setlocale(locale.LC_ALL, ('RU','UTF8'))
logger.setLevel(logging.DEBUG if os.environ.get('DEBUG', '0') == '1'
else logging.INFO) # Outputs debug messages to console.
BOT_TOKEN = os.environ.get('BOT_TOKEN', None)
MONGO_CONNECTION = os.environ.get('MONGO_CONNECTION', 'mongodb://localhost:27017/')
MONGO_DB = os.environ.get('MONGO_DB', 'raspisator')
bot = telebot.TeleBot(token=BOT_TOKEN, threaded=False)
logger.warning('Initalizing bot with token: {0}'.format("<SANTINIZED>" if BOT_TOKEN != None else "<EMPTY>"))
conn = MongoClient(MONGO_CONNECTION)
logger.warning("Database connected" if 'ok' in conn.server_info() and
conn.server_info()['ok'] == 1.0 else "Database connection failed!")
db = conn.get_database(MONGO_DB)
studiesmodel = Studiesdata(db)
usersmodel = Userdata(db)
cache = Cache(redis)
handlers = CommandHandlers(bot,
usersmodel=usersmodel,
studiesmodel=studiesmodel,
celery=celery,
cache=cache)
CommandsAliases(handlers,
search_menu,
main_menu,
groups_menu,
{'main': main_menu_button})
InlineHandlers(bot,
usersmodel=usersmodel,
studiesmodel=studiesmodel,
celery=celery,
cache=cache)
## INLINE QUERY HANDLE NEAREST PAIR
from .markups import gen_inline_groups_markup
@bot.inline_handler(lambda query: query.query == '')
def query_text(inline_query):
subs = usersmodel.get_subscriptions(tel_user=inline_query.from_user.id)
lessons = [studiesmodel.get_nearest_lesson(sub['id']) for sub in subs]
groups_inline = gen_inline_groups_markup(subs, lessons)
bot.answer_inline_query(inline_query.id, groups_inline)
# @bot.callback_query_handler(func=lambda call: call.data.startswith('change-group-'))
# def callback_chande(call):
# call.data = call.data[13:]
# if call.data.startswith('init-'):
# call.data = call.data[5:]
# markup = create_group_change_markup(groups_aviable, return_to=call.data)
# bot.edit_message_text(Messages.select_group_to_show, call.from_user.id, call.message.message_id,
# reply_markup=markup)
# bot.answer_callback_query(call.id, text="")
# if call.data.startswith('select-'):
# call.data = call.data[7:]
# group_id, back_to = call.data[:24], call.data[25:]
# usersmodel.set_user_current_group(call.from_user.id, group_id)
# # CHECK HERE WHERE TO BACK
# # TODO: Create callbacks commands parser
## SERVICE COMMANDS
@bot.message_handler(func= lambda message: message.text == 'update-database-schema')
def _(message):
resp = celery.send_task('deferred.get_groups_schema')
bot.send_message(message.chat.id, 'Schema will be updated! Task: "{0}"'.format(str(resp)))
@bot.message_handler(func= lambda message: message.text == 'purge-unused-subs')
def _(message):
resp = celery.send_task('deferred.unlink_non_used_subs')
bot.send_message(message.chat.id, 'Unused subs will be removed! Task: "{0}"'.format(str(resp)))
@bot.message_handler(func= lambda message: message.text == 'purge-timeouts')
def _(message):
resp = celery.send_task('deferred.purge_subscription_timeouts')
bot.send_message(message.chat.id, 'Timeouts purged! "{0}"'.format(str(resp))) | {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,670 | dokzlo13/polyrasp | refs/heads/master | /raspisator/app/dialogs.py | import inspect
import sys
from datetime import datetime
from telebot import types
from .templates import ParseMode, Messages
from .templates import selected_group_message, lessons_template
from .templates import level_mapper, type_mapper, kind_mapper, group_checkout_mapper
from .markups import *
from .chains import Retry
from .shared.timeworks import convert_concat_day_and_lesson
from .worker import celery
def handle_facultie_group_selection(bot, message, **kwargs):
"Выберите институт:"
bot.send_chat_action(message.chat.id, 'typing')
m = kwargs.get('m')
facult = m.get_facultie_by_facultie_name(message.text)
if not facult:
raise Retry("В данном институте нет групп, выберите другой институт")
# bot.send_message(message.chat.id, "Ищу группы для {0}".format(facult['abbr']),
# reply_markup=types.ReplyKeyboardRemove())
groups = m.get_groups_by(fac_id=facult['id'])
if not groups:
raise Retry("В данном институте нет групп, выберите другой институт")
kwargs.update({'next_step_markup': gen_dict_markup(kind_mapper)})
kwargs.update(dict(facult_id=facult['id']))
return kwargs
def handle_group_kind(bot, message, **kwargs):
"Выберите квалификацию:"
m = kwargs.get('m')
facult_id = kwargs.get('facult_id')
kind = kind_mapper.get(message.text)
groups = m.get_groups_by(fac_id=facult_id, kind=kind)
if not groups:
raise Retry('Нет групп с такой квалификацией!')
kwargs.update({'next_step_markup': gen_dict_markup(type_mapper)})
kwargs.update(dict(facult_id=facult_id, kind=kind))
return kwargs
def handle_group_type(bot, message, **kwargs):
"Выберите форму обучения:"
facult_id = kwargs.get('facult_id')
m = kwargs.get('m')
kind = kwargs.get('kind')
type_ = type_mapper.get(message.text)
groups = m.get_groups_by(fac_id=facult_id, kind=kind, type_=type_)
if not groups:
raise Retry('Нет групп с такой формой обучения!')
kwargs.update({'next_step_markup': gen_dict_markup(level_mapper)})
kwargs.update(dict(facult_id=facult_id, kind=kind, type_=type_))
return kwargs
def handle_group_level(bot, message, **kwargs):
"Выберите курс:"
m = kwargs.get('m')
facult_id = kwargs.get('facult_id')
kind = kwargs.get('kind')
type_ = kwargs.get('type_')
level = level_mapper.get(message.text)
groups = m.get_groups_by(fac_id=facult_id, kind=kind, type_=type_, level=level)
if not groups:
raise Retry("Нет групп для данного курса")
kwargs.update({'next_step_markup': gen_list_markup(groups, 'name')})
return kwargs
def handle_group(bot, message, **kwargs):
"Выберите группу:"
m = kwargs.get('m')
group = m.get_group_by_name(message.text)
if not group:
raise Retry("Для данной группы нет расписания, выберите другую группу")
facult = m.get_facult_by_react_id(group["facultie"])
text = selected_group_message(group, facult)
bot.send_message(message.chat.id, text=text, parse_mode=ParseMode.MARKDOWN)
kwargs.update(dict(group=group))
return kwargs
def handle_group_commit(bot, message, **kwargs):
"Подвердите выбор:"
u = kwargs.get('u')
group = kwargs.get('group')
confirm = group_checkout_mapper.get(message.text)
if confirm:
sub = u.add_subscription(message.from_user.id, message.chat.id, group)
celery.send_task('deferred.get_subscribtion', args=[str(sub)], kwargs={'initial':True})
text = 'Ваша группа добавлена в список подписок!\nПросмотреть все подписки можно командой /subs' \
'\n*Информация о расписании скоро появится!*'
bot.send_message(message.chat.id, text=text, parse_mode=ParseMode.MARKDOWN,
reply_markup=types.ReplyKeyboardRemove(selective=False))
def handle_teacher_name(bot, message, **kwargs):
"Введите имя преподавателя:"
result = celery.send_task('deferred.get_teacher_search', args=[message.text])
bot.send_chat_action(message.chat.id, 'typing')
# result = get_teacher_search.delay(message.text)
bot.send_message(message.chat.id, 'Произвожу поиск...')
result = result.wait(timeout=10)
if not result:
raise Retry('Поиск не дал результатов! Введите другой запрос, или вернитесь в меню /main')
kwargs.update({'next_step_markup': gen_list_markup(result, 'full_name')})
kwargs.update({'teachers': result})
return kwargs
def handle_teacher_selection(bot, message, **kwargs):
"Выберите преподавателя из списка:"
result =None
teachers = kwargs.get('teachers')
for teacher in teachers:
if teacher['full_name'] == message.text:
result = celery.send_task('deferred.get_teacher_lessons', args=[teacher['id']])
bot.send_chat_action(message.chat.id, 'typing')
# result = get_teacher_lessons.delay(teacher['id'])
result = result.wait(timeout=10)
if not result:
raise Retry('Для этого преподавателя нет расписания!')
dates_list = []
for item in result:
dates_list.append(Messages.teacher_date_templ(datetime.strptime(item['date'], '%Y-%m-%d')))
kwargs.update({'next_step_markup': gen_list_markup(dates_list)})
kwargs.update({'teacher_rasp': result})
return kwargs
def handle_teacher_date(bot, message, **kwargs):
"Выберите необходимую дату:"
bot.send_chat_action(message.chat.id, 'typing')
teacher_rasp = kwargs.get('teacher_rasp')
lessons = []
for rasp in teacher_rasp:
if rasp == []:
continue
weekday = datetime.strptime(rasp['date'], '%Y-%m-%d')
try:
recieved_date = datetime.strptime(message.text, Messages.teacher_time_template)
except ValueError:
raise Retry('Нет расписания на этот день')
if recieved_date == weekday:
for lesson in rasp['lessons']:
lesson['time_start'] = convert_concat_day_and_lesson(lesson['time_start'], weekday)
lesson['time_end'] = convert_concat_day_and_lesson(lesson['time_end'], weekday)
lesson['weekday'] = rasp['weekday']
lessons.append(lesson)
# use only lessons, without weeks info
if not lessons:
raise Retry('Нет расписания на этот день')
result = lessons_template(lessons)
bot.send_message(message.chat.id, result, parse_mode=ParseMode.MARKDOWN)
return kwargs
# Allowing to import only functions, described in whis module
__all__ = [m[0] for m in inspect.getmembers(sys.modules[__name__], inspect.isfunction)
if m[1].__module__ == inspect.getmodule(sys.modules[__name__]).__name__]
| {"/worker/app/deferred.py": ["/worker/app/collection.py"], "/raspisator/app/handlers/inline.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py"], "/raspisator/app/handlers/__init__.py": ["/raspisator/app/handlers/command.py", "/raspisator/app/handlers/aliases.py", "/raspisator/app/handlers/inline.py"], "/worker/app/__init__.py": ["/worker/app/deferred.py"], "/raspisator/app/markups.py": ["/raspisator/app/templates.py"], "/raspisator/app/chains.py": ["/raspisator/app/templates.py"], "/raspisator/app/handlers/command.py": ["/raspisator/app/handlers/core.py", "/raspisator/app/markups.py", "/raspisator/app/templates.py", "/raspisator/app/chains.py", "/raspisator/app/dialogs.py"], "/raspisator/app/__init__.py": ["/raspisator/app/bot.py"], "/raspisator/app/handlers/aliases.py": ["/raspisator/app/handlers/command.py"], "/raspisator/app/bot.py": ["/raspisator/app/worker.py", "/raspisator/app/templates.py", "/raspisator/app/handlers/__init__.py", "/raspisator/app/cache.py", "/raspisator/app/markups.py"], "/raspisator/app/dialogs.py": ["/raspisator/app/templates.py", "/raspisator/app/markups.py", "/raspisator/app/chains.py", "/raspisator/app/worker.py"]} |
69,750 | tszumowski/unsafe_defaults_example | refs/heads/master | /b.py | print("b.py: At the top before imports")
from defaults import defaults
print("b.py: Finished Imports.")
def print_normal():
print("b.py: print_normal(): {}".format(defaults))
def print_mod():
print("b.py: print_mod(): {}".format(defaults))
defaults["foo"] = "override!"
print("b.py: print_mod(): {}".format(defaults))
| {"/b.py": ["/defaults.py"], "/a.py": ["/defaults.py", "/b.py"]} |
69,751 | tszumowski/unsafe_defaults_example | refs/heads/master | /a.py | print("a.py: At the top before imports")
from defaults import defaults
import b
print("a.py: Finished Imports.")
print("a.py: defaults: {}".format(defaults))
print("a.py: Calling b.py")
b.print_normal()
b.print_mod()
b.print_normal()
print("a.py: defaults: {}".format(defaults))
| {"/b.py": ["/defaults.py"], "/a.py": ["/defaults.py", "/b.py"]} |
69,752 | tszumowski/unsafe_defaults_example | refs/heads/master | /defaults.py | print("defaults.py: !!! Inside defaults.py !!!")
defaults = {
"foo": "bar"
} | {"/b.py": ["/defaults.py"], "/a.py": ["/defaults.py", "/b.py"]} |
69,753 | leoneo7/rawyou | refs/heads/master | /api/models.py | from django.db import models
from django.contrib.auth.models import User
from datetime import datetime
class Emotion(models.Model):
emotion_name = models.CharField(max_length=10)
def __str__(self):
return u'%s' % (self.emotion_name)
class Track(models.Model):
user = models.ForeignKey(User)
emotion = models.ForeignKey(Emotion)
start_time = models.DateTimeField(default=datetime.now)
end_time = models.DateTimeField(default=datetime.now)
def __str__(self):
return u'%s' % (self.track_id)
| {"/api/serializer.py": ["/api/models.py"], "/rawyou/urls.py": ["/api/urls.py"], "/api/views.py": ["/api/models.py", "/api/serializer.py"], "/api/admin.py": ["/api/models.py"], "/api/urls.py": ["/api/views.py"]} |
69,754 | leoneo7/rawyou | refs/heads/master | /api/serializer.py | from rest_framework import serializers
from django.contrib.auth.models import User
from .models import Emotion, Track
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('username', 'password')
class EmotionSerializer(serializers.ModelSerializer):
class Meta:
model = Emotion
fields = ('emotion_name',)
class TrackSerializer(serializers.ModelSerializer):
class Meta:
model = Track
fields = ('emotion', 'start_time', 'end_time') | {"/api/serializer.py": ["/api/models.py"], "/rawyou/urls.py": ["/api/urls.py"], "/api/views.py": ["/api/models.py", "/api/serializer.py"], "/api/admin.py": ["/api/models.py"], "/api/urls.py": ["/api/views.py"]} |
69,755 | leoneo7/rawyou | refs/heads/master | /rawyou/urls.py | from django.conf.urls import url, include
from rest_framework.authtoken import views
from django.contrib import admin
from api.urls import router as api_router
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/', include(api_router.urls)),
url(r'^api-token-auth/', views.obtain_auth_token),
]
| {"/api/serializer.py": ["/api/models.py"], "/rawyou/urls.py": ["/api/urls.py"], "/api/views.py": ["/api/models.py", "/api/serializer.py"], "/api/admin.py": ["/api/models.py"], "/api/urls.py": ["/api/views.py"]} |
69,756 | leoneo7/rawyou | refs/heads/master | /api/views.py | import django_filters
from rest_framework import viewsets, filters, generics
from django.contrib.auth.models import User
from .models import Emotion, Track
from .serializer import UserSerializer, EmotionSerializer, TrackSerializer
from rest_framework import permissions
from api.permissions import IsOwnerOrReadOnly
class UserViewSet(viewsets.ModelViewSet):
queryset = User.objects.all()
serializer_class = UserSerializer
class EmotionViewSet(viewsets.ModelViewSet):
queryset = Emotion.objects.all()
serializer_class = EmotionSerializer
class TrackViewSet(viewsets.ModelViewSet):
serializer_class = TrackSerializer
permission_classes = (permissions.IsAuthenticatedOrReadOnly,
IsOwnerOrReadOnly,)
def get_queryset(self):
queryset = Track.objects.all()
user = self.request.user
if str(user) is not "AnonymousUser":
queryset = queryset.filter(user=user)
return queryset
def perform_create(self, serializer):
serializer.save(user=self.request.user)
| {"/api/serializer.py": ["/api/models.py"], "/rawyou/urls.py": ["/api/urls.py"], "/api/views.py": ["/api/models.py", "/api/serializer.py"], "/api/admin.py": ["/api/models.py"], "/api/urls.py": ["/api/views.py"]} |
69,757 | leoneo7/rawyou | refs/heads/master | /api/admin.py | from django.contrib import admin
from .models import Emotion, Track
@admin.register(Emotion)
class Emotion(admin.ModelAdmin):
pass
@admin.register(Track)
class Track(admin.ModelAdmin):
pass
| {"/api/serializer.py": ["/api/models.py"], "/rawyou/urls.py": ["/api/urls.py"], "/api/views.py": ["/api/models.py", "/api/serializer.py"], "/api/admin.py": ["/api/models.py"], "/api/urls.py": ["/api/views.py"]} |
69,758 | leoneo7/rawyou | refs/heads/master | /api/urls.py | from rest_framework import routers
from .views import UserViewSet, EmotionViewSet, TrackViewSet
router = routers.DefaultRouter()
router.register(r'users', UserViewSet)
router.register(r'emotions', EmotionViewSet)
router.register(r'tracks', TrackViewSet, 'Track') | {"/api/serializer.py": ["/api/models.py"], "/rawyou/urls.py": ["/api/urls.py"], "/api/views.py": ["/api/models.py", "/api/serializer.py"], "/api/admin.py": ["/api/models.py"], "/api/urls.py": ["/api/views.py"]} |
69,759 | georgebirch/yolov3-overhead | refs/heads/main | /train_test_split_sun_elev.py | import argparse
import time
from sys import platform
from gb_utils import *
from pathlib import Path
from datetime import datetime
parser = argparse.ArgumentParser()
# Get data configuration
parser.add_argument('--images', type=str, default='train_images/5.tif', help='path to images')
parser.add_argument('--sym_path', type=str, default='symlinks')
parser.add_argument('--matfile_name', type=str, default='test')
parser.add_argument('--sun_thres', type=str, default=35)
parser.add_argument('--valid_ratio', type=str, default=0.1)
opt = parser.parse_args()
print(opt)
def create(opt):
# class_map = [0, 0, 1, 1, 2, 2]
class_map = [0, 1, 2]
root_paths = [opt.images]
dets_df, stats_df = paths_to_df([opt.images])
stats_df = paths_to_symlinks([opt.images], opt.sym_path, stats_df)
stats_df = get_sun_elev(stats_df)
t1, t2, v1, v2 = train_test_split(dets_df, stats_df, opt.sun_thres, opt.valid_ratio)
dfs = (t1, t2, v1, v2)
for i, df in enumerate(dfs):
matfile_path = 'london_'+str(opt.matfile_name)+'_'+str(i)+'.mat'
print('Writing ',matfile_path)
df_to_mat(df, matfile_path, class_map)
create(opt)
| {"/detect_loop.py": ["/utils/gb_utils.py"], "/buffer_movement_counts.py": ["/gb_scoring.py"]} |
69,760 | georgebirch/yolov3-overhead | refs/heads/main | /create_training_matfile.py | from gb_utils import *
from pathlib import Path
def create():
# Class mapping. Must be a list with the same length as the number of classes that were stored in the yolo-style .txt label files.
# class_map can be used to amalgamate certain classes.
class_map = [0, 1, 2, 3, 4, 5, 6, 7, 8] # Leave classes unchanged.
# List of parent directories within which images and labels are stored in their own named subdirectories.
root_paths = [ \
# "/mnt/Data/AQM_training/london_RGB_8bit_ACOMP_30cm",
"/content/drive/My Drive/George_Birchenough/04_London_Datasets/london_8bit_combined/london_8bit_DRA-ACOMP_30cm",
"/content/drive/My Drive/George_Birchenough/04_London_Datasets/london_8bit_combined/london_8bit_DRA-ACOMP_50cm"]
# Name of the subdiretories within each of the above directories, containing yolo style labels in .txt files.
label_dirname = 'labels_9_class'
# Name of the subdiretories within each of the above directories, containing .tif images with identical names as above labels.
img_dirname = 'images_RGB'
# Path in which to save the resulting matfile.
matfile_path = "/home/george/xview-yolov3/utils/test_set_001.mat"
dets_df, stats_df = paths_to_df(root_paths, label_dirname, img_dirname)
df_tuple = (dets_df, stats_df)
df_to_mat(df_tuple, matfile_path, class_map)
create()
| {"/detect_loop.py": ["/utils/gb_utils.py"], "/buffer_movement_counts.py": ["/gb_scoring.py"]} |
69,761 | georgebirch/yolov3-overhead | refs/heads/main | /gb_scoring.py | from pathlib import Path
import scipy.io
import numpy as np
import pandas as pd
import itertools
import re
import os
import cv2
import glob
import shutil
import matplotlib.pyplot as plt
from matplotlib.collections import PatchCollection
from matplotlib.patches import Rectangle
import rasterio as rio
import geopandas as gpd
import shapely as sp
from rasterio.warp import transform_bounds
import pyproj
def img_roads_intersect (stats_df, road_buffer_shp_path ):
"""
Intersect the target image bounds (the georeferenced extend of each image) with the road buffer to generate a geodataframe containing only relevant road buffer data.
"""
print('Reading road buffer shapefile into geodataframe...')
buffer_gdf = gpd.read_file(road_buffer_shp_path)
buffer_crs = buffer_gdf.crs.to_epsg()
print('Using CRS : ', buffer_crs, ' for road buffer.')
img_paths = stats_df['img_paths'].tolist()
ldn_gdf = gpd.GeoDataFrame(geometry=[], crs=buffer_gdf.crs)
# print('Warning, reading img_paths from glob, not stats_df')
# imgs_path = '/content/drive/My Drive/George_Birchenough/04_London_Datasets/london_8bit_combined/london_8bit_ACOMP_30cm/images_RGB'
# img_paths = Path(imgs_path).rglob('*.tif')
print('Reading image bounds into geodataframe and setting CRS ...')
for img_path in img_paths:
with rio.open(img_path) as src:
bounds = transform_bounds(src.crs, rio.crs.CRS.from_epsg(buffer_crs), src.bounds[0], src.bounds[1], src.bounds[2], src.bounds[3])
geom = sp.geometry.box(*bounds)
ldn_gdf.loc[img_path, 'geometry'] = geom
buffer_gdf = buffer_gdf.loc[:, buffer_gdf.columns.intersection(['geometry', 'LEGEND', 'DESC_TERM', 'ROADNATURE', 'layer'])]
ldn_gdf.reset_index(drop=True, inplace=True)
print('Intersecting image bounds with road buffer to reduce computing time when intersecting bboxes..')
buffer_img_intersect = gpd.overlay(ldn_gdf, buffer_gdf, how='intersection')
return buffer_img_intersect
def dfs_to_gdfs(dfs, stats_df, buffer_crs = 'EPSG:32630'):
"""
For a given dataframe with ground truths/detections in yolo detection format, including the image paths, return geodataframe.
"""
print('Using CRS : ', buffer_crs, ' dataset geodataframes.')
img_paths = stats_df['img_paths'].tolist()
# print('Warning, reading img_paths from glob, not stats_df')
# imgs_path = '/content/drive/My Drive/George_Birchenough/04_London_Datasets/london_8bit_combined/london_8bit_ACOMP_30cm/images_RGB'
# img_paths = Path(imgs_path).rglob('*.tif')
gdf_out = pd.DataFrame()
for img_path in img_paths:
img_name = str ( Path( img_path ).stem )
df = dfs.loc[ dfs['chip'] == img_name ].reset_index(drop=True).copy()
df_in = df.copy()
gdf_in = df_to_gdf(img_path, df, buffer_crs)
gdf = gpd.GeoDataFrame(df_in, geometry = gdf_in.geometry)
gdf_out = pd.concat([gdf_out, gdf])
gdf_out.reset_index(drop=True, inplace = True)
return gdf_out
def df_to_gdf(img_path, df, buffer_crs):
""" For a given image, and its corresponding labels in a dataframe, output a geodataframe.
"""
if df.shape[0] == 0:
return gpd.GeoDataFrame()
elif img_path is not None:
with rio.open(img_path) as src:
crs = src.crs
df.xmin, df.ymin = rio.transform.xy(src.transform, df.ymin, df.xmin)
df.xmax, df.ymax = rio.transform.xy(src.transform, df.ymax, df.xmax)
gdf = gpd.GeoDataFrame( df,
geometry=[sp.geometry.Polygon([(r.xmin, r.ymin), (r.xmax, r.ymin), (r.xmax, r.ymax), (r.xmin, r.ymax)]) for r in
df.itertuples()], crs=crs)
gdf = gdf.to_crs(buffer_crs)
return gdf
def gdf_to_shps(gdf, outputs_path ):
"""
Export a geoDataFrame to a series of shapefiles, one for each image present in the table.
"""
Path(outputs_path).mkdir(exist_ok=True, parents=True)
for name, group in gdf.groupby('chip'):
out_path = Path(outputs_path, name).with_suffix('.shp')
group.to_file(out_path)
def dets_dfs_to_shps(dets_dfs, stats_df, outputs_path=None, crs=None):
"""
For a given folder of yolo detections, and a matfile containing the details of the images including the image paths,
loop through the paths and run yolo_to_shp.
"""
print("Writing shapefiles...")
Path(outputs_path).mkdir(exist_ok=True, parents=True)
img_paths = stats_df['img_paths'].tolist()
for img_path in img_paths:
out_path = Path(outputs_path, Path(img_path).stem).with_suffix('.shp')
img_name = str ( Path( img_path ).stem )
dets_df = dets_dfs.loc[ dets_dfs['chip'] == img_name ].reset_index().copy()
dets_df_to_shps(img_path, dets_df, out_path, crs)
def dets_df_to_shps(img_path, dets_df, out_path, crs=None):
""" For a given image, and its corresponding labels in a dataframe, output the shapefile.
"""
if dets_df.shape[0] == 0:
return ValueError
elif img_path is not None:
with rio.open(img_path) as src:
crs = src.crs
dets_df.xmin, dets_df.ymin = rio.transform.xy(src.transform, dets_df.ymin, dets_df.xmin)
dets_df.xmax, dets_df.ymax = rio.transform.xy(src.transform, dets_df.ymax, dets_df.xmax)
dets_gdf = gpd.GeoDataFrame(
geometry=[sp.geometry.Polygon([(r.xmin, r.ymin), (r.xmax, r.ymin), (r.xmax, r.ymax), (r.xmin, r.ymax)]) for r in
dets_df.itertuples()], crs=crs)
dets_gdf['object_class_id'] = dets_df['class_id']
dets_gdf['confidence'] = dets_df['confidence']
dets_gdf['true_pos'] = dets_df['true_pos']
dets_gdf['IoU'] = dets_df['IoU']
dets_gdf.to_file(out_path)
def yolo_dets_to_df(yolos_path):
"""
Grab data from a path containing yolo format detections, and return a df containing all rows.
"""
print("Loading detections into a dataframe ...")
dets_df2 = pd.DataFrame()
for yolo_path in Path(yolos_path).glob('*.txt'):
yolo_path = Path(yolo_path)
dets_df = yolo_det_to_df(yolo_path)
dets_df2 = pd.concat([dets_df2, dets_df])
return dets_df2
def yolo_det_to_df(yolo_path, headers=['xmin', 'ymin', 'xmax', 'ymax', 'class_id', 'confidence', 'det_id']):
"""
Grab data from yolo detection file and return a dataframe.
"""
try:
dets_df = pd.read_csv(yolo_path, names=headers, delim_whitespace=True)
except FileNotFoundError:
return FileNotFoundError
dets_df['chip'] = Path(yolo_path).stem
return dets_df
def vectorised_iou (bboxes1, bboxes2):
""" Given two sets of bboxes length N and M, compute the IoU matrix using numpy vector math. Return N x M matrix .
"""
x11, y11, x12, y12 = np.split(bboxes1, 4, axis=1)
x21, y21, x22, y22 = np.split(bboxes2, 4, axis=1)
xA = np.maximum(x11, np.transpose(x21))
yA = np.maximum(y11, np.transpose(y21))
xB = np.minimum(x12, np.transpose(x22))
yB = np.minimum(y12, np.transpose(y22))
interArea = np.maximum((xB - xA + 1), 0) * np.maximum((yB - yA + 1), 0)
boxAArea = (x12 - x11 + 1) * (y12 - y11 + 1)
boxBArea = (x22 - x21 + 1) * (y22 - y21 + 1)
iou = interArea / (boxAArea + np.transpose(boxBArea) - interArea)
return iou
def get_true_pos( dets_df, targs_df,iou_thres ):
""" Given dataframe for detections, targets (ground truths) and iou threshold, return dataframes with true positives highlighted.
"""
print("Comparing each BBox from detections, with each Bbox from ground-truths, IoU Threshold = ",iou_thres)
dets_df2=pd.DataFrame() # Initialise dataframes for storing new data.
targs_df2=pd.DataFrame()
grouped = targs_df.groupby('chip')
iterator=0
for i, (name, group) in enumerate(grouped):
# print("Image ",i+1," of ", len(grouped))
gt_df = targs_df.loc[ targs_df['chip'] == name, : ].copy() # Ground truth data from the targets .mat file, for the current image.
gt_df.reset_index(inplace=True)
p_df = dets_df.loc[ dets_df['chip'] == name, : ].sort_values('confidence', ascending=False).copy() # Detection data, in descending confidence.
p_df.reset_index(inplace=True)
# print(p_df.head())
bboxes_p = np.array(p_df.loc[ : , ['xmin', 'ymin', 'xmax', 'ymax'] ] )
bboxes_gt = np.array(gt_df.loc[ :, ['xmin', 'ymin', 'xmax', 'ymax'] ] )
iou = vectorised_iou (bboxes_p, bboxes_gt) # Compute iou matrix.
# Find values in the IoU matrix which are above the threshold. Store the indices in an array. Row[0] is the indice of the prediction, row[1] is the ground truth.
inds = np.array(np.nonzero(iou>iou_thres)).transpose()
print("True pos with IoU over ", iou_thres, " found = ", len(inds))
if len(inds) is not 0:
inds = pd.DataFrame(inds).drop_duplicates(0,keep='first').drop_duplicates(1,keep='first') # Drop duplicate values, where a ground truth is attributed to multiple predictins. Keep the more confident prediction.
# print(" Dropped duplicates, now = ",len(inds))
for j in range(len(inds)):
iterator +=1
p_df.loc[ inds.iloc[j,0].tolist() , 'true_pos' ] = 1 # Assign true_pos value of 1 in the dataframes for each true_pos indice
gt_df.loc[ inds.iloc[j,1].tolist() , 'true_pos' ] = 1
p_df.loc[ inds.iloc[j,0].tolist() , 'true_pos_id' ] = iterator # Assign unique value to each true_pos value to link predictions to ground truths
gt_df.loc[ inds.iloc[j,1].tolist() , 'true_pos_id' ] = iterator
p_df.loc[ inds.iloc[j,0].tolist() , 'IoU' ] = np.around( iou[inds.iloc[j,0],inds.iloc[j,1]], decimals = 2 ) # Assign IoU value to each prediction true_pos.
gt_df.loc[ inds.iloc[j,1].tolist() , 'IoU' ] = np.around( iou[inds.iloc[j,0],inds.iloc[j,1]], decimals = 2 ) # Same IoU for matching grount truth pair.
dets_df2 = pd.concat([dets_df2, p_df]) # Add each image back into a main dataframe.
targs_df2 = pd.concat([targs_df2, gt_df])
else:
dets_df2 = pd.concat([dets_df2, p_df]) # If there are no true_pos, still add the image back into the dataframe.
targs_df2 = pd.concat([targs_df2, gt_df])
dets_df = dets_df2.set_index('index').sort_index().fillna(0) # Sort and index dataframes as they were before, and fill NaNs with 0s.
targs_df = targs_df2.set_index('index').sort_index().fillna(0)
return dets_df, targs_df
def AP_images(dets_df, targs_df, stats_df, opt):
"""Given detections and targets dataframes, with true positive counts, compute average precision via trapezium-rule integration of the cumulative recall/precision graph.
Return list of average precision, precision and recall for each image.
"""
print(" Computing integrals to attain Average Precision ... ")
average_precision=[]
precision=[]
recall=[]
dataset=[]
chips=[]
gt_count=[]
det_count=[]
true_pos_count=[]
move_stats_df = pd.DataFrame()
# Loop through the images within the dataset.
grouped = targs_df.groupby('chip')
for i, (name, group) in enumerate(grouped):
# print("Image ",i+1," of ", len(grouped))
gt_df = targs_df.loc[ targs_df['chip'] == name, : ].copy() # Ground truth data from the targets .mat file, for the current image.
p_df = dets_df.loc[ dets_df['chip'] == name, : ].sort_values('confidence', ascending=False).copy() # Detection data, in descending confidence.
p_df.reset_index(inplace=True)
grd_true = len(gt_df)
cum_rec=[]
cum_pre=[]
d_integral=[]
# Loop through each prediction in order of descending confidence.
for i, pred in enumerate(p_df.index.tolist()):
# Compute cumulative number of true-pos.
cum_true = sum( p_df.loc[0:pred, 'true_pos'] )
# Compute cumulative precision = number of true_pos / number of predictions so far.
cum_pre.append(cum_true/(i+1))
# Compute cumulative recall = number of true_pos so far / total number of ground truths.
cum_rec.append(cum_true/grd_true)
if i == 0: # Initialise values for integration.
y1 = cum_pre[0]
x1 = cum_rec[0]
if i > 0:
# Integration of { precision = f(recall) } d (recall) , via trapezium rule.
y2 = cum_pre[i] # Height of right edge of trapezium
x2 = cum_rec[i] # Lower right corner of trapezium
dx = x2 - x1 # Width of trapezium
d_integral.append(0.5 * (y1 + y2) * dx) # Area of trapezium
y1 = y2 # Height of left edge of trapezium
x1 = x2 # Lower left corner of trapezium
average_precision.append(sum(d_integral))
precision.append(y2) # Final values of cumulative variables is the actual variable.
recall.append(x2)
gt_count.append(grd_true)
det_count.append(len(p_df))
true_pos_count.append(cum_true)
chips.append(name) # Store name of image in a parallel list.
dataset_name = stats_df.loc[stats_df.index == name, 'img_paths'].tolist()[0].split('/')[-3] # Work out which dataset the image came from.
dataset.append(dataset_name)
# Get movement stats
gt_df = targs_df.loc[ targs_df['chip'] == name, : ].copy() # Ground truth data from the targets .mat file, for the current image.
p_df = dets_df.loc[ dets_df['chip'] == name, : ].copy()
move_stats_df_right = get_moving_true_pos(p_df, gt_df, opt)
move_stats_df = pd.concat([move_stats_df, move_stats_df_right])
# Store all the computed information in a dataframe and return it.
scores_dict = {'Dataset': dataset, \
'Average Precision': average_precision, \
'Precision': precision, \
'Recall': recall, \
'object_count': gt_count, \
'detection_count': det_count, \
'gt_count': gt_count, \
'true_pos_count': true_pos_count, \
}
score_df = pd.DataFrame(scores_dict, index = chips)
# Convert columns to multiindex, with zeros as the upper level, to preserve move_stats_df multiindex levels when joining the dataframes:
score_df.columns = [np.zeros(len(score_df.columns)), score_df.columns]
move_stats_df.index = chips
score_df = pd.concat([score_df, move_stats_df], axis = 1)
return score_df
def AP_aggregate(dets_df, targs_df, opt):
"""Given detections and targets dataframes, with true positive counts, compute average precision via trapezium-rule integration of the cumulative recall/precision graph.
Return average precision, recall and precision for the entire test set as a 1 row dataframe.
"""
gt_df = targs_df.copy()
p_df = dets_df.sort_values('confidence', ascending=False).copy()
p_df.reset_index(inplace=True)
grd_true = len(gt_df) # Total number of ground truths in targets matfile.
cum_rec = []
cum_pre = []
d_integral=[]
# Loop through each prediction in order of descending confidence.
for i, pred in enumerate(p_df.index.tolist()):
# Compute cumulative number of true-pos.
cum_true = sum( p_df.loc[0:pred, 'true_pos'] )
# Compute cumulative precision = number of true_pos / number of predictions so far.
cum_pre.append(cum_true/(i+1))
# Compute cumulative recall = number of true_pos so far / total number of ground truths.
cum_rec.append(cum_true/grd_true)
if i == 0: # Initialise values for integration.
y1 = cum_pre[0]
x1 = cum_rec[0]
if i > 0:
# Integration of { precision = f(recall) } d (recall) , via trapezium rule.
y2 = cum_pre[i] # Height of right edge of trapezium
x2 = cum_rec[i] # Lower right corner of trapezium
dx = x2 - x1 # Width of trapezium
d_integral.append(0.5 * (y1 + y2) * dx) # Area of trapezium
y1 = y2 # Height of left edge of trapezium
x1 = x2 # Lower left corner of trapezium
average_precision = sum(d_integral)
precision = y2 # Final value of cumulative variables is the actual variable.
recall = x2
gt_count = grd_true
true_pos_count=cum_true
move_stats_df = get_moving_true_pos(dets_df, targs_df, opt)
score = {'Dataset':'Aggregate', \
'Average Precision': average_precision, \
'Precision': precision, \
'Recall': recall, \
'object_count': len(targs_df), \
'detection_count': len(dets_df), \
'gt_count': gt_count, \
'true_pos_count': true_pos_count, \
}
score_df = pd.DataFrame(score, index = [Path(opt.targets).stem])
score_df.columns = [np.zeros(len(score_df.columns)), score_df.columns]
print(move_stats_df)
move_stats_df.index = score_df.index
score_df = pd.concat([score_df, move_stats_df], axis = 1)
return score_df
def create_dfs(targets_path):
"""
Pull targets in from .mat file into targs_df, and create stats_df (N-img long) from targets/dets information.
"""
target = scipy.io.loadmat(targets_path, squeeze_me=True)
targs_df = pd.DataFrame(target['targets'], columns = ['class_id', 'xmin', 'ymin', 'xmax', 'ymax'] )
targs_df['img_id'] = target['id'].astype(int)
targs_df['chip'] = target['chips']
stats_df = pd.DataFrame([ target['img_paths'], target['uchips'] ]).transpose()
stats_df.columns = ['img_paths', 'uchips']
stats_df.set_index('uchips', inplace=True)
stats_df = pd.concat([ stats_df, targs_df.groupby('chip').chip.count()], axis=1 ).rename(columns={'chip':'object_count'})
# stats_df = pd.concat([ stats_df, dets_df.groupby('chip').chip.count()], axis=1 ).rename(columns={'chip':'detection_count'})
return targs_df, stats_df
def get_scores( dets_df, targs_df, stats_df, out_dir, opt, buffer_type='None' ):
print(" Computing integrals to attain Average Precision ... ")
score_agg_df = AP_aggregate(dets_df, targs_df, opt)
score_df = AP_images(dets_df, targs_df, stats_df, opt)
scores_df = pd.concat([score_agg_df, score_df])
scores_df['weights'] = Path(opt.weights).stem
scores_df['nms-thres'] = opt.nms_thres
scores_df['conf_thres'] = opt.conf_thres
scores_df['name'] = Path(opt.targets).stem
scores_df['iou-thres'] = opt.iou_thres
scores_df['buffer_type'] = buffer_type
scores_df['classes'] = opt.classes
print(scores_df.to_string())
scores_df.to_csv(Path( out_dir, Path(opt.targets).stem ).with_suffix('.txt'))
return(scores_df)
def get_total_true_pos(dets_gdf, targs_gdf):
# Find the 'union' of the unique 'true_pos_id's of both detections and ground truths.
# This will ensure our statistics include every true pair where either the detection or the target box touches a road buffer, even if the matching detection or target does not.
true_dets = dets_gdf.true_pos_id.unique() # Includes true_pos_id = 0 (the negatives)
true_targs = targs_gdf.true_pos_id.unique() # Includes true_pos_id = 0 (the negatives)
total_true_pos = len( np.unique( np.concatenate((true_dets,true_targs), axis=0 ) ) ) - 1 # -1 because when true_pos_id = 0, it is not a true pos.
# Compute total number of detections and ground truths for statistics.
# Include parents/children of detections/grount truths that are outside the buffer, even though their assosciate is inside.
# These are 'ghost' values as they were ommitted during intersection, but now must be added back into the calculation.
# There is one 'ghost' detection for every unmatched ground truth, and one ghost ground truth for every unmatched detection.
total_dets = len(dets_gdf) + ( total_true_pos - ( len(true_dets) - 1 ) ) # Add the 'ghost' children of detections back into the statistic. -1 because true_targs includes true_pos_id = 0
total_gts = len(targs_gdf) + ( total_true_pos - ( len(true_targs) - 1 ) ) # Add the 'ghost' parents of grount truths back into the statistic. -1 because true_dets includes tre_pos_id = 0.
prec = total_true_pos / total_dets
rec = total_true_pos / total_gts
return total_true_pos, total_dets, total_gts, prec, rec
def get_road_scores(road_dets, road_targs, stats_df, buffer_type, out_dir, opt):
"""
Compute recall and precision for the road buffered geodtaframes. No support for Average Precision.
Output scores_df for each image as well as for the aggregate data.
"""
# Remove duplicate values from the intersected dataframes, where one bbox intersets more than one road buffer polygon.
# For true_pos, we do this by ensuring each true_pos_id appears only once.
pos_dets = road_dets.loc[ road_dets['true_pos'] == 1 ].drop_duplicates('true_pos_id').copy()
# For non true-pos boxes, when intersected, the original unique index is inherited into column[0]. Use this to ensure all duplicates are removed.
neg_dets = road_dets.loc[ road_dets['true_pos'] == 0 ].drop_duplicates(road_dets.columns[0]).copy()
road_dets = pd.concat([pos_dets, neg_dets]).reset_index(drop=True)
# Same process for targets:
pos_targs = road_targs.loc[ road_targs['true_pos'] == 1 ].drop_duplicates('true_pos_id').copy()
neg_targs = road_targs.loc[ road_targs['true_pos'] == 0 ].drop_duplicates(road_dets.columns[0]).copy()
road_targs = pd.concat([pos_targs, neg_targs]).reset_index(drop=True)
precision=[] # Initialise lists
recall=[]
chips=[]
dataset=[]
average_precision=[]
object_count=[]
det_count=[]
gt_count=[]
true_pos_count=[]
move_stats_df = pd.DataFrame()
## Loop through the images within the dataset. ##
grouped = road_targs.groupby('chip')
for i, (name, group) in enumerate(grouped):
# print("Image ",i+1," of ", len(grouped))
dets_gdf = road_dets.loc[ road_dets['chip'] == name, : ].copy() # Detections data for the current image in the loop.
targs_gdf = road_targs.loc[ road_targs['chip'] == name, : ].copy() # Ground truth data for the current image in the loop.
total_true_pos, total_dets, total_gts, prec, rec = get_total_true_pos(dets_gdf, targs_gdf)
precision.append(prec)
recall.append(rec)
average_precision.append('NaN') # No support for integral in road buffer yet.
chips.append(name)
dataset_name = stats_df.loc[stats_df.index == name, 'img_paths'].tolist()[0].split('/')[-3] # Work out which dataset the image came from.
dataset.append(dataset_name)
object_count.append(len(targs_gdf)) # Find the number of ground truths in the image ( within the road buffer)
det_count.append(total_dets) # Number of detections (including 'ghost' children)
gt_count.append(total_gts) # Number of Ground_truths (ncluding 'ghost' parents)
true_pos_count.append(total_true_pos) # Number of true positives (union of dets+gts)
# Get movement stats
move_stats_df_right = get_moving_true_pos(dets_gdf, targs_gdf, opt)
move_stats_df = pd.concat([ move_stats_df, move_stats_df_right ])
# Store the stats for each image in a dataframe.
scores_dict_1 = {'Dataset': dataset, \
'Average Precision':average_precision, \
'Precision':precision, \
'Recall': recall, \
'object_count':len(road_targs), \
'detection_count':det_count, \
'gt_count':gt_count, \
'true_pos_count':true_pos_count, \
}
scores_df = pd.DataFrame(scores_dict_1, index=chips)
move_stats_df.index = chips
## Now compute stats for the aggregate test dataset. ##
total_true_pos, total_dets, total_gts, agg_precision, agg_recall = get_total_true_pos(road_dets, road_targs)
agg_precision = total_true_pos / total_dets
agg_recall = total_true_pos / total_gts
agg_det_count = total_dets
agg_gt_count = total_gts
agg_true_pos_count = total_true_pos
average_precision = 'Nan'
# Store the aggregate data in a single-row dataframe, and then join it with the previous.
scores_dict_2 = {'Dataset':'Aggregate', \
'Average Precision':average_precision, \
'Precision':agg_precision, \
'Recall': agg_recall, \
'object_count':len(road_targs), \
'detection_count':agg_det_count, \
'gt_count':agg_gt_count, \
'true_pos_count':agg_true_pos_count, \
}
score_agg_df = pd.DataFrame(scores_dict_2, index = [ (Path(opt.targets).stem + '_' + buffer_type) ])
move_stats_agg_df = get_moving_true_pos(road_dets, road_targs, opt)
move_stats_agg_df.index = [ (Path(opt.targets).stem + '_' + buffer_type) ]
scores_df = pd.concat([score_agg_df, scores_df])
move_stats_df = pd.concat([move_stats_agg_df, move_stats_df])
scores_df.columns = [np.zeros(len(scores_df.columns)), scores_df.columns]
scores_df = pd.concat([scores_df, move_stats_df ], axis = 1)
# Add ancilliary information from the detect.py parser
scores_df['weights'] = Path(opt.weights).stem
scores_df['nms-thres'] = opt.nms_thres
scores_df['conf_thres'] = opt.conf_thres
scores_df['name'] = (Path(opt.targets).stem + '_' + buffer_type)
scores_df['iou-thres'] = opt.iou_thres
scores_df['buffer_type'] = buffer_type
scores_df['classes'] = opt.classes
# Print and save data, appending filename with the buffer_type for easy identification.
print(scores_df.to_string())
scores_df.to_csv(Path( out_dir, Path(opt.targets).stem + '_' + buffer_type).with_suffix('.txt'))
return scores_df
def get_moving_true_pos(dets_df, targs_df, opt):
"""
Convert numerical class_id to pull out the movement class.
Sort both targs and dets by their true_pos_id, and then compare the movement attribution to detirmine if it is correct.
Create df with multiindex collumns with counts of each movement class >> 0: ground truths, 1: raw detections, 2: true detections, and 3: true detections with true movement attribution.
"""
if opt.classes == 6:
class_map = ['static', 'moving', 'static', 'moving', 'static', 'moving']
for num, movement_class in enumerate(class_map):
dets_df.loc[ dets_df['class_id'] == num, 'movement_class' ] = movement_class
targs_df.loc[ targs_df['class_id'] == num, 'movement_class' ] = movement_class
elif opt.classes == 9:
class_map = ['parked', 'static', 'moving', 'parked', 'static', 'moving', 'parked', 'static', 'moving']
for num, movement_class in enumerate(class_map):
dets_df.loc[ dets_df['class_id'] == num, 'movement_class' ] = movement_class
targs_df.loc[ targs_df['class_id'] == num, 'movement_class' ] = movement_class
elif opt.classes == 3:
class_map = ['parked', 'static', 'moving']
for num, movement_class in enumerate(class_map):
dets_df.loc[ dets_df['class_id'] == num, 'movement_class' ] = movement_class
targs_df.loc[ targs_df['class_id'] == num, 'movement_class' ] = movement_class
else:
print('Number of classes has no movement class map. Returning 0 for movement stats.')
return 0, 0, 0, 0
true_dets = dets_df.loc[ dets_df['true_pos'] == 1 ].copy().reset_index().set_index('true_pos_id').sort_index()
true_targs = targs_df.loc[ targs_df['true_pos'] == 1 ].copy().reset_index().set_index('true_pos_id').sort_index()
true_dets['true_movement'] = true_dets.eq(true_targs).movement_class
true_targs['true_movement'] = true_dets.eq(true_targs).movement_class
true_dets = true_dets.reset_index(drop=False).set_index('index')
true_targs = true_targs.reset_index(drop=False).set_index('index')
dets_df.loc[ true_dets.index, 'true_movement'] = true_dets['true_movement']
targs_df.loc[ true_targs.index, 'true_movement'] = true_targs['true_movement']
# true_pos_moving_true = sum( dets_df.loc[dets_df['movement_class'] == 'moving'].true_movement == True )
# true_pos_moving = sum( dets_df.loc[dets_df['movement_class'] == 'moving'].true_pos == 1 )
# Ground truths
gt_parked = sum (targs_df.movement_class == 'parked' )
gt_static = sum (targs_df.movement_class == 'static' )
gt_moving = sum (targs_df.movement_class == 'moving' )
# Detections
dets_parked = sum (dets_df.movement_class == 'parked' )
dets_static = sum (dets_df.movement_class == 'static' )
dets_moving = sum (dets_df.movement_class == 'moving' )
# True Detections
tp_parked = sum (true_dets.movement_class == 'parked' )
tp_static = sum (true_dets.movement_class == 'static' )
tp_moving = sum (true_dets.movement_class == 'moving' )
# True detections with true movement
tptm_parked = sum( dets_df.loc[dets_df['movement_class'] == 'parked'].true_movement == True )
tptm_static = sum( dets_df.loc[dets_df['movement_class'] == 'static'].true_movement == True )
tptm_moving = sum( dets_df.loc[dets_df['movement_class'] == 'moving'].true_movement == True )
cols_1 = ['Ground Truth', 'Detection', 'True Detection', 'True Detection with True Movement']
cols_1 = np.array([ [cols_1[i], cols_1[i], cols_1[i]] for i in range(len(cols_1)) ]).flatten()
cols_2 = ['Parked', 'Static', 'Moving']
cols_2 = np.array([cols_2, cols_2, cols_2, cols_2]).flatten()
vals = [gt_parked, gt_static, gt_moving, dets_parked, dets_static, dets_moving, tp_parked, tp_static, tp_moving, tptm_parked, tptm_static, tptm_moving ]
df = pd.DataFrame( columns=[cols_1, cols_2])
df.loc[0] = vals
return df
| {"/detect_loop.py": ["/utils/gb_utils.py"], "/buffer_movement_counts.py": ["/gb_scoring.py"]} |
69,762 | georgebirch/yolov3-overhead | refs/heads/main | /detect_loop.py | from detect import detect, ConvNetb
import time
from sys import platform
from models import *
from utils.datasets import *
from utils.utils import *
from pathlib import Path
from datetime import datetime
from utils.gb_utils import *
from utils.gb_scoring import *
# Path to a csv file to open and store the outputs of the this script in. This will be created once every time this script is called.
results_path = str('./results_' + datetime.now().strftime('%Y-%m-%d_%H-%M-%S') + '.csv')
# Setup lists of detection parameters to loop through
nms_thres_list = [0.1, 0.2, 0.3, 0.4, 0.5]
conf_thres_list = [0.990, 0.992, 0.994, 0.996, 0.998]
weights_list = ['/mnt/Data/AQM_training/01_development/weights/backup_9_class_london_10folds_2_200.pt', '/mnt/Data/AQM_training/01_development/weights/backup_9_class_london_10folds_2_400.pt', '/mnt/Data/AQM_training/01_development/weights/backup_9_class_london_10folds_2_600.pt', \
'/mnt/Data/AQM_training/01_development/weights/backup_9_class_london_10folds_2_800.pt', '/mnt/Data/AQM_training/01_development/weights/latest_9_class_london_10folds_2.pt']
# 'weights/backup_9_class_london_10folds_2_200.pt', 'weights/backup_9_class_london_10folds_2_400.pt', 'weights/backup_9_class_london_10folds_2_600.pt', 'weights/backup_9_class_london_10folds_2_800.pt', 'weights/latest_9_class_london_10folds_2.pt' ]
# Path to the targets matfile - carefull that this does not contain imagery used to create any of the above weights files.
targets_path = 'utils/london_10folds_9class_test_2.mat'
# Create dataframes from the contents ofthe matfile.
targs_df, stats_df = create_dfs(targets_path)
# Paths to each buffer shapefile.
major_roads_buffer_shp_path = '/mnt/server/AQM/AQM/01_Vehicle_Detection/Europe/London/02_City_Data/02_Roads/02_Buffered/02_Quantile_Derived_Buffers/combined_buffer_q_0.9_MajorRoads.shp'
minor_roads_buffer_shp_path = '/mnt/server/AQM/AQM/01_Vehicle_Detection/Europe/London/02_City_Data/02_Roads/02_Buffered/02_Quantile_Derived_Buffers/combined_buffer_q_0.9_Minor_unclipped.shp'
roads_buffer_q_90_shp_path = '/mnt/server/AQM/AQM/01_Vehicle_Detection/Europe/London/02_City_Data/02_Roads/02_Buffered/02_Quantile_Derived_Buffers/combined_buffer_q_0.9_unclipped.shp'
roads_buffer_mean_shp_path = '/mnt/server/AQM/AQM/01_Vehicle_Detection/Europe/London/02_City_Data/02_Roads/02_Buffered/02_Quantile_Derived_Buffers/0.5/measured_buffer_mean.shp'
# Intersect each road buffer with the boundaries of each image, to reduce time when overlaying bounding boxes.
major_buffer_img_intersect = img_roads_intersect ( stats_df, major_roads_buffer_shp_path )
minor_buffer_img_intersect = img_roads_intersect ( stats_df, minor_roads_buffer_shp_path )
buffer_q_90_img_intersect = img_roads_intersect ( stats_df, roads_buffer_q_90_shp_path )
buffer_mean_img_intersect = img_roads_intersect ( stats_df, roads_buffer_mean_shp_path )
# Create geodataframe from the ground truth objects. Set up a unique ID for each bbox.
targs_gdf = dfs_to_gdfs(targs_df, stats_df, major_buffer_img_intersect.crs)
targs_gdf['bbox_id'] = targs_gdf.index
# Overlay the ground truth bboxes onto the road buffers and drop any duplicates arrising when a bbox touches more than one road buffer.
road_targs_maj = gpd.overlay(targs_gdf, major_buffer_img_intersect, how='intersection').drop_duplicates('bbox_id')
road_targs_min = gpd.overlay(targs_gdf, minor_buffer_img_intersect, how='intersection').drop_duplicates('bbox_id')
road_targs_q90 = gpd.overlay(targs_gdf, buffer_q_90_img_intersect, how='intersection').drop_duplicates('bbox_id')
road_targs_mean = gpd.overlay(targs_gdf, buffer_mean_img_intersect, how='intersection').drop_duplicates('bbox_id')
class Options:
"""
Object to store detection parameters as attributes, that would normally be parsed from the command line.
"""
def __init__(self, targets, weights, conf_thres, nms_thres, classes, cfg, names):
self.targets = targets
self.weights = weights
self.conf_thres = conf_thres
self.nms_thres = nms_thres
self.classes = classes
self.cfg = cfg
self.names = names
self.source = targets
self.batch_size = 1
self.output = './output'
self.img_size = 32*51
self.iou_thres = 0.25
self.plot_flag = False
self.secondary_classifier = False
for nms_thres in nms_thres_list:
for conf_thres in conf_thres_list:
for i, weights in enumerate(weights_list):
kwargs1 = {'conf_thres':conf_thres, 'nms_thres':nms_thres, 'weights':weights, 'targets':targets_path, }
kwargs2 = {'cfg':'cfg/c9_a30symmetric.cfg', 'names':'data/ldn_9.names', 'classes':9 }
opt = Options(**kwargs1, **kwargs2)
# opt is an object in the same format as opt in detect.py
print('Commencing detections with the following parameters:')
print(kwargs1)
torch.cuda.empty_cache()
out_dir = detect(opt) # Run detections as normal
torch.cuda.empty_cache()
# Gather detections into a dataframe and convert into geodataframe.
dets_df = yolo_dets_to_df(out_dir)
dets_df.reset_index(inplace=True, drop=True)
dets_gdf = dfs_to_gdfs(dets_df, stats_df, major_buffer_img_intersect.crs)
# Overlay Gt and detections Bboxes to assign IoU, true_pos, and a unique true_pos_id.
true_dets_gdf, true_targs_gdf = get_true_pos(dets_gdf, targs_gdf, opt.iou_thres)
# Compute all relevant statistics including regarding movement, and add these to a 'scores' dataframe
scores_df = get_scores( true_dets_gdf, true_targs_gdf, stats_df, out_dir, opt )
scores_df.loc[:, 'buffer_type'] = 'None'
# Create new dataframes for the road buffered ground truths, including the true_pos information.
road_targs_maj = true_targs_gdf.loc[ true_targs_gdf.bbox_id.isin(road_targs_maj.bbox_id) ]
road_targs_min = true_targs_gdf.loc[ true_targs_gdf.bbox_id.isin(road_targs_min.bbox_id) ]
road_targs_q90 = true_targs_gdf.loc[ true_targs_gdf.bbox_id.isin(road_targs_q90.bbox_id) ]
road_targs_mean = true_targs_gdf.loc[ true_targs_gdf.bbox_id.isin(road_targs_mean.bbox_id) ]
print('Overlaying detections onto each road buffer...')
# Create new dataframes for the road buffered detections, including the true_pos information.
road_dets_maj = gpd.overlay(true_dets_gdf, major_buffer_img_intersect, how='intersection')
road_dets_min = gpd.overlay(true_dets_gdf, minor_buffer_img_intersect, how='intersection')
road_dets_q90 = gpd.overlay(true_dets_gdf, buffer_q_90_img_intersect, how='intersection')
road_dets_mean = gpd.overlay(true_dets_gdf, buffer_mean_img_intersect, how='intersection')
# Compute all relevant statistics for the road buffered dataframes, including movement information.
scores_df_maj = get_road_scores( road_dets_maj, road_targs_maj, stats_df, 'major', out_dir, opt )
scores_df_min = get_road_scores( road_dets_min, road_targs_min, stats_df, 'minor', out_dir, opt )
scores_df_q90 = get_road_scores( road_dets_q90, road_targs_q90, stats_df, '0.9 quantile', out_dir, opt )
scores_df_mean = get_road_scores( road_dets_mean, road_targs_mean, stats_df, 'mean', out_dir, opt )
# Join all statistics/counts into a single df:
scores_all = pd.concat( [ scores_df, scores_df_maj, scores_df_min, scores_df_q90, scores_df_mean ] )
# Add Datetime
scores_all.loc[:, 'datetime'] = out_dir.split('/')[-1]
# Add 'epochs'. In fact, this is the last number that appears in the weights path after the final '_'. This might not be the number of epochs.
scores_all.loc[:, 'epochs'] = str(Path(opt.weights).stem).split('_')[-1]
# Save scores into the detection folder.
scores_all.to_csv(Path( out_dir, Path(opt.targets).stem + '_all' ).with_suffix('.txt'))
# Append the main results file so that all results from every loop are stored in one place.
with open(results_path, 'a') as f:
scores_all.to_csv(f, header=f.tell()==0)
# Output the shapefiles in a parallel folder to the .txt files. These can be identified in the main results spreadsheet using the datetime.
gdf_to_shps(dets_gdf, out_dir+'\\shps' )
| {"/detect_loop.py": ["/utils/gb_utils.py"], "/buffer_movement_counts.py": ["/gb_scoring.py"]} |
69,763 | georgebirch/yolov3-overhead | refs/heads/main | /utils/gb_utils.py | from pathlib import Path
import itertools
import re
import os
import pandas as pd
import numpy as np
import cv2
import scipy.io
import glob
import shutil
from astropy.coordinates import get_sun, AltAz, EarthLocation
from astropy.time import Time
from pathlib import Path
import rasterio as rio
import geopandas as gpd
import pandas as pd
import shapely as sp
def shps_to_gdf(shapefiles_path, crs = 32630 ):
"""For a given folder of shapefiles, create single gdf with CRS as an input. Convert text labels to numerical. CRS defaults to 32360. """
gdf = gpd.GeoDataFrame()
for shp in Path(shapefiles_path).rglob('*.shp'):
shp_path = Path(shp)
# print('shp_path:', shp_path)
gdf1 = gpd.read_file(shp_path)
if gdf1.crs == None:
print('Warning, naive CRS')
gdf2 = gdf1.loc[ gdf1.geometry.dropna().index ]
if len(gdf1) != len(gdf2) :
print( shp_path, ': Warning, NaNs present in geometry, removed ', len(gdf1) - len(gdf2), ' rows.' )
gdf2 = gdf2.to_crs(crs)
gdf2['chip'] = shp_path.stem
gdf = pd.concat([gdf, gdf2])
# Convert classes to unique ID integers
# 0: small parked
# 1: small static
# 2: small moving
# 3: medium parked
# 4: medium static
# 5: medium moving
# 6: large parked
# 7: large static
# 8: large moving
for i, combo in enumerate(list(itertools.product(['small', 'medium', 'large'], ['parked', 'static', 'moving']))):
gdf.loc[(gdf['Class'] == combo[0]) & (gdf['Move_2'] == combo[1]), 'yolo_class'] = i
gdf.reset_index(drop = True, inplace=True)
return gdf
def shps_to_yolos(shapefiles_path, imgs_path, outputs_path):
"""For a given folder of shapefiles and images, converts labels to yolo format text files"""
for shp in Path(shapefiles_path).glob('*.shp'):
shp_path = Path(shp)
print('shp_path:', shp_path)
# Find image using regex - could have different order number or band tag :(
pattern = re.compile('([^_]*)-[^_]*-[^_]*_[^_]*_[^_]*_[^_]*_(.*).shp')
# Get capture groups of interest from shp name (datetime & tiles)
groups = pattern.findall(shp_path.name)[0]
pattern_to_find = re.compile('' + groups[0] + '-[^_]*-[^_]*_[^_]*_[^_]*_[^_]*_' + groups[1] + '.tif')
match = None
for file in os.listdir(imgs_path):
if pattern_to_find.match(file):
if not match:
match = file
else:
print("More than one match! skipping")
continue # Could handle better
if match is None:
print("No matches! Skipping")
continue
img_path = Path(imgs_path, match)
print('img_path:', img_path)
out_path = Path(outputs_path, img_path.stem).with_suffix('.txt')
print('out_path:', out_path)
shp_to_yolo(shp, img_path, out_path)
def shp_to_yolo(shapefile_path, img_path, output_path):
"""For a given shapefile, and image, converts labels to yolo format text files"""
# Get img dimensions
with rio.open(img_path) as src:
dims = src.meta['width'], src.meta['height']
transform = src.meta['transform']
# Read shapefile into dataframe
gdf = gpd.read_file(shapefile_path)
# # Convert classes to unique ID integers
# # 0: small static
# # 1: small moving
# # 2: medium static
# # 3: medium moving
# # 4: large static
# # 5: large moving
# for i, combo in enumerate(list(itertools.product(['small', 'medium', 'large'], ['static', 'moving']))):
# gdf.loc[(gdf['Class'] == combo[0]) & (gdf['Movement'] == combo[1]), 'yolo_class'] = i
# Convert classes to unique ID integers
# 0: small parked
# 1: small static
# 2: small moving
# 3: medium parked
# 4: medium static
# 5: medium moving
# 6: large parked
# 7: large static
# 8: large moving
for i, combo in enumerate(list(itertools.product(['small', 'medium', 'large'], ['parked', 'static', 'moving']))):
gdf.loc[(gdf['Class'] == combo[0]) & (gdf['Move_2'] == combo[1]), 'yolo_class'] = i
# Convert geometry to yolo format:
# coords as relative floats between 0-1
gdf['float_minx'] = ((gdf.loc[:, 'geometry'].bounds.minx - transform[2]) / transform[0]) / dims[0]
gdf['float_maxy'] = ((gdf.loc[:, 'geometry'].bounds.miny - transform[5]) / transform[4]) / dims[1]
gdf['float_maxx'] = ((gdf.loc[:, 'geometry'].bounds.maxx - transform[2]) / transform[0]) / dims[0]
gdf['float_miny'] = ((gdf.loc[:, 'geometry'].bounds.maxy - transform[5]) / transform[4]) / dims[1]
gdf.loc[gdf['float_minx'] < 0, 'float_minx'] = 0
gdf.loc[gdf['float_maxx'] > 1, 'float_maxx'] = 1
gdf.loc[gdf['float_miny'] < 0, 'float_miny'] = 0
gdf.loc[gdf['float_maxy'] > 1, 'float_maxy'] = 1
# coords of centre, width, height
gdf['width'] = gdf.loc[:, 'float_maxx'] - gdf.loc[:, 'float_minx']
gdf['height'] = gdf.loc[:, 'float_maxy'] - gdf.loc[:, 'float_miny']
gdf['centre_x'] = gdf.loc[:, 'float_minx'] + gdf.loc[:, 'width'] / 2
gdf['centre_y'] = gdf.loc[:, 'float_miny'] + gdf.loc[:, 'height'] / 2
# Write to output file
Path(output_path.parent).mkdir(exist_ok=True)
with open(output_path, 'w') as dest:
np.savetxt(dest, gdf.loc[:, ('yolo_class', 'centre_x', 'centre_y', 'width', 'height')], '%g')
print('Done')
def yolos_to_shps(yolos_path, images_path=None, outputs_path=None, crs=None, headers=['xmin', 'ymin', 'xmax', 'ymax', 'class_id', 'confidence']):
"""
For a given folder of yolo detections, and a folder of matching images, loop through the paths and run yolo_to_shp.
"""
Path(outputs_path).mkdir(exist_ok=True, parents=True)
for yolo_path in Path(yolos_path).glob('*.txt'):
yolo_path = Path(yolo_path)
img_path = Path(images_path, yolo_path.stem).with_suffix('.tif')
out_path = Path(outputs_path, yolo_path.stem).with_suffix('.shp')
yolo_to_shp(yolo_path, img_path, out_path, crs)
def yolos_df_to_shps(yolos_path, targets_path, outputs_path=None, crs=None, headers=['xmin', 'ymin', 'xmax', 'ymax', 'class_id', 'confidence']):
"""
For a given folder of yolo detections, and a matfile containing the details of the images including the image paths,
loop through the paths and run yolo_to_shp.
"""
Path(outputs_path).mkdir(exist_ok=True, parents=True)
img_paths = np.array(scipy.io.loadmat(targets_path, squeeze_me=True)['img_paths'])
for img_path in img_paths:
yolo_path = Path( yolos_path, Path(img_path).stem).with_suffix('.txt')
out_path = Path(outputs_path, Path(yolo_path).stem).with_suffix('.shp')
yolo_to_shp(yolo_path, img_path, out_path, crs)
def yolo_to_shp(yolo_path, image_path=None, out_path=None, crs=None, headers=['xmin', 'ymin', 'xmax', 'ymax', 'class_id', 'confidence']):
"""
For a given yolo detection .txt file and matching geo-referenced image .tif , produce the shapefile of the detection bboxes.
"""
if crs is None and image_path is None:
return AttributeError
if crs is not None:
if isinstance(crs, int):
crs = rio.crs.CRS.from_epsg(crs)
else:
crs = rio.crs.CRS.from_string(crs)
try:
dets_df = pd.read_csv(yolo_path, names=headers, delim_whitespace=True)
except FileNotFoundError:
return FileNotFoundError
if dets_df.shape[0] == 0:
return ValueError
elif image_path is not None:
with rio.open(image_path) as src:
crs = src.crs
dets_df.xmin, dets_df.ymin = rio.transform.xy(src.transform, dets_df.ymin, dets_df.xmin)
dets_df.xmax, dets_df.ymax = rio.transform.xy(src.transform, dets_df.ymax, dets_df.xmax)
dets_gdf = gpd.GeoDataFrame(
geometry=[sp.geometry.Polygon([(r.xmin, r.ymin), (r.xmax, r.ymin), (r.xmax, r.ymax), (r.xmin, r.ymax)]) for r in
dets_df.itertuples()], crs=crs)
dets_gdf['object_class_id'] = dets_df['class_id']
dets_gdf['confidence'] = dets_df['confidence']
filename = Path(yolo_path).with_suffix('.shp').name
if out_path is None:
dets_gdf.to_file(Path(Path(yolo_path).parent, filename))
else:
dets_gdf.to_file(out_path)
def get_sun_elev(stats_df):
stats_df['datetime'] = [ stats_df.index.tolist()[i].split('-')[0] for i in range(len(stats_df.index)) ]
grouped = stats_df.groupby('datetime')
loc = EarthLocation.of_address('London')
for name, group in grouped:
sun_time=Time.strptime(name, "%y%b%d%H%M%S")
altaz = AltAz(obstime=sun_time, location=loc)
zen_ang = get_sun(sun_time).transform_to(altaz)
stats_df.loc[stats_df['datetime'] == name, 'sun_elev'] = zen_ang.alt.degree
return stats_df
def train_test_split_sun_elev(dets_df, stats_df, elev_thres, valid_ratio):
print("Splitting up the dataset to create summer, winter based validation and training sets")
stats_df.loc[ stats_df['sun_elev'] < elev_thres, 'season' ] = 'winter'
stats_df.loc[ stats_df['sun_elev'] > elev_thres, 'season' ] = 'summer'
wvalid = stats_df.loc[ stats_df['sun_elev'] < elev_thres, : ].sample(frac=valid_ratio*2, random_state = 1)
svalid = stats_df.loc[ stats_df['sun_elev'] > elev_thres, : ].sample(frac=valid_ratio*2, random_state = 1)
stats_df.loc[wvalid.index, 'dataset'] = 'valid'
stats_df.loc[svalid.index, 'dataset'] = 'valid'
stats_df.fillna(value={'dataset':'train'}, inplace=True)
train_stats_1 = stats_df.drop ( stats_df.loc[ (stats_df['season'] == 'winter') & ( stats_df['dataset'] == 'valid' ),: ].index ).copy()
valid_stats_1 = stats_df.drop( train_stats_1.index ).copy()
train_stats_2 = stats_df.drop ( stats_df.loc[ (stats_df['season'] == 'summer') & ( stats_df['dataset'] == 'valid' ),: ].index ).copy()
valid_stats_2 = stats_df.drop( train_stats_2.index ).copy()
train_dets_1 = dets_df[ dets_df['chip'].isin ( train_stats_1.index ) ].copy()
valid_dets_1 = dets_df.drop(train_dets_1.index).copy()
train_dets_2 = dets_df[ dets_df['chip'].isin ( train_stats_2.index ) ]
valid_dets_2 = dets_df.drop(train_dets_2.index)
t1 = (train_dets_1, train_stats_1)
t2 = (train_dets_2, train_stats_2)
v1 = (valid_dets_1, valid_stats_1)
v2 = (valid_dets_2, valid_stats_2)
return t1, t2, v1, v2
def train_test_split(dets_df, stats_df, valid_ratio):
print("Splitting up the dataset into validation and training sets, in a 10:90 ratio, randomly")
valid = stats_df.sample(frac=valid_ratio, random_state = 1)
stats_df.loc[valid.index, 'dataset'] = 'valid'
stats_df.fillna(value={'dataset':'train'}, inplace=True)
train_stats_1 = stats_df.drop ( stats_df.loc[stats_df['dataset'] == 'valid' ,: ].index ).copy()
valid_stats_1 = stats_df.drop( train_stats_1.index ).copy()
train_dets_1 = dets_df[ dets_df['chip'].isin ( train_stats_1.index ) ].copy()
valid_dets_1 = dets_df.drop(train_dets_1.index).copy()
t1 = (train_dets_1, train_stats_1)
t2 = (train_dets_2, train_stats_2)
v1 = (valid_dets_1, valid_stats_1)
v2 = (valid_dets_2, valid_stats_2)
return t1, t2, v1, v2
def kfolds(stats_df, dets_df, k_folds):
"""Take a N-img dataframe and return k-folds * N-img long dataframe, with n different train/test sets randomly sampled,
in the form of dictionaries of stats and dets dataframes. """
df = stats_df.copy().reset_index()
from sklearn.model_selection import KFold
kf = KFold(n_splits = k_folds, shuffle = True, random_state = 2)
kf.get_n_splits()
i=0
df_full = pd.DataFrame()
for (train_index, test_index) in kf.split(df):
i+=1
df_train = df.loc[train_index, : ].copy()
df_train.loc[:, 'dataset'] = 'train_'+str(i)
df_test = df.loc[test_index, : ].copy()
df_test.loc[:, 'dataset'] = 'test_'+str(i)
df_full = pd.concat([df_full, df_train, df_test])
grouped = df_full.groupby('dataset')
names = ()
stats_dict={}
dets_dict={}
for i, (name, group) in enumerate(grouped):
stats_dict[name] = df_full.loc[ df_full['dataset'] == name, : ].copy()
dets_dict[name] = dets_df[ dets_df['chip'].isin ( stats_dict[name].loc[:,'uchip'] ) ].copy()
return dets_dict, stats_dict
# def paths_to_df(root_paths, headers=['class_id', 'height', 'width', 'centre_x', 'centre_y']):
# print("Warning! Using incorrect yolo label format specific to London_combined_608 tiles!")
def paths_to_df(root_paths, label_dirname, img_dirname, headers=['class_id', 'centre_x', 'centre_y', 'width', 'height']):
"""
For a list of directory paths, each containing named subdirectories containing images and yolo-style labels,
create dataframes for image and bounding box data.
"""
dets_df1=pd.DataFrame(columns=headers)
stats_df1=pd.DataFrame()
for root_path in root_paths:
print("Loading images and labels from ",root_path, "...")
labels = os.path.join(root_path, label_dirname)
imgs = os.path.join(root_path, img_dirname)
dets_df2, stats_df2 = yolos_to_df(labels, imgs)
dets_df1 = pd.concat([dets_df1,dets_df2])
stats_df1 = pd.concat([stats_df1, stats_df2])
image_numbers = np.array(range(len(stats_df1)))+5000
stats_df1.loc[:,'image_numbers'] = image_numbers
dets_df1 = dets_df1.reset_index(drop=True)
stats_df1.set_index('uchip', inplace = True)
# Drop any rows where the class is not valid.
dets_df1 = dets_df1.drop( dets_df1.loc[ dets_df1.class_id.isna() ].index )
return dets_df1, stats_df1
# def yolos_to_df(yolos_path, images_path, headers=['class_id', 'height', 'width', 'centre_x', 'centre_y']): # From 608 tiles - incorrect symlinks!
# print("Warning! Using incorrect yolo label format specific to London_combined_608 tiles!")
def yolos_to_df(yolos_path, images_path, headers=['class_id', 'centre_x', 'centre_y', 'width', 'height']): # Normal label mapping
""" For parallel image/label folders, look for image from yolo path and run yolo_to_df function. """
from pathlib import Path
dets_df1=pd.DataFrame(columns=headers)
stats_df1=pd.DataFrame()
for yolo_path in Path(yolos_path).rglob('*.txt'):
# print(yolo_path)
try:
yolo_path = Path(yolo_path)
img_path = Path(images_path, yolo_path.stem).with_suffix('.tif')
# print(" Img path = ",img_path)
# print(" Label path = ",yolo_path)
except:
print(' filenotfounderror')
dets_df2, stats_df2 = yolo_to_df(yolo_path, img_path)
dets_df1 = pd.concat([dets_df1,dets_df2])
stats_df1 = pd.concat([stats_df1, stats_df2])
return dets_df1, stats_df1
def yolo_to_df(yolo_path, image_path, headers=['class_id', 'centre_x', 'centre_y', 'width', 'height']):
# def yolo_to_df(yolo_path, image_path, headers=['class_id', 'height', 'width', 'centre_x', 'centre_y']):
""" Take image data and Yolo label data and pull it into a dataframe"""
try:
dets_df = pd.read_csv(str(yolo_path), names=headers, delim_whitespace=True)
except FileNotFoundError:
return FileNotFoundError
if dets_df.shape[0] == 0:
return ValueError
elif image_path is not None:
img = cv2.imread(str(image_path))
height,width,channels = np.shape(img)
dets_df['xmin'] = ( dets_df.centre_x - dets_df.width / 2 ) * width
dets_df['xmax'] = ( dets_df.centre_x + dets_df.width / 2 ) * width
dets_df['ymin'] = ( dets_df.centre_y - dets_df.height / 2 ) * height
dets_df['ymax'] = ( dets_df.centre_y + dets_df.height / 2 ) * height
dets_df.loc[ dets_df['xmin'] < 0, 'xmin' ] = 0
dets_df.loc[ dets_df['ymin'] < 0, 'ymin' ] = 0
dets_df.loc[ dets_df['xmax'] > width, 'xmax' ] = width
dets_df.loc[ dets_df['ymax'] > height, 'ymax' ] = height
dets_df['chip'] = uchip = Path(image_path).stem
stats = np.zeros((1,12)) # BGR mean and std, HSV mean and std
hsv = cv2.cvtColor(img, cv2.COLOR_BGR2HSV).astype(np.float32)
for j in range(3):
stats[0,j + 0] = img[:, :, j].astype(np.float32).mean()
stats[0,j + 3] = img[:, :, j].astype(np.float32).std()
stats[0,j + 6] = hsv[:, :, j].mean()
stats[0,j + 9] = hsv[:, :, j].std()
stats_df=pd.DataFrame(stats, columns = ['bmean', 'gmean', 'rmean',\
'bstd', 'gstd', 'rstd',\
'hmean', 'smean', 'vmean',\
'hstd', 'sstd', 'vstd', ])
stats_df['height'] = height
stats_df['width'] = width
stats_df['uchip'] = uchip
stats_df['img_path'] = str(image_path)
return dets_df, stats_df
def dfs_to_mat(dets_dict, stats_dict, matfile_dir_path, class_map, test_name):
"""Pack the kfolds split datasets into tuple pairs for input to .mat generator. """
for name in dets_dict:
df_tuple = ( dets_dict[name], stats_dict[name] )
filename = test_name + '_' + name
print('Now creating fold name: ', filename)
matfile_path = os.path.join(matfile_dir_path, filename+'.mat')
df_to_mat(df_tuple, matfile_path, class_map)
def df_to_mat(df_tuple, matfile_path, class_map):
"""Take dataframe outputs from paths_to_df and write matfile for input to xview-yolov3 model"""
dets_df, stats_df = df_tuple
print("Loaded ", len(dets_df['chip'][:]), " objects." )
stats_df.set_index('uchip', inplace=True)
# Discard out-of-bounds boxes:
dets_df = dets_df.loc[ (dets_df['centre_x'] > 0) & (dets_df['centre_y'] > 0) , :]
dets_df = dets_df.loc[ (dets_df['centre_x'] < 1) & (dets_df['centre_y'] < 1) , :]
dets_df = dets_df.loc[ (dets_df['centre_x'] - dets_df['width'] > 0) &
(dets_df['centre_x'] + dets_df['width'] < 1) , :]
dets_df = dets_df.loc[ (dets_df['centre_y'] - dets_df['height'] > 0) &
(dets_df['centre_y'] + dets_df['height'] < 1) , :]
print("Removed out of bounds objects. Now left with ", len(dets_df['chip'][:]), " objects." )
print("Pull out box coordinates in xview-yolov3 format...")
coords = np.array([dets_df.iloc[i,5:9] for i in range(len(dets_df.index))]).astype(int)
coords[:,[1,2]]=coords[:,[2,1]]
print("Create N_obj long list of image names for each bbox...")
chips = np.array(dets_df['chip'][:])
print("Create N_img long list of image dims..")
shapes = np.array([ stats_df['height'], stats_df['width'] ]).transpose()
print("Create N_img x 12 sized array of image stats...")
stats = np.array( [ stats_df.iloc[i,:12] for i in range(len(stats_df.index)) ])
print("Create N_img long list of unique chip names...")
uchips = np.array( stats_df.index )
# print("Create N_img long list of image sympaths...")
# sympaths = np.array(stats_df.loc[:,'sympaths'])
print("Create N_img long list of image actual paths...")
img_paths = np.array(stats_df.loc[:,'img_path'])
print("Create N_obj long list of class ID...")
classes = np.array(dets_df['class_id']).astype(int)
print("Create xview-yolov3 style array of obj class and bbox dims...")
targets = np.vstack( (classes,coords.T)).T
print("Update targets and classes")
newArray = np.copy(targets)
for k, v in enumerate(class_map):
newArray[targets[:,0]==k,0] = v
targets = newArray
classes = targets[:,0]
chips_id = np.zeros(len(chips)).astype(int) # N_obj long list of numerical chip_id for each bbox
print("Create N-img long list of numerical img number ...")
image_numbers = np.array(stats_df.loc[:,'image_numbers'])
for i, uchip in enumerate(uchips):
bool_i = ( chips == uchip )
chips_id[bool_i] = image_numbers[i]
print("Create N_img list of image weights, constant in value, normalised for sum=1...")
n_uchips = len(uchips)
image_weights = np.zeros(n_uchips) + 1/n_uchips # Constant image weights
print("Create class_mu, class_sigma, class_cov ... ")
h = coords[:,3]-coords[:,1].astype('float32')
w = coords[:,2]-coords[:,0].astype('float32')
Area = np.log(w*h)
aspect_ratio = np.log(w/h)
uc = np.unique(classes)
n_uc = len(uc)
class_mu = np.zeros((n_uc,4))
class_sigma = np.zeros((n_uc,4))
class_cov = np.zeros((n_uc,4,4))
for i in range(n_uc):
j = classes==uc[i]
wj = np.log(w[j])
hj = np.log(h[j])
aj = Area[j]
arj = aspect_ratio[j]
data = [wj, hj, aj, arj]
class_mu[i,:] = np.mean(data,1)
class_sigma[i,:] = np.std(data,1)
class_cov[i,:,:] = np.cov(data)
targets = targets.astype('float32')
chips_id = chips_id.astype('float32')
image_numbers = image_numbers.astype('uint16')
image_weights = image_weights.astype('<f8')
print("Save matfile at ", matfile_path, "...")
scipy.io.savemat(matfile_path,
{'coords': coords, 'chips': chips, 'classes': classes, 'shapes': shapes,
'stats': stats, 'uchips': uchips, 'targets': targets,
'image_numbers':image_numbers, 'image_weights':image_weights,
'id':chips_id, 'wh': [w,h], 'class_sigma':class_sigma,
'class_mu':class_mu, 'class_cov':class_cov, 'class_map':class_map, 'img_paths':img_paths })
return uchips, image_numbers
def paths_to_symlinks(paths, sym_path, stats_df):
"""
For a given list of image folders, look for subdir called 'images' and create a list of all images across those folders.
Then create a folder with symlinks to those images, re-named numerically
"""
os.makedirs(sym_path, exist_ok=True)
shutil.rmtree(sym_path)
os.makedirs(sym_path, exist_ok=True)
img_numbers = stats_df.loc[:,'image_numbers']
img_paths = np.array(stats_df.loc[:,'img_path'])
newpaths=[]
for img_path, img_number in zip(img_paths, img_numbers):
newpath=os.path.join(sym_path,str(img_number)+'.tif')
print(img_path)
print(newpath)
os.symlink(img_path, newpath)
newpaths.append(newpath)
stats_df.loc[:,'sympaths'] = newpaths
print("Symlinks created: ", len(newpaths))
return stats_df
| {"/detect_loop.py": ["/utils/gb_utils.py"], "/buffer_movement_counts.py": ["/gb_scoring.py"]} |
69,764 | georgebirch/yolov3-overhead | refs/heads/main | /buffer_movement_counts.py | yolos_path = '/mnt/Data/AQM_training/01_development/labels_9_class'
images_path = '/mnt/Data/AQM_training/london_RGB_8bit_DRA-ACOMP_50cm/images'
road_buffer_shp_path = '/mnt/server/AQM/AQM/01_Vehicle_Detection/Europe/London/02_City_Data/02_Roads/02_Buffered/02_Measured_Buffers/combined_buffer_q_0.9.shp'
from gb_utils import *
from gb_scoring import *
def append_results(df):
all_historic_results = './all_test_results_01.csv'
with open(all_historic_results, 'a') as f:
df.to_csv(f)
dets_df, stats_df = yolos_to_df(yolos_path, images_path)
print('Build DF with ', len(dets_df), ' objects, within ', len(stats_df), ' images')
dets_df = dets_df.dropna().copy()
dets_df.reset_index(drop=True, inplace=True)
print('Removed Nans. Left with ', len(dets_df), ' objects, within ', len(stats_df), ' images')
print('Rearranging classes into parked, static, moving.')
class_map = [0, 1, 2, 0, 1, 2, 0, 1, 2]
newArray = np.zeros(len(dets_df))
for k, v in enumerate(class_map):
dets_df.loc[ dets_df['class_id']==k, 'class_id_3_moving' ] = v
buffer_gdf = gpd.read_file(road_buffer_shp_path)
buffer_crs = buffer_gdf.crs.to_epsg()
print(' Creating Geodataframes from Dataframes using crs from road buffer : ')
stats_df['img_paths'] = stats_df.img_path.tolist()
dets_gdf = dfs_to_gdfs(dets_df, stats_df, buffer_crs)
dets_gdf.reset_index(drop=True, inplace=True)
print('Overlaying boxes onto road buffer.')
road_dets = gpd.overlay(dets_gdf, buffer_gdf, how='intersection')
print('We now have this many objects: ', len(road_dets))
print ( 'This many are unique objects: ', len(road_dets['index'].unique()) )
print('Computing area of each intersected box.')
road_dets['box_area'] = road_dets.area
print('Removing duplicate boxes keeping the one with largest area')
print ( 'After .area, This many are unique objects: ', len(road_dets['index'].unique()) )
# Sort by intersecting area, then by buffer size, largest first.
road_dets = road_dets.sort_values(['box_area', 'max_buffer'], ascending = [False, False])
# Drop duplicates from th inherited index_1 so that each GT is only represented once.
road_dets = road_dets.drop_duplicates('index', keep = 'first')
print('After drop duplicates: ',len(road_dets) )
# Create dataframe with group counts
dfg = road_dets.groupby(['LEGEND', 'class_id_3_moving']).size()
dfg = dfg.reset_index().set_index('LEGEND')
dfg = dfg.set_index([dfg.index, 'class_id_3_moving'])[0].unstack()
dfg.columns = ['parked', 'static', 'moving']
print('Saving CSV.')
append_results(dfg)
# Repeat but with only major/minor labels:
road_dets = road_dets.sort_values(['box_area', 'layer'], ascending = [False, True])
dfg = road_dets.groupby(['layer', 'class_id_3_moving']).size()
dfg = dfg.reset_index().set_index('layer')
dfg = dfg.set_index([dfg.index, 'class_id_3_moving'])[0].unstack()
dfg.columns = ['parked', 'static', 'moving']
append_results(dfg)
# Reset geometetry to centroids and repeat above analyses:
dets_gdf.geometry = dets_gdf.geometry.centroid
print('Overlaying box centroids onto road buffer.')
road_dets = gpd.overlay(dets_gdf, buffer_gdf, how='intersection')
print('We now have this many objects: ', len(road_dets))
print ( 'This many are unique objects: ', len(road_dets['index'].unique()) )
# Sort by intersecting area, then by buffer size, largest first.
road_dets = road_dets.sort_values(['max_buffer'], ascending = False)
# Drop duplicates from th inherited index_1 so that each GT is only represented once.
road_dets = road_dets.drop_duplicates('index', keep = 'first')
print('After drop duplicates: ',len(road_dets) )
# Create dataframe with group counts
dfg = road_dets.groupby(['LEGEND', 'class_id_3_moving']).size()
dfg = dfg.reset_index().set_index('LEGEND')
dfg = dfg.set_index([dfg.index, 'class_id_3_moving'])[0].unstack()
dfg.columns = ['parked', 'static', 'moving']
print('Saving CSV.')
append_results(dfg)
# Repeat but with only major/minor labels:
road_dets = road_dets.sort_values(['layer'], ascending = True)
dfg = road_dets.groupby(['layer', 'class_id_3_moving']).size()
dfg = dfg.reset_index().set_index('layer')
dfg = dfg.set_index([dfg.index, 'class_id_3_moving'])[0].unstack()
dfg.columns = ['parked', 'static', 'moving']
append_results(dfg)
| {"/detect_loop.py": ["/utils/gb_utils.py"], "/buffer_movement_counts.py": ["/gb_scoring.py"]} |
69,765 | georgebirch/yolov3-overhead | refs/heads/main | /train_test_split.py | from gb_utils import *
from pathlib import Path
def create():
# Class mapping. Must be a list with the same length as the number of classes that were stored in the yolo-style .txt label files.
# class_map can be used to amalgamate certain classes.
class_map = [0, 1, 2, 3, 4, 5, 6, 7, 8] # Leave classes unchanged.
# List of parent directories within which images and labels are stored in their own named subdirectories.
root_paths = [ \
# "/mnt/Data/AQM_training/london_RGB_8bit_ACOMP_30cm",
"/content/drive/My Drive/George_Birchenough/04_London_Datasets/london_8bit_combined/london_8bit_DRA-ACOMP_30cm",
"/content/drive/My Drive/George_Birchenough/04_London_Datasets/london_8bit_combined/london_8bit_DRA-ACOMP_50cm"]
# Name of the subdiretories within each of the above directories, containing yolo style labels in .txt files.
label_dirname = 'labels_9_class'
# Name of the subdiretories within each of the above directories, containing .tif images with identical names as above labels.
img_dirname = 'images_RGB'
# Name of the test - this will be used to create the name for each kfold matfile pair : test_name_train_1.mat, test_name_test_1.mat
test_name = 'london_10folds_9class'
# Number of folds to create. Eg if k = 10, then 10 train/test pairs will be created (20 matfiles in total)
k = 10
# Path to directory in which to store the kfolds matfiles.
matfile_dir_path = "/home/george/xview-yolov3/utils"
dets_df, stats_df = paths_to_df(root_paths, label_dirname, img_dirname)
dets_dict, stats_dict = kfolds(stats_df, dets_df, k)
dfs_to_mat(dets_dict, stats_dict, matfile_dir_path, class_map, test_name)
create()
| {"/detect_loop.py": ["/utils/gb_utils.py"], "/buffer_movement_counts.py": ["/gb_scoring.py"]} |
69,797 | iuliagalatan/Gomoku | refs/heads/main | /appCoord.py | from config.Settings import Settings
class App:
'''
main class which initialize the entire app
'''
@staticmethod
def start():
settings = Settings("settings.properties")
ui = settings.config()
ui.start()
App.start()
| {"/appCoord.py": ["/config/Settings.py"], "/config/Settings.py": ["/service/GameServ.py", "/ui/gui.py", "/ui/console.py"], "/ui/gui.py": ["/service/GameServ.py"]} |
69,798 | iuliagalatan/Gomoku | refs/heads/main | /config/Settings.py | from domain.Board import *
from service.GameServ import Game
from ui.gui import GUI
from ui.console import *
class Settings:
def __init__(self, configFile):
self.__config_file = configFile
self.__settings = {}
def readSettings(self):
with open(self.__config_file, "r") as f:
lines = f.read().split("\n")
settings = {}
for line in lines:
setting = line.split("=")
if len(setting) > 1:
self.__settings[setting[0].strip()] = setting[1].strip()
def config(self):
self.readSettings()
board = None
computer = None
game = None
if self.__settings['repository'] == "in-memory":
board = Board()
computer = Computer()
game = Game(board, computer)
ui = None
if self.__settings['ui'] == "console":
ui = UI(game)
if self.__settings['ui'] == 'gui':
ui = GUI(game)
return ui
| {"/appCoord.py": ["/config/Settings.py"], "/config/Settings.py": ["/service/GameServ.py", "/ui/gui.py", "/ui/console.py"], "/ui/gui.py": ["/service/GameServ.py"]} |
69,799 | iuliagalatan/Gomoku | refs/heads/main | /service/GameServ.py | from domain.Board import *
class Game():
def __init__(self, board, computer):
self._board = board
self._computer = computer
@property
def board(self):
return self._board
@property
def computer(self):
return self._computer
def move_player(self, x, y):
self.board.move(x,y,'X')
def move_computer(self, x, y):
X, Y= self.three_in_row(x, y)
if X == -1 and Y == -1:
ok = False
X, Y = self.two_two(x, y)
if X == -1 and Y ==-1:
ok = False
while ok == False:
X, Y = self.computer.move_random(x,y)
print(X)
print(Y)
ok = self.board.check2(X,Y)
self.board.move(X,Y, 'O')
return (X, Y)
def three_in_row(self,x,y):
coordi = [-1, -1, 0, 1, 1, 1, 0, -1]
coordj = [0, 1, 1, 1, 0, -1, -1, -1]
for q in range(0, 8):
ok = True
xi = x
yi = y
for p in range(0, 2):
xi = xi + coordi[q]
yi = yi + coordj[q]
if self.board.onboard(xi, yi) == False:
ok = False
else:
if self.board.board[xi][yi] != 1:
ok = False
if ok == True:
if(self.board.check2(xi+coordi[q],yi+coordj[q])):
return xi+coordi[q], yi+coordj[q]
elif self.board.check2(x-coordi[q], y-coordj[q]):
return x-coordi[q], y-coordj[q]
return -1, -1
def two_two(self, x, y):
Xs = -1
Ys = -1
coordi = [-1, -1, 0, 1, 1, 1, 0, -1]
coordj = [0, 1, 1, 1, 0, -1, -1, -1]
for l in range(0, 8):
if self.board.board[x+coordi[l]][y+coordj[l]] == 0 and self.board.onboard(x+coordi[l], y+coordj[l]):
xs = x+coordi[l]
ys = y+coordj[l]
for q in range(0, 8):
ok = True
xi = xs
yi = ys
xj = xs
yj = ys
for p in range(0, 1):
xi = xi + coordi[q]
yi = yi + coordj[q]
xj = xj - coordi[q]
yj = yj - coordj[q]
if self.board.onboard(xi, yi) == False or self.board.onboard(xj, yj) == False:
ok = False
else:
if self.board.board[xi][yi] != 1 or self.board.board[xj][yj] !=1:
ok = False
if ok == True:
Xs = xs
Ys = ys
return Xs, Ys
def is_won(self):
ok = self.board.is_won()
if ok:
return True
else:
return False
def is_tie(self):
ok = self.board.is_tie()
return ok
def is_over(self):
return self.board.is_tie() or self.board.is_won()
| {"/appCoord.py": ["/config/Settings.py"], "/config/Settings.py": ["/service/GameServ.py", "/ui/gui.py", "/ui/console.py"], "/ui/gui.py": ["/service/GameServ.py"]} |
69,800 | iuliagalatan/Gomoku | refs/heads/main | /ui/gui.py | from tkinter import *
import time
import sys
from PIL import Image, ImageTk, ImageOps
from service.GameServ import Game
class GUI:
def __init__(self, game):
self._game = game
self.root = Tk()
self.root.title('Gomoku')
self.root.resizable(width = False, height = False) # the main window can't be resized
self.fgColor = 'red'
self.bgColor = '#ccccff'
self.frame = Frame(self.root, bg = self.bgColor)
self.frame.grid(row = 0, column = 0, sticky="nsew")
self.root.grid_rowconfigure(0, weight = 1)
self.root.grid_columnconfigure(0, weight = 1)
self.root.grid_columnconfigure(1, weight = 1)
self.c = Canvas(self.frame, width = 600, height = 600, borderwidth = 1, bg = self.bgColor)
self.btnStart = Button(self.frame, text = "Start", bg = self.bgColor, fg = self.fgColor, command = self.start_game)
self.btnStop = Button(self.frame, text ='Exit', bg = self.bgColor, fg = self.fgColor, command = self.stop_game)
self.lblMessage = Label(self.frame, bg = self.bgColor, fg = self.fgColor, text = 'Start the game')
self.c.grid(row = 0, column = 0, columnspan = 2, sticky = "nsew")
self.lblMessage.grid(row = 1, column = 0, columnspan = 2, sticky = "nsew")
self.btnStart.grid(row = 2, column = 0, sticky = "new")
self.btnStop.grid(row = 2, column = 1, sticky = "new")
self.ROWS = 15
self.COLS = 15
self.tiles = [[None for col in range(self.COLS)] for row in range(self.ROWS)]
self.photoImg = [[None for col in range(self.COLS)] for row in range(self.ROWS)]
self.c.update()
self.col_width = self.c.winfo_width() / self.COLS
self.row_height = self.c.winfo_height() / self.ROWS
self.c.bind("<Button-1>", self.next_move)
self.create_table()
self.player = True
self.is_running = False;
@property
def game(self):
return self._game
def start(self):
self.root.mainloop()
def start_game(self):
self.lblMessage.configure(text = "Player's move:")
self.is_running = True
def stop_game(self):
sys.exit()
def create_table(self):
width = int(self.col_width)
height = int(self.row_height)
for row in range(self.ROWS):
for col in range(self.COLS):
img = Image.new('RGB', (width, height), (204, 204, 255))
img = img.resize((width, height), Image.ANTIALIAS)
self.photoImg[row][col] = ImageTk.PhotoImage(ImageOps.expand(img, border = 1, fill = 'red'))
self.tiles[row][col] = self.c.create_image(col*self.col_width, row*self.row_height, anchor = NW, image = self.photoImg[row][col])
self.c.update()
def draw_image(self, row, col, image_file):
width = int(self.col_width)
height = int(self.row_height)
self.c.delete(self.photoImg[row][col])
img = Image.open(image_file).resize((width, height), Image.ANTIALIAS)
self.photoImg[row][col] = ImageTk.PhotoImage(ImageOps.expand(img, border = 1, fill = 'red'))
self.c.delete(self.tiles[row][col])
self.tiles[row][col] = self.c.create_image(col*self.col_width, row*self.row_height, anchor = NW, image = self.photoImg[row][col])
self.c.update()
def next_move(self, event):
if self.is_running == False:
return None
col = int(event.x//self.col_width)
row = int(event.y//self.row_height)
if self._game.is_over() == False:
self.lblMessage.configure(text = "Computer's move:")
self.draw_image(row, col, 'resources/x.png')
try:
self._game.move_player(row, col)
time.sleep(0.5)
if self._game.is_over() == True:
self.lblMessage.configure(text = 'Game Over: tie or player won')
self.is_running = False;
return None
except Exception as e:
print(e)
if self._game.is_over() == False:
(row, col) = self._game.move_computer(row, col)
self.lblMessage.configure(text = "Player's move:")
self.draw_image(row, col, 'resources/0.png')
if self._game.is_over() == True:
self.lblMessage.configure(text = 'Game Over: tie or computer won')
self.is_running = False;
return None
| {"/appCoord.py": ["/config/Settings.py"], "/config/Settings.py": ["/service/GameServ.py", "/ui/gui.py", "/ui/console.py"], "/ui/gui.py": ["/service/GameServ.py"]} |
69,801 | iuliagalatan/Gomoku | refs/heads/main | /ui/console.py | from service import *
import time
class UI:
def __init__(self, game):
self._game = game
@property
def game(self):
return self._game
def start(self):
player = True
while self._game.is_over() == False:
if player == True:
x = int(input('coord x: '))
y = int(input('coord y: '))
try:
self._game.move_player(x,y)
player = False
except Exception as e:
print(e)
else:
time.sleep(0.5)
self._game.move_computer(x, y)
player = True
print('Computers move:')
print(self.game.board)
if player == True:
print('tie or computer won')
else:
print('tie or player won')
| {"/appCoord.py": ["/config/Settings.py"], "/config/Settings.py": ["/service/GameServ.py", "/ui/gui.py", "/ui/console.py"], "/ui/gui.py": ["/service/GameServ.py"]} |
69,842 | MFry/HighVolumeAutomationTesting | refs/heads/master | /specialist.py | __author__ = 'Michal'
from string import Template
import subprocess, shutil, logging, os, time, datetime, glob
#interface between generator and the speacilist
#drives the application under test
#be able to drive application videoLAN
#provide several conversion options
#convert a song to a specified format
#convert it back
specLogger = logging.getLogger('Manager.specialist')
handledConv = ['.mp3','.wav']
unhandledSpecChar = ["'", ',', '_']
optionsMP3 = ['128','160','192','320'] #options for MP3
VLCpath= ''
testPath= ''
#TODO: A bug occurs when a file name ends with "test "
#NOTE: Files that may have an extension more than three chars (or less) long will not work at the moment
def init (VLC, testFilesLoc):
setPaths(VLC, testFilesLoc)
for conv in handledConv:
testFiles = glob.glob(testPath+'/*'+conv)
sanitizeFiles(testFiles)
def setPaths (VLC, testFilesLoc):
'''
@param VLC -- Path to a VLC folder
@param testFilesLoc -- Path to the folder which contains all of your test files
'''
global VLCpath,testPath
logger = logging.getLogger('Manager.specialist.setPaths')
VLCpath=VLC
logger.debug('{:1}[PATH]: VLC: {}'.format('',VLC))
testPath=testFilesLoc
logger.debug('{:1}[PATH] Test Files: {}'.format('',testFilesLoc))
def convertToWAVE(fileName, codec='s16l', channels='2', outputBitRate='128', sampleRate='48000', type='wav'):
'''
Given a filename this function will attempt to call VLC and convert it into a WAVE format
@param fileName -- The name of the file to be converted
@param codec -- the codec used to do the conversion (VLC command line parameter)
@param channels -- (VLC command line parameter)
@param outputBitRate -- (VLC command line parameter)
@param sampleRate -- For the purposes of this particular oracle implementation, we should always keep it at 48kHz(VLC command line parameter)
@param type
Returns a string necessary to be called to convert the file to a WAVE file
'''
outputSongName = getOutputName(fileName[:-4], '.wav')
shutil.copy2(testPath+'/'+fileName,VLCpath+'/'+fileName)
#adding -vvv after dummy creates no cmd screen but dumps everything to stderr
t = Template ('vlc -I dummy $song ":sout=#transcode{acodec=$codec,channels=$channels,ab=$outputBitRate,samplerate=$sampleRate}:std{access=file,mux=$type,dst=$outputSongName}" vlc://quit')
command = t.substitute(song='"'+fileName+'"', codec=codec, channels=channels, outputBitRate=outputBitRate,
sampleRate=sampleRate,type=type, outputSongName=outputSongName)
print (command)
p = subprocess.Popen(command, cwd=VLCpath, shell=True)
stdout, stderr = p.communicate()
#TODO: log stderr and stdout
#clean up
os.remove(VLCpath+'/'+fileName)
shutil.move(VLCpath+'/'+outputSongName,testPath+'/'+outputSongName)
return outputSongName
def convertToMP3(fileName,codec='mpga',outputBitRate='192'):
"""
Given a filename this function will attempt to convert it (via VLC) to MP3 with the codec and the outputbitrate
@outputBitRate - 128,160,192,320
INFO: Uncompressed audio as stored on an audio-CD has a bit rate of 1,411.2 kbit/s,[note 2]
so the bitrates 128, 160 and 192 kbit/s represent compression ratios of approximately 11:1, 9:1 and 7:1 respectively.
Non-standard bit rates up to 640 kbit/s can be achieved with the LAME encoder and the freeformat option,
although few MP3 players can play those files. According to the ISO standard, decoders are only required to be able to decode streams up to 320 kbit/s.[45]
"""
outputSongName = getOutputName(fileName[:-4], '.mp3')
shutil.copy2(testPath+'/'+fileName,VLCpath+'/'+fileName)
t = Template('vlc -I dummy $song ":sout=#transcode{acodec=$codec,ab=$outputBitRate}:std{dst=$outputSongName,access=file}" vlc://quit')
command = t.substitute(song='"'+fileName+'"', codec=codec, outputBitRate=outputBitRate, outputSongName=outputSongName)
print (command)
p = subprocess.Popen(command, cwd=VLCpath, shell=True)
stdout, stderr = p.communicate()
#log stderr and stdout
os.remove(VLCpath+'/'+fileName)
shutil.move(VLCpath+'/'+outputSongName,testPath+'/'+outputSongName)
return outputSongName
def getFunc(funcName):
if 'convertToMP3' == funcName:
return convertToMP3
elif 'convertToWAVE' == funcName:
return convertToWAVE
def getOutputName(song, type):
files = glob.glob(testPath+'/*'+type)
found = []
for file in files:
if song in file:
candidate = file[file.find('\\')+2:]
if 'test' in candidate:
found.append(candidate)
testNum = findNextTestNumber(found, type)
outName = ''
if 'test' in song:
outName = song[:song.find('test ')+'test '.__len__()]+str(testNum)+type
else:
outName = song+' test '+ str(testNum)+type
return(outName)
def findNextTestNumber(list, type):
testNum = 0
for file in list:
curNum = file[file.find('test ')+'test '.__len__():file.find(type)]
curNum = int(curNum)
if testNum < curNum:
testNum = curNum
testNum += 1
return testNum
def sanitizeFiles(files):
cleanFiles = []
for file in files:
cleanFiles.append(cleanFile(file))
return cleanFiles
#TODO: Make it work for arbitrary number of special chars
def cleanFile(fileName):
logger = logging.getLogger('Manager.specialist.cleanFile')
fileName = fileName[fileName.find('\\')+1:]
out = fileName
sanitized = False
for specChar in unhandledSpecChar:
if specChar in fileName:
sanitized = True
out = removeSpecialChar(out, specChar)
if sanitized:
logger.info('{:1}[Sanitized] file {} rename to {}'.format('', fileName, out))
os.rename(testPath+'/'+fileName, testPath+'/'+out)
return out
def removeSpecialChar (name, char):
return name[:name.find(char)]+name[name.find(char)+1:]
def prep(refSong, testSong):
'''
Attempts to clean up the songs, by changing the db gain and resampling the songs to improve the accuracy of the oracle
NOTE: Proper resampling within the library that the oracle uses does not work, even using their sample code
this may be the lack of understanding of the code or a bug
'''
logger = logging.getLogger('Manager.specialist.prep')
#make testSong into a tempFile
#clean it up
command = 'CompAudio "'+ refSong +'" "' + testSong + '"'
p = subprocess.Popen(command, cwd=testPath, shell=True, stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if stderr is None:
pass
else:
logger.error(stderr)
output = str(stdout)
dbGain = output[output.find('File B = ')+'File B = '.__len__(): output.find(')\\r\\n Seg.')]
samplesNum = []
samples = output.split('Number of samples : ')
for samp in samples:
if ' (' in samp:
offset = samp.find(' (')
samplesNum.append(samp[:offset])
logger.debug('{:5}[Sample Rates]: {} [db Gain]: {}'.format('', samplesNum, dbGain))
#db gain of one implies that the file does not need to be adjusted, through testing
# adjusting the db improves accuracy of the oracle
if dbGain != 1:
tempName = 'tempf.wav'
os.rename(testPath+'/'+testSong, testPath+'/'+tempName)
command = 'ResampAudio -i 1 -g ' + str(dbGain) + ' "' + tempName + '" "' + testSong + '"'
p = subprocess.Popen(command, cwd=testPath, shell=True, stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if stderr is None:
pass
else:
logger.error(stderr)
#print(str(stdout))
os.remove(testPath+'/'+tempName)
# different sampling rates significantly distorts the accuracy of the oracle
if samplesNum[0] > samplesNum[1]:
logger.debug('{:5}[Resampling]: {}'.format('', refSong))
tempName = 'tempf.wav'
os.rename(testPath+'/'+refSong, testPath+'/'+tempName)
command = 'CopyAudio -l 0:' + str(samplesNum[0]) + ' "' + tempName + '" "' + refSong + '"'
p = subprocess.Popen(command, cwd=testPath, shell=True, stdout=subprocess.PIPE)
stdout, stderr = p.communicate()
if stderr is None:
pass
else:
logger.error(stderr)
#print(str(stdout))
os.remove(testPath+'/'+tempName)
| {"/generator.py": ["/specialist.py", "/stateExpert.py", "/oracle.py"], "/manager.py": ["/specialist.py", "/bob.py", "/stateExpert.py", "/generator.py", "/oracle.py"], "/stateExpert.py": ["/specialist.py"], "/oracle.py": ["/specialist.py"]} |
69,843 | MFry/HighVolumeAutomationTesting | refs/heads/master | /fileManager.py | __author__ = 'Michal'
# This module takes a set of files for testing, moves them and renames them so that they can be easily accessed.
testFileLoc = ''
testData = []
def init (testPath):
testFileLoc = testPath
def convertTestingItems():
'''
Converts all items under test to a specific naming convention so that we do not get any collisions
or break the script that needs to run via the command console. This module has a somewhat stronger coupling with
''' | {"/generator.py": ["/specialist.py", "/stateExpert.py", "/oracle.py"], "/manager.py": ["/specialist.py", "/bob.py", "/stateExpert.py", "/generator.py", "/oracle.py"], "/stateExpert.py": ["/specialist.py"], "/oracle.py": ["/specialist.py"]} |
69,844 | MFry/HighVolumeAutomationTesting | refs/heads/master | /generator.py | __author__ = 'Michal'
import random, logging, specialist, stateExpert, os, oracle
seed = None
tests = None
testFilePath = ''
#statistics
convRun = 0
#Given the knowledge of the specialist generate the tests
#Output the test generated
def init(path, s=42):
global seed, tests, testFilePath
logger = logging.getLogger("Manager.generator.set")
seed = s
random.seed(seed)
testFilePath = path
logger.info("{:8}[Seed]: {}".format('',seed))
logger.info("{:8}[Tests]: {}".format('',tests))
#TODO: Add control to how many tests we wish to run
def runTests(songs):
logger = logging.getLogger('Manager.generator.runTests')
songsGen = []
#runs a single test for each song
while songs.__len__() > 0:
incCount()
ele = random.randint(0,songs.__len__()-1)
#TODO: Its possible to make this less convoluted by popping the element and then working on it
testPos = stateExpert.getTest(songs[ele])
#logger.debug('{:2} Element Picked: {}, song: {} '.format('',ele,songs[ele]))
logger.debug('{:2}[Conversion] #{:4} [Song:] {}'.format('',convRun, songs[ele][songs[ele].find('\\')+1:]))
songsGen.append(specialist.getFunc(testPos[0][0])(songs[ele][songs[ele].find('\\')+1:]))
songs.pop(ele)
return songsGen
def cleanUp (songs):
'''
@Param songs - list of files we wish to delete
Deletes all the files (within the list songs) from the default directory set by bob for testing data storage
'''
logger = logging.getLogger('Manager.generator.cleanUp')
while songs.__len__() > 0:
toDel = songs.pop()
logger.debug('{:3}[Deletion] [{}] from path {}'.format('', toDel, testFilePath))
os.remove(testFilePath+'/'+toDel)
def findRef (song):
if 'test ' in song:
return song[:song.find('test ')-1]+oracle.refType
else:
return song
def incCount():
global convRun
convRun += 1
def sanitizeFiles(files):
cleanFiles = []
for file in files:
cleanFiles.append(specialist.cleanFile(file))
return cleanFiles | {"/generator.py": ["/specialist.py", "/stateExpert.py", "/oracle.py"], "/manager.py": ["/specialist.py", "/bob.py", "/stateExpert.py", "/generator.py", "/oracle.py"], "/stateExpert.py": ["/specialist.py"], "/oracle.py": ["/specialist.py"]} |
69,845 | MFry/HighVolumeAutomationTesting | refs/heads/master | /manager.py | __author__ = 'Michal'
from optparse import OptionParser
import inspect, specialist, logging, bob, stateExpert, generator, random, oracle, glob
# The manager is the point of contact between the user and the rest of the modules
# it directly
logName = ''
logHandle = None
#logging set up
def init():
global logHandle, logName
bob.getConfigData('config.txt')
logName = bob.logFileName
#logging set up
logger = logging.getLogger("Manager")
logger.setLevel(logging.DEBUG)
logHandle = logging.FileHandler(logName)
formatting = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logHandle.setFormatter(formatting)
logger.addHandler(logHandle)
#check bob paths
logger.info('LOGGING STARTED')
bob.testConfig()
specialist.init(bob.VLCpath,bob.testPath)
tests = getTests()
mp3Opt = specialist.optionsMP3
stateExpert.setTests(tests['WAVE'],tests['MP3'],mp3Opt)
generator.init(bob.testPath)
#TODO: Bug, if we have multiple ['] we only sanitize it once
bob.updateTestData()
def getTests():
'''
Returns all the tests that are available based on the convention
convertToXXX.
'''
loggerT = logging.getLogger("Manager.getTests")
testsAvail = {}
for name, data in inspect.getmembers(specialist):
if name == '__builtins__':
continue
if inspect.isfunction(data):
substring = 'convert'
if substring in repr(data):
testsAvail[name[name.find('To')+2:]]=name #keys currently become MP3, WAVE
loggerT.debug('{:11} [Specialist]: Function {} :{}'.format('',name, repr(data)))
return testsAvail
def runBase():
generator.runBaseLine()
def sanitizeFiles(files):
cleanFiles = []
for file in files:
cleanFiles.append(specialist.cleanFile(file))
return cleanFiles
def testAll (fileFormat):
songsComp = glob.glob(bob.testPath+'/*'+fileFormat)
testIt = []
for song in songsComp:
if 'test ' in song:
print (song)
testIt.append(song)
for test in testIt:
test = test[test.find('\\')+1:]
ref = generator.findRef(test)
ref = ref[ref.find('\\')+1:]
oracle.compare(ref,test)
init()
songsComp = generator.runTests(bob.testData)
toTest = generator.runTests(songsComp)
print(songsComp)
generator.cleanUp(songsComp)
res = []
for test in toTest:
test = test[test.find('\\')+1:]
ref = generator.findRef(test)
ref = ref[ref.find('\\')+1:]
res.append(oracle.compare(ref,test))
#generator.cleanUp(toTest)
#generator.cleanUp(songsComp)
res.append(oracle.compare('A Ja Tzo Saritsa.wav', 'A Ja Tzo SaritsaBAD.wav')) #testing bad data
oracle.resultStats(res)
| {"/generator.py": ["/specialist.py", "/stateExpert.py", "/oracle.py"], "/manager.py": ["/specialist.py", "/bob.py", "/stateExpert.py", "/generator.py", "/oracle.py"], "/stateExpert.py": ["/specialist.py"], "/oracle.py": ["/specialist.py"]} |
69,846 | MFry/HighVolumeAutomationTesting | refs/heads/master | /stateExpert.py | __author__ = 'Michal'
import specialist, logging
#interface between generator and the speacilist
tests = {}
def setTests(WAVTest, MP3Test, MP3Options):
'''
@Param WAVTest - The function containing WAV conversion
@Param MP3Test - The function containing MP3 conversion
@Param MP3Options - This contains certain parameters than can be changed that would effect the conversion, such as bitrates and the codec
Sets the necessary information to work with the specialist class and run tests properly
'''
global tests
logger = logging.getLogger('Manager.stateExpert.setTests')
tests['mp3Tests'] = MP3Test
tests['mp3Op'] = MP3Options
tests['wavTests'] = WAVTest
logger.debug("[Tests]: {}".format(tests))
def getTest(file):
'''
Returns a possible test and options with it based on the file
@Param file - string name of the file we may wish to test
Returns - a tuple containing two lists
1. List of functions we can run
2. List of parameters that vary the function
'''
global tests
logger = logging.getLogger('Manager.stateExpert.getTest')
#find the file extension so that we can give an option of what tests make sense to perform
file = file[::-1] # reverse file
songType = file[:file.find('.')+1]
songType = songType[::-1]
if songType == '.wav':
out = [tests['mp3Tests'], []], [tests['mp3Op'], []]
elif songType == '.mp3':
out = [tests['wavTests'], tests['mp3Tests']], [tests['mp3Op'], []]
logger.debug(out)
return out
| {"/generator.py": ["/specialist.py", "/stateExpert.py", "/oracle.py"], "/manager.py": ["/specialist.py", "/bob.py", "/stateExpert.py", "/generator.py", "/oracle.py"], "/stateExpert.py": ["/specialist.py"], "/oracle.py": ["/specialist.py"]} |
69,847 | MFry/HighVolumeAutomationTesting | refs/heads/master | /oracle.py | __author__ = 'Michal'
import subprocess, numpy, logging, specialist
# This module is used to interface with an oracle from the TSP Lab
# (url http://www-mmsp.ece.mcgill.ca/documents/Software/Packages/AFsp/AFsp.html)
# Currently, because of the weakness of the oracle and the lack of mathematical sophistication
# the false negative rate is rather high.
testPath = '' #location of folder containing test files
answer = 'Objective Difference Grade:' #string we are looking for
refType = '.wav'
testsRun = 0
sensitivity = 1
def init(path):
testPath = path
def testRun():
'''
Assuming the file test1.wav exists this allows us to test whether we have the proper oracle and things are correctly setup
'''
output = None
p = subprocess.Popen('PQevalAudio "'+'test1.wav'+'" "'+'test1.wav'+'"',shell=True,cwd=testPath, stdout=subprocess.PIPE)
output, stderr = p.communicate()
print(findResults(output))
def compareToSelf(song):
'''
Establishes a baseline of what is considered a perfect score by comparing a song against itself
'''
return compare(song,song)
def compare(reference, test):
'''
@param reference -- The original "untouched" song (should be a .wav at 48kHz)
@param test -- The copy of the song which was modified through a conversion (or synthetic means to create a "bad" song (static noise, drop in volume, ect.)
Compares two songs which need to be .wav at 48000Khz (for accuracy), this will attempt to isolate an identical sample size and account for db variation
@return the value within the string outputed by PQevalAudio, more information about the value can be found here (http://www-mmsp.ece.mcgill.ca/documents/Software/Packages/AFsp/PQevalAudio.html)
'''
specialist.prep(reference, test)
testNum = incCount()
logger = logging.getLogger('Manager.oracle.compare')
logger.debug('{:6}[PQevalAudio] Reference: {} test file {}'.format('', reference,test))
p = subprocess.Popen('PQevalAudio "'+reference +'" "'+ test +'"', shell=True, cwd=testPath, stdout=subprocess.PIPE)
output, stderr = p.communicate()
if stderr is None:
pass
else:
logger.error(stderr)
res = findResults(output)
print(res)
logger.info('{:7}[PQevalAudio] [Test]#: {} [Results]: {}'.format('', testNum, res))
return res
def findResults(output):
'''
Parses the output string to find the desired numeric value and ignore the extra information
@param output -- String outputed by PQevalAudio
'''
output = str(output)
output = output[output.find(answer)+answer.__len__()+1:]
return output[:output.find('\\')]
def incCount():
'''
Increments the count for the number of test cases evaluated
@return -- the current number of evaluated tests
'''
global testsRun
testsRun += 1
return testsRun
def resultStats(res):
'''
Returns the statistical results of the numerical scores given by the compare function
Flags any score that is bellow the average - 2 standard deviations (This is due to the numbers being negative) and more negative meaning worse.
'''
results = []
for result in res:
results.append(float(result))
stdDev = numpy.std(results)
avg = numpy.average(results)
logger = logging.getLogger('Manager.oracle.resultStats')
logger.info('{:3}[Statistical Information]: '.format(''))
logger.info('{:3}[Data]: {} '.format('', str(res)))
logger.info('{:3}[Total]: {} '.format('', str(res.__len__())))
logger.info('{:3}[Average]: {}'.format('', avg))
logger.info('{:3}[Standard Deviation]: {}'.format('', stdDev))
problems = 0
for result in results:
if result <= avg-sensitivity*stdDev:
problems += 1
#TODO: Create a dictionary of keys -> results and store the names to give more information
logger.info('{:3}[Possible Bugs]: {}'.format('', problems))
| {"/generator.py": ["/specialist.py", "/stateExpert.py", "/oracle.py"], "/manager.py": ["/specialist.py", "/bob.py", "/stateExpert.py", "/generator.py", "/oracle.py"], "/stateExpert.py": ["/specialist.py"], "/oracle.py": ["/specialist.py"]} |
69,848 | MFry/HighVolumeAutomationTesting | refs/heads/master | /bob.py | __author__ = 'Michal'
# Initializes the basics and does some basic sanity checks to see whether the parameters set make sense.
import glob, os, sys, logging
fileFormat = "/*.wav"
testData = []
VLCpath=''
testPath=''
testDataType=''
logFileName=''
#read the user defined parameters
#Folder with test data
#File Format
#Seed
params = ['files' , 'vlc', 'testDataType', 'logFileName']
def getConfigData (fileName):
'''
Given a config file bob will set up the necessary paths for VLC, test data and the log
fileName - Name of the configuration file
@param fileName -- Config file that will be loaded
'''
try:
config = open(fileName, 'r').read()
except FileNotFoundError as err:
print("Log file {} could not be found".format(err),file=sys.stderr)
sys.exit(1)
config = config.split('\n')
for line in config:
toUpdate = line[:line.find('=')]
data = line[line.find('=')+1:]
update(toUpdate,data)
def update (toUpdate, data):
'''
Sets a given variable to data based on the string toUpdate
'''
global testPath, VLCpath, testDataType, logFileName
#TODO: Log the set paths
#print (data)
if toUpdate == 'vlc': #update vlc
VLCpath = data
elif toUpdate == 'files': #update file path
testPath = data
elif toUpdate == 'testDataType': #update the test data types
testDataType = data
elif toUpdate == 'logFileName': #update the logFileName
logFileName = data
#TODO: Improve error handling, tell the user what was not found
def testConfig():
'''
Ensures that the paths provided are at least real and contain the data types declared earlier.
'''
global testData
logger = logging.getLogger('Manager.bob.testConfig')
if os.path.isdir(VLCpath):
logger.info("{:7}Directory {} found.".format('',VLCpath))
else:
logger.error("VLC directory not found!")
if os.path.isdir(testPath):
logger.info("{:7}Directory {} found.".format('',testPath))
else:
logger.error("Test file directory not found!")
testData = glob.glob(testPath+'/*'+testDataType)
if testData.__len__() > 0:
logger.info("{:7}Test files count: {}".format('',testData.__len__()))
else:
logger.error("No test files found")
def updateTestData():
'''
Finds all the test data within the folder, counts them and stores them
'''
global testData
logger = logging.getLogger('Manager.bob.updateTestData')
testData = glob.glob(testPath+'/*'+testDataType)
if testData.__len__() > 0:
logger.info("{:3}[Updated] Test files count {}".format('',testData.__len__()))
else:
logger.error("No test files found")
'''
#Unit test
getConfigData('config3.txt')
print (testPath)
print (VLCpath)
print (testDataType)
print (logFileName)
testConfig()
'''
| {"/generator.py": ["/specialist.py", "/stateExpert.py", "/oracle.py"], "/manager.py": ["/specialist.py", "/bob.py", "/stateExpert.py", "/generator.py", "/oracle.py"], "/stateExpert.py": ["/specialist.py"], "/oracle.py": ["/specialist.py"]} |
69,863 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/migrations/0007_auto_20201027_1817.py | # Generated by Django 3.1.2 on 2020-10-27 18:17
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0006_auto_20201027_1813'),
]
operations = [
migrations.RenameField(
model_name='musician',
old_name='first_name',
new_name='number',
),
migrations.RemoveField(
model_name='musician',
name='instrument',
),
migrations.RemoveField(
model_name='musician',
name='last_name',
),
]
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,864 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/migrations/0019_auto_20201028_0649.py | # Generated by Django 3.1.2 on 2020-10-28 06:49
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0018_life_numberrange'),
]
operations = [
migrations.DeleteModel(
name='Life',
),
migrations.DeleteModel(
name='NumberRange',
),
]
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,865 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/migrations/0009_delete_album.py | # Generated by Django 3.1.2 on 2020-10-27 18:22
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0008_question'),
]
operations = [
migrations.DeleteModel(
name='Album',
),
]
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,866 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/models.py | from django.db import models
from django.contrib.auth.models import User
from django.dispatch import receiver
from django.urls import reverse
from django_rest_passwordreset.signals import reset_password_token_created
from django.core.mail import send_mail
@receiver(reset_password_token_created)
def password_reset_token_created(sender, instance, reset_password_token, *args, **kwargs):
email_plaintext_message = f"{'https://djangovueattackapplice3.herokuapp.com/reset-password-new'}?token={reset_password_token.key}"
send_mail(
# title:
"Password Reset for {title} {usernames}".format(title="Some website title", usernames=reset_password_token.user.username),
# message:
f'{email_plaintext_message} \n {reset_password_token.user.username}',
# from:
"noreply@somehost.local",
# to:
[reset_password_token.user.email]
)
# print("reset_password_token.user", reset_password_token.user.username)
class Life(models.Model):
number = models.IntegerField()
user = models.OneToOneField(User, on_delete=models.CASCADE)
def __str__(self):
return f'{self.user} {self.number}'
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,867 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/views.py | from django.shortcuts import render
# Create your views here.
from django.contrib.auth import authenticate
from django.views.decorators.csrf import csrf_exempt
from rest_framework.authtoken.models import Token
from rest_framework.decorators import api_view, permission_classes
from rest_framework.permissions import AllowAny
from rest_framework.status import (
HTTP_400_BAD_REQUEST,
HTTP_404_NOT_FOUND,
HTTP_200_OK
)
from rest_framework.response import Response
from rest_framework import generics
from .serializers import UserSerializer, RegisterSerializer
from rest_framework import serializers
from .models import Life
from django.contrib.auth.models import User
from rest_framework import status
from .serializers import ChangePasswordSerializer
from rest_framework.permissions import IsAuthenticated
@csrf_exempt
@api_view(["POST"])
@permission_classes((AllowAny,))
def login(request):
username = request.data.get("username")
password = request.data.get("password")
if username is None or password is None:
return Response({'msg': 'Please provide both username and password'},
status=HTTP_400_BAD_REQUEST)
user = authenticate(username=username, password=password)
if not user:
return Response({'msg': 'Username or password is incorrect.'},
status=HTTP_400_BAD_REQUEST)
listToken = Token.objects.all().filter(user=user)
if len(listToken) != 0:
return Response({'msg': 'The user is already logged in.'},
status=HTTP_400_BAD_REQUEST)
token, _ = Token.objects.get_or_create(user=user)
return Response({'token': token.key},
status=HTTP_200_OK)
@csrf_exempt
@api_view(["GET"])
def sample_api(request):
data = {'sample_data': request.headers["Authorization"].split(' ')[1]}
# [Authorization]
return Response(data, status=HTTP_200_OK)
class RegisterAPI(generics.GenericAPIView):
serializer_class = RegisterSerializer
permission_classes = []
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
password = request.data['password']
password2 = request.data['password2']
if password != password2:
raise serializers.ValidationError({'msg':'Passowrd must match.'})
user = serializer.save()
tempLife = Life(user=user, number=10)
tempLife.save()
return Response({"msg": "You have successfully registered"})
class LogoutAPI(generics.GenericAPIView):
def post(self, request, *args, **kwargs):
strTokenUser = request.headers["Authorization"].split(' ')[1]
user = Token.objects.all().filter(key=strTokenUser)[0].user
user.auth_token.delete()
return Response({"msg": "User logged in successfully."})
from rest_framework import status
from rest_framework import generics
from rest_framework.response import Response
from django.contrib.auth.models import User
from .serializers import ChangePasswordSerializer
from rest_framework.permissions import IsAuthenticated
class ChangePasswordView(generics.UpdateAPIView):
"""
An endpoint for changing password.
"""
serializer_class = ChangePasswordSerializer
model = User
# permission_classes = (IsAuthenticated,)
def get_object(self, queryset=None):
obj = self.request.user
return obj
def update(self, request, *args, **kwargs):
self.object = self.get_object()
strTokenUser = request.headers["Authorization"].split(' ')[1]
user = Token.objects.all().filter(key=strTokenUser)[0].user
self.object = user
serializer = self.get_serializer(data=request.data)
print("aaaa0", request.data)
if serializer.is_valid():
# Check old password
print("aaaa1", serializer.data.get("old_password"))
if not self.object.check_password(serializer.data.get("old_password")):
print("aaaa2")
return Response({"old_password": ["Wrong password."]}, status=HTTP_400_BAD_REQUEST)
print("aaaa3")
# set_password also hashes the password that the user will get
self.object.set_password(serializer.data.get("new_password"))
print("aaaa4")
self.object.save()
print("aaaa5")
response = {
'status': 'success',
'code': HTTP_200_OK,
'message': 'Password updated successfully',
'data': []
}
print("aaaa6")
return Response(response)
return Response(serializer.errors, status=HTTP_400_BAD_REQUEST)
# ----------------------------------------------------------------------------------------
class LifeAPI(generics.GenericAPIView):
def post(self, request, *args, **kwargs):
strTokenUser = request.headers["Authorization"].split(' ')[1]
user = Token.objects.all().filter(key=strTokenUser)[0].user
usernameToAttack = request.data["toattack"]
userToAttack=User.objects.all().filter(username=usernameToAttack)[0]
lifeUserToAttack = Life.objects.all().filter(user=userToAttack)[0]
if lifeUserToAttack.number != 0:
lifeUserToAttack.number = lifeUserToAttack.number - 1
lifeUserToAttack.save()
return Response({"life": lifeUserToAttack.number})
def get(self, request, *args, **kwargs):
strTokenUser = request.headers["Authorization"].split(' ')[1]
user = Token.objects.all().filter(key=strTokenUser)[0].user
listLife = Life.objects.all()
dictData = {}
for tempUser in listLife:
dictData[tempUser.user.username] = tempUser.number
return Response(dictData)
# return Response({"msg": "User logged in successfully."})
# Token.objects.all().filter(key="34ac5679ff32fa927660d471cc585635f12f7a07")[0].user
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,868 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/migrations/0014_auto_20201028_0628.py | # Generated by Django 3.1.2 on 2020-10-28 06:28
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0013_auto_20201027_1844'),
]
operations = [
migrations.AlterField(
model_name='life',
name='number',
field=models.IntegerField(default=10, validators=[django.core.validators.MaxValueValidator(10), django.core.validators.MinValueValidator(0)]),
),
]
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,869 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/migrations/0005_auto_20201027_1807.py | # Generated by Django 3.1.2 on 2020-10-27 18:07
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0004_numberrange'),
]
operations = [
migrations.RenameField(
model_name='numberrange',
old_name='number',
new_name='numbers',
),
]
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,870 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/migrations/0011_auto_20201027_1824.py | # Generated by Django 3.1.2 on 2020-10-27 18:24
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('core', '0010_album'),
]
operations = [
migrations.RemoveField(
model_name='question',
name='user',
),
migrations.DeleteModel(
name='Album',
),
migrations.DeleteModel(
name='Musician',
),
migrations.DeleteModel(
name='Question',
),
]
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,871 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/admin.py | from django.contrib import admin
# from .models import Life, NumberRange
# admin.site.register(NumberRange)
from .models import Life
admin.site.register(Life)
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,872 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/migrations/0013_auto_20201027_1844.py | # Generated by Django 3.1.2 on 2020-10-27 18:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0012_life_numberrange'),
]
operations = [
migrations.RemoveField(
model_name='numberrange',
name='id',
),
migrations.AlterField(
model_name='numberrange',
name='number',
field=models.AutoField(primary_key=True, serialize=False),
),
]
| {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,873 | smichaelshal/game-vue | refs/heads/main | /django/drf/core/urls.py | from django.urls import path, include
from .views import RegisterAPI, LogoutAPI, ChangePasswordView, login
from .views import sample_api, LifeAPI
urlpatterns = [
path('api/register/', RegisterAPI.as_view(), name='register'),
path('api/login', login),
path('api/logout/', LogoutAPI.as_view(), name='logout'),
path('api/change-password/', ChangePasswordView.as_view(), name='change-password'),
path('api/password_reset/', include('django_rest_passwordreset.urls', namespace='password_reset')),
path('api/sampleapi', sample_api),
path('api/life/', LifeAPI.as_view(), name='life'),
] | {"/django/drf/core/views.py": ["/django/drf/core/models.py"], "/django/drf/core/admin.py": ["/django/drf/core/models.py"], "/django/drf/core/urls.py": ["/django/drf/core/views.py"]} |
69,896 | Cho1s/Site1 | refs/heads/master | /taskmanager/main/migrations/0004_categorie_description.py | # Generated by Django 3.2.6 on 2021-09-01 17:59
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('main', '0003_auto_20210901_2023'),
]
operations = [
migrations.AddField(
model_name='categorie',
name='description',
field=models.TextField(default=django.utils.timezone.now, max_length=100, verbose_name='Описание'),
preserve_default=False,
),
]
| {"/taskmanager/main/views.py": ["/taskmanager/main/models.py"], "/taskmanager/main/admin.py": ["/taskmanager/main/models.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.