index
int64 | repo_name
string | branch_name
string | path
string | content
string | import_graph
string |
|---|---|---|---|---|---|
1,755
|
Redent0r/Libra
|
refs/heads/master
|
/master_admin.py
|
### std lib ###
import sys
import sqlite3
import time
import os
### PyQt4 ###
from PyQt4 import QtCore, QtGui, QtSql
### GUIs ###
from gui_inventory import Ui_MainWindow as InventoryGui
from gui_purchase import Ui_Dialog as PurchaseGui
from gui_sale import Ui_Dialog as SaleGui
from gui_client import Ui_Dialog as ClientGui
from gui_modify import Ui_Dialog as ModifyGui
from gui_move import Ui_Dialog as MoveGui
from gui_client_modify import Ui_Dialog as ClientModifyGui
import mec_inventory#, stresstest
class Inventory (QtGui.QMainWindow, InventoryGui):
### constants ###
useNas = False ### change this to use nas
DB_LOCATION = ".libra.db" # database
def __init__ (self, parent=None):
start = time.time()
### sets up visual gui ###
QtGui.QMainWindow.__init__(self, parent) # parent shit for exit bug; object hierarchy
self.setupUi(self)
self.setAttribute(QtCore.Qt.WA_DeleteOnClose) # maybe takes care of closing bug
### Database Connection, for qsqlquerymodel ###
self.db = QtSql.QSqlDatabase.addDatabase('QSQLITE')
self.db.setDatabaseName(self.DB_LOCATION)
self.db.open()
### Table Models ###
self.mdlClients = QtSql.QSqlQueryModel()
self.mdlPurchases = QtSql.QSqlQueryModel()
self.mdlSales = QtSql.QSqlQueryModel()
self.mdlInventory = QtSql.QSqlQueryModel()
# bal
self.mdlPurchasesBal = QtSql.QSqlQueryModel()
self.mdlSalesBal = QtSql.QSqlQueryModel()
### sort filter proxy model ###
self.proxyInventory = QtGui.QSortFilterProxyModel()
self.proxyInventory.setSourceModel(self.mdlInventory)
self.proxyPurchases = QtGui.QSortFilterProxyModel()
self.proxyPurchases.setSourceModel(self.mdlPurchases)
self.proxySales = QtGui.QSortFilterProxyModel()
self.proxySales.setSourceModel(self.mdlSales)
self.proxyClients = QtGui.QSortFilterProxyModel()
self.proxyClients.setSourceModel(self.mdlClients)
# bal
self.proxyPurchasesBal = QtGui.QSortFilterProxyModel()
self.proxyPurchasesBal.setSourceModel(self.mdlPurchasesBal)
self.proxySalesBal = QtGui.QSortFilterProxyModel()
self.proxySalesBal.setSourceModel(self.mdlSalesBal)
### proxy filter parameters
self.proxyInventory.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive) # case insennsitive
self.proxyPurchases.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
self.proxySales.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
self.proxyClients.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
# bal
self.proxyPurchasesBal.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
self.proxySalesBal.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive)
#### setting models to tables ###
self.tblInventory.setModel(self.proxyInventory)
self.tblPurchases.setModel(self.proxyPurchases)
self.tblSales.setModel(self.proxySales)
self.tblClients.setModel(self.proxyClients)
# bal
self.tblPurchasesBal.setModel(self.proxyPurchasesBal)
self.tblSalesBal.setModel(self.proxySalesBal)
### Actions functionality ###
self.actionRefresh.triggered.connect(self.refreshTables)
self.actionPurchase.triggered.connect(self.action_purchase)
self.actionSale.triggered.connect(self.action_sale)
self.actionClient.triggered.connect(self.action_client)
self.btnModifyInventory.clicked.connect(self.modify_inventory)
self.btnMove.clicked.connect(self.move_item)
self.btnRemovePurchase.clicked.connect(self.remove_purchase)
self.btnRemoveSale.clicked.connect(self.reverse_sale)
self.btnSettle.clicked.connect(self.settle_debt)
self.btnRemoveClient.clicked.connect(self.remove_client)
self.btnModifyClient.clicked.connect(self.modify_client)
self.leditInventory.textEdited.connect(lambda: self.search(self.leditInventory.text(), self.proxyInventory))
self.leditPurchases.textEdited.connect(lambda: self.search(self.leditPurchases.text(), self.proxyPurchases))
self.leditSales.textEdited.connect(lambda: self.search(self.leditSales.text(), self.proxySales))
self.leditClients.textEdited.connect(lambda: self.search(self.leditClients.text(), self.proxyClients))
self.cmboxInventory.activated.connect(lambda: self.combo_box_changed(self.cmboxInventory, self.proxyInventory))
self.cmboxPurchases.activated.connect(lambda: self.combo_box_changed(self.cmboxPurchases, self.proxyPurchases))
self.cmboxSales.activated.connect(lambda: self.combo_box_changed(self.cmboxSales, self.proxySales))
self.cmboxClients.activated.connect(lambda: self.combo_box_changed(self.cmboxClients, self.proxyClients))
self.radioHistoric.toggled.connect(lambda: self.set_balance(self.radioHistoric))
self.radioAnnual.toggled.connect(lambda: self.set_balance(self.radioAnnual))
self.radioMonthly.toggled.connect(lambda: self.set_balance(self.radioMonthly))
self.dateAnnual.dateChanged.connect(lambda: self.set_balance(self.radioAnnual))
self.dateMonthly.dateChanged.connect(lambda: self.set_balance(self.radioMonthly))
self.calBalance.selectionChanged.connect(self.calendar_changed)
self.calBalance.showToday()
### Creates tables if not exists, for mec_inventario ###
self.conn = sqlite3.connect(self.DB_LOCATION)
self.c = self.conn.cursor()
mec_inventory.create_tables(self.conn, self.c)
########################## STRESSS TESTTTTTT ################################
#stresstest.test_entries(self.conn, self.c, 10)
#stresstest.test_entries(self.conn, self.c, 100)
#stresstest.test_entries(self.conn, self.c, 250)
#stresstest.test_entries(self.conn, self.c, 500)
#stresstest.test_entries(self.conn, self.c, 1000)
################################################################################
self.set_balance(self.radioHistoric)
self.refreshTables()
headers = ["Code", "Name", "Group", "Available Quantity", "Unit Cost",
"Suggested Price", "Minimum Quantity", "Maximum Quantity", "Category"]
for i in range(len(headers)):
self.mdlInventory.setHeaderData(i, QtCore.Qt.Horizontal, headers[i]) # +1 for id col
self.cmboxInventory.addItems(headers) # add headers to combo box
headers = ["Date", "Transaction", "Code", "Name", "Group", "Quantity", "Vendor",
"Unit Cost", "Total Cost", "Category"]
for i in range(len(headers)):
self.mdlPurchases.setHeaderData(i, QtCore.Qt.Horizontal, headers[i])
self.cmboxPurchases.addItems(headers)
headers = ["Date", "Transaction", "Code", "Name", "Group", "Quantity", "Unit Price",
"Total Price", "Client", "Pay"]
for i in range(len(headers)):
self.mdlSales.setHeaderData(i, QtCore.Qt.Horizontal, headers[i])
self.cmboxSales.addItems(headers)
headers = ["ID", "Name", "Invested", "Debt",
"E-mail", "Phone", "Cellphone"]
for i in range(len(headers)):
self.mdlClients.setHeaderData(i, QtCore.Qt.Horizontal, headers[i])
self.cmboxClients.addItems(headers)
# headers bal
headers = ["Date", "Transaction", "Code", "Quantity", "Total Cost"]
for i in range(len(headers)):
self.mdlPurchasesBal.setHeaderData(i, QtCore.Qt.Horizontal, headers[i])
headers = ["Date", "Transaction", "Code", "Quantity", "Total Price"]
for i in range(len(headers)):
self.mdlSalesBal.setHeaderData(i, QtCore.Qt.Horizontal, headers[i])
### table uniform stretch ###
self.tblInventory.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive)
self.tblPurchases.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive)
self.tblSales.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive)
self.tblClients.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive)
# bal stretch
self.tblBalance.horizontalHeader().setResizeMode(QtGui.QHeaderView.Stretch)
self.tblBalance.verticalHeader().setResizeMode(QtGui.QHeaderView.Stretch)
self.tblPurchasesBal.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive)
self.tblSalesBal.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive)
end = time.time()
print("constructor time: " + str(end - start))
def refreshTables(self):
start = time.time()
self.mdlInventory.setQuery("""SELECT code, name, groupx, avail, costUni, priceUniSug,
stockmin, stockmax, category FROM Inventory""", self.db)
self.mdlPurchases.setQuery("""SELECT dat, trans, code, name, groupx, quantity,
provider, costUni, costItems, category FROM Entries""", self.db)
self.mdlSales.setQuery("""SELECT dat, trans, code, name, groupx, quantity, priceUni,
priceItems, client, payment FROM Outs""", self.db)
self.mdlClients.setQuery("""SELECT identification, name, money_invested, debt,
mail, num, cel FROM Clients""", self.db)
# bal tables
self.mdlPurchasesBal.setQuery(""" SELECT dat, trans, code, quantity, costItems
FROM Entries """, self.db)
self.mdlSalesBal.setQuery("""SELECT dat, trans, code, quantity,
priceItems FROM Outs""", self.db)
end = time.time()
print("refresh time: " + str(end - start))
def settle_debt(self):
index = self.tblSales.selectionModel().selectedRows()
if index:
row = int(index[0].row()) # selected row
code = self.proxySales.data(self.proxySales.index(row, 1)) # 0 = fecha, 1 = codigo
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Settle",
"Are you sure you wish to settle\n"
"the debt generated by sale number: " + code + "?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
mec_inventory.paid(self.conn, self.c, code)
QtGui.QMessageBox.information(self, 'Message', "The debt generated by sale number: " + code +
"\nhas been settled successfully")
self.refreshTables()
else:
QtGui.QMessageBox.information(self, 'Message', "Please select the sale by\n" +
"credit you wish to settle")
def calendar_changed(self):
start = time.time()
self.radioDaily.setChecked(True)
date = str(self.calBalance.selectedDate().toPyDate())
self.search(date, self.proxyPurchasesBal)
self.search(date, self.proxySalesBal)
items = mec_inventory.calc_bal_day(self.c, date[0:4], date[5:7], date[8:10])
self.tblBalance.setItem(0, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[2]))) # ventas contado
self.tblBalance.setItem(1, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[3]))) # ventas credito
self.tblBalance.setItem(2, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[1]))) # ingreso tot
self.tblBalance.setItem(3, 1, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[0]))) # costo
self.tblBalance.setItem(4, 1, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[5]))) # impuesto
self.tblBalance.setItem(5, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[6]))) # ganancia
if items[0] != 0:
self.tblBalance.setItem(6, 2, QtGui.QTableWidgetItem('% {0:.2f}'.format(items[6]/items[0] * 100)))
else:
self.tblBalance.setItem(6, 2, QtGui.QTableWidgetItem('% 0.00'))
end = time.time()
print("cal: " + str(end - start))
def set_balance(self, radioButton):
start = time.time()
if radioButton.isChecked():
items = []
if radioButton == self.radioHistoric:
self.search("", self.proxyPurchasesBal)
self.search("", self.proxySalesBal)
items = mec_inventory.calc_bal_his(self.c)
# [costoTot,precioTot,cd,cc,ingresoTot,impuestoTot,gananciaTot]
elif radioButton == self.radioAnnual:
date = str(self.dateAnnual.date().toPyDate())
self.search(date[0:4], self.proxyPurchasesBal)
self.search(date[0:4], self.proxySalesBal)
items = mec_inventory.calc_bal_year(self.c, date[0:4])
# [costoTot,precioTot,cd,cc,ingresoTot,impuestoTot,gananciaTot]
else: # radio mensual
date = str(self.dateMonthly.date().toPyDate())
self.search((date[0:4] + "-" + date[5:7]), self.proxyPurchasesBal)
self.search((date[0:4] + "-" + date[5:7]), self.proxySalesBal)
items = mec_inventory.calc_bal_mes(self.c, date[0:4], date[5:7])
# [costoTot,precioTot,cd,cc,ingresoTot,impuestoTot,gananciaTot]
self.tblBalance.setItem(0, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[2])))
self.tblBalance.setItem(1, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[3])))
self.tblBalance.setItem(2, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[1])))
self.tblBalance.setItem(3, 1, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[0])))
self.tblBalance.setItem(4, 1, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[5])))
self.tblBalance.setItem(5, 2, QtGui.QTableWidgetItem('$ {0:.2f}'.format(items[6])))
if items[0] != 0:
self.tblBalance.setItem(6, 2, QtGui.QTableWidgetItem('% {0:.2f}'.format(items[6]/items[0] * 100)))
else:
self.tblBalance.setItem(6, 2, QtGui.QTableWidgetItem('% 0.00'))
end = time.time()
print("bal: " + str(end - start))
def combo_box_changed(self, comboBox, proxy):
proxy.setFilterKeyColumn(comboBox.currentIndex())
def search(self, text, proxy):
proxy.setFilterRegExp("^" + text)
def move_item(self):
index = self.tblInventory.selectionModel().selectedRows() ### list of indexes
if index:
row = int(index[0].row()) # selected row
code = self.proxyInventory.data(self.proxyInventory.index(row, 0))
group = self.proxyInventory.data(self.proxyInventory.index(row, 2))
available = self.proxyInventory.data(self.proxyInventory.index(row, 3))
move = Move(code, available, group, self)
move.show()
else:
QtGui.QMessageBox.information(self, 'Message', "Please select the \n" +
"item you wish to move")
def modify_inventory(self):
index = self.tblInventory.selectionModel().selectedRows() ### list of indexes
if index:
row = int(index[0].row()) # selected row
code = self.proxyInventory.data(self.proxyInventory.index(row, 0))
group = self.proxyInventory.data(self.proxyInventory.index(row, 2))
modifyInventory = ModifyInventory(code, group, self)
modifyInventory.show()
self.tblInventory.clearSelection() # clear choice
else:
QtGui.QMessageBox.information(self, 'Message', "Please select the \n" +
"item you wish to modify")
def remove_client(self):
index = self.tblClients.selectionModel().selectedRows()
if index:
row = int(index[0].row()) # selected row
name = self.proxyClients.data(self.proxyClients.index(row, 1)) # 0 = fecha, 1 = codigo
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Delete",
"Are you sure you want to delete: " + name + "?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
if mec_inventory.del_client_name(self.conn, self.c, name):
self.refreshTables() # refresh
QtGui.QMessageBox.information(self, 'Message', "The client: " + name +
"\nhas been deleted sucessfully")
else:
QtGui.QMessageBox.critical(self, 'Error', 'An unexpected error has occurred.\n'+
'Please try again.')
self.tblClients.clearSelection() # clear choice
else:
QtGui.QMessageBox.information(self, 'Message', "Please select the \n" +
"client you wish to delete")
def modify_client(self):
index = self.tblClients.selectionModel().selectedRows()
if index:
row = int(index[0].row()) # selected row
name = self.proxyClients.data(self.proxyClients.index(row, 1)) # 0 = fecha, 1 = codigo
modifyClient = ModifyClient(name, self)
modifyClient.show()
else:
QtGui.QMessageBox.information(self, 'Message', "Please select the \n" +
"client you wish to modify")
def remove_purchase(self):
index = self.tblPurchases.selectionModel().selectedRows()
if index:
row = int(index[0].row()) # selected row
code = self.proxyPurchases.data(self.proxyPurchases.index(row, 1)) # 0 = fecha, 1 = codigo
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Delete",
"Are you sure you want to delete purchase\n"
" number: " + code + "?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
if mec_inventory.del_general(self.conn, self.c, code):
self.refreshTables() # refresh
QtGui.QMessageBox.information(self, 'Message', "Purchase number: " + code +
"\nhas been deleted successfully.\n" +
"Inventory must be reduced manually")
else:
QtGui.QMessageBox.critical(self, 'Error', 'An unexpected error has occurred.\n'+
'Please try again.')
self.tblPurchases.clearSelection() # clear choice
else:
QtGui.QMessageBox.information(self, 'Message', "Please select the\n" +
"purchase that you want to delete")
def reverse_sale(self):
index = self.tblSales.selectionModel().selectedRows()
if index:
row = int(index[0].row()) # selected row
code = self.proxySales.data(self.proxySales.index(row, 1)) # 0 = fecha, 1 = codigo
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Reverse",
"Are you sure you want to reverse\n"
"purchase number: " + code + "?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
if mec_inventory.del_general(self.conn, self.c, code):
self.refreshTables() # refresh
QtGui.QMessageBox.information(self, 'Message', "Purchase number: " + code +
"\nhas been reversed successfully")
else:
QtGui.QMessageBox.critical(self, 'Error', 'An unexpected error has occurred.\n'+
'Please try again.')
self.tblSales.clearSelection() # clear choice
else:
QtGui.QMessageBox.warning(self, 'Message', "Please select the\n" +
"purchase you want to reverse")
def action_client(self):
client = Client(self)
client.show()
def action_sale(self):
sale = Sale(self)
sale.show()
def action_purchase(self):
purchase = Purchase(self)
purchase.show()
def closeEvent(self,event):
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Warning",
"Are you sure you want to exit?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
self.db.close()
self.c.close()
self.conn.close()
event.accept()
else:
event.ignore()
class Purchase(QtGui.QDialog, PurchaseGui):
def __init__ (self, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
### functionality ###
self.btnAdd.clicked.connect(self.add)
self.btnUndo.clicked.connect(self.undo)
self.spnboxMargin.valueChanged.connect(self.margin_changed)
self.spnboxPrice.valueChanged.connect(self.price_changed)
self.spnboxCost.valueChanged.connect(self.cost_changed)
### connection, from parent #######
self.conn = self.parent().conn
self.c = self.parent().c
### combo box categoria config
self.cmboxCategory.addItems(mec_inventory.unique(self.c, "category", "Inventory"))
self.cmboxCategory.completer().setCompletionMode(QtGui.QCompleter.PopupCompletion)
### code combo box ###
self.cmBoxCode.addItems(mec_inventory.unique(self.c, "code", "Inventory"))
self.cmBoxCode.completer().setCompletionMode(QtGui.QCompleter.PopupCompletion)
self.cmBoxCode.setEditText("")
self.cmBoxCode.activated.connect(self.code_return)
self.cmboxGroup.activated.connect(self.group_return)
self.code = "" # controlling multiple code input
def cost_changed(self):
self.spnboxMargin.setValue(0)
self.spnboxPrice.setValue(0)
def price_changed(self):
cost = self.spnboxCost.value()
if cost > 0:
price = self.spnboxPrice.value()
margin = (price/cost - 1) * 100
self.spnboxMargin.setValue(margin)
def margin_changed(self):
margin = self.spnboxMargin.value()
cost = self.spnboxCost.value()
price = cost * (1 + margin/100)
self.spnboxPrice.setValue(price)
def code_return(self):
code = self.cmBoxCode.currentText()
if self.code != code:
self.cmboxGroup.clear()
self.cmboxGroup.addItems(mec_inventory.unique(self.c, "group", "inventory", "code", code))
self.code = code
self.group_return()
def group_return(self):
code = self.cmBoxCode.currentText()
group = self.cmboxGroup.currentText()
query = mec_inventory.query_add(self.c, code, group) ### temp error
if query:
self.leditName.setText(query[0]) # nombre
self.spnboxCost.setValue(query[1]) # costo
self.spnboxPrice.setValue(query[2]) # precio sugerido
self.cmboxCategory.setEditText(query[3]) # categoria
self.spnBoxMin.setValue(query[4]) # min
self.spnBoxMax.setValue(query[5]) # max
self.price_changed()
else:
QtGui.QMessageBox.information(self, 'Message', ' No previous records of this code have\n'+
'been found. New records will be created.')
def undo(self):
self.leditName.clear()
self.spnboxCost.setValue(0)
self.spnBoxQuantity.setValue(1)
self.spnboxMargin.setValue(0)
self.spnboxPrice.setValue(0)
self.cmboxCategory.clearEditText()
self.cmboxGroup.clearEditText()
self.leditVendor.clear()
self.spnBoxMin.setValue(1)
self.spnBoxMax.setValue(100)
self.cmBoxCode.clearEditText()
def add(self):
code = self.cmBoxCode.currentText()
name = self.leditName.text().capitalize()
if code != "" and name != "":
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Purchase",
"Are you sure you want to\n"
"store this purchase?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
start = time.time()
cost = self.spnboxCost.value()
margin = self.spnboxMargin.value()
price = self.spnboxPrice.value()
quantity = self.spnBoxQuantity.value()
group = self.cmboxGroup.currentText()
cat = self.cmboxCategory.currentText().capitalize()
vendor = self.leditVendor.text().capitalize()
stockMin = self.spnBoxMin.value()
stockMax = self.spnBoxMax.value()
### anadiendo ###
succesful = mec_inventory.add_item_entry(self.conn, self.c, code, name,
quantity, vendor, cost, price, group, cat, stockMin, stockMax)
if succesful:
self.parent().refreshTables()
self.undo() # this has to go after refresh
QtGui.QMessageBox.information(self, 'Message', 'This purchase has been\n'+
'regstered successfully')
self.close()
else:
QtGui.QMessageBox.critical(self, 'Error', 'An unexpected error occurred.\n'+
'Please try again')
end = time.time()
print("compra time: " + str(end-start))
elif code == "":
QtGui.QMessageBox.warning(self, 'Warning', 'Please enter a code')
else: # nombre == ""
QtGui.QMessageBox.warning(self, 'Warning', 'Please enter a name')
class Sale(QtGui.QDialog, SaleGui):
def __init__(self, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
### functionality ###
self.btnInsert.clicked.connect(self.add)
self.btnUndo.clicked.connect(self.undo)
self.btnConfirm.clicked.connect(self.confirm)
self.btnDelete.clicked.connect(self.delete_entry)
self.spnboxPrice.valueChanged.connect(self.price_changed)
self.spnBoxMargin.valueChanged.connect(self.margin_changed)
self.spnBoxQuantity.valueChanged.connect(self.quantity_changed)
self.tblInventory.clicked.connect(self.table_clicked)
### combo box nombre config ###
self.cmboxClient.setModel(self.parent().mdlClients)
self.cmboxClient.setModelColumn(1)
self.cmboxClient.completer().setCompletionMode(QtGui.QCompleter.PopupCompletion)
self.cmboxClient.setEditText("")
### table ###
self.model = QtGui.QStandardItemModel()
self.model.setColumnCount(5)
header = ["Code", "Name", "Item Price", "Quantity", "Total Price"]
self.model.setHorizontalHeaderLabels(header)
self.tblItems.setModel(self.model)
### abstract table / list of lists ###
self.abstractTable = []
### mini innventario ###
self.mdlInventory = QtSql.QSqlQueryModel()
self.proxyInventory = QtGui.QSortFilterProxyModel()
self.proxyInventory.setSourceModel(self.mdlInventory)
self.tblInventory.setModel(self.proxyInventory)
self.refresh_inventory()
header = ["Code", "Name", "Available", "Group"]
for i in range(len(header)):
self.mdlInventory.setHeaderData(i, QtCore.Qt.Horizontal, header[i])
self.cmboxInventory.addItems(header) # add headers to combo box
self.tblInventory.horizontalHeader().setResizeMode(QtGui.QHeaderView.Interactive)
# search funnctionality
self.cmboxInventory.activated.connect(self.combo_box_changed)
self.leditInventory.textChanged.connect(lambda: self.search(self.leditInventory.text()))
self.proxyInventory.setFilterCaseSensitivity(QtCore.Qt.CaseInsensitive) # case insennsitive
### sqlite 3 connection from parent ###
self.conn = self.parent().conn
self.c = self.parent().c
def combo_box_changed(self):
self.proxyInventory.setFilterKeyColumn(self.cmboxInventory.currentIndex())
def search(self, text):
self.proxyInventory.setFilterRegExp("^" + text)
def refresh_inventory(self):
self.mdlInventory.setQuery("""SELECT code, name, avail, groupx
FROM Inventory""", self.parent().db) # uses parent connection
def table_clicked(self):
self.spnBoxQuantity.setValue(1) # reset cantidad
index = self.tblInventory.selectionModel().selectedRows() ### list of indexes
row = int(index[0].row()) # selected row
code = self.proxyInventory.data(self.proxyInventory.index(row, 0))
group = self.proxyInventory.data(self.proxyInventory.index(row, 3))
query = mec_inventory.query_sale(self.c, code, group)
if query:
self.leditCode.setText(code) # arg
self.leditName.setText(query[0])
self.leditGroup.setText(group)
self.spnboxPrice.setValue(query[1])
self.spnboxCost.setValue(query[2])
self.price_changed()
else:
QtGui.QMessageBox.critical(self, 'Error', "An unexpected error has occurred.\n" +
"Please try again")
self.refresh_inventory()
def margin_changed(self):
price = (1 + (self.spnBoxMargin.value() / 100)) * self.spnboxCost.value()
self.spnboxPrice.setValue(price)
self.quantity_changed()
def quantity_changed(self):
priceTotalItem = self.spnboxPrice.value() * self.spnBoxQuantity.value()
self.spnBoxTotalItemPrice.setValue(priceTotalItem)
def refreshTotals(self):
if self.abstractTable:
taxes = 0.0
discounts = 0.0
subtotal = 0.0
for line in self.abstractTable:
taxes += line[2] * line[3] * line[1] # impuesto * precio * cantidad
discounts += (1 + line[2]) * line [3] * line[4] * line[1] # (1 + impuesto) * precio * desc * cant
subtotal += line[3] * line[1] # precio * cantidad
self.spnBoxSubtotal.setValue(subtotal)
self.spnBoxTaxT.setValue(taxes)
self.spnBoxDiscountT.setValue(discounts)
self.spnBoxGrandTotal.setValue(subtotal + taxes - discounts)
else:
self.spnBoxSubtotal.setValue(0)
self.spnBoxTaxT.setValue(0)
self.spnBoxDiscountT.setValue(0)
self.spnBoxGrandTotal.setValue(0)
def delete_entry(self):
index = self.tblItems.selectionModel().selectedRows() ### list of indexes
if (index):
row = int(index[0].row()) # selected row
self.model.removeRow(row)
if row == 0:
self.cmboxClient.setEnabled(True)
del self.abstractTable[row] # deletes from abstract table
self.refreshTotals()
self.tblItems.clearSelection()
else:
QtGui.QMessageBox.information(self, 'Message', 'Please select the line\n' +
'you wish to remove')
def price_changed(self):
if self.spnboxCost.value() > 0:
margin = (self.spnboxPrice.value() / self.spnboxCost.value()) * 100 - 100
self.spnBoxMargin.setValue(margin) # sets margin
self.quantity_changed()
def undo (self):
self.leditCode.clear()
self.leditName.clear()
self.leditGroup.clear()
self.spnboxCost.setValue(0)
self.spnboxPrice.setValue(0)
self.spnBoxQuantity.setValue(1)
self.spnBoxMargin.setValue(0)
self.spnboxDiscount.setValue(0)
self.chkBoxItbms.setChecked(True)
self.chkBoxCredit.setChecked(False)
self.spnBoxTotalItemPrice.setValue(0.00)
def add(self):
### table view ###
code = self.leditCode.text()
if code != "":
client = self.cmboxClient.currentText()
quantity = self.spnBoxQuantity.value()
group = self.leditGroup.text()
error = mec_inventory.sale_valid(self.c, code, client, quantity, group) # returns list of errors
if not error:
### shopping cart table ###
line = []
line.append(QtGui.QStandardItem(self.leditCode.text()))
line.append(QtGui.QStandardItem(self.leditName.text()))
line.append(QtGui.QStandardItem(self.spnboxPrice.text()))
line.append(QtGui.QStandardItem(self.spnBoxQuantity.text()))
line.append(QtGui.QStandardItem(self.spnBoxTotalItemPrice.text()))
self.model.appendRow(line)
### abstract table ###
line = []
line.append(self.leditCode.text()) # 0
line.append(quantity) # 1
line.append(float(0.07 if self.chkBoxItbms.isChecked() else 0.0)) # 2
line.append(self.spnboxPrice.value()) # 3
line.append(self.spnboxDiscount.value() / 100) # 4 # percentage
line.append("CRE" if self.chkBoxCredit.isChecked() else "DEB") # 5
line.append(self.cmboxClient.currentText()) # 6
line.append(self.leditGroup.text()) # 7
self.abstractTable.append(line)
self.refreshTotals()
self.undo()
self.cmboxClient.setEnabled(False) # disable edit client
elif 3 in error: # error code for missinng client
QtGui.QMessageBox.information(self, 'Message', 'No previous records of this client\n' +
'have been found. Please create it')
newClient = Client(self.parent())
newClient.leditName.setText(client)
newClient.show()
elif 2 in error:
QtGui.QMessageBox.warning(self, 'Warning', 'The item quantity you wish to sell\n' +
'is not available in your inventory')
else:
QtGui.QMessageBox.critical(self, 'Error', 'An unexpected error has occurred.\n' +
'Please try again')
self.refresh_inventory()
else: # code == ""
QtGui.QMessageBox.warning(self, 'Error', 'Please select\n' +
'an inventory item')
def confirm(self):
if self.abstractTable:
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Sell",
"Are you sure you\n"
"want to make this sale?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
start = time.time()
if mec_inventory.shopping_cart(self.conn, self.c, self.abstractTable):
self.parent().refreshTables()
del self.abstractTable [:]
for i in range(self.model.rowCount()):
self.model.removeRow(0)
self.refreshTotals()
self.cmboxClient.clearEditText()
self.undo()
self.cmboxClient.setEnabled(True)
end = time.time()
print("time venta: " + str(end - start))
QtGui.QMessageBox.information(self, 'Message', 'The transaction has been\n'+
'registered successfully')
else:
QtGui.QMessageBox.critical(self, 'Error', 'An unexpected error has occurred.\n' +
'Please try again')
self.refresh_inventory() # regardless succesful or not
else:
QtGui.QMessageBox.warning(self, 'Warning', 'Please insert an item\n' +
'to be sold')
class Client(QtGui.QDialog, ClientGui):
def __init__(self, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
### functionality ###
self.btnUndo.clicked.connect(self.undo)
self.btnAdd.clicked.connect(self.anadir)
### validators ###
regexpPhone = QtCore.QRegExp("^[0-9-()]*$") # 0-9 or - or ()
phoneVal = QtGui.QRegExpValidator(regexpPhone)
self.leditPhone.setValidator(phoneVal)
self.leditCellphone.setValidator(phoneVal)
self.leditFax.setValidator(phoneVal)
### connection, from parent ###
self.conn = self.parent().conn
self.c = self.parent().c
def anadir(self):
name = self.leditName.text().title()
if name != "":
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Add Client",
"Are you sure you want to\n"
"add this client?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
start = time.time()
id = self.leditID.text()
phone = self.leditPhone.text()
cellphone = self.leditCellphone.text()
address = self.leditAddress.text().capitalize()
email = self.leditEmail.text()
fax = self.leditFax.text()
if mec_inventory.add_client(self.conn, self.c, id, name, email, phone, cellphone, fax, address):
self.parent().refreshTables()
self.undo()
QtGui.QMessageBox.information(self, 'Message', 'The client has been\n'+
'added successfully')
else:
QtGui.QMessageBox.warning(self, 'Error', 'The client that you are trying\n' +
'to add already exists')
end = time.time()
print("time cliente: " + str(end - start))
else: # nombre == ""
QtGui.QMessageBox.warning(self, 'Warning', 'Please insert a name')
def undo(self):
self.leditName.clear()
self.leditID.clear()
self.leditPhone.clear()
self.leditCellphone.clear()
self.leditAddress.clear()
self.leditFax.clear()
self.leditEmail.clear()
class ModifyInventory(QtGui.QDialog, ModifyGui):
def __init__(self, code, group, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
# parent connection
self.conn = self.parent().conn
self.c = self.parent().c
self.leditCode.setText(code)
self.cmboxGroup.addItem(group)
self.cmboxGroup.addItem("Global")
items = mec_inventory.query_modify(self.c, code, group)
# Returns [disponible,precioUniSug,costoUni,categoria,stockmin,stockmax]
if items:
self.available = items[0]
self.price = items[1]
self.cost = items[2]
self.category = items[3]
self.min = items[4]
self.max = items[5]
self.name = items[6]
self.spnboxAvailable.setValue(self.available)
self.spnboxPrice.setValue(self.price)
self.spnboxCost.setValue(self.cost)
self.cmboxCategory.setEditText(self.category)
self.spnboxMin.setValue(self.min)
self.spnboxMax.setValue(self.max)
self.leditName.setText(self.name)
self.spnboxMargin.setValue(((self.price / self.cost) - 1) * 100)
### functionality ###
self.btnModify.clicked.connect(self.modify_inventory)
self.btnUndo.clicked.connect(self.undo)
self.spnboxMargin.valueChanged.connect(self.margin_changed)
self.spnboxPrice.valueChanged.connect(self.price_changed)
self.spnboxCost.valueChanged.connect(self.cost_changed)
def modify_inventory(self):
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Modify",
"Are you sure you want\n"
"to modify this item?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
start = time.time()
code = self.leditCode.text()
name = self.leditName.text()
cost = self.spnboxCost.value()
margin = self.spnboxMargin.value()
price = self.spnboxPrice.value()
available = self.spnboxAvailable.value()
group = self.cmboxGroup.currentText()
cat = self.cmboxCategory.currentText().capitalize()
stockMin = self.spnboxMin.value()
stockMax = self.spnboxMax.value()
### modificando ###
mec_inventory.modify(self.conn, self.c, code, group,
available, price, cat, stockMin, stockMax, cost, name)
self.parent().refreshTables()
QtGui.QMessageBox.information(self, 'Message', 'The modification has been\n'+
'registered successfully')
self.close()
end = time.time()
print("modificar time: " + str(end-start))
def cost_changed(self):
self.spnboxMargin.setValue(0)
self.spnboxPrice.setValue(0)
def price_changed(self):
cost = self.spnboxCost.value()
if cost > 0:
price = self.spnboxPrice.value()
margin = (price/cost - 1) * 100
self.spnboxMargin.setValue(margin)
def margin_changed(self):
margin = self.spnboxMargin.value()
cost = self.spnboxCost.value()
price = cost * (1 + margin/100)
self.spnboxPrice.setValue(price)
def undo(self):
self.leditName.setText(self.name)
self.spnboxCost.setValue(self.cost)
self.spnboxAvailable.setValue(self.available)
self.spnboxMargin.setValue((self.price / self.cost - 1) * 100)
self.spnboxPrice.setValue(self.price)
self.cmboxCategory.setEditText(self.category)
self.cmboxGroup.setCurrentIndex(0)
self.spnboxMin.setValue(self.min)
self.spnboxMax.setValue(self.max)
class ModifyClient(QtGui.QDialog, ClientModifyGui):
def __init__(self, name, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
self.leditName.setText(name)
# functionality
self.btnUndo.clicked.connect(self.undo)
self.btnModify.clicked.connect(self.modify)
### validators ###
regexpPhone = QtCore.QRegExp("^[0-9-()]*$") # 0-9 or - or ()
phoneVal = QtGui.QRegExpValidator(regexpPhone)
self.leditPhone.setValidator(phoneVal)
self.leditCellphone.setValidator(phoneVal)
self.leditFax.setValidator(phoneVal)
### connection, from parent ###
self.conn = self.parent().conn
self.c = self.parent().c
info = mec_inventory.query_client(self.c, name)
if info:
self.id = info[0]
self.email = info[1]
self.phone = info[2]
self.cel = info[3]
self.fax = info[4]
self.address = info[5]
self.leditName.setText(name)
self.leditID.setText(info[0])
self.leditEmail.setText(info[1])
self.leditPhone.setText(info[2])
self.leditCellphone.setText(info[3])
self.leditFax.setText(info[4])
self.leditAddress.setText(info[5])
else:
QtGui.QMessageBox.warning(self, 'Error','An unexpected error has occurred.\n'+
'Please try again')
self.close()
def modify(self):
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Add Client",
"Are you sure you want\n"
"to modify this client?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
start = time.time()
name = self.leditName.text()
id = self.leditID.text()
phone = self.leditPhone.text()
cell = self.leditCellphone.text()
address = self.leditAddress.text().capitalize()
email = self.leditEmail.text()
fax = self.leditFax.text()
mec_inventory.modify_client(self.conn, self.c, name, id,
email, phone, cell, fax, address)
self.parent().refreshTables()
QtGui.QMessageBox.information(self, 'Message', 'The client has been\n'+
'modified successfully')
self.close()
end = time.time()
print("time mod cliente: " + str(end - start))
def undo(self):
self.leditID.setText(self.id)
self.leditPhone.setText(self.phone)
self.leditCellphone.setText(self.cel)
self.leditAddress.setText(self.address)
self.leditFax.setText(self.fax)
self.leditEmail.setText(self.email)
class Move(QtGui.QDialog, MoveGui):
def __init__(self, code, available, group, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
self.conn = self.parent().conn
self.c = self.parent().c
self.leditCode.setText(code)
self.spnboxQuantity.setMaximum(available)
self.leditFromGroup.setText(str(group))
self.cmboxToGroup.addItems(mec_inventory.unique(self.c, "groupx", "inventory", "code", code))
self.cmboxToGroup.removeItem(self.cmboxToGroup.findText(group))
self.btnConfirm.clicked.connect(self.confirm)
def confirm(self):
msgbox = QtGui.QMessageBox(QtGui.QMessageBox.Icon(4), "Sell",
"Are you sure you want to\n"
"move this item?", parent=self)
btnYes = msgbox.addButton("Yes", QtGui.QMessageBox.ButtonRole(0)) # yes
btnNo = msgbox.addButton("No", QtGui.QMessageBox.ButtonRole(1)) # no
msgbox.exec_()
if msgbox.clickedButton() == btnYes:
code = self.leditCode.text()
quantity = self.spnboxQuantity.value()
fromGroup = self.leditFromGroup.text()
toGroup = self.cmboxToGroup.currentText()
print(str(code) + str(quantity) + str(fromGroup) + str(toGroup))
mec_inventory.move(self.conn, self.c, code, fromGroup, toGroup, quantity)
self.parent().refreshTables()
QtGui.QMessageBox.information(self, 'Message', 'The operation has been \n'+
'made successfully')
self.close()
##################### starts everything #############################################
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
inventory = Inventory() # borrar esto
inventory.show() # si se va a condicionar al nas location
# if os.path.isdir("\\\\NASPAREDES\\db"):
# inventario = Inventario()
# inventario.show()
# else:
# widget = QtGui.QWidget()
# QtGui.QMessageBox.warning( widget, 'Error de conexin', 'Necesitamos que este conectado a\n' +
# 'la red wifi')
sys.exit(app.exec_())
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,756
|
Redent0r/Libra
|
refs/heads/master
|
/gui_login.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui_login.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(172, 150)
Dialog.setMinimumSize(QtCore.QSize(172, 150))
Dialog.setMaximumSize(QtCore.QSize(172, 150))
font = QtGui.QFont()
font.setFamily(_fromUtf8("Arial"))
font.setPointSize(10)
font.setStyleStrategy(QtGui.QFont.NoAntialias)
Dialog.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/access-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.label = QtGui.QLabel(Dialog)
font = QtGui.QFont()
font.setPointSize(17)
font.setBold(True)
font.setWeight(75)
self.label.setFont(font)
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName(_fromUtf8("label"))
self.verticalLayout.addWidget(self.label)
self.leditUser = QtGui.QLineEdit(Dialog)
self.leditUser.setAlignment(QtCore.Qt.AlignCenter)
self.leditUser.setObjectName(_fromUtf8("leditUser"))
self.verticalLayout.addWidget(self.leditUser)
self.leditPassword = QtGui.QLineEdit(Dialog)
self.leditPassword.setEchoMode(QtGui.QLineEdit.Password)
self.leditPassword.setAlignment(QtCore.Qt.AlignCenter)
self.leditPassword.setObjectName(_fromUtf8("leditPassword"))
self.verticalLayout.addWidget(self.leditPassword)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.btnLogin = QtGui.QPushButton(Dialog)
self.btnLogin.setObjectName(_fromUtf8("btnLogin"))
self.horizontalLayout.addWidget(self.btnLogin)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Inventory", None))
self.label.setText(_translate("Dialog", "Member Login", None))
self.leditUser.setPlaceholderText(_translate("Dialog", "Username", None))
self.leditPassword.setPlaceholderText(_translate("Dialog", "Password", None))
self.btnLogin.setText(_translate("Dialog", "Login", None))
import res_rc
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,757
|
Redent0r/Libra
|
refs/heads/master
|
/gui_purchase.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui_compra.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.setWindowModality(QtCore.Qt.NonModal)
Dialog.resize(220, 366)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Dialog.sizePolicy().hasHeightForWidth())
Dialog.setSizePolicy(sizePolicy)
Dialog.setMinimumSize(QtCore.QSize(220, 366))
Dialog.setMaximumSize(QtCore.QSize(400, 366))
font = QtGui.QFont()
font.setPointSize(10)
Dialog.setFont(font)
Dialog.setContextMenuPolicy(QtCore.Qt.DefaultContextMenu)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/plus-icon-0.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
Dialog.setSizeGripEnabled(False)
Dialog.setModal(True)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.label = QtGui.QLabel(Dialog)
self.label.setObjectName(_fromUtf8("label"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_2)
self.leditName = QtGui.QLineEdit(Dialog)
self.leditName.setPlaceholderText(_fromUtf8(""))
self.leditName.setObjectName(_fromUtf8("leditName"))
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.leditName)
self.label_5 = QtGui.QLabel(Dialog)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.label_5)
self.label_3 = QtGui.QLabel(Dialog)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.label_3)
self.spnBoxQuantity = QtGui.QSpinBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spnBoxQuantity.sizePolicy().hasHeightForWidth())
self.spnBoxQuantity.setSizePolicy(sizePolicy)
self.spnBoxQuantity.setMinimumSize(QtCore.QSize(0, 0))
self.spnBoxQuantity.setWrapping(False)
self.spnBoxQuantity.setFrame(True)
self.spnBoxQuantity.setButtonSymbols(QtGui.QAbstractSpinBox.UpDownArrows)
self.spnBoxQuantity.setAccelerated(False)
self.spnBoxQuantity.setMaximum(999999)
self.spnBoxQuantity.setProperty("value", 1)
self.spnBoxQuantity.setObjectName(_fromUtf8("spnBoxQuantity"))
self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.spnBoxQuantity)
self.label_4 = QtGui.QLabel(Dialog)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.formLayout.setWidget(9, QtGui.QFormLayout.LabelRole, self.label_4)
self.label_6 = QtGui.QLabel(Dialog)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.formLayout.setWidget(10, QtGui.QFormLayout.LabelRole, self.label_6)
self.leditVendor = QtGui.QLineEdit(Dialog)
self.leditVendor.setPlaceholderText(_fromUtf8(""))
self.leditVendor.setObjectName(_fromUtf8("leditVendor"))
self.formLayout.setWidget(10, QtGui.QFormLayout.FieldRole, self.leditVendor)
self.label_7 = QtGui.QLabel(Dialog)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.formLayout.setWidget(11, QtGui.QFormLayout.LabelRole, self.label_7)
self.spnBoxMin = QtGui.QSpinBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spnBoxMin.sizePolicy().hasHeightForWidth())
self.spnBoxMin.setSizePolicy(sizePolicy)
self.spnBoxMin.setAccelerated(False)
self.spnBoxMin.setMaximum(999999)
self.spnBoxMin.setProperty("value", 1)
self.spnBoxMin.setObjectName(_fromUtf8("spnBoxMin"))
self.formLayout.setWidget(11, QtGui.QFormLayout.FieldRole, self.spnBoxMin)
self.spnBoxMax = QtGui.QSpinBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spnBoxMax.sizePolicy().hasHeightForWidth())
self.spnBoxMax.setSizePolicy(sizePolicy)
self.spnBoxMax.setAccelerated(True)
self.spnBoxMax.setMaximum(999999)
self.spnBoxMax.setProperty("value", 100)
self.spnBoxMax.setObjectName(_fromUtf8("spnBoxMax"))
self.formLayout.setWidget(12, QtGui.QFormLayout.FieldRole, self.spnBoxMax)
self.label_9 = QtGui.QLabel(Dialog)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.formLayout.setWidget(7, QtGui.QFormLayout.LabelRole, self.label_9)
self.label_10 = QtGui.QLabel(Dialog)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_10)
self.label_8 = QtGui.QLabel(Dialog)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.formLayout.setWidget(12, QtGui.QFormLayout.LabelRole, self.label_8)
self.cmBoxCode = QtGui.QComboBox(Dialog)
self.cmBoxCode.setEditable(True)
self.cmBoxCode.setObjectName(_fromUtf8("cmBoxCode"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.cmBoxCode)
self.spnboxCost = QtGui.QDoubleSpinBox(Dialog)
self.spnboxCost.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnboxCost.setKeyboardTracking(False)
self.spnboxCost.setSuffix(_fromUtf8(""))
self.spnboxCost.setDecimals(2)
self.spnboxCost.setMaximum(9999.0)
self.spnboxCost.setObjectName(_fromUtf8("spnboxCost"))
self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.spnboxCost)
self.spnboxMargin = QtGui.QDoubleSpinBox(Dialog)
self.spnboxMargin.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnboxMargin.setKeyboardTracking(False)
self.spnboxMargin.setMaximum(9999.0)
self.spnboxMargin.setObjectName(_fromUtf8("spnboxMargin"))
self.formLayout.setWidget(6, QtGui.QFormLayout.FieldRole, self.spnboxMargin)
self.spnboxPrice = QtGui.QDoubleSpinBox(Dialog)
self.spnboxPrice.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnboxPrice.setKeyboardTracking(False)
self.spnboxPrice.setMaximum(99999.0)
self.spnboxPrice.setObjectName(_fromUtf8("spnboxPrice"))
self.formLayout.setWidget(7, QtGui.QFormLayout.FieldRole, self.spnboxPrice)
self.cmboxCategory = QtGui.QComboBox(Dialog)
self.cmboxCategory.setEditable(True)
self.cmboxCategory.setObjectName(_fromUtf8("cmboxCategory"))
self.formLayout.setWidget(9, QtGui.QFormLayout.FieldRole, self.cmboxCategory)
self.label_11 = QtGui.QLabel(Dialog)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_11)
self.cmboxGroup = QtGui.QComboBox(Dialog)
self.cmboxGroup.setEditable(True)
self.cmboxGroup.setObjectName(_fromUtf8("cmboxGroup"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.cmboxGroup)
self.verticalLayout.addLayout(self.formLayout)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.btnAdd = QtGui.QPushButton(Dialog)
self.btnAdd.setAutoDefault(False)
self.btnAdd.setDefault(False)
self.btnAdd.setObjectName(_fromUtf8("btnAdd"))
self.horizontalLayout.addWidget(self.btnAdd)
self.btnUndo = QtGui.QPushButton(Dialog)
self.btnUndo.setAutoDefault(False)
self.btnUndo.setObjectName(_fromUtf8("btnUndo"))
self.horizontalLayout.addWidget(self.btnUndo)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Purchase", None))
Dialog.setWhatsThis(_translate("Dialog", "Write a code and press ENTER.\n"
"the fields will fill out automatically if this code was recorded previously", None))
self.label.setText(_translate("Dialog", "Code:", None))
self.label_2.setText(_translate("Dialog", "Name:", None))
self.label_5.setText(_translate("Dialog", "Unit Cost:", None))
self.label_3.setText(_translate("Dialog", "Quantity:", None))
self.label_4.setText(_translate("Dialog", "Category:", None))
self.label_6.setText(_translate("Dialog", "Vendor:", None))
self.label_7.setText(_translate("Dialog", "Minimum Quantity:", None))
self.label_9.setText(_translate("Dialog", "Suggested Price:", None))
self.label_10.setText(_translate("Dialog", "Profit Margin:", None))
self.label_8.setText(_translate("Dialog", "Maximum Quantity:", None))
self.spnboxCost.setPrefix(_translate("Dialog", "$ ", None))
self.spnboxMargin.setPrefix(_translate("Dialog", "% ", None))
self.spnboxPrice.setPrefix(_translate("Dialog", "$ ", None))
self.label_11.setText(_translate("Dialog", "Group:", None))
self.btnAdd.setText(_translate("Dialog", "Add", None))
self.btnUndo.setText(_translate("Dialog", "Undo", None))
import res_rc
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,758
|
Redent0r/Libra
|
refs/heads/master
|
/gui_client.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui_cliente.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(188, 227)
Dialog.setMinimumSize(QtCore.QSize(188, 227))
Dialog.setMaximumSize(QtCore.QSize(350, 227))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/manager-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
Dialog.setModal(True)
self.gridLayout = QtGui.QGridLayout(Dialog)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.label = QtGui.QLabel(Dialog)
self.label.setObjectName(_fromUtf8("label"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label)
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_2)
self.leditName = QtGui.QLineEdit(Dialog)
self.leditName.setObjectName(_fromUtf8("leditName"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.leditName)
self.leditPhone = QtGui.QLineEdit(Dialog)
self.leditPhone.setObjectName(_fromUtf8("leditPhone"))
self.formLayout.setWidget(2, QtGui.QFormLayout.FieldRole, self.leditPhone)
self.label_3 = QtGui.QLabel(Dialog)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.formLayout.setWidget(4, QtGui.QFormLayout.LabelRole, self.label_3)
self.label_4 = QtGui.QLabel(Dialog)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_4)
self.leditAddress = QtGui.QLineEdit(Dialog)
self.leditAddress.setObjectName(_fromUtf8("leditAddress"))
self.formLayout.setWidget(4, QtGui.QFormLayout.FieldRole, self.leditAddress)
self.leditEmail = QtGui.QLineEdit(Dialog)
self.leditEmail.setObjectName(_fromUtf8("leditEmail"))
self.formLayout.setWidget(6, QtGui.QFormLayout.FieldRole, self.leditEmail)
self.label_5 = QtGui.QLabel(Dialog)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_5)
self.leditCellphone = QtGui.QLineEdit(Dialog)
self.leditCellphone.setObjectName(_fromUtf8("leditCellphone"))
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.leditCellphone)
self.label_6 = QtGui.QLabel(Dialog)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.label_6)
self.leditFax = QtGui.QLineEdit(Dialog)
self.leditFax.setObjectName(_fromUtf8("leditFax"))
self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.leditFax)
self.label_7 = QtGui.QLabel(Dialog)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_7)
self.leditID = QtGui.QLineEdit(Dialog)
self.leditID.setMinimumSize(QtCore.QSize(0, 0))
self.leditID.setPlaceholderText(_fromUtf8(""))
self.leditID.setObjectName(_fromUtf8("leditID"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.leditID)
self.gridLayout.addLayout(self.formLayout, 0, 0, 1, 1)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem)
self.btnAdd = QtGui.QPushButton(Dialog)
self.btnAdd.setObjectName(_fromUtf8("btnAdd"))
self.horizontalLayout.addWidget(self.btnAdd)
self.btnUndo = QtGui.QPushButton(Dialog)
self.btnUndo.setObjectName(_fromUtf8("btnUndo"))
self.horizontalLayout.addWidget(self.btnUndo)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout.addItem(spacerItem1)
self.gridLayout.addLayout(self.horizontalLayout, 1, 0, 1, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Add Client", None))
self.label.setText(_translate("Dialog", "Name: ", None))
self.label_2.setText(_translate("Dialog", "Phone: ", None))
self.label_3.setText(_translate("Dialog", "Address: ", None))
self.label_4.setText(_translate("Dialog", "E-mail: ", None))
self.label_5.setText(_translate("Dialog", "Cellphone: ", None))
self.label_6.setText(_translate("Dialog", "Fax:", None))
self.label_7.setText(_translate("Dialog", "ID:", None))
self.btnAdd.setText(_translate("Dialog", "Add", None))
self.btnUndo.setText(_translate("Dialog", "Undo", None))
import res_rc
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,759
|
Redent0r/Libra
|
refs/heads/master
|
/mec_login.py
|
"""
Author:Christopher Holder
Project : Version 1.0(Login)
"""
import sqlite3
import sys
def create_login_table(cursor,connection):#Creates login table.
cursor.execute("CREATE TABLE IF NOT EXISTS login(ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, User TEXT NOT NULL, Pass TEXT NOT NULL,class TEXT NOT NULL,dat TEXT);")
cursor.execute("SELECT User FROM login WHERE User = 'Administrator'")
data = cursor.fetchone()
if data == None:
print("...............Adding admin account")
cursor.execute("INSERT INTO login (User, Pass,class,dat)""VALUES ('Administrator','nimda','admin',date('now'))")
print("...............Account added")
connection.commit()
return True
def check_login(cursor,username,password):# Logs in ,returns current user.
a = (username,password,)
cursor.execute("SELECT User,Pass FROM login WHERE User = ? AND Pass = ?",a)
data = cursor.fetchone()#Returns a single tuple.
if data == None:#f returns type None.
print("Not registered")
return False
return True
def add_user(cursor,username,password):
a = (username,)
b = (username,password,)
cursor.execute("SELECT User FROM login WHERE User =?", a)
data = cursor.fetchone()
if data == None:
print("Username not valid")
if len(password) < 8:
print("Must be at least 8 characters.")
return False
cursor.execute("INSERT INTO login (User, Pass,class,dat) VALUES (?,?,'regular',date('now'))", b)
print("Succesful registration.")
return True
else:
print("Already registered")
return False
def print_login_table(cursor):
elems = cursor.execute("SELECT * FROM login")
data = cursor.fetchall()
for row in data:
print(row)
def check_if_admin(cursor,username):
a =(username,)
cursor.execute("SELECT class FROM login WHERE User = ?",a)
data = cursor.fetchone()
if data == None:
return False
elif data[0] == 'admin':
return True
else:
return False
def remove_user():
pass
def log_out(cursor,connection):
print('')
print('.................Closing')
connection.commit()
cursor.close()
sys.exit()
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,760
|
Redent0r/Libra
|
refs/heads/master
|
/gui_move.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui_mover.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.resize(313, 99)
Dialog.setMinimumSize(QtCore.QSize(227, 99))
Dialog.setMaximumSize(QtCore.QSize(500, 99))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/swap-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
self.verticalLayout = QtGui.QVBoxLayout(Dialog)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setContentsMargins(-1, -1, 0, -1)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.label = QtGui.QLabel(Dialog)
self.label.setObjectName(_fromUtf8("label"))
self.horizontalLayout_2.addWidget(self.label)
self.spnboxQuantity = QtGui.QSpinBox(Dialog)
self.spnboxQuantity.setMinimum(1)
self.spnboxQuantity.setMaximum(99999)
self.spnboxQuantity.setObjectName(_fromUtf8("spnboxQuantity"))
self.horizontalLayout_2.addWidget(self.spnboxQuantity)
self.leditCode = QtGui.QLineEdit(Dialog)
self.leditCode.setReadOnly(True)
self.leditCode.setObjectName(_fromUtf8("leditCode"))
self.horizontalLayout_2.addWidget(self.leditCode)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.label_3 = QtGui.QLabel(Dialog)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.horizontalLayout.addWidget(self.label_3)
self.leditFromGroup = QtGui.QLineEdit(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.leditFromGroup.sizePolicy().hasHeightForWidth())
self.leditFromGroup.setSizePolicy(sizePolicy)
self.leditFromGroup.setReadOnly(True)
self.leditFromGroup.setObjectName(_fromUtf8("leditFromGroup"))
self.horizontalLayout.addWidget(self.leditFromGroup)
self.label_4 = QtGui.QLabel(Dialog)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.horizontalLayout.addWidget(self.label_4)
self.cmboxToGroup = QtGui.QComboBox(Dialog)
self.cmboxToGroup.setEditable(True)
self.cmboxToGroup.setObjectName(_fromUtf8("cmboxToGroup"))
self.horizontalLayout.addWidget(self.cmboxToGroup)
self.verticalLayout.addLayout(self.horizontalLayout)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem)
self.btnConfirm = QtGui.QPushButton(Dialog)
self.btnConfirm.setObjectName(_fromUtf8("btnConfirm"))
self.horizontalLayout_3.addWidget(self.btnConfirm)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem1)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Move", None))
self.label.setText(_translate("Dialog", "Move:", None))
self.label_3.setText(_translate("Dialog", "From group:", None))
self.label_4.setText(_translate("Dialog", "To group:", None))
self.btnConfirm.setText(_translate("Dialog", "Confirm", None))
import res_rc
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,761
|
Redent0r/Libra
|
refs/heads/master
|
/gui_inventory.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui_inventory.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(1269, 712)
font = QtGui.QFont()
font.setPointSize(10)
MainWindow.setFont(font)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/dbIcon.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
MainWindow.setWindowIcon(icon)
MainWindow.setLocale(QtCore.QLocale(QtCore.QLocale.Spanish, QtCore.QLocale.Panama))
MainWindow.setIconSize(QtCore.QSize(60, 60))
MainWindow.setToolButtonStyle(QtCore.Qt.ToolButtonIconOnly)
MainWindow.setDocumentMode(False)
MainWindow.setTabShape(QtGui.QTabWidget.Rounded)
MainWindow.setUnifiedTitleAndToolBarOnMac(False)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.gridLayout = QtGui.QGridLayout(self.centralwidget)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.tabWidget = QtGui.QTabWidget(self.centralwidget)
font = QtGui.QFont()
font.setPointSize(10)
self.tabWidget.setFont(font)
self.tabWidget.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.tabWidget.setTabPosition(QtGui.QTabWidget.North)
self.tabWidget.setTabShape(QtGui.QTabWidget.Rounded)
self.tabWidget.setIconSize(QtCore.QSize(25, 25))
self.tabWidget.setElideMode(QtCore.Qt.ElideNone)
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab_balance = QtGui.QWidget()
self.tab_balance.setObjectName(_fromUtf8("tab_balance"))
self.verticalLayout_3 = QtGui.QVBoxLayout(self.tab_balance)
self.verticalLayout_3.setMargin(0)
self.verticalLayout_3.setObjectName(_fromUtf8("verticalLayout_3"))
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.verticalLayout_2.setContentsMargins(0, 0, -1, -1)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.horizontalLayout_5 = QtGui.QHBoxLayout()
self.horizontalLayout_5.setObjectName(_fromUtf8("horizontalLayout_5"))
self.horizontalLayout_7 = QtGui.QHBoxLayout()
self.horizontalLayout_7.setObjectName(_fromUtf8("horizontalLayout_7"))
self.verticalLayout_7 = QtGui.QVBoxLayout()
self.verticalLayout_7.setContentsMargins(0, -1, -1, -1)
self.verticalLayout_7.setObjectName(_fromUtf8("verticalLayout_7"))
self.groupBox = QtGui.QGroupBox(self.tab_balance)
self.groupBox.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.horizontalLayout = QtGui.QHBoxLayout(self.groupBox)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.formLayout = QtGui.QFormLayout()
self.formLayout.setSizeConstraint(QtGui.QLayout.SetDefaultConstraint)
self.formLayout.setFormAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignTop)
self.formLayout.setContentsMargins(0, -1, -1, -1)
self.formLayout.setVerticalSpacing(6)
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.dateAnnual = QtGui.QDateEdit(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dateAnnual.sizePolicy().hasHeightForWidth())
self.dateAnnual.setSizePolicy(sizePolicy)
self.dateAnnual.setDateTime(QtCore.QDateTime(QtCore.QDate(2017, 1, 2), QtCore.QTime(0, 0, 0)))
self.dateAnnual.setDate(QtCore.QDate(2017, 1, 2))
self.dateAnnual.setMinimumDateTime(QtCore.QDateTime(QtCore.QDate(2017, 1, 2), QtCore.QTime(0, 0, 0)))
self.dateAnnual.setMinimumDate(QtCore.QDate(2017, 1, 2))
self.dateAnnual.setObjectName(_fromUtf8("dateAnnual"))
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.dateAnnual)
self.radioMonthly = QtGui.QRadioButton(self.groupBox)
self.radioMonthly.setObjectName(_fromUtf8("radioMonthly"))
self.formLayout.setWidget(5, QtGui.QFormLayout.LabelRole, self.radioMonthly)
self.dateMonthly = QtGui.QDateEdit(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.dateMonthly.sizePolicy().hasHeightForWidth())
self.dateMonthly.setSizePolicy(sizePolicy)
self.dateMonthly.setDateTime(QtCore.QDateTime(QtCore.QDate(2017, 5, 1), QtCore.QTime(0, 0, 0)))
self.dateMonthly.setDate(QtCore.QDate(2017, 5, 1))
self.dateMonthly.setMinimumDateTime(QtCore.QDateTime(QtCore.QDate(2017, 5, 1), QtCore.QTime(0, 0, 0)))
self.dateMonthly.setMinimumDate(QtCore.QDate(2017, 5, 1))
self.dateMonthly.setCurrentSection(QtGui.QDateTimeEdit.MonthSection)
self.dateMonthly.setObjectName(_fromUtf8("dateMonthly"))
self.formLayout.setWidget(5, QtGui.QFormLayout.FieldRole, self.dateMonthly)
self.radioAnnual = QtGui.QRadioButton(self.groupBox)
self.radioAnnual.setObjectName(_fromUtf8("radioAnnual"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.radioAnnual)
self.radioHistoric = QtGui.QRadioButton(self.groupBox)
self.radioHistoric.setChecked(True)
self.radioHistoric.setObjectName(_fromUtf8("radioHistoric"))
self.formLayout.setWidget(2, QtGui.QFormLayout.LabelRole, self.radioHistoric)
self.radioDaily = QtGui.QRadioButton(self.groupBox)
self.radioDaily.setObjectName(_fromUtf8("radioDaily"))
self.formLayout.setWidget(6, QtGui.QFormLayout.LabelRole, self.radioDaily)
self.horizontalLayout.addLayout(self.formLayout)
self.calBalance = QtGui.QCalendarWidget(self.groupBox)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.MinimumExpanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.calBalance.sizePolicy().hasHeightForWidth())
self.calBalance.setSizePolicy(sizePolicy)
self.calBalance.setMinimumSize(QtCore.QSize(300, 0))
self.calBalance.setMaximumSize(QtCore.QSize(16777215, 100))
self.calBalance.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.calBalance.setSelectedDate(QtCore.QDate(2017, 3, 1))
self.calBalance.setMinimumDate(QtCore.QDate(2017, 3, 1))
self.calBalance.setMaximumDate(QtCore.QDate(2100, 12, 31))
self.calBalance.setFirstDayOfWeek(QtCore.Qt.Monday)
self.calBalance.setGridVisible(True)
self.calBalance.setHorizontalHeaderFormat(QtGui.QCalendarWidget.NoHorizontalHeader)
self.calBalance.setVerticalHeaderFormat(QtGui.QCalendarWidget.NoVerticalHeader)
self.calBalance.setNavigationBarVisible(True)
self.calBalance.setObjectName(_fromUtf8("calBalance"))
self.horizontalLayout.addWidget(self.calBalance)
self.verticalLayout_7.addWidget(self.groupBox)
self.groupBox_2 = QtGui.QGroupBox(self.tab_balance)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.gridLayout_2 = QtGui.QGridLayout(self.groupBox_2)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.tblPurchasesBal = QtGui.QTableView(self.groupBox_2)
self.tblPurchasesBal.setAlternatingRowColors(True)
self.tblPurchasesBal.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblPurchasesBal.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblPurchasesBal.setSortingEnabled(True)
self.tblPurchasesBal.setObjectName(_fromUtf8("tblPurchasesBal"))
self.tblPurchasesBal.horizontalHeader().setStretchLastSection(True)
self.tblPurchasesBal.verticalHeader().setVisible(False)
self.gridLayout_2.addWidget(self.tblPurchasesBal, 0, 0, 1, 1)
self.verticalLayout_7.addWidget(self.groupBox_2)
self.groupBox_3 = QtGui.QGroupBox(self.tab_balance)
self.groupBox_3.setObjectName(_fromUtf8("groupBox_3"))
self.gridLayout_3 = QtGui.QGridLayout(self.groupBox_3)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
self.tblSalesBal = QtGui.QTableView(self.groupBox_3)
self.tblSalesBal.setAlternatingRowColors(True)
self.tblSalesBal.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblSalesBal.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblSalesBal.setSortingEnabled(True)
self.tblSalesBal.setObjectName(_fromUtf8("tblSalesBal"))
self.tblSalesBal.horizontalHeader().setStretchLastSection(True)
self.tblSalesBal.verticalHeader().setVisible(False)
self.gridLayout_3.addWidget(self.tblSalesBal, 0, 0, 1, 1)
self.verticalLayout_7.addWidget(self.groupBox_3)
self.horizontalLayout_7.addLayout(self.verticalLayout_7)
self.tblBalance = QtGui.QTableWidget(self.tab_balance)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Ignored)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.tblBalance.sizePolicy().hasHeightForWidth())
self.tblBalance.setSizePolicy(sizePolicy)
self.tblBalance.setMinimumSize(QtCore.QSize(350, 0))
font = QtGui.QFont()
font.setPointSize(14)
self.tblBalance.setFont(font)
self.tblBalance.setFrameShape(QtGui.QFrame.Box)
self.tblBalance.setFrameShadow(QtGui.QFrame.Raised)
self.tblBalance.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblBalance.setTabKeyNavigation(False)
self.tblBalance.setProperty("showDropIndicator", False)
self.tblBalance.setDragDropOverwriteMode(False)
self.tblBalance.setAlternatingRowColors(False)
self.tblBalance.setSelectionMode(QtGui.QAbstractItemView.NoSelection)
self.tblBalance.setTextElideMode(QtCore.Qt.ElideLeft)
self.tblBalance.setShowGrid(True)
self.tblBalance.setGridStyle(QtCore.Qt.SolidLine)
self.tblBalance.setWordWrap(True)
self.tblBalance.setCornerButtonEnabled(False)
self.tblBalance.setRowCount(7)
self.tblBalance.setColumnCount(3)
self.tblBalance.setObjectName(_fromUtf8("tblBalance"))
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(0, 0, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(0, 2, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(1, 0, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(1, 1, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(1, 2, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(2, 0, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(2, 1, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(2, 2, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(3, 0, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(3, 1, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(3, 2, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(4, 0, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(4, 1, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(4, 2, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(5, 0, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(5, 1, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(5, 2, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(6, 0, item)
item = QtGui.QTableWidgetItem()
self.tblBalance.setItem(6, 2, item)
self.tblBalance.horizontalHeader().setVisible(False)
self.tblBalance.verticalHeader().setVisible(False)
self.horizontalLayout_7.addWidget(self.tblBalance)
self.horizontalLayout_5.addLayout(self.horizontalLayout_7)
self.verticalLayout_2.addLayout(self.horizontalLayout_5)
self.verticalLayout_3.addLayout(self.verticalLayout_2)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/calculator.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_balance, icon1, _fromUtf8(""))
self.tab_inventory = QtGui.QWidget()
self.tab_inventory.setObjectName(_fromUtf8("tab_inventory"))
self.verticalLayout_6 = QtGui.QVBoxLayout(self.tab_inventory)
self.verticalLayout_6.setMargin(0)
self.verticalLayout_6.setObjectName(_fromUtf8("verticalLayout_6"))
self.horizontalLayout_6 = QtGui.QHBoxLayout()
self.horizontalLayout_6.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_6.setObjectName(_fromUtf8("horizontalLayout_6"))
self.btnModifyInventory = QtGui.QPushButton(self.tab_inventory)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Maximum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnModifyInventory.sizePolicy().hasHeightForWidth())
self.btnModifyInventory.setSizePolicy(sizePolicy)
self.btnModifyInventory.setText(_fromUtf8(""))
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/edit_write_pencil_pen_page-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnModifyInventory.setIcon(icon2)
self.btnModifyInventory.setIconSize(QtCore.QSize(20, 20))
self.btnModifyInventory.setObjectName(_fromUtf8("btnModifyInventory"))
self.horizontalLayout_6.addWidget(self.btnModifyInventory)
self.btnMove = QtGui.QPushButton(self.tab_inventory)
self.btnMove.setText(_fromUtf8(""))
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/swap-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnMove.setIcon(icon3)
self.btnMove.setObjectName(_fromUtf8("btnMove"))
self.horizontalLayout_6.addWidget(self.btnMove)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_6.addItem(spacerItem)
self.cmboxInventory = QtGui.QComboBox(self.tab_inventory)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.cmboxInventory.sizePolicy().hasHeightForWidth())
self.cmboxInventory.setSizePolicy(sizePolicy)
self.cmboxInventory.setMinimumSize(QtCore.QSize(20, 0))
self.cmboxInventory.setSizeIncrement(QtCore.QSize(0, 0))
self.cmboxInventory.setEditable(False)
self.cmboxInventory.setInsertPolicy(QtGui.QComboBox.InsertAtBottom)
self.cmboxInventory.setModelColumn(0)
self.cmboxInventory.setObjectName(_fromUtf8("cmboxInventory"))
self.horizontalLayout_6.addWidget(self.cmboxInventory)
self.leditInventory = QtGui.QLineEdit(self.tab_inventory)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.leditInventory.sizePolicy().hasHeightForWidth())
self.leditInventory.setSizePolicy(sizePolicy)
self.leditInventory.setMinimumSize(QtCore.QSize(40, 0))
self.leditInventory.setObjectName(_fromUtf8("leditInventory"))
self.horizontalLayout_6.addWidget(self.leditInventory)
self.verticalLayout_6.addLayout(self.horizontalLayout_6)
self.tblInventory = QtGui.QTableView(self.tab_inventory)
self.tblInventory.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblInventory.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblInventory.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblInventory.setSortingEnabled(True)
self.tblInventory.setCornerButtonEnabled(False)
self.tblInventory.setObjectName(_fromUtf8("tblInventory"))
self.tblInventory.horizontalHeader().setStretchLastSection(True)
self.verticalLayout_6.addWidget(self.tblInventory)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/paper-box-icon-63457.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_inventory, icon4, _fromUtf8(""))
self.tab_purchases = QtGui.QWidget()
self.tab_purchases.setObjectName(_fromUtf8("tab_purchases"))
self.verticalLayout = QtGui.QVBoxLayout(self.tab_purchases)
self.verticalLayout.setMargin(0)
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.btnRemovePurchase = QtGui.QPushButton(self.tab_purchases)
self.btnRemovePurchase.setText(_fromUtf8(""))
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/Remove.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnRemovePurchase.setIcon(icon5)
self.btnRemovePurchase.setIconSize(QtCore.QSize(20, 20))
self.btnRemovePurchase.setObjectName(_fromUtf8("btnRemovePurchase"))
self.horizontalLayout_2.addWidget(self.btnRemovePurchase)
spacerItem1 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_2.addItem(spacerItem1)
self.cmboxPurchases = QtGui.QComboBox(self.tab_purchases)
self.cmboxPurchases.setObjectName(_fromUtf8("cmboxPurchases"))
self.horizontalLayout_2.addWidget(self.cmboxPurchases)
self.leditPurchases = QtGui.QLineEdit(self.tab_purchases)
self.leditPurchases.setObjectName(_fromUtf8("leditPurchases"))
self.horizontalLayout_2.addWidget(self.leditPurchases)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.tblPurchases = QtGui.QTableView(self.tab_purchases)
self.tblPurchases.setAlternatingRowColors(True)
self.tblPurchases.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblPurchases.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblPurchases.setVerticalScrollMode(QtGui.QAbstractItemView.ScrollPerPixel)
self.tblPurchases.setSortingEnabled(True)
self.tblPurchases.setWordWrap(True)
self.tblPurchases.setCornerButtonEnabled(False)
self.tblPurchases.setObjectName(_fromUtf8("tblPurchases"))
self.tblPurchases.horizontalHeader().setStretchLastSection(True)
self.tblPurchases.verticalHeader().setVisible(False)
self.tblPurchases.verticalHeader().setSortIndicatorShown(False)
self.verticalLayout.addWidget(self.tblPurchases)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/cart-arrow-down-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_purchases, icon6, _fromUtf8(""))
self.tab_sales = QtGui.QWidget()
self.tab_sales.setObjectName(_fromUtf8("tab_sales"))
self.verticalLayout_4 = QtGui.QVBoxLayout(self.tab_sales)
self.verticalLayout_4.setMargin(0)
self.verticalLayout_4.setObjectName(_fromUtf8("verticalLayout_4"))
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.btnRemoveSale = QtGui.QPushButton(self.tab_sales)
self.btnRemoveSale.setText(_fromUtf8(""))
icon7 = QtGui.QIcon()
icon7.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/undo-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnRemoveSale.setIcon(icon7)
self.btnRemoveSale.setIconSize(QtCore.QSize(20, 20))
self.btnRemoveSale.setObjectName(_fromUtf8("btnRemoveSale"))
self.horizontalLayout_3.addWidget(self.btnRemoveSale)
self.btnSettle = QtGui.QPushButton(self.tab_sales)
self.btnSettle.setText(_fromUtf8(""))
icon8 = QtGui.QIcon()
icon8.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/payment-256.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnSettle.setIcon(icon8)
self.btnSettle.setIconSize(QtCore.QSize(20, 20))
self.btnSettle.setObjectName(_fromUtf8("btnSettle"))
self.horizontalLayout_3.addWidget(self.btnSettle)
spacerItem2 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_3.addItem(spacerItem2)
self.cmboxSales = QtGui.QComboBox(self.tab_sales)
self.cmboxSales.setObjectName(_fromUtf8("cmboxSales"))
self.horizontalLayout_3.addWidget(self.cmboxSales)
self.leditSales = QtGui.QLineEdit(self.tab_sales)
self.leditSales.setObjectName(_fromUtf8("leditSales"))
self.horizontalLayout_3.addWidget(self.leditSales)
self.verticalLayout_4.addLayout(self.horizontalLayout_3)
self.tblSales = QtGui.QTableView(self.tab_sales)
self.tblSales.setEditTriggers(QtGui.QAbstractItemView.NoEditTriggers)
self.tblSales.setAlternatingRowColors(True)
self.tblSales.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblSales.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblSales.setSortingEnabled(True)
self.tblSales.setCornerButtonEnabled(False)
self.tblSales.setObjectName(_fromUtf8("tblSales"))
self.tblSales.horizontalHeader().setStretchLastSection(True)
self.tblSales.verticalHeader().setVisible(False)
self.verticalLayout_4.addWidget(self.tblSales)
icon9 = QtGui.QIcon()
icon9.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/cashier-icon-png-8.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_sales, icon9, _fromUtf8(""))
self.tab_clients = QtGui.QWidget()
self.tab_clients.setObjectName(_fromUtf8("tab_clients"))
self.verticalLayout_5 = QtGui.QVBoxLayout(self.tab_clients)
self.verticalLayout_5.setMargin(0)
self.verticalLayout_5.setObjectName(_fromUtf8("verticalLayout_5"))
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.btnRemoveClient = QtGui.QPushButton(self.tab_clients)
self.btnRemoveClient.setText(_fromUtf8(""))
self.btnRemoveClient.setIcon(icon5)
self.btnRemoveClient.setIconSize(QtCore.QSize(20, 20))
self.btnRemoveClient.setObjectName(_fromUtf8("btnRemoveClient"))
self.horizontalLayout_4.addWidget(self.btnRemoveClient)
self.btnModifyClient = QtGui.QPushButton(self.tab_clients)
self.btnModifyClient.setText(_fromUtf8(""))
icon10 = QtGui.QIcon()
icon10.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/edit_user_male_write_pencil_man-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnModifyClient.setIcon(icon10)
self.btnModifyClient.setIconSize(QtCore.QSize(20, 20))
self.btnModifyClient.setObjectName(_fromUtf8("btnModifyClient"))
self.horizontalLayout_4.addWidget(self.btnModifyClient)
spacerItem3 = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem3)
self.cmboxClients = QtGui.QComboBox(self.tab_clients)
self.cmboxClients.setObjectName(_fromUtf8("cmboxClients"))
self.horizontalLayout_4.addWidget(self.cmboxClients)
self.leditClients = QtGui.QLineEdit(self.tab_clients)
self.leditClients.setObjectName(_fromUtf8("leditClients"))
self.horizontalLayout_4.addWidget(self.leditClients)
self.verticalLayout_5.addLayout(self.horizontalLayout_4)
self.tblClients = QtGui.QTableView(self.tab_clients)
self.tblClients.setAlternatingRowColors(True)
self.tblClients.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblClients.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblClients.setSortingEnabled(True)
self.tblClients.setCornerButtonEnabled(False)
self.tblClients.setObjectName(_fromUtf8("tblClients"))
self.tblClients.horizontalHeader().setStretchLastSection(True)
self.tblClients.verticalHeader().setVisible(False)
self.verticalLayout_5.addWidget(self.tblClients)
icon11 = QtGui.QIcon()
icon11.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/15656.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.tabWidget.addTab(self.tab_clients, icon11, _fromUtf8(""))
self.gridLayout.addWidget(self.tabWidget, 0, 1, 1, 1)
MainWindow.setCentralWidget(self.centralwidget)
self.toolBar = QtGui.QToolBar(MainWindow)
self.toolBar.setLayoutDirection(QtCore.Qt.LeftToRight)
self.toolBar.setLocale(QtCore.QLocale(QtCore.QLocale.English, QtCore.QLocale.UnitedStates))
self.toolBar.setMovable(True)
self.toolBar.setIconSize(QtCore.QSize(30, 30))
self.toolBar.setToolButtonStyle(QtCore.Qt.ToolButtonTextUnderIcon)
self.toolBar.setFloatable(False)
self.toolBar.setObjectName(_fromUtf8("toolBar"))
MainWindow.addToolBar(QtCore.Qt.LeftToolBarArea, self.toolBar)
self.actionPurchase = QtGui.QAction(MainWindow)
icon12 = QtGui.QIcon()
icon12.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/plus-icon-0.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionPurchase.setIcon(icon12)
self.actionPurchase.setObjectName(_fromUtf8("actionPurchase"))
self.actionSale = QtGui.QAction(MainWindow)
icon13 = QtGui.QIcon()
icon13.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/product_basket-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionSale.setIcon(icon13)
self.actionSale.setObjectName(_fromUtf8("actionSale"))
self.actionClient = QtGui.QAction(MainWindow)
icon14 = QtGui.QIcon()
icon14.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/manager-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionClient.setIcon(icon14)
self.actionClient.setObjectName(_fromUtf8("actionClient"))
self.actionRefresh = QtGui.QAction(MainWindow)
icon15 = QtGui.QIcon()
icon15.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/Oxygen-Icons.org-Oxygen-Actions-view-refresh.ico")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionRefresh.setIcon(icon15)
self.actionRefresh.setObjectName(_fromUtf8("actionRefresh"))
self.toolBar.addAction(self.actionRefresh)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionSale)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionPurchase)
self.toolBar.addSeparator()
self.toolBar.addAction(self.actionClient)
self.toolBar.addSeparator()
self.retranslateUi(MainWindow)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Libra v1.0.0", None))
self.groupBox.setTitle(_translate("MainWindow", "Period", None))
self.dateAnnual.setDisplayFormat(_translate("MainWindow", "yyyy", None))
self.radioMonthly.setText(_translate("MainWindow", "Monthly", None))
self.dateMonthly.setDisplayFormat(_translate("MainWindow", "MMM/yyyy", None))
self.radioAnnual.setText(_translate("MainWindow", "Annual", None))
self.radioHistoric.setText(_translate("MainWindow", "Historic", None))
self.radioDaily.setText(_translate("MainWindow", "Daily", None))
self.groupBox_2.setTitle(_translate("MainWindow", "Purchases", None))
self.groupBox_3.setTitle(_translate("MainWindow", "Sales", None))
__sortingEnabled = self.tblBalance.isSortingEnabled()
self.tblBalance.setSortingEnabled(False)
item = self.tblBalance.item(0, 0)
item.setText(_translate("MainWindow", "Sales (paid)", None))
item = self.tblBalance.item(0, 2)
item.setText(_translate("MainWindow", "0.00", None))
item = self.tblBalance.item(1, 0)
item.setText(_translate("MainWindow", "Sales (credit)", None))
item = self.tblBalance.item(1, 2)
item.setText(_translate("MainWindow", "0.00", None))
item = self.tblBalance.item(2, 0)
item.setText(_translate("MainWindow", "Total revenue", None))
item = self.tblBalance.item(2, 2)
item.setText(_translate("MainWindow", "0.00", None))
item = self.tblBalance.item(3, 0)
item.setText(_translate("MainWindow", "Costs", None))
item = self.tblBalance.item(3, 1)
item.setText(_translate("MainWindow", "0.00", None))
item = self.tblBalance.item(4, 0)
item.setText(_translate("MainWindow", "Taxes", None))
item = self.tblBalance.item(4, 1)
item.setText(_translate("MainWindow", "0.00", None))
item = self.tblBalance.item(5, 0)
item.setText(_translate("MainWindow", "Profit", None))
item = self.tblBalance.item(5, 2)
item.setText(_translate("MainWindow", "0.00", None))
item = self.tblBalance.item(6, 0)
item.setText(_translate("MainWindow", "Profit (margin)", None))
item = self.tblBalance.item(6, 2)
item.setText(_translate("MainWindow", "0.00", None))
self.tblBalance.setSortingEnabled(__sortingEnabled)
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_balance), _translate("MainWindow", "Balance", None))
self.btnModifyInventory.setToolTip(_translate("MainWindow", "Modify inventory", None))
self.btnMove.setToolTip(_translate("MainWindow", "Move Item", None))
self.leditInventory.setPlaceholderText(_translate("MainWindow", "Search...", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_inventory), _translate("MainWindow", "Inventory", None))
self.btnRemovePurchase.setToolTip(_translate("MainWindow", "Remove purchase", None))
self.leditPurchases.setPlaceholderText(_translate("MainWindow", "Search...", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_purchases), _translate("MainWindow", "Purchases", None))
self.btnRemoveSale.setToolTip(_translate("MainWindow", "Reverse sale", None))
self.btnSettle.setToolTip(_translate("MainWindow", "Settle debt", None))
self.leditSales.setPlaceholderText(_translate("MainWindow", "Search...", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_sales), _translate("MainWindow", "Sales", None))
self.btnRemoveClient.setToolTip(_translate("MainWindow", "Remove client", None))
self.btnModifyClient.setToolTip(_translate("MainWindow", "Modify Client", None))
self.leditClients.setPlaceholderText(_translate("MainWindow", "Search...", None))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_clients), _translate("MainWindow", "Clients", None))
self.toolBar.setWindowTitle(_translate("MainWindow", "toolBar", None))
self.actionPurchase.setText(_translate("MainWindow", "Purchase", None))
self.actionSale.setText(_translate("MainWindow", "Sale", None))
self.actionClient.setText(_translate("MainWindow", "Client", None))
self.actionRefresh.setText(_translate("MainWindow", "Refresh", None))
import res_rc
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,762
|
Redent0r/Libra
|
refs/heads/master
|
/main_login.py
|
import sys
import sqlite3
from PyQt4 import QtCore, QtGui, QtSql
from gui_login import Ui_Dialog as LoginGui
import master_admin
import mec_login
from mec_login import check_login # tengo que importarlo
class Login(QtGui.QDialog, LoginGui):
def __init__(self, parent=None):
QtGui.QDialog.__init__(self, parent)
self.setupUi(self)
### functionality ###
self.btnLogin.clicked.connect(self.start)
### database ###
self.conn = sqlite3.connect(".libra.db")
self.c = self.conn.cursor()
mec_login.create_login_table(self.c, self.conn)
self.show()
def start(self):
usuario = self.leditUser.text()
password = self.leditPassword.text()
if check_login(self.c, usuario, password):
print("success")
self.accept()
else:
self.leditUser.clear()
self.leditPassword.clear()
QtGui.QMessageBox.warning(self, 'Error', 'Incorrect username or password')
def closeEvent(self, e):
print("closing")
self.c.close()
self.conn.close()
e.accept()
if __name__ == "__main__":
app = QtGui.QApplication(sys.argv)
login = Login()
if login.exec_() == QtGui.QDialog.Accepted:
mainwindow = master_admin.Inventory()
mainwindow.show()
sys.exit(app.exec_())
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,763
|
Redent0r/Libra
|
refs/heads/master
|
/mec_inventory.py
|
"""
Author:Christopher Holder
"""
def create_tables(connection,cursor):
"""
This function creates the neccessary tables in the database.
"""
cursor.execute("CREATE TABLE IF NOT EXISTS OrdinalNumber(ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,num TEXT NOT NULL)")
cursor.execute('CREATE TABLE IF NOT EXISTS OrdinalNumberS(ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, num TEXT NOT NULL)')
cursor.execute("""CREATE TABLE IF NOT EXISTS Inventory(ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,code TEXT NOT NULL,name TEXT NOT NULL,avail INTEGER NOT NULL,costUni REAL NOT NULL,priceUniSug REAL NOT NULL,groupx TEXT NOT NULL,category TEXT,stockmin INTEGER,stockmax INTEGER)""")
cursor.execute("""CREATE TABLE IF NOT EXISTS Entries(ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,dat TEXT,trans TEXT,code TEXT NOT NULL,name TEXT NOT NULL,quantity INTEGER NOT NULL,provider TEXT ,costUni REAL NOT NULL,costItems REAL NOT NULL,groupx TEXT NOT NULL, category TEXT)""")
cursor.execute("""CREATE TABLE IF NOT EXISTS Outs(ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,dat TEXT,trans TEXT,code TEXT NOT NULL,name TEXT NOT NULL,quantity INTEGER NOT NULL,groupx TEXT NOT NULL,priceUni REAL,priceItems REAL,tax REAL,revenue REAL,winnings REAL,payment TEXT,client TEXT)""")
cursor.execute('CREATE TABLE IF NOT EXISTS Clients(ID INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,identification TEXT,name TEXT,mail TEXT,num TEXT,cel TEXT,fax TEXT ,direction TEXT,bought INTEGER,money_invested REAL,paid REAL,debt REAL)')
add_client(connection,cursor,'Misc','','','','','','')
connection.commit()
return True
def add_item_entry(connection,cursor,code = '#',name = "",quantity = 0,provider = "",costUni = 0.00,priceUniSug = 100 ,groupx = '',category = "",stockmin = "",stockmax = ""):
"""
This function adds entries to the table Inventory and Entries.
"""
cursor.execute('SELECT code,groupx FROM Inventory WHERE code=? AND groupx = ?',(code,groupx))
data = cursor.fetchone()
if data == None:
transnum = ordinal_generator(connection,cursor)
avail = quantity
costItems = costUni * quantity
costItems = round(costItems,2)
priceUniSug = round(priceUniSug,2)
costUni = round(costUni,2)
b = (code,name,avail,costUni,priceUniSug,groupx,category,stockmin,stockmax)
c = (transnum,code,name,quantity,provider,costUni,costItems,groupx,category)
cursor.execute("INSERT INTO Inventory (code,name,avail,costUni,priceUniSug,groupx,category,stockmin,stockmax) VALUES(?,?,?,?,?,?,?,?,?)",b)
cursor.execute("INSERT INTO Entries (dat,trans,code,name,quantity,provider,costUni,costItems,groupx,category) VALUES(date('now'),?,?,?,?,?,?,?,?,?)",c)
connection.commit()
else:
transnum = ordinal_generator(connection,cursor)
avail = quantity
costItems = costUni * quantity
costItems = round(costItems,2)
c = (transnum,code,name,quantity,provider,round(costUni,2),costItems,groupx,category)
#-------------------------------------------------------------------------------------------------------
increase_stock(cursor,code,groupx,quantity)
update_all(cursor,code,groupx,costUni,priceUniSug,name,category)
#-------------------------------------------------------------------------------------------------------
cursor.execute("INSERT INTO Entries (dat,trans,code,name,quantity,provider,costUni,costItems,groupx,category) VALUES(date('now'),?,?,?,?,?,?,?,?,?)",c)
connection.commit()
return True
def add_item_exit_fixed(connection,cursor,code = "#",quantity = 1,tax = 0.07,pricef = 10.00,discount = 0,payment = 'CRE',client = '',trans='',groupx = ''):
a =(code,groupx)
cursor.execute('SELECT name FROM Inventory WHERE code = ? AND groupx = ?',a)
data0 = cursor.fetchone()
name = str(data0[0])
decrease_stock(cursor,code,groupx,quantity)
priceUni = pricef
taxTot = tax * priceUni * quantity
taxTot = round(taxTot,2)
priceItems = priceUni * (tax + 1) * quantity
if (discount == 0):
priceItems = round(priceItems,2)
else:
discount = priceItems * discount
priceItems = priceItems - discount
priceItems = round(priceItems,2)
cursor.execute('SELECT costUni FROM Inventory WHERE code = ? AND groupx = ?',a)
data2 = cursor.fetchone()
costItems = (float(data2[0]))* quantity
costItems = round(costItems,2)
revenue = priceItems - costItems
revenue = round(revenue,2)
winnings = revenue - taxTot
winnings = round(winnings,2)
auto_del_0(connection,cursor)
b = (trans,code,name,quantity,groupx,priceUni,priceItems,taxTot,revenue,winnings,payment,client)
cursor.execute("INSERT INTO Outs (dat,trans,code,name,quantity,groupx,priceUni,priceItems,tax,revenue,winnings,payment,client) VALUES(date('now'),?,?,?,?,?,?,?,?,?,?,?,?)",b)
update_client_info(connection,cursor,client)
connection.commit()
#-------------------------------------------------------------------------------------------------------
return True
#-------------------------------------------------------------------------------------------------------
def modify(connection,cursor,code,groupx,avail,priceUni,category,smin,smax,costUni, name):
if (groupx == 'Global'):
cursor.execute('UPDATE Inventory SET name = ?,priceUniSug = ?,category = ?, stockmin = ?,stockmax = ? ,costUni = ? WHERE code = ?',(name,priceUni,category,smin,smax,costUni,code))
else:
cursor.execute('UPDATE Inventory SET name = ?,avail = ?,priceUniSug = ?,category = ?, stockmin = ?,stockmax = ? ,costUni = ? WHERE code = ? AND groupx = ?',(name,avail,priceUni,category,smin,smax,costUni,code,groupx))
connection.commit()
def modify_client(connection,cursor,name,identification,mail,num,cel,fax,direction):
sel = (identification,mail,num,cel,fax,direction,name)
cursor.execute('UPDATE Clients SET identification = ?,mail = ?,num = ?,cel = ?,fax = ?,direction = ? WHERE name = ?',sel)
connection.commit()
def move(connection,cursor,code,groupx1,groupx2,quantity):
cursor.execute('SELECT code,name,avail,costUni,priceUniSug,groupx,category,stockmin,stockmax FROM Inventory WHERE code = ? and groupx = ?',(code,groupx1))
data = cursor.fetchone()
decrease_stock(cursor,code,groupx1,quantity)
auto_del_0(connection,cursor)
cursor.execute('SELECT name FROM Inventory WHERE code = ? AND groupx = ?' , (code,groupx2))
data2 = cursor.fetchone()
if (data2 == None):
c = (data[0],data[1],quantity,data[3],data[4],groupx2,data[6],data[7],data[8])
cursor.execute('INSERT INTO Inventory (code,name,avail,costUni,priceUniSug,groupx,category,stockmin,stockmax) VALUES(?,?,?,?,?,?,?,?,?)',c)
else:
increase_stock(cursor,code,groupx2,quantity)
connection.commit()
def shopping_cart(connection,cursor,lista):
"""
This function does multiple sales.lista is a list of lists.
The elements should contain the following arguments. : [code,quantity,tax,pricef,discount,payment,client,groupx]
"""
counter = 0
results =[]
failed = {}
for e in lista:
a = sale_valid2(cursor,e[0],e[1],e[7])
results.append(a)
for el in range(len(results)):
if (results[el] != 0):
failed.setdefault((el+1),results[el])
if (len(failed) > 0):
print(failed)
return failed
t = ordinal_generator2(connection,cursor)
for e in lista:
counter += 1
transa = t + (str(counter).zfill(3))
add_item_exit_fixed(connection,cursor,e[0],e[1],e[2],e[3],e[4],e[5],e[6],transa,e[7])
return True
def sale_valid(cursor,code,client_name,quantity,groupx):
"""
Checks If client ,quantity, or code exists.
0 = Sucessful
1 = does not exists. 2 = reduces below existing units ,
3 = client does not exist
"""
l = []
a = (code,groupx)
b = (client_name,)
cursor.execute('SELECT code,avail FROM Inventory WHERE code = ? AND groupx = ?',a)
data0 = cursor.fetchone()
if (data0 == None):
l.append(1)
if (data0 != None):
if (data0[1] < quantity):
l.append(2)
cursor.execute('SELECT name FROM Clients WHERE name = ?',b)
data2 = cursor.fetchone()
if (data2 == None):
l.append(3)
if (len(l) == 0):
l = 0
return l
def sale_valid2(cursor,code,quantity,groupx):
"""
Checks If client ,quantity, or code exists.
0 = Sucessful
1 = does not exists. 2 = reduces below existing units ,
"""
l = []
a = (code,groupx)
cursor.execute('SELECT code,avail FROM Inventory WHERE code = ? AND groupx = ?',a)
data0 = cursor.fetchone()
if (data0 == None):
l.append(1)
if (data0 != None):
if (data0[1] < quantity):
l.append(2)
if (len(l) == 0):
l = 0
return l
def query_add(cursor,code,groupx):
cursor.execute('SELECT name,costUni,priceUniSug,category,stockmin,stockmax FROM Inventory WHERE code = ? AND groupx = ?',(code,groupx))
data = cursor.fetchone()
if (data == None):
return False
return data
def query_sale(cursor,code,groupx):
"""
Returns list with [name,priceUniSug,costUni]
"""
cursor.execute('SELECT name,priceUniSug,costUni FROM Inventory WHERE code = ? AND groupx = ?',(code,groupx))
data = cursor.fetchone()
if (data == None):
print('No name with that code')
return False
return data
def query_modify(cursor,code,groupx):
"""
Returns [avail,priceUniSug,costUni,category,stockmin,stockmax,name]
"""
cursor.execute('SELECT avail,priceUniSug,costUni,category,stockmin,stockmax, name FROM Inventory WHERE code = ? AND groupx = ?',(code,groupx))
data = cursor.fetchone()
return data
def query_client(cursor,name):
"""
Returns [identification,mail,num,cel,fax,direction,bought,money_invested,paid,debt]
"""
cursor.execute('SELECT identification,mail,num,cel,fax,direction FROM Clients WHERE name = ?',(name,))
data = cursor.fetchone()
return data
#-------------------------------------------------------------------------------------------------------
def add_client(connection,cursor,identification,name,mail,num,cel,fax,direction):
"""
Adds client to client table.
Returns False if the name has been used before.
"""
bought = 0
money_invested = 0.0
paid = 0.0
debt = 0.0
i = (name,)
cursor.execute('SELECT name FROM Clients WHERE name = ?',i)
data = cursor.fetchone()
if (data != None):
print('Name already used.')
return False
t = (identification,name,mail,num,cel,fax,direction,bought,money_invested,paid,debt)
cursor.execute("INSERT INTO Clients (identification,name,mail,num,cel,fax,direction,bought,money_invested,paid,debt) VALUES (?,?,?,?,?,?,?,?,?,?,?)",t)
connection.commit()
return True
def update_client_info(connection,cursor,user):
a = (user,)
money = []
articles = []
cursor.execute('SELECT priceItems,quantity FROM Outs WHERE client = ? ',a)
data2 = cursor.fetchall()
if (data2 == None):
return False
for row2 in data2:
money.append(row2[0])
for row2 in data2:
articles.append(row2[1])
debit = []
credit = []
cursor.execute("SELECT priceItems FROM Outs WHERE client = ? AND payment = 'DEB'",a)
data4 = cursor.fetchall()
for row4 in data4:
debit.append(row4[0])
cursor.execute("SELECT priceItems FROM Outs WHERE client = ? AND payment = 'CRE'",a)
data5 = cursor.fetchall()
for row5 in data5:
credit.append(row5[0])
money = sum(money)
articles = sum(articles)
debit = sum(debit)
credit =sum(credit)
cursor.execute('UPDATE Clients SET bought = ?,money_invested = ?,paid = ?,debt = ? WHERE name = ?',(articles,money,debit,credit,user))
connection.commit()
def del_client_id(connection,cursor,identification):
cursor.execute('DELETE FROM Clients WHERE identification = ?',(identification,))
connection.commit()
return True
def del_client_name(connection,cursor,name):
cursor.execute('DELETE FROM Clients WHERE name = ?',(name,))
connection.commit()
return True
#-------------------------------------------------------------------------------------------------------
def calc_bal_his(cursor):
"""
CalcuLates balances of all exits and entries ever and adds them to the historic balance db.
"""
t = []
cursor.execute('SELECT costItems FROM Entries')
data = cursor.fetchall()
for row0 in data:
t.append(row0[0])
costTot = sum(t)
cursor.execute('SELECT priceItems,revenue,tax,winnings FROM Outs')
query = cursor.fetchall()
#-------------------------------------------------------------------------------------------------------
p = []
for row2 in query:
p.append(row2[0])
priceTot = sum(p)
#-------------------------------------------------------------------------------------------------------
g = []
for row3 in query:
g.append(row3[1])
revenueTot = sum(g)
#-------------------------------------------------------------------------------------------------------
i = []
for row4 in query:
i.append(row4[2])
taxTot = sum(i)
#-------------------------------------------------------------------------------------------------------
x = []
for row5 in query:
x.append(row5[3])
winningsTot = sum(x)
#-------------------------------------------------------------------------------------------------------
cd = calc_deb(cursor)
cc = calc_cre(cursor)
return [costTot,priceTot,cd,cc,round((priceTot - costTot),2),taxTot,round((priceTot - costTot - taxTot),2)]
def calc_bal_mes(cursor,year,month):
if (len(year) != 4) or (int(year) < 2016) or (int(year)> 3000) or (isinstance(year,float)) or (len(month) != 2) or (isinstance(month,float)) or (int(month)< 0) or (int(month)>12) :
print('Bad date')
return False
date = year+'-'+ month
entries = []
#-------------------------------------------------------------------------------------------------------
cursor.execute('SELECT dat,costItems FROM Entries')
data = cursor.fetchall()
for row in data:
if (date in row[0]):
entries.append(row[1])
costTot = sum(entries)
cursor.execute('SELECT dat,priceItems,revenue,tax,winnings FROM Outs ')
query = cursor.fetchall()
#-------------------------------------------------------------------------------------------------------
p = []
for e in query:
if (date in e[0]):
p.append(e[1])
priceTot = sum(p)
#-------------------------------------------------------------------------------------------------------
g = []
for d in query:
if (date in d[0]):
g.append(d[2])
revenueTot = sum(g)
#-------------------------------------------------------------------------------------------------------
i = []
for elem in query:
if (date in elem[0]):
i.append(elem[3])
taxTot = sum(i)
#-------------------------------------------------------------------------------------------------------
x = []
for al in query:
if(date in al[0]):
x.append(al[4])
winningsTot = sum(x)
#-------------------------------------------------------------------------------------------------------
cd = calc_deb(cursor,date)
cc = calc_cre(cursor,date)
return [costTot,priceTot,cd,cc,round((priceTot - costTot),2),taxTot,round((priceTot - costTot - taxTot),2)]
def calc_bal_year(cursor,year):
if (len(year) != 4) or (int(year) < 2016) or (int(year)> 3000) or (isinstance(year,float)) :
print('Not proper date.')
return False
date = year
entries = []
#-------------------------------------------------------------------------------------------------------
cursor.execute('SELECT dat,costItems FROM Entries')
data = cursor.fetchall()
for row in data:
if (date in row[0]):
entries.append(row[1])
costTot = sum(entries)
cursor.execute('SELECT dat,priceItems,revenue,tax,winnings FROM Outs ')
query = cursor.fetchall()
#-------------------------------------------------------------------------------------------------------
p = []
for e in query:
if (date in e[0]):
p.append(e[1])
priceTot = sum(p)
#-------------------------------------------------------------------------------------------------------
g = []
for d in query:
if (date in d[0]):
g.append(d[2])
revenueTot = sum(g)
#-------------------------------------------------------------------------------------------------------
i = []
for elem in query:
if (date in elem[0]):
i.append(elem[3])
taxTot = sum(i)
#-------------------------------------------------------------------------------------------------------
x = []
for al in query:
if(date in al[0]):
x.append(al[4])
winningsTot = sum(x)
#-------------------------------------------------------------------------------------------------------
cd = calc_deb(cursor,date)
cc = calc_cre(cursor,date)
return [costTot,priceTot,cd,cc,round((priceTot - costTot),2),taxTot,round((priceTot - costTot - taxTot),2)]
def calc_bal_day(cursor,year,month,day):
if (len(year) != 4) or (int(year) < 2016) or (int(year)> 3000) or (isinstance(year,float)) or (len(month) != 2) or (isinstance(month,float)) or (int(month)< 0) or (int(month)>12) or (int(day) > 31) or (len(day) != 2):
print('Bad date')
return False
date = year+'-'+ month + '-' + day
entries = []
cursor.execute('SELECT dat,costItems FROM Entries')
data = cursor.fetchall()
for row in data:
if (date in row[0]):
entries.append(row[1])
costTot = sum(entries)
cursor.execute('SELECT dat,priceItems,revenue,tax,winnings FROM Outs ')
query = cursor.fetchall()
#-------------------------------------------------------------------------------------------------------
p = []
for e in query:
if (date in e[0]):
p.append(e[1])
priceTot = sum(p)
#-------------------------------------------------------------------------------------------------------
g = []
for d in query:
if (date in d[0]):
g.append(d[2])
revenueTot = sum(g)
#-------------------------------------------------------------------------------------------------------
i = []
for elem in query:
if (date in elem[0]):
i.append(elem[3])
taxTot = sum(i)
#-------------------------------------------------------------------------------------------------------
x = []
for al in query:
if(date in al[0]):
x.append(al[4])
winningsTot = sum(x)
#-------------------------------------------------------------------------------------------------------
cd = calc_deb(cursor,date)
cc = calc_cre(cursor,date)
return [costTot,priceTot,cd,cc,round((priceTot - costTot),2),taxTot,round((priceTot - costTot - taxTot),2)]
#-------------------------------------------------------------------------------------------------------
def gen_query(cursor,table,column,stri,num):
"""
Returns a list with elements that contain the string.
Returns empty list if it does find one.
"""
list1 = []
list2 = []
query = 'SELECT '+ str(column) +' FROM '+ str(table)
cursor.execute(query)
data = cursor.fetchall()
if (data == None):
return list1
for row in data:
list1.append(row[0])
for e in list1:
if (stri in e ):
list2.append(e)
while (len(list2) > num):
list2.pop()
print(list2)
return list2
def paid(connection,cursor,trans):
"""
Marks an item as paid.
"""
t = (trans,)
cursor.execute("UPDATE Outs SET payment = 'DEB' WHERE trans = ?",(trans,))
cursor.execute("SELECT client FROM Outs WHERE trans = ?",(trans,))
data = cursor.fetchone()
update_client_info(connection,cursor,data[0])
connection.commit()
def move_to_credit(connection,cursor,trans):
"""
Marks an item as not paid.
"""
cursor.execute("UPDATE Outs SET payment = 'CRE' WHERE trans = ?",(trans,))
cursor.execute("SELECT client FROM Outs WHERE trans = ?",(trans,))
data = cursor.fetchone()
update_client_info(connection,cursor,data[0])
connection.commit()
def calc_deb(cursor, date = None):
"""
Calculates liquidity.
"""
deb = []
if (date == None):
cursor.execute("SELECT priceItems FROM Outs WHERE payment = 'DEB'")
data = cursor.fetchall()
for e in data:
deb.append(e[0])
else:
cursor.execute("SELECT priceItems,dat FROM Outs WHERE payment = 'DEB'")
data = cursor.fetchall()
for e in data:
if (date in e[1]):
deb.append(e[0])
deb = round(sum(deb),2)
return deb
def calc_cre(cursor,date = None):
"""
Calculates money customers currently owe.
"""
cre = []
if (date == None):
cursor.execute("SELECT priceItems FROM Outs WHERE payment = 'CRE'")
data = cursor.fetchall()
for e in data:
cre.append(e[0])
else:
cursor.execute("SELECT priceItems,dat FROM Outs WHERE payment = 'CRE'")
data = cursor.fetchall()
for e in data:
if (date in e[1]):
cre.append(e[0])
cre = round(sum(cre),2)
return cre
#-------------------------------------------------------------------------------------------------------
def del_general(connection,cursor,trans):
"""
Generalizes use of delete function.
Clients table delete not included.
"""
try:
if(trans[0] == '1'):
return del_item_entries(connection,cursor,trans)
elif(trans[0] == '2'):
return del_item_salidas(connection,cursor,trans)
else:
print('Unknown transaction number')
return False
except TypeError:
print('Error in cell')
return False
def del_item_entries(connection,cursor,trans):
"""
Deletes items from entries by transaction number.
"""
cursor.execute('DELETE FROM Entries WHERE trans = ?',(trans,))
connection.commit()
return True
def del_item_inventory(connection,cursor,code,groupx):
"""
Deletes items from inventory by code.
"""
cursor.execute('DELETE FROM Inventory WHERE code = ? AND groupx = ?',(code,groupx))
connection.commit()
return True
def del_item_salidas(connection,cursor,trans):
"""
Deletes items by transaction number.
"""
cursor.execute('SELECT quantity FROM Outs WHERE trans = ?',(trans,))
data = cursor.fetchone()
if (data == None):
print('Transaction number not from an Out')
return False
cursor.execute('SELECT priceItems,client FROM Outs WHERE trans = ?',(trans,))
p = cursor.fetchone()
cursor.execute('SELECT money_invested FROM Clients WHERE name = ? ',(p[1],))
d = cursor.fetchone()
f = d[0]- p[0]
cursor.execute('UPDATE Clients SET money_invested = ? WHERE name = ?',(f,p[1]))
cursor.execute('SELECT code,groupx FROM Outs WHERE trans = ?',(trans,))
data2 = cursor.fetchone()
#-------------------------------------------------------------------------------------------------------
g = (data2[0],data2[1])
cursor.execute('SELECT avail FROM Inventory WHERE code = ? AND groupx = ?',g)
data3 = cursor.fetchone()
avail = data3[0] + data[0]
b =(avail,data2[0],data2[1])
cursor.execute('UPDATE Inventory SET avail = ? WHERE code = ? AND groupx = ?',b)
#-------------------------------------------------------------------------------------------------------
cursor.execute('DELETE FROM Outs WHERE trans = ?',(trans,))
connection.commit()
return True
def auto_del_0(connection,cursor):
cursor.execute('SELECT avail FROM Inventory WHERE avail = 0')
data4 = cursor.fetchone()
if data4 != None:
cursor.execute('DELETE FROM Inventory WHERE avail = 0')
def unique(cursor,column,table,key_column = "",key = ""):
if key_column == "":
cursor.execute("SELECT DISTINCT "+ column + " FROM " + table)
else:
cursor.execute("SELECT DISTINCT " + column + " FROM " + table + " WHERE " + key_column + " = ?",(key,))
unique_values = []
data = cursor.fetchall()
if data != None:
for line in data:
unique_values.append(line[0])
return unique_values
#-------------------------------------------------------------------------------------------------------
def ordinal_generator(connection,cursor):
"""
Generates string numbers starting with 1 and makes sure to never
have used them before.It also adds them complementary 0's until it
has a minimum length of 8 characters.
"""
exists = False
trans = ""
cursor.execute('SELECT MAX(ID) FROM OrdinalNumber')
index = cursor.fetchone()
if (index[0] == None):
trans = '00000000'
else:
index = str(index[0])
trans = index.zfill(8)
d = ('a',)
cursor.execute('INSERT INTO OrdinalNumber(num) VALUES (?)',d)
connection.commit()
return ('1' + trans)
def ordinal_generator2(connection,cursor):
exists = False
trans = ""
cursor.execute('SELECT MAX(ID) FROM OrdinalNumberS')
index = cursor.fetchone()
if (index[0] == None):
trans = '000000'
else:
index = str(index[0])
trans = index.zfill(6)
d = ('a',)
cursor.execute('INSERT INTO OrdinalNumberS(num) VALUES (?)',d)
connection.commit()
return ('2' + trans)
def update_all(cursor,code,groupx,cost,price,name,category):
t = (name,price,cost,category,code,groupx)
cursor.execute('UPDATE Inventory SET name = ?,priceUniSug = ?,costUni = ?,category = ? WHERE code = ? AND groupx = ?',t)
def increase_stock(cursor,code,groupx,quantity):
cursor.execute('SELECT avail FROM Inventory WHERE code = ? AND groupx = ?',(code,groupx))
data = cursor.fetchone()
avail = int(data[0]) + quantity
cursor.execute('UPDATE Inventory SET avail = ? WHERE code = ? AND groupx = ?',(avail,code,groupx))
return True
def decrease_stock(cursor,code,groupx,quant):
#Reduce stock
cursor.execute('SELECT avail FROM Inventory WHERE code = ? AND groupx = ?',(code,groupx))
data = cursor.fetchone()
avail = int(data[0]) - quant
cursor.execute('UPDATE Inventory SET avail = ? WHERE code = ? AND groupx = ?',(avail,code,groupx))
return True
def print_(cursor,table):#Print any table.
cursor.execute('SELECT * FROM '+ table)
data = cursor.fetchall()
for row in data:
print(row)
return True
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,764
|
Redent0r/Libra
|
refs/heads/master
|
/gui_sale.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'gui_venta.ui'
#
# Created by: PyQt4 UI code generator 4.12.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_Dialog(object):
def setupUi(self, Dialog):
Dialog.setObjectName(_fromUtf8("Dialog"))
Dialog.setWindowModality(QtCore.Qt.WindowModal)
Dialog.resize(1311, 488)
Dialog.setMinimumSize(QtCore.QSize(750, 488))
Dialog.setMaximumSize(QtCore.QSize(16777215, 488))
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(_fromUtf8(":/icons/resources/product_basket-512.png")), QtGui.QIcon.Normal, QtGui.QIcon.Off)
Dialog.setWindowIcon(icon)
Dialog.setModal(True)
self.horizontalLayout = QtGui.QHBoxLayout(Dialog)
self.horizontalLayout.setContentsMargins(9, -1, -1, -1)
self.horizontalLayout.setSpacing(8)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.groupBox_2 = QtGui.QGroupBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.groupBox_2.sizePolicy().hasHeightForWidth())
self.groupBox_2.setSizePolicy(sizePolicy)
self.groupBox_2.setObjectName(_fromUtf8("groupBox_2"))
self.verticalLayout_2 = QtGui.QVBoxLayout(self.groupBox_2)
self.verticalLayout_2.setObjectName(_fromUtf8("verticalLayout_2"))
self.horizontalLayout_4 = QtGui.QHBoxLayout()
self.horizontalLayout_4.setContentsMargins(-1, 0, -1, -1)
self.horizontalLayout_4.setObjectName(_fromUtf8("horizontalLayout_4"))
self.cmboxInventory = QtGui.QComboBox(self.groupBox_2)
self.cmboxInventory.setObjectName(_fromUtf8("cmboxInventory"))
self.horizontalLayout_4.addWidget(self.cmboxInventory)
self.leditInventory = QtGui.QLineEdit(self.groupBox_2)
self.leditInventory.setObjectName(_fromUtf8("leditInventory"))
self.horizontalLayout_4.addWidget(self.leditInventory)
spacerItem = QtGui.QSpacerItem(40, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.horizontalLayout_4.addItem(spacerItem)
self.verticalLayout_2.addLayout(self.horizontalLayout_4)
self.tblInventory = QtGui.QTableView(self.groupBox_2)
self.tblInventory.setMinimumSize(QtCore.QSize(280, 0))
self.tblInventory.setAlternatingRowColors(True)
self.tblInventory.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblInventory.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblInventory.setSortingEnabled(True)
self.tblInventory.setObjectName(_fromUtf8("tblInventory"))
self.tblInventory.horizontalHeader().setStretchLastSection(True)
self.tblInventory.verticalHeader().setVisible(False)
self.verticalLayout_2.addWidget(self.tblInventory)
self.horizontalLayout.addWidget(self.groupBox_2)
self.verticalLayout = QtGui.QVBoxLayout()
self.verticalLayout.setObjectName(_fromUtf8("verticalLayout"))
self.formLayout_2 = QtGui.QFormLayout()
self.formLayout_2.setFieldGrowthPolicy(QtGui.QFormLayout.AllNonFixedFieldsGrow)
self.formLayout_2.setObjectName(_fromUtf8("formLayout_2"))
self.label_11 = QtGui.QLabel(Dialog)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_11.setFont(font)
self.label_11.setObjectName(_fromUtf8("label_11"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_11)
self.label = QtGui.QLabel(Dialog)
self.label.setObjectName(_fromUtf8("label"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.LabelRole, self.label)
self.leditCode = QtGui.QLineEdit(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.leditCode.sizePolicy().hasHeightForWidth())
self.leditCode.setSizePolicy(sizePolicy)
self.leditCode.setReadOnly(True)
self.leditCode.setPlaceholderText(_fromUtf8(""))
self.leditCode.setObjectName(_fromUtf8("leditCode"))
self.formLayout_2.setWidget(1, QtGui.QFormLayout.FieldRole, self.leditCode)
self.label_10 = QtGui.QLabel(Dialog)
self.label_10.setObjectName(_fromUtf8("label_10"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.LabelRole, self.label_10)
self.leditName = QtGui.QLineEdit(Dialog)
self.leditName.setReadOnly(True)
self.leditName.setObjectName(_fromUtf8("leditName"))
self.formLayout_2.setWidget(2, QtGui.QFormLayout.FieldRole, self.leditName)
self.label_6 = QtGui.QLabel(Dialog)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_6.setFont(font)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.formLayout_2.setWidget(5, QtGui.QFormLayout.LabelRole, self.label_6)
self.label_2 = QtGui.QLabel(Dialog)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.formLayout_2.setWidget(6, QtGui.QFormLayout.LabelRole, self.label_2)
self.spnBoxQuantity = QtGui.QSpinBox(Dialog)
self.spnBoxQuantity.setAccelerated(True)
self.spnBoxQuantity.setKeyboardTracking(False)
self.spnBoxQuantity.setMinimum(1)
self.spnBoxQuantity.setMaximum(999999)
self.spnBoxQuantity.setProperty("value", 1)
self.spnBoxQuantity.setObjectName(_fromUtf8("spnBoxQuantity"))
self.formLayout_2.setWidget(6, QtGui.QFormLayout.FieldRole, self.spnBoxQuantity)
self.label_3 = QtGui.QLabel(Dialog)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.formLayout_2.setWidget(7, QtGui.QFormLayout.LabelRole, self.label_3)
self.label_8 = QtGui.QLabel(Dialog)
self.label_8.setObjectName(_fromUtf8("label_8"))
self.formLayout_2.setWidget(8, QtGui.QFormLayout.LabelRole, self.label_8)
self.chkBoxItbms = QtGui.QCheckBox(Dialog)
self.chkBoxItbms.setChecked(True)
self.chkBoxItbms.setTristate(False)
self.chkBoxItbms.setObjectName(_fromUtf8("chkBoxItbms"))
self.formLayout_2.setWidget(9, QtGui.QFormLayout.LabelRole, self.chkBoxItbms)
self.label_9 = QtGui.QLabel(Dialog)
font = QtGui.QFont()
font.setBold(True)
font.setWeight(75)
self.label_9.setFont(font)
self.label_9.setObjectName(_fromUtf8("label_9"))
self.formLayout_2.setWidget(10, QtGui.QFormLayout.LabelRole, self.label_9)
self.spnBoxTotalItemPrice = QtGui.QDoubleSpinBox(Dialog)
self.spnBoxTotalItemPrice.setReadOnly(True)
self.spnBoxTotalItemPrice.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnBoxTotalItemPrice.setSuffix(_fromUtf8(""))
self.spnBoxTotalItemPrice.setMaximum(999999.0)
self.spnBoxTotalItemPrice.setObjectName(_fromUtf8("spnBoxTotalItemPrice"))
self.formLayout_2.setWidget(10, QtGui.QFormLayout.FieldRole, self.spnBoxTotalItemPrice)
self.label_13 = QtGui.QLabel(Dialog)
self.label_13.setObjectName(_fromUtf8("label_13"))
self.formLayout_2.setWidget(4, QtGui.QFormLayout.LabelRole, self.label_13)
self.cmboxClient = QtGui.QComboBox(Dialog)
self.cmboxClient.setEditable(True)
self.cmboxClient.setObjectName(_fromUtf8("cmboxClient"))
self.formLayout_2.setWidget(0, QtGui.QFormLayout.FieldRole, self.cmboxClient)
self.chkBoxCredit = QtGui.QCheckBox(Dialog)
self.chkBoxCredit.setObjectName(_fromUtf8("chkBoxCredit"))
self.formLayout_2.setWidget(9, QtGui.QFormLayout.FieldRole, self.chkBoxCredit)
self.spnboxCost = QtGui.QDoubleSpinBox(Dialog)
self.spnboxCost.setAlignment(QtCore.Qt.AlignLeading|QtCore.Qt.AlignLeft|QtCore.Qt.AlignVCenter)
self.spnboxCost.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnboxCost.setDecimals(2)
self.spnboxCost.setMaximum(99999.0)
self.spnboxCost.setObjectName(_fromUtf8("spnboxCost"))
self.formLayout_2.setWidget(4, QtGui.QFormLayout.FieldRole, self.spnboxCost)
self.spnboxPrice = QtGui.QDoubleSpinBox(Dialog)
self.spnboxPrice.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnboxPrice.setMaximum(9999999.0)
self.spnboxPrice.setObjectName(_fromUtf8("spnboxPrice"))
self.formLayout_2.setWidget(5, QtGui.QFormLayout.FieldRole, self.spnboxPrice)
self.spnBoxMargin = QtGui.QDoubleSpinBox(Dialog)
self.spnBoxMargin.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnBoxMargin.setMaximum(999999.0)
self.spnBoxMargin.setObjectName(_fromUtf8("spnBoxMargin"))
self.formLayout_2.setWidget(7, QtGui.QFormLayout.FieldRole, self.spnBoxMargin)
self.spnboxDiscount = QtGui.QDoubleSpinBox(Dialog)
self.spnboxDiscount.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnboxDiscount.setMaximum(99999.0)
self.spnboxDiscount.setObjectName(_fromUtf8("spnboxDiscount"))
self.formLayout_2.setWidget(8, QtGui.QFormLayout.FieldRole, self.spnboxDiscount)
self.label_14 = QtGui.QLabel(Dialog)
self.label_14.setObjectName(_fromUtf8("label_14"))
self.formLayout_2.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_14)
self.leditGroup = QtGui.QLineEdit(Dialog)
self.leditGroup.setReadOnly(True)
self.leditGroup.setObjectName(_fromUtf8("leditGroup"))
self.formLayout_2.setWidget(3, QtGui.QFormLayout.FieldRole, self.leditGroup)
self.verticalLayout.addLayout(self.formLayout_2)
self.horizontalLayout_3 = QtGui.QHBoxLayout()
self.horizontalLayout_3.setObjectName(_fromUtf8("horizontalLayout_3"))
self.btnInsert = QtGui.QPushButton(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnInsert.sizePolicy().hasHeightForWidth())
self.btnInsert.setSizePolicy(sizePolicy)
self.btnInsert.setAutoDefault(False)
self.btnInsert.setDefault(False)
self.btnInsert.setObjectName(_fromUtf8("btnInsert"))
self.horizontalLayout_3.addWidget(self.btnInsert)
self.btnUndo = QtGui.QPushButton(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.btnUndo.sizePolicy().hasHeightForWidth())
self.btnUndo.setSizePolicy(sizePolicy)
self.btnUndo.setAutoDefault(False)
self.btnUndo.setObjectName(_fromUtf8("btnUndo"))
self.horizontalLayout_3.addWidget(self.btnUndo)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.formLayout = QtGui.QFormLayout()
self.formLayout.setObjectName(_fromUtf8("formLayout"))
self.label_4 = QtGui.QLabel(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Fixed, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.label_4.sizePolicy().hasHeightForWidth())
self.label_4.setSizePolicy(sizePolicy)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.formLayout.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_4)
self.spnBoxSubtotal = QtGui.QDoubleSpinBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spnBoxSubtotal.sizePolicy().hasHeightForWidth())
self.spnBoxSubtotal.setSizePolicy(sizePolicy)
self.spnBoxSubtotal.setStyleSheet(_fromUtf8(""))
self.spnBoxSubtotal.setWrapping(False)
self.spnBoxSubtotal.setReadOnly(True)
self.spnBoxSubtotal.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnBoxSubtotal.setMaximum(99999.99)
self.spnBoxSubtotal.setObjectName(_fromUtf8("spnBoxSubtotal"))
self.formLayout.setWidget(0, QtGui.QFormLayout.FieldRole, self.spnBoxSubtotal)
self.label_7 = QtGui.QLabel(Dialog)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.formLayout.setWidget(1, QtGui.QFormLayout.LabelRole, self.label_7)
self.spnBoxTaxT = QtGui.QDoubleSpinBox(Dialog)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.spnBoxTaxT.sizePolicy().hasHeightForWidth())
self.spnBoxTaxT.setSizePolicy(sizePolicy)
self.spnBoxTaxT.setReadOnly(True)
self.spnBoxTaxT.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnBoxTaxT.setMaximum(999999.0)
self.spnBoxTaxT.setObjectName(_fromUtf8("spnBoxTaxT"))
self.formLayout.setWidget(1, QtGui.QFormLayout.FieldRole, self.spnBoxTaxT)
self.label_12 = QtGui.QLabel(Dialog)
self.label_12.setObjectName(_fromUtf8("label_12"))
self.formLayout.setWidget(3, QtGui.QFormLayout.LabelRole, self.label_12)
self.spnBoxDiscountT = QtGui.QDoubleSpinBox(Dialog)
self.spnBoxDiscountT.setReadOnly(True)
self.spnBoxDiscountT.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnBoxDiscountT.setMaximum(99999.0)
self.spnBoxDiscountT.setObjectName(_fromUtf8("spnBoxDiscountT"))
self.formLayout.setWidget(3, QtGui.QFormLayout.FieldRole, self.spnBoxDiscountT)
self.verticalLayout.addLayout(self.formLayout)
self.line = QtGui.QFrame(Dialog)
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.verticalLayout.addWidget(self.line)
self.formLayout_3 = QtGui.QFormLayout()
self.formLayout_3.setObjectName(_fromUtf8("formLayout_3"))
self.label_5 = QtGui.QLabel(Dialog)
font = QtGui.QFont()
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
self.label_5.setFont(font)
self.label_5.setFrameShape(QtGui.QFrame.NoFrame)
self.label_5.setFrameShadow(QtGui.QFrame.Plain)
self.label_5.setScaledContents(False)
self.label_5.setWordWrap(False)
self.label_5.setObjectName(_fromUtf8("label_5"))
self.formLayout_3.setWidget(0, QtGui.QFormLayout.LabelRole, self.label_5)
self.spnBoxGrandTotal = QtGui.QDoubleSpinBox(Dialog)
font = QtGui.QFont()
font.setPointSize(11)
font.setBold(True)
font.setWeight(75)
font.setStrikeOut(False)
self.spnBoxGrandTotal.setFont(font)
self.spnBoxGrandTotal.setAutoFillBackground(False)
self.spnBoxGrandTotal.setStyleSheet(_fromUtf8(""))
self.spnBoxGrandTotal.setFrame(True)
self.spnBoxGrandTotal.setReadOnly(True)
self.spnBoxGrandTotal.setButtonSymbols(QtGui.QAbstractSpinBox.NoButtons)
self.spnBoxGrandTotal.setMaximum(999999.0)
self.spnBoxGrandTotal.setObjectName(_fromUtf8("spnBoxGrandTotal"))
self.formLayout_3.setWidget(0, QtGui.QFormLayout.FieldRole, self.spnBoxGrandTotal)
self.verticalLayout.addLayout(self.formLayout_3)
self.horizontalLayout_2 = QtGui.QHBoxLayout()
self.horizontalLayout_2.setObjectName(_fromUtf8("horizontalLayout_2"))
self.btnConfirm = QtGui.QPushButton(Dialog)
self.btnConfirm.setAutoDefault(False)
self.btnConfirm.setObjectName(_fromUtf8("btnConfirm"))
self.horizontalLayout_2.addWidget(self.btnConfirm)
self.btnDelete = QtGui.QPushButton(Dialog)
self.btnDelete.setAutoDefault(False)
self.btnDelete.setObjectName(_fromUtf8("btnDelete"))
self.horizontalLayout_2.addWidget(self.btnDelete)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout.addLayout(self.verticalLayout)
self.groupBox = QtGui.QGroupBox(Dialog)
self.groupBox.setObjectName(_fromUtf8("groupBox"))
self.gridLayout = QtGui.QGridLayout(self.groupBox)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.tblItems = QtGui.QTableView(self.groupBox)
self.tblItems.setAlternatingRowColors(True)
self.tblItems.setSelectionMode(QtGui.QAbstractItemView.SingleSelection)
self.tblItems.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tblItems.setSortingEnabled(False)
self.tblItems.setCornerButtonEnabled(False)
self.tblItems.setObjectName(_fromUtf8("tblItems"))
self.tblItems.horizontalHeader().setStretchLastSection(True)
self.gridLayout.addWidget(self.tblItems, 0, 1, 1, 1)
self.horizontalLayout.addWidget(self.groupBox)
self.horizontalLayout.setStretch(0, 1)
self.horizontalLayout.setStretch(2, 1)
self.retranslateUi(Dialog)
QtCore.QMetaObject.connectSlotsByName(Dialog)
def retranslateUi(self, Dialog):
Dialog.setWindowTitle(_translate("Dialog", "Sale", None))
self.groupBox_2.setTitle(_translate("Dialog", "Inventory", None))
self.leditInventory.setPlaceholderText(_translate("Dialog", "Search...", None))
self.label_11.setText(_translate("Dialog", "Client:", None))
self.label.setText(_translate("Dialog", "Code:", None))
self.leditCode.setToolTip(_translate("Dialog", "Press Enter to \n"
"search item by code", None))
self.leditCode.setWhatsThis(_translate("Dialog", "Insert the code item here\n"
"", None))
self.label_10.setText(_translate("Dialog", "Name:", None))
self.label_6.setText(_translate("Dialog", "Item Price:", None))
self.label_2.setText(_translate("Dialog", "Quantity:", None))
self.label_3.setText(_translate("Dialog", "Margin:", None))
self.label_8.setText(_translate("Dialog", "Discount:", None))
self.chkBoxItbms.setText(_translate("Dialog", "Include Tax", None))
self.label_9.setText(_translate("Dialog", "Total Item Price:", None))
self.spnBoxTotalItemPrice.setPrefix(_translate("Dialog", "$ ", None))
self.label_13.setText(_translate("Dialog", "Cost:", None))
self.chkBoxCredit.setText(_translate("Dialog", "Credit", None))
self.spnboxCost.setPrefix(_translate("Dialog", "$ ", None))
self.spnboxPrice.setPrefix(_translate("Dialog", "$ ", None))
self.spnBoxMargin.setPrefix(_translate("Dialog", "% ", None))
self.spnboxDiscount.setPrefix(_translate("Dialog", "% ", None))
self.label_14.setText(_translate("Dialog", "Group:", None))
self.btnInsert.setText(_translate("Dialog", "Insert", None))
self.btnUndo.setText(_translate("Dialog", "Undo", None))
self.label_4.setText(_translate("Dialog", "Subtotal:", None))
self.spnBoxSubtotal.setPrefix(_translate("Dialog", "$ ", None))
self.label_7.setText(_translate("Dialog", "Sales Tax:", None))
self.spnBoxTaxT.setToolTip(_translate("Dialog", "7.00%", None))
self.spnBoxTaxT.setPrefix(_translate("Dialog", "$ ", None))
self.label_12.setText(_translate("Dialog", "Discount:", None))
self.spnBoxDiscountT.setPrefix(_translate("Dialog", "$ ", None))
self.label_5.setText(_translate("Dialog", "Grand Total:", None))
self.spnBoxGrandTotal.setToolTip(_translate("Dialog", "SubTotal + \n"
"ITBMS (7.00%)", None))
self.spnBoxGrandTotal.setPrefix(_translate("Dialog", "$ ", None))
self.btnConfirm.setText(_translate("Dialog", "Confirm", None))
self.btnDelete.setText(_translate("Dialog", "Delete Entry", None))
self.groupBox.setTitle(_translate("Dialog", "Items", None))
import res_rc
|
{"/master_admin.py": ["/gui_inventory.py", "/gui_purchase.py", "/gui_sale.py", "/gui_client.py", "/gui_move.py", "/mec_inventory.py"], "/main_login.py": ["/gui_login.py", "/master_admin.py", "/mec_login.py"]}
|
1,765
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/models/patent_doctorTb.py
|
from django.db import models
from .doctors import Doctors
from .patients import Patient
class PatentDoctorTb(models.Model):
'''
we can add extra fields here
'''
doctor = models.ForeignKey(Doctors, blank=False, null=False, on_delete=models.CASCADE)
patient = models.ForeignKey(Patient, blank=False, null=False, on_delete=models.CASCADE)
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,766
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/migrations/0006_alter_patient_doctor.py
|
# Generated by Django 3.2.8 on 2021-10-28 06:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('necktieapp', '0005_auto_20211028_1129'),
]
operations = [
migrations.AlterField(
model_name='patient',
name='doctor',
field=models.ManyToManyField(blank=True, null=True, through='necktieapp.PatentDoctorTb', to='necktieapp.Doctors'),
),
]
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,767
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/migrations/0003_patient.py
|
# Generated by Django 3.2.8 on 2021-10-27 16:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('necktieapp', '0002_alter_doctors_d_phone'),
]
operations = [
migrations.CreateModel(
name='Patient',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('p_surname', models.CharField(blank=True, max_length=20, null=True)),
('p_fullname', models.CharField(blank=True, max_length=20, null=True)),
('p_username', models.CharField(max_length=40)),
('p_phone', models.CharField(blank=True, max_length=10, null=True)),
('p_country', models.CharField(blank=True, max_length=50, null=True)),
('p_state', models.CharField(blank=True, max_length=50, null=True)),
('doctor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='necktieapp.doctors')),
],
),
]
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,768
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/models/doctors.py
|
from django.db import models
from model_utils import Choices
SPECIALIZATIONS = Choices(
("CD", "Cardiology"),
("GS", "General Surgery"),
("EC", "Endocrinology"),
("NT", "Neonatology"),
)
class Doctors(models.Model):
d_surname = models.CharField(max_length=20, blank=True, null=True)
d_firstname = models.CharField(max_length=20, blank=True, null=True)
d_username = models.CharField(max_length=40, blank=False, null=False, unique=True)
d_phone = models.CharField(max_length=10, blank=True, null=True)
d_address = models.TextField(blank=True, null=True)
d_country = models.CharField(max_length=30)
d_specialization = models.CharField(
choices=SPECIALIZATIONS,
max_length=4,
blank=False,
null=False,
)
d_pincode = models.IntegerField()
def __str__(self):
return self.d_username
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,769
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/views/__init__.py
|
from .doctor_view import DoctorViewset # noqa: F401
from .patient_view import PatientViewset # noqa: F401
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,770
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/migrations/0004_auto_20211027_2226.py
|
# Generated by Django 3.2.8 on 2021-10-27 16:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('necktieapp', '0003_patient'),
]
operations = [
migrations.RemoveField(
model_name='patient',
name='doctor',
),
migrations.CreateModel(
name='PatentDoctorTb',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('doctor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='necktieapp.doctors')),
('patient', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='necktieapp.patient')),
],
),
migrations.AddField(
model_name='patient',
name='doctor',
field=models.ManyToManyField(through='necktieapp.PatentDoctorTb', to='necktieapp.Doctors'),
),
]
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,771
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/migrations/0005_auto_20211028_1129.py
|
# Generated by Django 3.2.8 on 2021-10-28 05:59
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('necktieapp', '0004_auto_20211027_2226'),
]
operations = [
migrations.RenameField(
model_name='doctors',
old_name='d_state',
new_name='d_specialization',
),
migrations.RenameField(
model_name='doctors',
old_name='d_surnam',
new_name='d_surname',
),
]
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,772
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/serializers/__init__.py
|
from .doctor_serializer import DoctorSerializer
from .patient_serializer import PatientSerializer
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,773
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/migrations/0002_alter_doctors_d_phone.py
|
# Generated by Django 3.2.8 on 2021-10-27 16:39
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('necktieapp', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='doctors',
name='d_phone',
field=models.CharField(blank=True, max_length=10, null=True),
),
]
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,774
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/models/patients.py
|
from django.db import models
from .doctors import Doctors
class Patient(models.Model):
p_surname = models.CharField(max_length=20, blank=True, null=True)
doctor = models.ManyToManyField(Doctors, through="PatentDoctorTb", null=True, blank=True)
p_fullname = models.CharField(max_length=20, blank=True, null=True)
p_username = models.CharField(max_length=40, blank=False, null=False)
p_phone = models.CharField(max_length=10, blank=True, null=True)
p_country = models.CharField(max_length=50, blank=True, null=True)
p_state = models.CharField(max_length=50, blank=True, null=True)
def __str__(self):
return self.p_username
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,775
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/views/patient_view.py
|
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import viewsets, filters
from rest_framework.permissions import IsAuthenticated
from necktieapp.models import Patient
from necktieapp.serializers import PatientSerializer
class PatientViewset(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Patient.objects.all()
serializer_class = PatientSerializer
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
filterset_fields = ['id', 'p_surname', 'p_username']
search_fields = ['id', 'p_surname', 'p_username']
ordering_fields = ['id', 'p_surname', 'p_username']
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,776
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/admin.py
|
from django.contrib import admin
from .models import Doctors, Patient, PatentDoctorTb
admin.site.register(Doctors)
admin.site.register(Patient)
admin.site.register(PatentDoctorTb)
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,777
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/urls.py
|
from rest_framework.routers import DefaultRouter
from django.conf.urls import url, include
from necktieapp import views
router = DefaultRouter(trailing_slash=False)
router.register(r'doctors', views.DoctorViewset)
router.register(r'patients', views.PatientViewset)
urlpatterns = [
url(r'^v1/', include(router.urls)),
]
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,778
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/views/doctor_view.py
|
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import viewsets, filters
from rest_framework.permissions import IsAuthenticated
from necktieapp.models import Doctors
from necktieapp.serializers import DoctorSerializer
class DoctorViewset(viewsets.ModelViewSet):
permission_classes = (IsAuthenticated,)
queryset = Doctors.objects.all()
serializer_class = DoctorSerializer
filter_backends = [DjangoFilterBackend, filters.SearchFilter, filters.OrderingFilter]
filterset_fields = ['id', 'd_specialization', 'd_username']
search_fields = ['id', 'd_specialization', 'd_username']
ordering_fields = ['id', 'd_specialization', 'd_username']
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,779
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/management/commands/bulk_create.py
|
import random
import string
from django.core.management.base import BaseCommand
from django.utils.crypto import get_random_string
from necktieapp.models import Doctors
sample_data = {
'd_surname': get_random_string(),
'd_firstname': get_random_string(),
'd_username': "",
'd_phone': get_random_string(),
'd_address': get_random_string(),
'd_country': get_random_string(),
'd_specialization': "CD",
'd_pincode': 524101,
}
class Command(BaseCommand):
help = 'Create random doctors'
def add_arguments(self, parser):
parser.add_argument('total', type=int, help='Indicates the number of users to be created')
def handle(self, *args, **kwargs):
total = kwargs['total']
list_instances = []
Doctors.objects.all().delete()
for i in range(total):
sample_data['d_username'] = ''.join(random.choices(string.ascii_uppercase + string.digits, k=8))
list_instances.append(Doctors(**sample_data))
Doctors.objects.bulk_create(list_instances)
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,780
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/tests/test_doctors.py
|
from django.test import TestCase, TransactionTestCase
from necktieapp.models import Doctors
sample_data = {
'd_surname': "sudheer",
'd_firstname': "mandi",
'd_username': "smre",
'd_phone': "7702231789",
'd_address': "Ramalingapuram",
'd_country': "India",
'd_specialization': "CD",
'd_pincode': 524101,
}
class TestDoctor(TransactionTestCase):
fixtures = ["doctors.json"]
def test_create_new_record(self):
model_instance = Doctors.objects.create(**sample_data)
self.assertIsInstance(model_instance, Doctors)
self.assertEqual(model_instance.d_username, "smre")
def test_update_record(self):
instance = Doctors.objects.get(id=1)
instance.d_phone = "9177935906"
instance.save()
self.assertEqual(instance.d_phone, "9177935906")
def test_should_not_save_duplicate_username(self):
before_count = Doctors.objects.count()
sample_data["d_username"] = "smreddy"
try:
Doctors.objects.create(**sample_data)
except Exception as e:
after_count = Doctors.objects.count()
self.assertEqual(before_count, after_count)
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,781
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/serializers/doctor_serializer.py
|
from rest_framework import serializers
from necktieapp.models import Doctors
class DoctorSerializer(serializers.ModelSerializer):
class Meta:
model = Doctors
fields = "__all__"
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,782
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/migrations/0001_initial.py
|
# Generated by Django 3.2.8 on 2021-10-27 15:55
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Doctors',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('d_surnam', models.CharField(blank=True, max_length=20, null=True)),
('d_firstname', models.CharField(blank=True, max_length=20, null=True)),
('d_username', models.CharField(max_length=40, unique=True)),
('d_phone', models.IntegerField(blank=True, null=True)),
('d_address', models.TextField(blank=True, null=True)),
('d_country', models.CharField(max_length=30)),
('d_state', models.CharField(choices=[('CD', 'Cardiology'), ('GS', 'General Surgery'), ('EC', 'Endocrinology'), ('NT', 'Neonatology')], max_length=4)),
('d_pincode', models.IntegerField()),
],
),
]
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,783
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/apps.py
|
from django.apps import AppConfig
class NecktieappConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'necktieapp'
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,784
|
sudheermouni/NeckTie
|
refs/heads/main
|
/Necktie/necktieapp/models/__init__.py
|
from .doctors import Doctors
from .patients import Patient
from .patent_doctorTb import PatentDoctorTb
|
{"/Necktie/necktieapp/models/patent_doctorTb.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py"], "/Necktie/necktieapp/views/__init__.py": ["/Necktie/necktieapp/views/doctor_view.py", "/Necktie/necktieapp/views/patient_view.py"], "/Necktie/necktieapp/serializers/__init__.py": ["/Necktie/necktieapp/serializers/doctor_serializer.py"], "/Necktie/necktieapp/models/patients.py": ["/Necktie/necktieapp/models/doctors.py"], "/Necktie/necktieapp/admin.py": ["/Necktie/necktieapp/models/__init__.py"], "/Necktie/necktieapp/models/__init__.py": ["/Necktie/necktieapp/models/doctors.py", "/Necktie/necktieapp/models/patients.py", "/Necktie/necktieapp/models/patent_doctorTb.py"]}
|
1,789
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/dataset/params.py
|
SAMM_ROOT = '/data/gjz_mm21/SAMM'
CASME_2_LABEL_DIR = '/data/gjz_mm21/CASME_2_LongVideoFaceCropped/CASME_2_longVideoFaceCropped/labels'
# kernel path
GAUSS_KERNEL_PATH = {
'sm_kernel': '/home/gjz/lry_kernels/gauss2D-smooth.npy',
'dr1_kernel': '/home/gjz/lry_kernels/gauss1D-derivative1.npy',
'dr2_kernel': '/home/gjz/lry_kernels/gauss1D-derivative2.npy'
}
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,790
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/utils.py
|
import os
import sys
import cv2
from timm.utils import reduce_tensor
import torch
import shutil
import numpy as np
import os.path as osp
import torch.nn.functional as F
import matplotlib.pyplot as plt
import torch.distributed as dist
from torch.nn.modules import loss
from datetime import datetime
import paths
import dataset.utils as dataset_utils
sys.setrecursionlimit(10000)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count
class Focal_Loss(torch.nn.Module):
def __init__(self, alpha=[], gamma=2, num_class=2, epsilon=1e-7):
super().__init__()
if alpha == []:
self.alpha = torch.ones(num_class)
else:
self.alpha = torch.tensor(alpha, dtype=torch.float32)
self.gamma = gamma
self.epsilon = epsilon
def forward(self, pred, target):
assert len(pred.shape) == 2, 'pred shape should be N, num_class'
assert len(target.shape) == 1, 'target shape should be N'
pred = torch.softmax(pred, dim=-1)
target_pred = -F.nll_loss(pred, target, reduction='none')
loss = -torch.log(target_pred + self.epsilon)
class_alpha = torch.tensor([self.alpha[c.item()] for c in target],
dtype=torch.float32).to(loss.device)
weights = ((1 - target_pred)**self.gamma) * class_alpha
loss = (weights * loss).mean()
return loss
class My_loss(torch.nn.Module):
def __init__(self):
super().__init__()
self.focal_loss = Focal_Loss(num_class=3)
def forward(self, out, anno_y, label_y):
anno_x = out[..., 0]
label_x = out[..., 1:]
if len(anno_x.shape) == 2:
anno_x = anno_x.reshape(-1)
anno_y = anno_y.reshape(-1)
# loss_ccc = -ccc(anno_x, anno_y)[0]
# loss_mse = F.mse_loss(anno_x, anno_y)
loss_l1 = F.l1_loss(anno_x, anno_y)
# logits = F.log_softmax(label_x, dim=-1)
# loss_ce = F.nll_loss(logits, label_y)
if len(label_x.shape) == 3:
label_x = label_x.reshape(-1, label_x.shape[-1])
label_y = label_y.reshape(-1)
# loss_ce = F.cross_entropy(label_x, label_y, reduce='mean')
# loss_focal = self.focal_loss(label_x, label_y)
# loss = loss_ccc + loss_ce
# loss = loss_mse + loss_ce
# loss = loss_mse + loss_focal
# loss = loss_mse * 100
# loss = loss_l1 * 100 + loss_focal
loss = loss_l1 * 1000
return loss
def ccc(y_pred, y_true, epsilon=1e-7):
assert len(y_pred.shape) == 1
true_mean = y_true.mean()
pred_mean = y_pred.mean()
v_true = y_true - true_mean
v_pred = y_pred - pred_mean
rho = (v_pred * v_true).sum() / (torch.sqrt(
(v_pred**2).sum()) * torch.sqrt((v_true**2).sum()) + epsilon)
std_predictions = torch.std(y_pred)
std_gt = torch.std(y_true)
ccc = 2 * rho * std_gt * std_predictions / (
(std_predictions**2 + std_gt**2 +
(pred_mean - true_mean)**2) + epsilon)
return ccc, rho
def img_dirs_filter(img_dirs, dataset):
'''
some clips are not labeled...
'''
_img_dirs = []
if dataset == 'SAMM':
anno_dict = np.load(osp.join(paths.SAMM_LABEL_DIR, 'anno_dict.npy'),
allow_pickle=True).item()
elif dataset == 'CASME_2':
anno_dict = np.load(osp.join(paths.CASME_2_LABEL_DIR, 'anno_dict.npy'),
allow_pickle=True).item()
else:
raise NotImplementedError
for img_dir in img_dirs:
if img_dir in anno_dict:
_img_dirs.append(img_dir)
else:
print('clip: {} is not labeled or labeled incorrectly.'.format(
img_dir))
return _img_dirs
def get_img_dirs(dataset):
if dataset == 'SAMM':
img_dirs = [
osp.join(paths.SAMM_VIDEO_DIR, name)
for name in os.listdir(paths.SAMM_VIDEO_DIR)
]
elif dataset == 'CASME_2':
_img_dirs = [[
osp.join(paths.CASME_2_VIDEO_DIR, name1, name2)
for name2 in os.listdir(osp.join(paths.CASME_2_VIDEO_DIR, name1))
] for name1 in os.listdir(paths.CASME_2_VIDEO_DIR)]
img_dirs = []
for dirs in _img_dirs:
img_dirs.extend(dirs)
else:
raise NotImplementedError
img_dirs = img_dirs_filter(img_dirs, dataset)
return img_dirs
def leave_one_out(img_dirs, dataset):
img_dirs_dict = {}
img_dirs = sorted(img_dirs)
if dataset == 'SAMM':
keys = []
for img_dir in img_dirs:
keys.append(osp.basename(img_dir).split('_')[0]) # 006, 007...
keys = sorted(list(set(keys)))
for key in keys:
train_set = []
val_set = []
for img_dir in img_dirs:
if key in img_dir:
val_set.append(img_dir)
else:
train_set.append(img_dir)
img_dirs_dict[key] = [train_set, val_set]
elif dataset == 'CASME_2':
keys = []
for img_dir in img_dirs:
keys.append(img_dir.split('/')[-2]) # s15, s16...
keys = sorted(list(set(keys)))
for key in keys:
train_set = []
val_set = []
for img_dir in img_dirs:
if img_dir.split('/')[-2] == key:
val_set.append(img_dir)
else:
train_set.append(img_dir)
img_dirs_dict[key] = [train_set, val_set]
else:
raise NotImplementedError
return img_dirs_dict
def adjust_learning_rate(optimizer, epoch, lr_strat, wd, lr_steps, factor=0.1):
"""Sets the learning rate to the initial LR decayed by 10 every N epochs"""
decay = factor**(sum(epoch >= np.asarray(lr_steps)))
lr = lr_strat * decay
decay = wd
for param_group in optimizer.param_groups:
param_group['lr'] = lr
param_group['weight_decay'] = decay
def save_checkpoint(state, is_best, save_root, root_model, filename='val'):
torch.save(
state,
'%s/%s/%s_checkpoint.pth.tar' % (save_root, root_model, filename))
if is_best:
shutil.copyfile(
'%s/%s/%s_checkpoint.pth.tar' % (save_root, root_model, filename),
'%s/%s/%s_best_loss.pth.tar' % (save_root, root_model, filename))
# print("checkpoint saved to",
# '%s/%s/%s_best_loss.pth.tar' % (save_root, root_model, filename))
def check_rootfolders(args):
"""Create log and model folder"""
folders_util = [
args.root_log, args.root_model, args.root_output, args.root_runs
]
folders_util = [
"%s/" % (args.save_root) + folder for folder in folders_util
]
for folder in folders_util:
if not os.path.exists(folder):
print('creating folder ' + folder)
os.makedirs(folder)
def evaluate(pred_anno_dict,
pred_label_dict,
dataset,
threshold=0.9,
val_id='all',
epoch=-1,
args=None):
if dataset == 'SAMM':
pred_gt = np.load(osp.join(paths.SAMM_ROOT, 'pred_gt.npy'),
allow_pickle=True).item()
anno_dict = np.load(osp.join(paths.SAMM_ROOT, 'anno_dict.npy'),
allow_pickle=True).item()
fps = 200
elif dataset == 'CASME_2':
pred_gt = np.load(osp.join(paths.CASME_2_LABEL_DIR, 'pred_gt.npy'),
allow_pickle=True).item()
anno_dict = np.load(osp.join(paths.CASME_2_LABEL_DIR, 'anno_dict.npy'),
allow_pickle=True).item()
fps = 30
else:
raise NotImplementedError
result_dict = {}
for img_dir, pred_annos in pred_anno_dict.items():
pred_labels = pred_label_dict[img_dir]
gt_list = pred_gt[img_dir]
pred_list = []
# scan all possible peak point
for peak_idx in range(0, len(pred_annos), fps):
is_peak = True
front = peak_idx
tail = peak_idx
# label_sum = pred_labels[peak_idx]
cumsum = pred_annos[peak_idx]
while is_peak and cumsum < threshold and (
front > 0 or tail < len(pred_annos) - 1):
if front - 1 >= 0:
front -= 1
cumsum += pred_annos[front]
# label_sum += pred_labels[front]
if tail + 1 < len(pred_annos):
tail += 1
cumsum += pred_annos[tail]
# label_sum += pred_labels[tail]
is_peak = pred_annos[peak_idx] >= pred_annos[
front] and pred_annos[peak_idx] >= pred_annos[tail]
if is_peak and cumsum >= threshold:
# TODO: label func
pred_list.append([front, tail, -1])
M = len(gt_list)
N = len(pred_list)
A = 0
for [onset, offset, label_gt] in gt_list:
for [
front, tail, _
] in pred_list: # TODO: if one pred could match more than one gt?
if front < onset:
b1 = [front, tail]
b2 = [onset, offset]
else:
b2 = [front, tail]
b1 = [onset, offset]
# 1
if b1[1] >= b2[0] and b2[1] >= b1[1]:
overlap = b1[1] - b2[0] + 1
union = b2[1] - b1[0] + 1
elif b1[1] >= b2[1]:
overlap = b2[1] - b2[0] + 1
union = b1[1] - b1[0] + 1
else:
# no overlap
overlap = 0
union = 1
if overlap / union >= 0.5:
A += 1
break
result_dict[img_dir] = [M, N, A]
ret_info = []
M = 0
N = 0
A = 0
for key, (m, n, a) in result_dict.items():
# p = a / n
# r = a / m
# f = 2 * r * p / (p + r)
# ret_info.append('[{}] P: {.4f}, R: {:.4f}, F1: {:.4f}'.format(
# key, p, r, f))
M += m
N += n
A += a
if M == 0 or N == 0 or A == 0:
precision = -1.0
recall = -1.0
f_score = -1.0
else:
precision = A / N
recall = A / M
f_score = 2 * recall * precision / (recall + precision)
ret_info.append('[over all] P: {:.4f}, R: {:.4f}, F1: {:.4f}'.format(
precision, recall, f_score))
# save fig
column = 3
fig = plt.figure(figsize=(10,
((len(pred_anno_dict) - 1) // column + 1) * 2))
for i, (img_dir, pred_annos) in enumerate(pred_anno_dict.items()):
fig.add_subplot((len(pred_anno_dict) - 1) // column + 1, column, i + 1)
plt.plot(pred_annos, 'b-', alpha=0.5)
plt.plot(anno_dict[img_dir], 'r-', alpha=0.5)
fig.tight_layout()
plt.savefig(
osp.join(args.save_root, args.root_output,
'{}_anno_{}.pdf'.format(val_id, epoch)))
plt.close('all')
return ret_info, f_score, (M, N, A)
def evaluate_bi_labels(pred_and_gt, val_id, epoch, args):
keys = sorted(list(pred_and_gt.keys()))
imgs_dirs = sorted(list(set([osp.dirname(img_p) for img_p in keys])))
result_dict = {}
for imgs_dir in imgs_dirs:
result_dict[imgs_dir] = []
img_ps = dataset_utils.scan_jpg_from_img_dir(imgs_dir)
for img_p in img_ps:
result_dict[imgs_dir].append(pred_and_gt.get(
img_p, [0, 0])) # [pred, target]
result_dict[imgs_dir] = np.asarray(result_dict[imgs_dir])
precision, recall, f_score, MNA, result_dict, match_regions_record = evaluate_pred_and_gt(
result_dict, args)
# visulization
if args.local_rank == 0:
column = 3
fig = plt.figure(figsize=(10,
((len(imgs_dirs) - 1) // column + 1) * 2))
for i, imgs_dir in enumerate(imgs_dirs):
fig.add_subplot((len(imgs_dirs) - 1) // column + 1, column, i + 1)
data = result_dict[imgs_dir]
pred = data[:, 0]
target = data[:, 1]
plt.plot(pred, 'b-', alpha=0.5)
plt.plot(target, 'r-', alpha=0.5) # gt
plt.title(osp.basename(imgs_dir))
fig.tight_layout()
out_dir = osp.join(args.save_root, args.root_output, val_id)
os.makedirs(out_dir, exist_ok=True)
plt.savefig(osp.join(out_dir, 'bi_label_{}.pdf'.format(epoch)))
plt.close('all')
return precision, recall, f_score, MNA, match_regions_record
def extend_front(front, pred, patience):
assert pred[front] > 0
d = patience
while d > 0:
if front + d < len(pred) and pred[front + d] > 0:
return extend_front(front + d, pred, patience)
d -= 1
return front
def evaluate_pred_and_gt(result_dict, args):
if args.dataset == 'SAMM':
# patience = 25
pred_gt = np.load(osp.join(paths.SAMM_ROOT, 'pred_gt.npy'),
allow_pickle=True).item()
elif args.dataset == 'CASME_2':
pred_gt = np.load(osp.join(paths.CASME_2_LABEL_DIR, 'pred_gt.npy'),
allow_pickle=True).item()
# patience = 10
else:
raise NotImplementedError
M = 0
N = 0
A = 0
match_regions_record = {}
for imgs_dir, data in result_dict.items():
pred = data[:, 0]
target = data[:, 1]
found_regions = []
match_regions = [
] # gt_onset, gt_offset, pred_onset, pred_offset, TP/FP
front = 0
while front < len(pred):
tail = front
if pred[front] > 0:
tail = extend_front(front, pred, args.patience)
if front < tail: # find one region
found_regions.append([front, tail])
front = tail + args.patience
# modify result_dict
pred = np.zeros_like(pred)
for front, tail in found_regions:
pred[front:tail] = 1
data[:, 0] = pred
result_dict[imgs_dir] = data
# eval precision, recall, f_score
gt_list = pred_gt[imgs_dir]
m = len(gt_list)
n = len(found_regions)
a = 0
# TODO: determine whether one predicted region is macro or micro-expression
gt_regions_mark = np.zeros(m)
found_regions_mark = np.zeros(n)
for mg, [onset, offset, label_gt] in enumerate(gt_list):
# label_gt: 1->macro, 2->micro
for mf, [front, tail] in enumerate(
found_regions
): # TODO: if one found region can match more than one gt region
if front < onset:
b1 = [front, tail]
b2 = [onset, offset]
else:
b1 = [onset, offset]
b2 = [front, tail]
# 1
if b1[1] >= b2[0] and b2[1] >= b1[1]:
overlap = b1[1] - b2[0] + 1
union = b2[1] - b1[0] + 1
elif b1[1] >= b2[1]:
overlap = b2[1] - b2[0] + 1
union = b1[1] - b1[0] + 1
else: # no overlap
overlap = 0
union = 1
if overlap / union >= 0.5:
a += 1
found_regions_mark[mf] = 1
gt_regions_mark[mg] = 1
match_regions.append([onset, offset, front, tail, 'TP'])
break
for mg in range(m):
if gt_regions_mark[mg] == 0:
onset, offset, _ = gt_list[mg]
match_regions.append([onset, offset, '-', '-', 'FN'])
for mf in range(n):
if found_regions_mark[mf] == 0:
front, tail = found_regions[mf]
match_regions.append(['-', '-', front, tail, 'FP'])
match_regions_record[imgs_dir] = match_regions
M += m
N += n
A += a
# NOTE: if one found region can match more than one gt region, TP+FP may be greater than n
# result of the participant
if A == 0 or N == 0:
precision = -1.0
recall = -1.0
f_score = -1.0
else:
precision = A / N
recall = A / M
f_score = 2 * precision * recall / (precision + recall)
return precision, recall, f_score, (M, N,
A), result_dict, match_regions_record
def calculate_metric_from_dict_MNA(MNA_all):
M = 0
N = 0
A = 0
for k, mna in MNA_all.items():
m, n, a = mna
M += m
N += n
A += a
try:
precision = A / N
recall = A / M
f_score = 2 * precision * recall / (precision + recall)
except:
precision = -1.0
recall = -1.0
f_score = -1.0
return precision, recall, f_score
def synchronize():
if not dist.is_available():
return
if not dist.is_initialized():
return
world_size = dist.get_world_size()
if world_size == 1:
return
dist.barrier()
def reduce_loss(loss, args):
if args.distributed:
loss = reduce_tensor(loss.data, float(args.world_size))
return loss
def synchronize_pred_and_gt(pred_and_gt, epoch, args, remove=True):
if args.distributed:
out_dir = osp.join(args.save_root, args.root_runs,
'temp_{}'.format(epoch))
if args.local_rank == 0:
os.makedirs(out_dir, exist_ok=True)
synchronize() # make dir done
np.save(
osp.join(out_dir,
'temp_pred_and_gt_{}.npy'.format(args.local_rank)),
pred_and_gt)
synchronize() # save done
if args.local_rank == 0:
pred_and_gt = {}
for name in os.listdir(out_dir):
data = np.load(osp.join(out_dir, name),
allow_pickle=True).item()
pred_and_gt.update(data)
np.save(osp.join(out_dir, 'temp_pred_and_gt_merge.npy'),
pred_and_gt)
synchronize() # merge done
else:
synchronize() # start read
pred_and_gt = np.load(osp.join(out_dir,
'temp_pred_and_gt_merge.npy'),
allow_pickle=True).item()
synchronize() # read done
if remove and args.local_rank == 0:
shutil.rmtree(out_dir)
return pred_and_gt
def synchronize_f_score(f_score, args):
assert isinstance(f_score, float)
if args.distributed:
f_score = torch.tensor(f_score).cuda()
assert f_score.dtype == torch.float32
synchronize() # wait tensor allocation
dist.broadcast(f_score, src=0)
f_score = f_score.item()
return f_score
def synchronize_list(list_obj, args):
assert isinstance(list_obj, (list, tuple))
if args.distributed:
list_obj = torch.tensor(list_obj, dtype=torch.int32).cuda()
synchronize() # wait tensor allocation
dist.broadcast(list_obj, src=0)
list_obj = list_obj.cpu().numpy().tolist()
return list_obj
def delete_records(total_MNA, match_regions_record_all, val_id):
# keys1 = list(total_MNA.keys())
keys2 = list(match_regions_record_all.keys())
rm_key = val_id
del total_MNA[rm_key]
for k in keys2:
if k.split('/')[-2] == rm_key or osp.basename(k).split(
'_')[0] == rm_key:
del match_regions_record_all[k]
return total_MNA, match_regions_record_all
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,791
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/paths.py
|
# SAMM
SAMM_ROOT = '/data/gjz_mm21/SAMM'
SAMM_LABEL_DIR = SAMM_ROOT
SAMM_VIDEO_DIR = '/data/gjz_mm21/SAMM/SAMM_longvideos'
# CASME_2
CASME_2_ROOT = '/data/gjz_mm21/CASME_2_LongVideoFaceCropped/CASME_2_longVideoFaceCropped'
CASME_2_LABEL_DIR = '/data/gjz_mm21/CASME_2_LongVideoFaceCropped/CASME_2_longVideoFaceCropped/labels'
CASME_2_VIDEO_DIR = '/data/gjz_mm21/CASME_2_LongVideoFaceCropped/CASME_2_longVideoFaceCropped/longVideoFaceCropped'
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,792
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/dataset/me_dataset.py
|
from unicodedata import name
import cv2
import os
import pdb
import torch
import time
import pywt
import glob
import numpy as np
import os.path as osp
from tqdm import tqdm
from torch.utils.data import Dataset
from torch import nn as nn
from . import params
from . import utils
WT_CHANNEL = 4
sm_kernel = np.load(params.GAUSS_KERNEL_PATH['sm_kernel'])
dr1_kernel = np.load(params.GAUSS_KERNEL_PATH['dr1_kernel'])
dr2_kernel = np.load(params.GAUSS_KERNEL_PATH['dr2_kernel'])
dr1_kernel = dr1_kernel[:, None, None]
dr2_kernel = dr2_kernel[:, None, None]
class SAMMDataset(Dataset):
def __init__(self,
mode,
img_dirs,
seq_len=64,
step=32,
time_len=12,
input_size=256,
data_aug=False,
data_option=None,
dataset_name='SAMM'):
super().__init__()
self.dataset_name = dataset_name
self.mode = mode
self.seq_len = seq_len
self.step = step
assert mode == 'train' or (mode == 'test'
and self.seq_len <= self.step)
self.time_len = time_len # observate time_len//2 frames before and after
self.size = input_size if data_option == 'diff' else input_size * 2
self.img_dirs = img_dirs # imgs files dirs
if not isinstance(self.img_dirs, list):
self.img_dirs = [self.img_dirs]
self.img_ps_dict = self._get_img_ps_dict()
self.seq_list = self._get_seq_list()
self.label_dict = np.load(osp.join(params.SAMM_ROOT, 'label_dict.npy'),
allow_pickle=True).item()
self.anno_dict = np.load(osp.join(params.SAMM_ROOT, 'anno_dict.npy'),
allow_pickle=True).item()
# print('Load {} clips, {} frames from {}'.format(
# len(self.seq_list),
# len(self.seq_list) * self.seq_len, dataset_name))
self.transform = utils.get_group_transform(
mode) if data_aug else utils.Identity()
self.data_option = data_option
def _get_img_ps_dict(self):
ret_dict = {}
for img_dir in self.img_dirs:
img_ps = utils.scan_jpg_from_img_dir(img_dir)
ret_dict[img_dir] = tuple(img_ps)
return ret_dict
def _get_seq_list(self):
ret_list = []
for img_dir, img_ps in self.img_ps_dict.items():
front = 0
tail = front + self.seq_len # [front, tail), tail not include
while tail <= len(img_ps):
ret_list.append([img_dir, front,
tail]) # (img dir, front_idx, tail_idx)
front += self.step
tail = front + self.seq_len
return ret_list
def __len__(self):
return len(self.seq_list)
def __getitem__(self, index):
img_dir, front, tail = self.seq_list[
index] # [front, tail), tail not include
seq_info = (img_dir, front, tail)
# insert and append extra imgs for temporal conv
_old_len = len(self.img_ps_dict[img_dir])
img_ps = list(self.img_ps_dict[img_dir][front:tail])
for i in range(1, self.time_len // 2 + 1):
img_ps.insert(0, self.img_ps_dict[img_dir][max(0, front - i)])
img_ps.append(self.img_ps_dict[img_dir][min(
_old_len - 1, tail - 1 + i)])
_cur_len = len(self.img_ps_dict[img_dir])
assert _old_len == _cur_len # make sure the dict has not been changed
# read seqence features, annos and labels
img_features = np.stack([
np.load(p.replace('.jpg', '.npy'))
for p in img_ps[self.time_len // 2:-self.time_len // 2]
], 0)
annos = self.anno_dict[img_dir][front:tail]
labels = self.label_dict[img_dir][front:tail]
assert img_features.shape == (self.seq_len, 2048) # resnet50 features
# read sequence imgs
flat_imgs = np.empty(
(self.seq_len + (self.time_len // 2) * 2, self.size, self.size),
dtype=np.float32)
for i, p in enumerate(img_ps):
img = cv2.imread(p, cv2.IMREAD_GRAYSCALE)
if not img.shape[0] == img.shape[1]:
# crop to square
h, w = img.shape
wide = abs(h - w) // 2
if h > w:
img = img[wide:wide + w, :]
else:
img = img[:, wide:wide + h]
try:
assert img.shape[0] == img.shape[1]
except:
print('Error in cropping image {}'.format(p))
img = cv2.resize(img, (self.size, self.size))
flat_imgs[i] = img
# transform
flat_imgs = self.transform(flat_imgs)
if self.data_option is not None and 'wt' in self.data_option:
flat_wts = np.stack([dwt2(img) for img in flat_imgs], 0)
# expand falt imgs
i = 0
front = 0
tail = front + self.time_len # [front, tail], tail include
if self.data_option is not None and 'wt' in self.data_option:
seq_wts = np.empty((self.seq_len, self.time_len + 1, WT_CHANNEL,
self.size // 2, self.size // 2),
dtype=np.float32)
elif self.data_option == 'diff':
seq_imgs = np.empty(
(self.seq_len, self.time_len + 1, self.size, self.size),
dtype=np.float32)
while tail < len(flat_imgs):
if self.data_option is not None and 'wt' in self.data_option:
seq_wts[i] = flat_wts[front:tail + 1].copy()
elif self.data_option == 'diff':
seq_imgs[i] = flat_imgs[front:tail + 1].copy()
i += 1
front += 1
tail += 1
assert i == self.seq_len
# data options
if self.data_option == 'diff':
ret_coefs = np.stack([get_diff(imgs) for imgs in seq_imgs], 0)
elif self.data_option == 'wt_diff':
ret_coefs = np.stack([get_diff(coefs) for coefs in seq_wts],
0).reshape(self.seq_len,
self.time_len * WT_CHANNEL,
self.size // 2, self.size // 2)
elif self.data_option == 'wt_dr':
ret_coefs = seq_wts.transpose(0, 2, 1, 3, 4)
ret_coefs = np.asarray([[
get_smoothing_and_dr_coefs(coefs_dim2)
for coefs_dim2 in coefs_dim1
] for coefs_dim1 in ret_coefs])
assert ret_coefs.shape[:3] == (self.seq_len, WT_CHANNEL, 3 * 2)
ret_coefs = ret_coefs.transpose(0, 2, 1, 3, 4)
ret_coefs = ret_coefs.reshape(self.seq_len, -1, self.size // 2,
self.size // 2)
elif self.data_option is None:
print('Require data option...')
exit()
else:
raise NotImplementedError
ret_coefs = torch.FloatTensor(ret_coefs)
img_features = torch.FloatTensor(img_features)
annos = torch.FloatTensor(annos)
labels = torch.LongTensor(labels)
return ret_coefs, img_features, annos, labels, seq_info
class CASME_2Dataset(SAMMDataset):
def __init__(self,
mode,
img_dirs,
seq_len=64,
step=32,
time_len=12,
input_size=256,
data_aug=False,
data_option=None,
dataset_name='CASME_2'):
super().__init__(mode,
img_dirs,
seq_len=seq_len,
step=step,
time_len=time_len,
input_size=input_size,
data_aug=data_aug,
data_option=data_option,
dataset_name=dataset_name)
self.label_dict = np.load(osp.join(params.CASME_2_LABEL_DIR,
'label_dict.npy'),
allow_pickle=True).item()
self.anno_dict = np.load(osp.join(params.CASME_2_LABEL_DIR,
'anno_dict.npy'),
allow_pickle=True).item()
class SAMMImageDataset(Dataset):
def __init__(self, img_ps):
super().__init__()
self.img_ps = img_ps
self.bi_label = np.load(
osp.join(params.SAMM_ROOT, 'bi_label.npy'),
allow_pickle=True).item() # imgs_dir -> [<target img_p> ... ]
def __len__(self):
return len(self.img_ps)
def __getitem__(self, index):
img_p = self.img_ps[index]
npy_p = img_p.replace('.jpg', '.npy')
feature = np.load(npy_p)
feature = torch.tensor(feature, dtype=torch.float32)
imgs_dir = osp.dirname(img_p)
label = 1 if img_p in self.bi_label[
imgs_dir] else 0 # 1 for spotting region
label = torch.tensor(label, dtype=torch.long)
return feature, label, img_p
class CASME_2ImageDataset(SAMMImageDataset):
def __init__(self, img_ps):
super().__init__(img_ps)
self.bi_label = np.load(
osp.join(params.CASME_2_LABEL_DIR, 'bi_label.npy'),
allow_pickle=True).item() # imgs_dir -> [<target img_p> ... ]
def get_diff(imgs):
if len(imgs.shape) == 3:
assert imgs.shape[1] == imgs.shape[2] # imgs
elif len(imgs.shape) == 4:
assert imgs.shape[2] == imgs.shape[
3] and imgs.shape[1] == WT_CHANNEL # wt_coefs
imgs1 = imgs[:-1]
imgs2 = imgs[1:]
return imgs2 - imgs1
def dwt2(img, wave_name='haar'):
assert isinstance(img, np.ndarray)
coefs = pywt.dwt2(img, wave_name)
coefs = np.array([coefs[0], *coefs[1]])
return coefs # (4, w//2, h//2)
def get_smoothing_and_dr_coefs(imgs):
'''
GAUSS_KERNEL_PATH
'''
global sm_kernel, dr1_kernel, dr2_kernel
sm_imgs = np.array([cv2.filter2D(img, -1, sm_kernel) for img in imgs])
dr_ks = dr1_kernel.shape[0]
dr1_res = []
dr2_res = []
for i in range(len(imgs) - dr_ks + 1):
_imgs = sm_imgs[i:i + dr_ks]
dr1_res.append((_imgs * dr1_kernel).sum(axis=0))
dr2_res.append((_imgs * dr2_kernel).sum(axis=0))
res = np.stack((*dr1_res, *dr2_res), 0)
return res
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,793
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/model/network.py
|
import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
import glob
import os
import os.path as osp
from torch.serialization import load
class MLP(nn.Module):
def __init__(self, hidden_units, dropout=0.3):
super(MLP, self).__init__()
input_feature_dim = hidden_units[0]
num_layers = len(hidden_units) - 1
assert num_layers > 0
assert hidden_units[-1] == 256
fc_list = []
for hidden_dim in hidden_units[1:]:
fc_list += [
nn.Dropout(dropout),
nn.Linear(input_feature_dim, hidden_dim),
nn.BatchNorm1d(hidden_dim),
nn.ReLU(inplace=True)
]
input_feature_dim = hidden_dim
self.mlp = nn.Sequential(*fc_list)
def forward(self, input_tensor):
bs, num_frames, feature_dim = input_tensor.size()
input_tensor = input_tensor.reshape(bs * num_frames, feature_dim)
out = self.mlp(input_tensor)
return out.reshape(bs, num_frames, -1)
class Temporal_Net(nn.Module):
def __init__(self, input_size, num_channels, hidden_units, dropout,
feature):
super().__init__()
assert input_size in [112, 128, 224, 256]
self.feature = feature # return feature before classification
# 4 layers conv net
self.conv_net = []
self.conv_net.append(
self._make_conv_layer(num_channels, 2**6, stride=2))
for i in range(7, 10):
self.conv_net.append(
self._make_conv_layer(2**(i - 1), 2**i, stride=2))
self.conv_net = nn.Sequential(*self.conv_net)
last_conv_width = input_size // (2**4)
last_conv_dim = 2**9
self.dropout = nn.Dropout2d(p=0.2)
# self.avgpool = nn.AvgPool2d(
# kernel_size=[last_conv_width, last_conv_width])
fc_list = []
fc_list += [
nn.Linear(last_conv_dim, hidden_units[0]),
nn.ReLU(inplace=True),
nn.BatchNorm1d(hidden_units[0]),
nn.Dropout(dropout)
]
for i in range(0, len(hidden_units) - 2):
fc_list += [
nn.Linear(hidden_units[i], hidden_units[i + 1]),
nn.ReLU(inplace=True),
nn.BatchNorm1d(hidden_units[i + 1]),
nn.Dropout(dropout)
]
self.fc = nn.Sequential(*fc_list)
# not used
final_norm = nn.BatchNorm1d(1, eps=1e-6, momentum=0.1)
self.classifier = nn.Sequential(
nn.Linear(hidden_units[-2], hidden_units[-1]), final_norm)
def _make_conv_layer(self, in_c, out_c, kernel_size=3, stride=2):
ks = kernel_size
conv_layer = nn.Sequential(
nn.Conv2d(in_c, out_c, kernel_size=(ks, ks), padding=ks // 2),
nn.BatchNorm2d(out_c,
eps=1e-05,
momentum=0.1,
affine=True,
track_running_stats=True),
nn.ReLU(inplace=True),
nn.Conv2d(out_c,
out_c,
kernel_size=(ks, ks),
padding=ks // 2,
stride=stride),
nn.BatchNorm2d(out_c,
eps=1e-05,
momentum=0.1,
affine=True,
track_running_stats=True),
nn.ReLU(inplace=True),
)
return conv_layer
def forward(self, wt_data):
bs, num_frames, num_channel, W0, H0 = wt_data.size()
wt_data = wt_data.reshape(bs * num_frames, num_channel, W0, H0)
conv_out = self.conv_net(wt_data)
avgpool = F.adaptive_avg_pool2d(conv_out, (1, 1))
# avgpool = self.avgpool(conv_out)
avgpool = avgpool.reshape(bs * num_frames, -1)
out = self.fc(avgpool)
if self.feature:
return out
else:
out = self.classifier(out)
return out
class Two_Stream_RNN(nn.Module):
def __init__(self,
mlp_hidden_units=[2048, 256, 256],
dropout=0.3,
inchannel=12,
size=256,
outchannel=4):
super().__init__()
self.mlp = MLP(mlp_hidden_units)
self.temporal_net = Temporal_Net(size,
inchannel,
hidden_units=[256, 256, 1],
dropout=0.3,
feature=True)
self.transform = nn.Sequential(nn.Linear(512, 256),
nn.ReLU(inplace=True),
nn.BatchNorm1d(256),
nn.Dropout(dropout))
self.rnns = nn.GRU(256,
128,
bidirectional=True,
num_layers=2,
dropout=0.3,
batch_first=True)
self.classifier = nn.Sequential(nn.Dropout(dropout),
nn.Linear(256, outchannel),
nn.BatchNorm1d(outchannel), nn.ReLU())
_init_weights(self)
def forward(self, temp_data, rgb_data, return_feature=False):
bs, num_frames = rgb_data.size(0), rgb_data.size(1)
# spatial features
features_cnn = self.mlp(rgb_data)
features_spatial = features_cnn.reshape(bs, num_frames, -1)
# temporal features
features_temporal = self.temporal_net(temp_data)
features_temporal = features_temporal.reshape(bs, num_frames, -1)
features = torch.cat([features_spatial, features_temporal], dim=-1)
features = self.transform(features.reshape(bs * num_frames, -1))
features = features.reshape(bs, num_frames, -1)
# rnn combination
outputs_rnns, _ = self.rnns(features)
outputs_rnns = outputs_rnns.reshape(bs * num_frames, -1)
out = self.classifier(outputs_rnns)
out = out.reshape(bs, num_frames, -1)
if return_feature:
return out
# anno transforms
out[..., 0] = torch.log(out[..., 0] + 1)
return out
class Two_Stream_RNN_Cls(Two_Stream_RNN):
def __init__(self,
mlp_hidden_units=[2048, 256, 256],
dropout=0.3,
inchannel=12,
size=256,
outchannel=2):
super().__init__(mlp_hidden_units=mlp_hidden_units,
dropout=dropout,
inchannel=inchannel,
size=size,
outchannel=outchannel)
self.classifier = nn.Sequential(nn.Dropout(dropout),
nn.Linear(256, outchannel))
_init_weights(self)
def forward(self, temp_data, rgb_data):
out = super().forward(temp_data, rgb_data, return_feature=True)
return out
class ResNet50_Cls(nn.Module):
def __init__(self, num_class=2):
super().__init__()
self.fc = nn.Sequential(nn.Linear(2048, 512), nn.Dropout(0.5),
nn.Linear(512, num_class))
def forward(self, x):
assert x.shape[-1] == 2048
x = self.fc(x)
return x
def _init_weights(model):
for k, m in model.named_modules():
if isinstance(m, (nn.Conv3d, nn.Conv2d, nn.Conv1d)):
nn.init.kaiming_normal_(m.weight,
mode='fan_out',
nonlinearity='relu')
# nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, (nn.BatchNorm3d, nn.BatchNorm2d, nn.BatchNorm1d)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, (nn.Linear)):
nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
def load_pretrained_model(model, path, load_bn):
model_dict = model.state_dict()
state_dict = torch.load(path, map_location='cpu')['state_dict']
state_dict = {
k.replace('wt_net', 'temporal_net', 1): v
for k, v in state_dict.items()
}
# bn filter
if not load_bn:
bn_keys = []
for k in state_dict.keys():
if 'running_mean' in k:
bn_name = '.'.join(k.split('.')[:-1])
for name in [
'weight', 'bias', 'running_mean', 'running_var',
'num_batches_tracked'
]:
bn_keys.append(bn_name + '.' + name)
state_dict = {k: v for k, v in state_dict.items() if k not in bn_keys}
# # module name rank adjust
# for k, v in state_dict.items():
# if 'mlp.mlp.5' in k:
# state_dict[k.replace('mlp.mlp.5', 'mlp.mlp.4')] = v
# del state_dict[k]
# if 'temporal_net.fc.4' in k:
# state_dict[k.replace('temporal_net.fc.4',
# 'temporal_net.fc.3')] = v
# del state_dict[k]
# classifier filter
state_dict = {k: v for k, v in state_dict.items() if 'classifier' not in k}
model_dict.update(state_dict)
model.load_state_dict(model_dict)
return model
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,794
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/config.py
|
import argparse
parser = argparse.ArgumentParser(description="x")
parser.add_argument('--store_name', type=str, default="")
parser.add_argument('--save_root', type=str, default="")
parser.add_argument('--tag', type=str, default="")
parser.add_argument('--snap', type=str, default="")
parser.add_argument('--dataset',
type=str,
default="",
choices=['SAMM', 'CASME_2'])
parser.add_argument('--data_aug', action='store_true')
parser.add_argument('--distributed', action='store_true')
parser.add_argument('--amp', action='store_true')
parser.add_argument("--local_rank", default=0, type=int)
parser.add_argument("--seed", default=111, type=int)
parser.add_argument('--finetune_list',
default=[],
type=str,
nargs="+",
help='finetune subjects')
parser.add_argument("--patience",
default=15,
type=int,
help='front extend patience')
# ========================= Model Configs ==========================
parser.add_argument('--hidden_units',
default=[2048, 256, 256],
type=int,
nargs="+",
help='hidden units set up')
parser.add_argument('--length', type=int, default=64)
parser.add_argument('--step', type=int, default=64)
parser.add_argument('-L',
type=int,
default=12,
help='the number of input difference images')
parser.add_argument('--input_size', type=int, default=112)
parser.add_argument('--data_option',
type=str,
choices=['diff', 'wt_diff', 'wt_dr'])
# ========================= Learning Configs ==========================
parser.add_argument('--epochs',
default=25,
type=int,
metavar='N',
help='number of total epochs to run')
parser.add_argument(
'--early_stop', type=int,
default=3) # if validation loss didn't improve over 3 epochs, stop
parser.add_argument('-b',
'--batch_size',
default=16,
type=int,
metavar='N',
help='mini-batch size (default: 16)')
parser.add_argument('--lr', default=1e-2, type=float)
parser.add_argument('--lr_decay_factor', default=0.1, type=float)
parser.add_argument('--lr_steps',
default=[2, 5],
type=float,
nargs="+",
metavar='LRSteps',
help='epochs to decay learning rate by factor')
parser.add_argument('--optim', default='SGD', type=str)
parser.add_argument('--momentum',
default=0.9,
type=float,
metavar='M',
help='momentum')
parser.add_argument('--weight-decay',
'--wd',
default=5e-4,
type=float,
metavar='W',
help='weight decay (default: 5e-4)')
parser.add_argument('--clip-gradient',
'--gd',
default=20,
type=float,
metavar='W',
help='gradient norm clipping (default: 20)')
parser.add_argument('--focal_alpha', default=[1., 1.], type=float, nargs="+")
# ========================= Monitor Configs ==========================
parser.add_argument('--print-freq',
'-p',
default=50,
type=int,
metavar='N',
help='print frequency (default: 50) iteration')
parser.add_argument('--eval-freq',
'-ef',
default=1,
type=int,
metavar='N',
help='evaluation frequency (default: 1) epochs')
# ========================= Runtime Configs ==========================
parser.add_argument('-j',
'--workers',
default=0,
type=int,
metavar='N',
help='number of data loading workers (default: 4)')
parser.add_argument('--resume',
default='',
type=str,
metavar='PATH',
help='path to latest checkpoint (default: none)')
parser.add_argument('--delete_last',
action='store_true',
help='delete the last recorded subject')
parser.add_argument('-t',
'--test',
dest='test',
action='store_true',
help='evaluate model on test set')
parser.add_argument('--start-epoch',
default=0,
type=int,
metavar='N',
help='manual epoch number (useful on restarts)')
parser.add_argument('--gpus', type=str, default=None)
parser.add_argument('--root_log', type=str, default='log')
parser.add_argument('--root_model', type=str, default='model')
parser.add_argument('--root_output', type=str, default='output')
parser.add_argument('--root_runs', type=str, default='runs')
parser.add_argument('--load_pretrained', type=str, default='')
parser.add_argument('--load_bn', action='store_true')
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,795
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/preprocess/params.py
|
# CASME_2
CASME_2_ROOT = '/data/gjz_mm21/CASME_2_LongVideoFaceCropped/CASME_2_longVideoFaceCropped'
CASME_2_LABEL_DIR = '/data/gjz_mm21/CASME_2_LongVideoFaceCropped/CASME_2_longVideoFaceCropped/labels'
CASME_2_VIDEO_DIR = '/data/gjz_mm21/CASME_2_LongVideoFaceCropped/CASME_2_longVideoFaceCropped/longVideoFaceCropped'
# SAMM
SAMM_ROOT = '/data/gjz_mm21/SAMM'
SAMM_VIDEO_DIR = '/data/gjz_mm21/SAMM/SAMM_longvideos'
# resnet50 features
MODEL_DIR = '/home/gjz/fmr_backbone/pytorch-benchmarks/ferplus'
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,796
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/submit.py
|
import pandas as pd
import numpy as np
import os.path as osp
dataset = 'CASME_2'
# dataset = 'SAMM'
submit_name = 'submit_{}.csv'.format(dataset)
result_dir_name = 'results'
submit_npy_name = 'match_regions_record_all.npy'
submit_id = 'done_exp_cls_ca_20210708-215035'
def convert_key(k, dataset):
if dataset == 'CASME_2':
k = osp.basename(k)[:7]
elif dataset == 'SAMM':
k = osp.basename(k)
else:
raise NotImplementedError
return k
data = np.load(osp.join('.', result_dir_name, submit_id, 'output',
submit_npy_name),
allow_pickle=True).item()
metric = {'TP': 0, 'FN': 0, 'FP': 0}
with open(submit_name, 'w') as f:
if dataset == 'CASME_2':
f.write('2\r\n')
elif dataset == 'SAMM':
f.write('1\r\n')
else:
raise NotImplementedError
for k, v in data.items():
k = convert_key(k, dataset)
assert isinstance(v[0], list)
for line in v:
f.write(','.join([k, *[str(x) for x in line]]) + '\r\n')
metric[line[-1]] += 1
precision = metric['TP'] / (metric['TP'] + metric['FP'])
recall = metric['TP'] / (metric['TP'] + metric['FN'])
f_score = 2 * precision * recall / (precision + recall)
print('TP: {}, FP: {}, FN: {}'.format(metric['TP'], metric['FP'],
metric['FN']))
print('P: {:.4f}, R: {:.4f}, F: {:.4f}'.format(precision, recall, f_score))
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,797
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/trainer_cls.py
|
import time
from matplotlib.pyplot import winter
import torch
import torch.nn.functional as F
import numpy as np
import utils
import dataset.utils as dataset_utils
import dataset.params as DATASET_PARAMS
def train(dataloader, model, criterion, optimizer, epoch, logger, args,
amp_autocast, loss_scaler):
batch_time = utils.AverageMeter()
data_time = utils.AverageMeter()
losses = utils.AverageMeter()
end = time.time()
model.train()
for i, data_batch in enumerate(dataloader):
data_time.update(time.time() - end)
temp_data, img_features, annos, labels, _ = data_batch
batch_size = temp_data.shape[0]
# # TODO: skip all zero samples
# if (labels == 0).all() and np.random.rand() <= 0.7:
# end = time.time()
# # print('skip all zeros batch...')
# continue
# keep_ids = []
# for bi in range(batch_size):
# if not ((labels[bi] == 0).all() and np.random.rand() <= 0.5):
# keep_ids.append(bi)
# # print('skip {} samples...'.format(batch_size - len(keep_ids)))
# batch_size = len(keep_ids) # m batch_size
# if batch_size == 0:
# end = time.time()
# # print('skip all zeros batch...')
# continue
# keep_ids = np.asarray(keep_ids)
# temp_data = temp_data[keep_ids]
# img_features = img_features[keep_ids]
# annos = annos[keep_ids]
# labels = labels[keep_ids]
# label preprocess
labels[labels > 0] = 1 # 1, 2 -> 1
temp_data = temp_data.cuda()
img_features = img_features.cuda()
# annos = annos.cuda()
labels = labels.cuda()
with amp_autocast():
out = model(temp_data, img_features)
# flat labels
out = out.reshape(batch_size * args.length, -1)
labels = labels.reshape(-1)
loss = criterion(out, labels)
# backward + step
optimizer.zero_grad()
if loss_scaler is None:
loss.backward()
optimizer.step()
else:
loss_scaler(loss, optimizer)
# distirbuted reduce
utils.reduce_loss(loss, args)
losses.update(loss.item(), temp_data.size(0))
batch_time.update(time.time() - end)
if args.local_rank == 0 and (i % args.print_freq == 0
or i == len(dataloader) - 1):
output = ('Epoch: [{0}][{1}/{2}], lr: {lr:.5f}\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Data {data_time.val:.3f} ({data_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(
epoch,
i + 1,
len(dataloader),
batch_time=batch_time,
data_time=data_time,
loss=losses,
lr=optimizer.param_groups[-1]['lr']))
logger.info(output)
torch.cuda.synchronize()
end = time.time()
def validate(dataloader, model, criterion, logger, args, amp_autocast):
batch_time = utils.AverageMeter()
losses = utils.AverageMeter()
model.eval()
end = time.time()
# outs = []
# annos = []
# labels = []
# pred_anno_dict = {} # imgs_dir -> anno values
# pred_label_dict = {} # imgs_dir -> labels
# anno_dict = {}
# label_dict = {}
pred_and_gt = {} # img_p -> [pred, target]
for i, data_batch in enumerate(dataloader):
temp_data, img_features, annos, labels, seq_info = data_batch
# label preprocess
labels[labels > 0] = 1 # 1, 2 -> 1
batch_size = labels.shape[0]
temp_data = temp_data.cuda()
img_features = img_features.cuda()
# annos = annos.cuda()
labels = labels.cuda()
with torch.no_grad():
with amp_autocast():
out = model(temp_data, img_features)
loss = criterion(out.reshape(batch_size * args.length, -1),
labels.reshape(-1)).float()
if not torch.isnan(loss).any():
# distirbuted reduce
utils.reduce_loss(loss, args)
losses.update(loss.item(), temp_data.size(0))
batch_time.update(time.time() - end)
if args.local_rank == 0 and (i % args.print_freq == 0
or i == len(dataloader) - 1):
output = ('Val: [{0}/{1}]\t'
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t'
'Loss {loss.val:.4f} ({loss.avg:.4f})\t'.format(
i + 1,
len(dataloader),
batch_time=batch_time,
loss=losses))
logger.info(output)
torch.cuda.synchronize()
# record
img_dirs, fronts, tails = seq_info
for batch_idx in range(batch_size):
img_dir = img_dirs[batch_idx]
front = fronts[batch_idx].item()
tail = tails[batch_idx].item()
# assert batch_size == 1, 'batch size should be 1'
img_dir_ps = dataset_utils.scan_jpg_from_img_dir(img_dir)
# if not img_dir in pred_label_dict:
# pred_anno_dict[img_dir] = np.zeros(len(img_dir_ps))
# pred_label_dict[img_dir] = np.zeros(len(img_dir_ps))
# anno_dict = [img_dir] = np.zeros(len(img_dir_ps))
# label_dict = [img_dir] = np.zeros(len(img_dir_ps))
pred_label = torch.argmax(out[batch_idx], dim=-1).reshape(-1)
label = labels[batch_idx].reshape(-1)
for j in range(front, tail):
img_p = img_dir_ps[j]
pred_and_gt[img_p] = [
pred_label[j - front].item(), label[j - front].item()
]
# pred_anno_dict[img_dir][front:tail] += pred_annos
# assert (pred_label_dict[img_dir][front:tail] == 0
# ).all(), 'should be no overlap'
# pred_label_dict[img_dir][front:tail] += pred_labels
# anno_dict[img_dir][front:tail] += annos
# label_dict[img_dir][front:tail] += labels
end = time.time()
return losses.avg, pred_and_gt
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,798
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/preprocess/casme_2_label_generation.py
|
'''
generate the emotion intensity of each frame
'''
# %%
import pdb
import os
import os.path as osp
from numpy.core.numeric import ones, ones_like
from numpy.lib.function_base import percentile
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import params
# %% ID2NAME and NAME2ID
# CASME_2_PID2NAME/NAME2PID
df = pd.read_csv(osp.join(params.CASME_2_LABEL_DIR, 'naming_rule1.csv'))
data = df.values
CASME_2_PID2NAME = {str(line[-1]): str(line[1]) for line in data}
CASME_2_NAME2PID = {str(line[1]): str(line[-1]) for line in data}
del df
del data
# CASME_2_VID2NAME
df = pd.read_csv(osp.join(params.CASME_2_LABEL_DIR, 'naming_rule2.csv'))
data = df.values
CASME_2_VID2NAME = {'{:04d}'.format(line[0]): str(line[1]) for line in data}
CASME_2_NAME2VID = {str(line[1]): '{:04d}'.format(line[0]) for line in data}
del df
del data
save_dict_dir = osp.join(params.CASME_2_ROOT, 'ID2NAME2ID')
os.makedirs(save_dict_dir, exist_ok=True)
for p, d in zip(
['pid2name', 'name2pid', 'vid2name', 'name2vid'],
[CASME_2_PID2NAME, CASME_2_NAME2PID, CASME_2_VID2NAME, CASME_2_NAME2VID]):
np.save(osp.join(save_dict_dir, p + '.npy'), d)
# %% main
anno_dict = {}
label_dict = {} # 0: none, 1: macro, 2: micro
pred_gt = {} # [[onset, offset, label],...]
bi_label_dict = {} # store all the img_ps fall into the spotting interval
df = pd.read_csv(osp.join(params.CASME_2_LABEL_DIR, 'CASFEcode_final.csv'))
data = df.values
for row in data:
# construct imgs dir for current row data
pid = str(row[0])
vname = row[1].split('_')[0]
pname = CASME_2_PID2NAME[pid]
vid = CASME_2_NAME2VID[vname]
name_code = pname[1:]
imgs_file_head = name_code + '_' + vid
for file_name in os.listdir(osp.join(params.CASME_2_VIDEO_DIR, pname)):
if file_name.startswith(imgs_file_head):
imgs_dir = osp.join(params.CASME_2_VIDEO_DIR, pname, file_name)
break
# update emotion intensity and label
imgs_name = [
name
for name in sorted(os.listdir(imgs_dir),
key=lambda x: int(x.split('.')[0].split('_')[-1]))
if '.jpg' in name
] # first img name: img_1.jpg
onset, apex, offset = row[2:2 + 3]
onset, apex, offset = int(onset), int(apex), int(offset)
if onset > 0 and apex > 0 and offset > 0:
pass
elif onset > 0 and apex > 0 and offset == 0:
offset = min(len(imgs_name), apex + (apex - onset))
elif onset > 0 and apex == 0 and offset > 0:
apex = (onset + offset) // 2
else:
raise Exception
try:
assert onset < apex and apex < offset
except:
print('[Error][{}] onset: {}, apex: {}, offset: {}, '.format(
imgs_dir, onset, apex, offset))
continue # skip this row
if not imgs_dir in anno_dict:
anno_dict[imgs_dir] = np.zeros(len(imgs_name))
label_dict[imgs_dir] = np.zeros(len(imgs_name))
pred_gt[imgs_dir] = []
bi_label_dict[imgs_dir] = []
# convert start index from 1 to 0
onset -= 1
apex -= 1
offset -= 1
# intensity
sigma = min(offset - apex, apex - onset) // 2
mu = apex
func = lambda x: np.exp(-(x - mu)**2 / 2 / sigma / sigma
) / sigma / np.sqrt(2 * np.pi)
# func = lambda x: (x - onset) / (apex - onset) if x >= apex else (
# offset - x) / (offset - apex)
cumsum = 0
for i in range(onset, offset + 1):
anno_dict[imgs_dir][i] += func(i)
cumsum += anno_dict[imgs_dir][i]
if cumsum < 0:
pdb.set_trace()
# print('onset2offset cumsum: {:.2f}'.format(cumsum))
# label
label_dict[imgs_dir][onset:offset +
1] = 1 if 'macro' in str(row[-2]).lower() else 2
# pred_gt
pred_gt[imgs_dir].append(
[onset, offset + 1, 1 if 'macro' in str(row[-2]).lower() else 2])
# bi_label
bi_label_dict[imgs_dir].extend(
[osp.join(imgs_dir, name) for name in imgs_name[onset:offset + 1]])
np.save(osp.join(params.CASME_2_LABEL_DIR, 'anno_dict.npy'), anno_dict)
np.save(osp.join(params.CASME_2_LABEL_DIR, 'label_dict.npy'), label_dict)
np.save(osp.join(params.CASME_2_LABEL_DIR, 'pred_gt.npy'), pred_gt)
np.save(osp.join(params.CASME_2_LABEL_DIR, 'bi_label.npy'), bi_label_dict)
# %% visulization
# fig = plt.figure(figsize=(30, 50))
# for i, (k, v) in enumerate(anno_dict.items()):
# fig.add_subplot((len(anno_dict) - 1) // 5 + 1, 5, i + 1)
# plt.plot(v)
# fig.tight_layout()
# plt.savefig('./CASME_2_annos.pdf')
# plt.show()
column = 5
fig = plt.figure(figsize=(30, ((len(label_dict) - 1) // column + 1) * 2))
for i, (k, v) in enumerate(label_dict.items()):
v[v > 0] = 1 # 1,2 -> 1
fig.add_subplot((len(label_dict) - 1) // column + 1, column, i + 1)
plt.plot(v, 'r-')
plt.title(osp.basename(k))
fig.tight_layout()
out_dir = './preprocess'
plt.savefig(osp.join(out_dir, 'ca_bi_label.pdf'))
plt.close('all')
# %%
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,799
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/preprocess/samm_2_label_generation.py
|
'''
generate the emotion intensity of each frame
'''
# %%
import os
import pdb
import os.path as osp
from numpy.core.numeric import ones
from numpy.lib.function_base import percentile
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import params
# %% main
anno_dict = {} # intensity
label_dict = {} # 0: none, 1: macro, 2: micro
pred_gt = {} # [[onset, offset, label],...]
bi_label_dict = {} # store all the img_ps fall into the spotting interval
df = pd.read_csv(osp.join(params.SAMM_ROOT, 'SAMM_labels.csv'))
data = df.values
for row in data:
# construct imgs dir for current row data
file_name = row[1][:5]
imgs_dir = osp.join(params.SAMM_VIDEO_DIR, file_name)
assert osp.exists(imgs_dir)
# update emotion intensity and label
imgs_name = [
name
for name in sorted(os.listdir(imgs_dir),
key=lambda x: int(x.split('.')[0].split('_')[-1]))
if '.jpg' in name
] # first img name: xxx_x_0001.jpg
onset, apex, offset = row[3:3 + 3]
onset, apex, offset = int(onset), int(apex), int(offset)
if onset > 0 and apex > 0 and offset > 0:
pass
elif onset > 0 and apex > 0 and offset == -1:
offset = min(len(imgs_name), apex + (apex - onset))
elif onset > 0 and apex == -1 and offset > 0:
apex = (onset + offset) // 2
else:
raise Exception
try:
assert onset < apex and apex < offset
except:
print('[Error][{}] onset: {}, apex: {}, offset: {}, '.format(
imgs_dir, onset, apex, offset))
continue # skip this row
if not imgs_dir in anno_dict:
anno_dict[imgs_dir] = np.zeros(len(imgs_name))
label_dict[imgs_dir] = np.zeros(len(imgs_name))
pred_gt[imgs_dir] = []
bi_label_dict[imgs_dir] = []
# convert start index from 1 to 0
onset -= 1
apex -= 1
offset -= 1
# intensity
sigma = min(offset - apex, apex - onset) // 2 + 1e-7
if sigma <= 0:
pdb.set_trace()
mu = apex
func = lambda x: np.exp(-(x - mu)**2 / 2 / sigma / sigma
) / sigma / np.sqrt(2 * np.pi)
cumsum = 0
for i in range(onset, offset + 1):
anno_dict[imgs_dir][i] += func(i)
cumsum += anno_dict[imgs_dir][i]
# print('onset2offset cumsum: {:.2f}'.format(cumsum))
# label
label_dict[imgs_dir][onset:offset +
1] = 1 if 'macro' in str(row[-2]).lower() else 2
# pred_gt
pred_gt[imgs_dir].append(
[onset, offset + 1, 1 if 'macro' in str(row[-2]).lower() else 2])
# bi_label
bi_label_dict[imgs_dir].extend(
[osp.join(imgs_dir, name) for name in imgs_name[onset:offset + 1]])
np.save(osp.join(params.SAMM_ROOT, 'anno_dict.npy'), anno_dict)
np.save(osp.join(params.SAMM_ROOT, 'label_dict.npy'), label_dict)
np.save(osp.join(params.SAMM_ROOT, 'pred_gt.npy'), pred_gt)
np.save(osp.join(params.SAMM_ROOT, 'bi_label.npy'), bi_label_dict)
# %% visulization
# fig = plt.figure(figsize=(30, 50))
# for i, (k, v) in enumerate(anno_dict.items()):
# fig.add_subplot((len(anno_dict) - 1) // 5 + 1, 5, i + 1)
# plt.plot(v)
# fig.tight_layout()
# plt.savefig('./SAMM_annos.pdf')
# plt.show()
column = 5
fig = plt.figure(figsize=(30, ((len(label_dict) - 1) // column + 1) * 2))
for i, (k, v) in enumerate(label_dict.items()):
v[v > 0] = 1 # 1,2 -> 1
fig.add_subplot((len(label_dict) - 1) // column + 1, column, i + 1)
plt.plot(v, 'r-')
plt.title(osp.basename(k))
fig.tight_layout()
out_dir = './preprocess'
plt.savefig(osp.join(out_dir, 'sa_bi_label.pdf'))
plt.close('all')
# %%
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,800
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/preprocess/CNN_feature_extraction.py
|
from __future__ import division
from typing import Iterable
import cv2
import os
import time
import six
import sys
from tqdm import tqdm
import argparse
import pickle
import torch
import torch.nn as nn
import numpy as np
import pandas as pd
import torch.utils.data
import os.path as osp
import torch.backends.cudnn as cudnn
import torchvision.transforms as transforms
from glob import glob
import numbers
from PIL import Image, ImageOps
import random
import params
# for torch lower version
import torch._utils
from torch.nn import functional as F
try:
torch._utils._rebuild_tensor_v2
except AttributeError:
def _rebuild_tensor_v2(storage, storage_offset, size, stride,
requires_grad, backward_hooks):
tensor = torch._utils._rebuild_tensor(storage, storage_offset, size,
stride)
tensor.requires_grad = requires_grad
tensor._backward_hooks = backward_hooks
return tensor
torch._utils._rebuild_tensor_v2 = _rebuild_tensor_v2
global parsed
import torch.utils.data as data
# multi thread setting
os.environ["MKL_NUM_THREADS"] = "1"
os.environ["NUMEXPR_NUM_THREADS"] = "1"
os.environ["OMP_NUM_THREADS"] = "1"
cv2.ocl.setUseOpenCL(False)
cv2.setNumThreads(0)
class SAMMDataset(data.Dataset):
def __init__(self, data_root, transform=None):
super().__init__()
self.img_ps = glob(osp.join(data_root, '*/*.jpg'))
self.transform = transform
def __len__(self):
return len(self.img_ps)
def __getitem__(self, index):
img = Image.open(self.img_ps[index]).convert('RGB')
img = self.transform(img) if self.transform is not None else img
return img, self.img_ps[index]
class CASME_2Dataset(SAMMDataset):
def __init__(self, data_root, transform=None):
super().__init__(data_root, transform)
self.img_ps = glob(osp.join(data_root, '*/*/*.jpg'))
def load_module_2or3(model_name, model_def_path):
"""Load model definition module in a manner that is compatible with
both Python2 and Python3
Args:
model_name: The name of the model to be loaded
model_def_path: The filepath of the module containing the definition
Return:
The loaded python module."""
if six.PY3:
import importlib.util
spec = importlib.util.spec_from_file_location(model_name,
model_def_path)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
else:
import importlib
dirname = os.path.dirname(model_def_path)
sys.path.insert(0, dirname)
module_name = os.path.splitext(os.path.basename(model_def_path))[0]
mod = importlib.import_module(module_name)
return mod
def load_model(model_name, MODEL_DIR):
"""Load imoprted PyTorch model by name
Args:
model_name (str): the name of the model to be loaded
Return:
nn.Module: the loaded network
"""
model_def_path = osp.join(MODEL_DIR, model_name + '.py')
weights_path = osp.join(MODEL_DIR, model_name + '.pth')
mod = load_module_2or3(model_name, model_def_path)
func = getattr(mod, model_name)
net = func(weights_path=weights_path)
return net
def compose_transforms(meta,
resize=256,
center_crop=True,
override_meta_imsize=False):
"""Compose preprocessing transforms for model
The imported models use a range of different preprocessing options,
depending on how they were originally trained. Models trained in MatConvNet
typically require input images that have been scaled to [0,255], rather
than the [0,1] range favoured by PyTorch.
Args:
meta (dict): model preprocessing requirements
resize (int) [256]: resize the input image to this size
center_crop (bool) [True]: whether to center crop the image
override_meta_imsize (bool) [False]: if true, use the value of `resize`
to select the image input size, rather than the properties contained
in meta (this option only applies when center cropping is not used.
Return:
(transforms.Compose): Composition of preprocessing transforms
"""
normalize = transforms.Normalize(mean=meta['mean'], std=meta['std'])
im_size = meta['imageSize']
assert im_size[0] == im_size[1], 'expected square image size'
if center_crop:
transform_list = [
transforms.Resize(resize),
transforms.CenterCrop(size=(im_size[0], im_size[1]))
]
else:
if override_meta_imsize:
im_size = (resize, resize)
transform_list = [transforms.Resize(size=(im_size[0], im_size[1]))]
transform_list += [transforms.ToTensor()]
if meta['std'] == [1, 1, 1]: # common amongst mcn models
transform_list += [lambda x: x * 255.0]
transform_list.append(normalize)
return transforms.Compose(transform_list)
def augment_transforms(meta,
resize=256,
random_crop=True,
override_meta_imsize=False):
normalize = transforms.Normalize(mean=meta['mean'], std=meta['std'])
im_size = meta['imageSize']
assert im_size[0] == im_size[1], 'expected square image size'
if random_crop:
v = random.random()
transform_list = [
transforms.Resize(resize),
RandomCrop(im_size[0], v),
RandomHorizontalFlip(v)
]
else:
if override_meta_imsize:
im_size = (resize, resize)
transform_list = [transforms.Resize(size=(im_size[0], im_size[1]))]
transform_list += [transforms.ToTensor()]
if meta['std'] == [1, 1, 1]: # common amongst mcn models
transform_list += [lambda x: x * 255.0]
transform_list.append(normalize)
return transforms.Compose(transform_list)
class RandomCrop(object):
def __init__(self, size, v):
if isinstance(size, numbers.Number):
self.size = (int(size), int(size))
else:
self.size = size
self.v = v
def __call__(self, img):
w, h = img.size
th, tw = self.size
x1 = int((w - tw) * self.v)
y1 = int((h - th) * self.v)
#print("print x, y:", x1, y1)
assert (img.size[0] == w and img.size[1] == h)
if w == tw and h == th:
out_image = img
else:
out_image = img.crop(
(x1, y1, x1 + tw, y1 +
th)) #same cropping method for all images in the same group
return out_image
class RandomHorizontalFlip(object):
"""Randomly horizontally flips the given PIL.Image with a probability of 0.5
"""
def __init__(self, v):
self.v = v
return
def __call__(self, img):
if self.v < 0.5:
img = img.transpose(Image.FLIP_LEFT_RIGHT)
#print ("horiontal flip: ",self.v)
return img
def get_vec(model, layer_name, image):
bs = image.size(0)
if parsed.layer_name == 'pool5_full':
layer_name = 'pool5'
layer = model._modules.get(layer_name)
if parsed.layer_name == 'fc7':
layer_output_size = 4096
my_embedding = torch.zeros(bs, layer_output_size)
elif parsed.layer_name == 'fc8':
my_embedding = torch.zeros(bs, 7)
elif parsed.layer_name == 'pool5' or parsed.layer_name == 'pool5_full':
my_embedding = torch.zeros([bs, 512, 7, 7])
elif parsed.layer_name == 'pool4':
my_embedding = torch.zeros([bs, 512, 14, 14])
elif parsed.layer_name == 'pool3':
my_embedding = torch.zeros([bs, 256, 28, 28])
elif parsed.layer_name == 'pool5_7x7_s1':
my_embedding = torch.zeros([bs, 2048, 1, 1])
elif parsed.layer_name == 'conv5_3_3x3_relu':
my_embedding = torch.zeros([bs, 512, 7, 7])
def copy_data(m, i, o):
my_embedding.copy_(o.data)
h = layer.register_forward_hook(copy_data)
h_x = model(image)
h.remove()
if parsed.layer_name == 'pool5' or parsed.layer_name == 'conv5_3_3x3_relu':
GAP_layer = nn.AvgPool2d(kernel_size=[7, 7], stride=(1, 1))
my_embedding = GAP_layer(my_embedding)
return F.relu(my_embedding.squeeze())
def get_frame_index(frame_path):
frame_name = frame_path.split('/')[-1]
frame_num = int(frame_name.split('.')[0].split('_')[-1])
return frame_num
def predict(data_loader, layer_name, model, des_dir):
with torch.no_grad():
for ims, img_path in tqdm(data_loader):
ims = ims.cuda()
output = get_vec(model, layer_name, ims)
if not len(output.shape) == 2:
output = [
output,
]
img_path = [
img_path,
]
for feature, path in zip(output, img_path):
basename = osp.basename(path)
des_basename = basename.split('.')[0] + '.npy'
des_path = path.replace(basename, des_basename)
np.save(des_path, feature)
def feature_extraction(model, loader, des_dir):
model.eval()
predict(loader, parsed.layer_name, model, des_dir)
def main():
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ['CUDA_VISIBLE_DEVICES'] = '1'
MODEL_DIR = params.MODEL_DIR
model_name = 'resnet50_ferplus_dag'
model = load_model(model_name, MODEL_DIR)
model = model.cuda()
meta = model.meta
preproc_transforms = compose_transforms(
meta, center_crop=False) if not parsed.augment else augment_transforms(
meta, random_crop=True)
if parsed.dataset == 'SAMM':
dataset = SAMMDataset(params.SAMM_VIDEO_DIR, preproc_transforms)
# parsed.save_root = params.SAMM_FEATURE_DIR
elif parsed.dataset == 'CASME_2':
dataset = CASME_2Dataset(params.CASME_2_VIDEO_DIR, preproc_transforms)
# parsed.save_root = params.CASME_2_FEATURE_DIR
else:
raise NotImplementedError
data_loader = torch.utils.data.DataLoader(dataset,
batch_size=4,
num_workers=0,
pin_memory=False)
des_dir = None
# des_dir = osp.join(
# parsed.save_root, '_'.join([
# '{}_features'.format(model_name), 'fps=' + str(parsed.fps),
# parsed.layer_name
# ]))
# os.makedirs(des_dir, exist_ok=True)
feature_extraction(model, data_loader, des_dir)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='Run.')
parser.add_argument('--refresh',
dest='refresh',
action='store_true',
help='refresh feature cache')
parser.add_argument('--fps',
type=int,
default=0,
help='frames per second to extract')
parser.add_argument('--layer_name', type=str, default='pool5_7x7_s1')
parser.add_argument(
'--augment',
action="store_true",
help='whether to extract augmented features for train set only ')
parser.add_argument('--dataset', type=str, default='')
parsed = parser.parse_args()
parsed.dataset = 'SAMM'
main()
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,801
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/preprocess/openface/face_crop_align.py
|
import os
import os.path as osp
from tqdm import tqdm
from glob import glob
from video_processor import Video_Processor
import params
# OpenFace parameters
save_size = 224
OpenFace_exe = params.OpenFace_exe
quiet = True
nomask = True
grey = False
tracked_vid = False
noface_save = False
# dataset
video_root = params.video_root
# main
video_processor = Video_Processor(save_size, nomask, grey, quiet, tracked_vid,
noface_save, OpenFace_exe)
video_ps = list(glob(osp.join(video_root, '*/*mp4')))
video_ps.extend(list(glob(osp.join(video_root, '*/*avi'))))
for video_p in tqdm(video_ps):
video_name = os.path.basename(video_p).split('.')[0]
opface_output_dir = os.path.join(os.path.dirname(video_p),
video_name + "_opface")
video_processor.process(video_p, opface_output_dir)
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,802
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/model/utils.py
|
import torch.nn as nn
def init_weights(model):
for k, m in model.named_modules():
if isinstance(m, nn.Conv3d) or isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight,
mode='fan_out',
nonlinearity='relu')
# nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.BatchNorm3d) or isinstance(m, nn.BatchNorm2d):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
elif isinstance(m, nn.Linear):
nn.init.xavier_normal_(m.weight)
if m.bias is not None:
nn.init.constant_(m.bias, 0)
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,803
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/main_cls.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from genericpath import exists
import os
from typing import Final
import cv2
import sys
from matplotlib.pyplot import xcorr
from numpy.random import f, sample, shuffle
from torch.utils.data import dataset
from config import parser
if len(sys.argv) > 1:
# use shell args
args = parser.parse_args()
print('Use shell args.')
else:
# Debug
args_list = [
'--dataset',
'SAMM',
'--print-freq',
'1',
'--snap',
'debug',
'--data_option',
'wt_diff',
'--gpus',
'0',
'--batch_size',
'2',
'--input_size',
'128',
'--length',
'64',
'-L',
'12',
'--workers',
'0',
]
args = parser.parse_args(args_list)
# os setting
os.environ["MKL_NUM_THREADS"] = "1"
os.environ["NUMEXPR_NUM_THREADS"] = "1"
os.environ["OMP_NUM_THREADS"] = "1"
cv2.ocl.setUseOpenCL(False)
cv2.setNumThreads(0)
if args.gpus is not None:
os.environ['CUDA_DEVICE_ORDER'] = 'PCI_BUS_ID'
os.environ["CUDA_VISIBLE_DEVICES"] = args.gpus
import re
import logging
import time
import torch
import os.path as osp
import torch.nn as nn
import numpy as np
import pandas as pd
import torch.distributed as dist
from torch.nn import DataParallel
from torch.nn.parallel import DistributedDataParallel
from datetime import datetime
from tqdm import tqdm
from pprint import pformat
from timm.utils import setup_default_logging, NativeScaler, reduce_tensor, distribute_bn
from timm.data.distributed_sampler import OrderedDistributedSampler
from contextlib import suppress
from model.network import Two_Stream_RNN_Cls, load_pretrained_model
from dataset.me_dataset import SAMMDataset, CASME_2Dataset
import utils
import trainer_cls as trainer
# torch.multiprocessing.set_start_method('spawn')
torch.backends.cudnn.benchmark = True
# check resume
RESUME = osp.exists(args.resume)
# check finetune
if len(args.finetune_list) > 0:
assert RESUME
FINETUNE = True
else:
FINETUNE = False
_logger = logging.getLogger('train')
# resume
if RESUME:
setattr(args, 'save_root', 'results/{}'.format(osp.basename(args.resume)))
else:
snapshot_name = '_'.join(
[args.snap, datetime.now().strftime("%Y%m%d-%H%M%S")])
if len(args.store_name) == 0:
args.store_name = snapshot_name
setattr(args, 'save_root', 'results/{}'.format(args.store_name))
# make dirs
if args.local_rank == 0:
utils.check_rootfolders(args)
else:
time.sleep(1)
# setup logging
setup_default_logging(
log_path=os.path.join(args.save_root, args.root_log, 'run.log'))
_logger.info("save experiment to :{}".format(args.save_root))
# save args
if args.local_rank == 0:
args_string = pformat(args.__dict__)
_logger.info(args_string)
# reset random
torch.manual_seed(args.seed)
torch.cuda.manual_seed(args.seed)
torch.cuda.manual_seed_all(args.seed)
np.random.seed(args.seed)
# if distributed
if args.distributed and 'WORLD_SIZE' in os.environ:
args.distributed = int(os.environ['WORLD_SIZE']) > 1
args.device = 'cuda'
args.world_size = 1
args.rank = 0 # global rank
if args.distributed:
args.device = 'cuda:%d' % args.local_rank
torch.cuda.set_device(args.local_rank)
dist.init_process_group(backend='nccl', init_method='env://')
args.world_size = dist.get_world_size()
args.rank = dist.get_rank()
_logger.info(
'Training in distributed mode with multiple processes, 1 GPU per process. Process %d, total %d.'
% (args.rank, args.world_size))
# else:
# _logger.info('Training with a single process on 1 GPUs.')
assert args.rank >= 0
utils.synchronize()
# loss_fn
criterion = utils.Focal_Loss(alpha=args.focal_alpha)
# leave one subject out cross validation
img_dirs = utils.get_img_dirs(args.dataset)
img_dirs_dict = utils.leave_one_out(
img_dirs, args.dataset) # key -> [train_set, val_set]
# finetuen and resume
if RESUME:
total_MNA = np.load(osp.join(args.resume, args.root_output,
'cross_validation_MNA_dict.npy'),
allow_pickle=True).item()
match_regions_record_all = np.load(osp.join(
args.resume, args.root_output, 'match_regions_record_all.npy'),
allow_pickle=True).item()
if not FINETUNE:
keys1 = list(total_MNA.keys())
# keys2 = list(match_regions_record_all.keys())
rm_key = keys1[-1] # after python 3.6, order is guaranteed
if args.delete_last:
# delete the last subject results
total_MNA, match_regions_record_all = utils.delete_records(
total_MNA, match_regions_record_all, rm_key)
if args.local_rank == 0:
_logger.info('resume from subject {} (include)'.format(rm_key))
elif args.local_rank == 0:
_logger.info('resume from subject {} (not include)'.format(rm_key))
else:
if args.local_rank == 0:
_logger.info('finetune subjects: [{}]'.format(','.join(
args.finetune_list)))
else:
total_MNA = {} # store all cross-validation results
match_regions_record_all = {}
utils.synchronize()
for vi, (val_id, [train_dirs, val_dirs]) in enumerate(img_dirs_dict.items()):
# leave {val_id} out...
# FINETUNE has higher priority than RESUME
if FINETUNE and (val_id not in args.finetune_list):
continue # skip subjects that do not need finetune
if RESUME and (not FINETUNE) and (val_id in total_MNA):
continue # skip from resume
if val_id in args.finetune_list:
# delete records
total_MNA, match_regions_record_all = utils.delete_records(
total_MNA, match_regions_record_all, val_id)
if args.data_option == 'diff':
inchannel = args.L
elif args.data_option == 'wt_diff':
inchannel = 4 * args.L
elif args.data_option == 'wt_dr':
inchannel = (
args.L + 1 - 11 +
1) * 2 * 4 # gauss kernel size = 11, *2 = dr1,dr2, *4 = 4 bands
# amp
amp_autocast = suppress # do nothing
loss_scaler = None
if args.amp:
amp_autocast = torch.cuda.amp.autocast
loss_scaler = NativeScaler()
if args.local_rank == 0:
_logger.info(
'Using native Torch AMP. Training in mixed precision.')
else:
if args.local_rank == 0:
_logger.info('AMP not enabled. Training in float32.')
# model
model = Two_Stream_RNN_Cls(mlp_hidden_units=args.hidden_units,
inchannel=inchannel,
outchannel=2)
# load pretrained
if osp.exists(args.load_pretrained):
model = load_pretrained_model(model, args.load_pretrained,
args.load_bn)
if args.local_rank == 0:
_logger.info('Load pretrained model from {}[load_bn: {}]'.format(
args.load_pretrained, args.load_bn))
# pytorch_total_params = sum(p.numel() for p in model.parameters()
# if p.requires_grad)
# print("Total Params: {}".format(pytorch_total_params))
model = model.cuda()
# setup synchronized BatchNorm for distributed training
if args.distributed:
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
# if args.local_rank == 0:
# _logger.info(
# 'Converted model to use Synchronized BatchNorm. WARNING: You may have issues if using '
# 'zero initialized BN layers (enabled by default for ResNets) while sync-bn enabled.'
# )
# optimizer
if args.optim == 'SGD':
optimizer = torch.optim.SGD(
[p for p in model.parameters() if p.requires_grad],
args.lr,
momentum=args.momentum,
weight_decay=args.weight_decay)
elif args.optim == 'Adam':
optimizer = torch.optim.Adam(
[p for p in model.parameters() if p.requires_grad],
args.lr,
weight_decay=args.weight_decay)
else:
raise NotImplementedError
# setup distributed training
if args.distributed:
model = DistributedDataParallel(model,
device_ids=[args.local_rank],
find_unused_parameters=True)
else:
model = DataParallel(model).cuda()
# dataset
Dataset = SAMMDataset if args.dataset == 'SAMM' else CASME_2Dataset
def create_dataset():
train_dataset = Dataset(
mode='train',
img_dirs=train_dirs,
seq_len=args.length,
step=args.step,
# step=1000, # !!
time_len=args.L,
input_size=args.input_size,
data_aug=args.data_aug,
data_option=args.data_option)
val_dataset = Dataset(
mode='test',
img_dirs=val_dirs,
seq_len=args.length,
step=args.length, # assert no overlap
# step=1000, # !!
time_len=args.L,
input_size=args.input_size,
data_aug=False,
data_option=args.data_option)
return train_dataset, val_dataset
train_dataset, val_dataset = create_dataset()
if args.distributed:
val_sampler = OrderedDistributedSampler(val_dataset)
train_sampler = torch.utils.data.distributed.DistributedSampler(
train_dataset)
else:
val_sampler = None
train_sampler = None
train_loader = torch.utils.data.DataLoader(train_dataset,
shuffle=train_sampler is None,
sampler=train_sampler,
batch_size=args.batch_size,
drop_last=False,
num_workers=args.workers,
pin_memory=False)
val_loader = torch.utils.data.DataLoader(val_dataset,
batch_size=args.batch_size,
shuffle=False,
sampler=val_sampler,
num_workers=0,
pin_memory=False,
drop_last=False)
if args.local_rank == 0:
_logger.info('<' * 10 + ' {} '.format(val_id) + '<' * 10)
best_f_score = -1000.0
best_loss = 1000.0
val_accum_epochs = 0
for epoch in range(args.epochs):
if train_sampler is not None:
train_sampler.set_epoch(epoch)
utils.adjust_learning_rate(optimizer, epoch, args.lr,
args.weight_decay, args.lr_steps,
args.lr_decay_factor)
trainer.train(train_loader, model, criterion, optimizer, epoch,
_logger, args, amp_autocast, loss_scaler)
utils.synchronize()
# bn syn
if args.distributed:
if args.local_rank == 0:
_logger.info("Distributing BatchNorm running means and vars")
distribute_bn(model, args.world_size,
True) # true for reduce, false for broadcast
# logging
if (epoch + 1) % args.eval_freq == 0 or epoch == args.epochs - 1:
loss_val, pred_and_gt = trainer.validate(val_loader, model,
criterion, _logger, args,
amp_autocast)
# distributed synchronize
pred_and_gt = utils.synchronize_pred_and_gt(
pred_and_gt, epoch, args)
# eval
if args.local_rank == 0:
precision, recall, f_score, MNA, match_regions_record = utils.evaluate_bi_labels(
pred_and_gt, val_id, epoch, args)
else:
f_score = -10.0
MNA = (0, 0, 0)
# precision, recall, f_score, MNA, match_regions_record = utils.evaluate_bi_labels(
# pred_and_gt, val_id, epoch, args)
utils.synchronize()
# synchronize
f_score = utils.synchronize_f_score(f_score, args)
_logger.info('f_score of processor {}: {:.4f}'.format(
args.local_rank, f_score))
MNA = utils.synchronize_list(MNA, args)
_logger.info('MNA of processor {}: {}'.format(
args.local_rank, MNA))
is_equal_score = f_score == best_f_score
is_best_loss = loss_val < best_loss
best_loss = min(loss_val, best_loss)
is_best_score = f_score > best_f_score
best_f_score = max(best_f_score, f_score)
# save checkpoint
if args.local_rank == 0:
_logger.info(
'Test[{}]: loss_val: {:.4f} (best: {:.4f}), f-score: {:.4f} (best: {:.4f})'
.format(epoch, loss_val, best_loss, f_score, best_f_score))
utils.save_checkpoint(
{
'epoch': epoch + 1,
'state_dict': model.state_dict(),
},
is_best_score,
args.save_root,
args.root_model,
filename=val_id)
utils.synchronize()
if is_best_score or (is_equal_score and
MNA[1] < total_MNA.get(val_id, [0, 0, 0])[1]):
val_accum_epochs = 0
total_MNA.update(
{val_id:
MNA}) # processor 0 need this record for branch selection
if args.local_rank == 0:
match_regions_record_all.update(
match_regions_record
) # only processor 0 need this record
out_dir = osp.join(args.save_root, args.root_output,
val_id)
os.makedirs(out_dir, exist_ok=True)
np.save(osp.join(out_dir, 'match_regions_record_best.npy'),
match_regions_record)
# all
np.save(
osp.join(args.save_root, args.root_output,
'cross_validation_MNA_dict.npy'), total_MNA)
np.save(
osp.join(args.save_root, args.root_output,
'match_regions_record_all.npy'),
match_regions_record_all)
precision, recall, f_score = utils.calculate_metric_from_dict_MNA(
total_MNA)
_logger.info(
'Test[all] Avg f-score now: {:.4f}'.format(f_score))
utils.synchronize()
else:
val_accum_epochs += 1
if val_accum_epochs >= args.early_stop:
_logger.info(
"validation ccc did not improve over {} epochs, stop processor {}"
.format(args.early_stop, args.local_rank))
break
if args.local_rank == 0:
precision_all, recall_all, f_score_all = utils.calculate_metric_from_dict_MNA(
total_MNA)
_logger.critical(
'[{}][{}]/[{}] f_score: {:.4f}, precision_all: {:.4f}, recall_all: {:.4f}, f_score_all: {:.4f}'
.format(val_id, vi + 1, len(img_dirs_dict), best_f_score,
precision_all, recall_all, f_score_all))
# store results
if args.local_rank == 0:
np.save(
osp.join(args.save_root, args.root_output,
'cross_validation_MNA_dict.npy'), total_MNA)
np.save(
osp.join(args.save_root, args.root_output,
'match_regions_record_all.npy'), match_regions_record_all)
_logger.info('ALL DONE')
exit()
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,804
|
guanjz20/MM21_FME_solution
|
refs/heads/master
|
/dataset/utils.py
|
from albumentations.augmentations.transforms import GaussNoise
import cv2
import os
import numpy as np
import os.path as osp
import albumentations as alb
# from torch._C import Ident
# from torch.nn.modules.linear import Identity
class IsotropicResize(alb.DualTransform):
def __init__(self,
max_side,
interpolation_down=cv2.INTER_AREA,
interpolation_up=cv2.INTER_CUBIC,
always_apply=False,
p=1):
super(IsotropicResize, self).__init__(always_apply, p)
self.max_side = max_side
self.interpolation_down = interpolation_down
self.interpolation_up = interpolation_up
def apply(self,
img,
interpolation_down=cv2.INTER_AREA,
interpolation_up=cv2.INTER_CUBIC,
**params):
return isotropically_resize_image(
img,
size=self.max_side,
interpolation_down=interpolation_down,
interpolation_up=interpolation_up)
def apply_to_mask(self, img, **params):
return self.apply(img,
interpolation_down=cv2.INTER_NEAREST,
interpolation_up=cv2.INTER_NEAREST,
**params)
def get_transform_init_args_names(self):
return ("max_side", "interpolation_down", "interpolation_up")
class Identity():
def __init__(self):
pass
def __call__(self, x):
return x
class GroupTrainTransform():
def __init__(self):
self.ImageCompression = alb.ImageCompression(quality_lower=60,
quality_upper=100,
p=1),
self.GaussNoise = alb.GaussNoise(p=1),
self.GaussianBlur = alb.GaussianBlur(blur_limit=(3, 5), p=1),
self.HorizontalFlip = alb.HorizontalFlip(p=1),
self.LightChange = alb.OneOf([
alb.RandomBrightnessContrast(),
alb.FancyPCA(),
alb.HueSaturationValue()
],
p=1),
self.ShiftRotate = alb.ShiftScaleRotate(
shift_limit=0.1,
scale_limit=0.2,
rotate_limit=10,
border_mode=cv2.BORDER_CONSTANT,
p=1),
def _apply_aug(imgs, aug_method):
for i, img in enumerate(imgs):
imgs[i] = aug_method(image=img)['image']
return imgs
def __call__(self, imgs):
# img compress
if np.random.random() < 0.3:
imgs = self._apply_aug(imgs, self.ImageCompression)
# gauss noise
if np.random.random() < 0.1:
imgs = self._apply_aug(imgs, self.GaussNoise)
# gauss blur
if np.random.random() < 0.05:
imgs = self._apply_aug(imgs, self.GaussianBlur)
# flip
if np.random.random() < 0.5:
imgs = self._apply_aug(imgs, self.HorizontalFlip)
# light
if np.random.random() < 0.5:
imgs = self._apply_aug(imgs, self.LightChange)
# shift rotate
if np.random.random() < 0.5:
imgs = self._apply_aug(imgs, self.ShiftRotate)
return imgs
class GroupTestTransform(Identity):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
def get_group_transform(mode):
if mode == 'train':
return GroupTrainTransform()
elif mode == 'test':
return GroupTestTransform()
else:
raise (NotImplementedError)
def isotropically_resize_image(img,
size,
interpolation_down=cv2.INTER_AREA,
interpolation_up=cv2.INTER_CUBIC):
h, w = img.shape[:2]
if max(w, h) == size:
return img
if w > h:
scale = size / w
h = h * scale
w = size
else:
scale = size / h
w = w * scale
h = size
interpolation = interpolation_up if scale > 1 else interpolation_down
resized = cv2.resize(img, (int(w), int(h)), interpolation=interpolation)
return resized
def get_transform(mode, size):
if mode == 'train':
return get_train_transform(size)
elif mode == 'test':
return get_test_transform(size)
else:
raise (NotImplementedError)
def get_test_transform(size):
return alb.Compose([
IsotropicResize(max_side=size),
alb.PadIfNeeded(min_height=size,
min_width=size,
border_mode=cv2.BORDER_CONSTANT),
])
def get_train_transform(size):
return alb.Compose([
# alb.GaussNoise(p=0.1),
# alb.GaussianBlur(blur_limit=(3, 5), p=0.1),
alb.HorizontalFlip(),
alb.OneOf([
IsotropicResize(max_side=size,
interpolation_down=cv2.INTER_AREA,
interpolation_up=cv2.INTER_CUBIC),
IsotropicResize(max_side=size,
interpolation_down=cv2.INTER_AREA,
interpolation_up=cv2.INTER_LINEAR),
IsotropicResize(max_side=size,
interpolation_down=cv2.INTER_LINEAR,
interpolation_up=cv2.INTER_LINEAR),
],
p=1),
alb.PadIfNeeded(min_height=size,
min_width=size,
border_mode=cv2.BORDER_CONSTANT),
# alb.OneOf([
# alb.RandomBrightnessContrast(),
# alb.FancyPCA(),
# alb.HueSaturationValue()
# ],
# p=0.5),
# alb.ToGray(p=0.2),
# alb.ShiftScaleRotate(shift_limit=0.1,
# scale_limit=0.1,
# rotate_limit=5,
# border_mode=cv2.BORDER_CONSTANT,
# p=0.5),
])
def scan_jpg_from_img_dir(img_dir):
img_ps = [
osp.join(img_dir, name)
for name in sorted(os.listdir(img_dir),
key=lambda x: int(x.split('.')[0].split('_')[-1]))
if '.jpg' in name # !! sort key
]
return img_ps
|
{"/utils.py": ["/paths.py", "/dataset/utils.py"], "/trainer_cls.py": ["/utils.py", "/dataset/utils.py", "/dataset/params.py"], "/main_cls.py": ["/config.py", "/model/network.py", "/dataset/me_dataset.py", "/utils.py", "/trainer_cls.py"]}
|
1,816
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/cli/__init__.py
|
# type: ignore
"""Provide a CLI."""
import logging
import click
from cpias import __version__
from cpias.cli.client import run_client
from cpias.cli.server import start_server
SETTINGS = dict(help_option_names=["-h", "--help"])
@click.group(
options_metavar="", subcommand_metavar="<command>", context_settings=SETTINGS
)
@click.option("--debug", is_flag=True, help="Start server in debug mode.")
@click.version_option(__version__)
@click.pass_context
def cli(ctx, debug):
"""Run CPIAS server."""
ctx.obj = {}
ctx.obj["debug"] = debug
if debug:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.INFO)
cli.add_command(start_server)
cli.add_command(run_client)
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,817
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/__init__.py
|
"""Provide a server for image analysis."""
from .const import VERSION
from .message import Message
from .server import CPIAServer
__all__ = ["Message", "CPIAServer"]
__version__ = VERSION
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,818
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/cli/client.py
|
# type: ignore
"""Provide a CLI to start a client."""
import asyncio
import click
from cpias.cli.common import common_tcp_options
from cpias.client import tcp_client
DEFAULT_MESSAGE = '{"cli": "client-1", "cmd": "hello", "dta": {"planet": "world"}}\n'
@click.command(options_metavar="<options>")
@click.option("--message", default=DEFAULT_MESSAGE, help="Message to send to server.")
@common_tcp_options
@click.pass_context
def run_client(ctx, message, host, port):
"""Run an async tcp client to connect to the server."""
debug = ctx.obj["debug"]
asyncio.run(
tcp_client(message, host=host, port=port), debug=debug,
)
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,819
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/exceptions.py
|
"""Provide exceptions."""
class CPIASError(Exception):
"""Represent a generic CPIAS exception."""
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,820
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/commands/__init__.py
|
"""Provide commands to the server."""
from functools import wraps
from types import ModuleType
from typing import Callable, Mapping
import pkg_resources
import voluptuous as vol
from voluptuous.humanize import humanize_error
from cpias.const import LOGGER
from cpias.message import Message
def get_commands() -> Mapping[str, ModuleType]:
"""Return a dict of command modules."""
commands = {
entry_point.name: entry_point.load()
for entry_point in pkg_resources.iter_entry_points("cpias.commands")
}
return commands
def validate(schema: dict) -> Callable:
"""Return a decorator for argument validation."""
vol_schema = vol.Schema(schema)
def decorator(func: Callable) -> Callable:
"""Decorate a function and validate its arguments."""
@wraps(func)
async def check_args(server, message, **data): # type: ignore
"""Check arguments."""
try:
data = vol_schema(data)
except vol.Invalid as exc:
err = humanize_error(data, exc)
LOGGER.error(
"Received invalid data for command %s: %s", message.command, err
)
return Message(client=message.client, command="invalid", data=data)
return await func(server, message, **data)
return check_args
return decorator
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,821
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/message.py
|
"""Provide a model for messages sent and received by the server."""
from __future__ import annotations
import json
from enum import Enum
from typing import Optional, cast
from .const import LOGGER
class Message:
"""Represent a client/server message."""
def __init__(self, *, client: str, command: str, data: dict) -> None:
"""Set up message instance."""
self.client = client
self.command = command
self.data = data
self.copy = self.__copy__
def __copy__(self) -> Message:
"""Copy message."""
msg_data = self.encode()
new_msg = cast(Message, self.decode(msg_data))
return new_msg
def __repr__(self) -> str:
"""Return the representation."""
return (
f"{type(self).__name__}(client={self.client}, command={self.command}, "
f"data={self.data})"
)
@classmethod
def decode(cls, data: str) -> Optional[Message]:
"""Decode data into a message."""
# '{"cli": "client-1", "cmd": "hello", "dta": {"param1": "world"}}'
try:
parsed_data = json.loads(data.strip())
except ValueError:
LOGGER.error("Failed to parse message data: %s", data)
return None
if not isinstance(parsed_data, dict):
LOGGER.error("Incorrect message data: %s", parsed_data)
return None
params: dict = {
block.name: parsed_data.get(block.value) for block in MessageBlock
}
return cls(**params)
def encode(self) -> str:
"""Encode message into a data string."""
compiled_msg = {attr.value: getattr(self, attr.name) for attr in MessageBlock}
return f"{json.dumps(compiled_msg)}\n"
class MessageBlock(Enum):
"""Represent a message block."""
client = "cli"
command = "cmd"
data = "dta"
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,822
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/cli/server.py
|
# type: ignore
"""Provide a CLI to start the server."""
import asyncio
import click
from cpias.cli.common import common_tcp_options
from cpias.server import CPIAServer
@click.command(options_metavar="<options>")
@common_tcp_options
@click.pass_context
def start_server(ctx, host, port):
"""Start an async tcp server."""
debug = ctx.obj["debug"]
server = CPIAServer(host=host, port=port)
try:
asyncio.run(server.start(), debug=debug)
except KeyboardInterrupt:
asyncio.run(server.stop(), debug=debug)
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,823
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/client.py
|
"""Provide a test client for the CPIAServer."""
import asyncio
from cpias.const import LOGGER
async def tcp_client(message: str, host: str = "127.0.0.1", port: int = 8555) -> None:
"""Connect to server and send message."""
reader, writer = await asyncio.open_connection(host, port)
data = await reader.readline()
version_msg = data.decode()
LOGGER.debug("Version message: %s", version_msg.strip())
LOGGER.info("Send: %r", message)
writer.write(message.encode())
await writer.drain()
data = await reader.readline()
LOGGER.info("Received: %r", data.decode())
LOGGER.debug("Closing the connection")
writer.close()
await writer.wait_closed()
if __name__ == "__main__":
asyncio.run(
tcp_client('{"cli": "client-1", "cmd": "hello", "dta": {"planet": "world"}}\n'),
debug=True,
)
asyncio.run(
tcp_client(
'{"cli": "client-1", "cmd": "hello_slow", "dta": {"planet": "slow"}}\n'
),
debug=True,
)
asyncio.run(
tcp_client(
'{"cli": "client-1", "cmd": "hello_persistent", '
'"dta": {"planet": "Mars"}}\n'
),
debug=True,
)
asyncio.run(
tcp_client(
'{"cli": "client-1", "cmd": "hello_process", '
'"dta": {"planet": "Neptune"}}\n'
),
debug=True,
)
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,824
|
CellProfiling/cpias
|
refs/heads/master
|
/setup.py
|
"""Set up file for cpias package."""
from pathlib import Path
from setuptools import find_packages, setup
PROJECT_DIR = Path(__file__).parent.resolve()
README_FILE = PROJECT_DIR / "README.md"
LONG_DESCR = README_FILE.read_text(encoding="utf-8")
VERSION = (PROJECT_DIR / "cpias" / "VERSION").read_text().strip()
GITHUB_URL = "https://github.com/CellProfiling/cpias"
DOWNLOAD_URL = f"{GITHUB_URL}/archive/master.zip"
setup(
name="cpias",
version=VERSION,
description="Provide a server for image analysis",
long_description=LONG_DESCR,
long_description_content_type="text/markdown",
author="Martin Hjelmare",
author_email="marhje52@gmail.com",
url=GITHUB_URL,
download_url=DOWNLOAD_URL,
packages=find_packages(exclude=["contrib", "docs", "tests*"]),
python_requires=">=3.7",
install_requires=["click", "voluptuous"],
include_package_data=True,
entry_points={
"console_scripts": ["cpias = cpias.cli:cli"],
"cpias.commands": ["hello = cpias.commands.hello"],
},
license="Apache-2.0",
zip_safe=False,
classifiers=[
"Development Status :: 3 - Alpha",
"Framework :: AsyncIO",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Natural Language :: English",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Scientific/Engineering",
],
)
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,825
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/const.py
|
"""Provide constants for cpias."""
import logging
from pathlib import Path
VERSION = (Path(__file__).parent / "VERSION").read_text().strip()
API_VERSION = "1.0.0"
LOGGER = logging.getLogger(__package__)
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,826
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/cli/common.py
|
# type: ignore
"""Provide common CLI options."""
import click
def common_tcp_options(func):
"""Supply common tcp connection options."""
func = click.option(
"-p",
"--port",
default=8555,
show_default=True,
type=int,
help="TCP port of the connection.",
)(func)
func = click.option(
"-H",
"--host",
default="127.0.0.1",
show_default=True,
help="TCP address of the server.",
)(func)
return func
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,827
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/process.py
|
"""Provide process tools."""
import asyncio
import signal
from multiprocessing import Pipe, Process
from multiprocessing.connection import Connection
from time import sleep
from typing import TYPE_CHECKING, Any, Callable, Dict, Tuple
from cpias.const import LOGGER
from cpias.exceptions import CPIASError
if TYPE_CHECKING:
from cpias.server import CPIAServer
class ReceiveError(CPIASError):
"""Error raised when receving from a process failed."""
def create_process(
server: "CPIAServer", create_callback: Callable, *args: Any
) -> Tuple[Callable, Callable]:
"""Create a persistent process."""
parent_conn, child_conn = Pipe()
prc = Process(target=func_wrapper, args=(create_callback, child_conn, *args))
prc.start()
def stop_process() -> None:
"""Stop process."""
prc.terminate()
server.on_stop(stop_process)
async def async_recv() -> Any:
"""Receive data from the process connection asynchronously."""
while True:
if not prc.is_alive() or parent_conn.poll():
break
await asyncio.sleep(0.5)
if not prc.is_alive():
raise ReceiveError
try:
return await server.add_executor_job(parent_conn.recv)
except EOFError as exc:
LOGGER.debug("Nothing more to receive")
raise ReceiveError from exc
async def async_send(data: Dict[Any, Any]) -> None:
"""Send data to the process."""
parent_conn.send(data)
return async_recv, async_send
def func_wrapper(create_callback: Callable, conn: Connection, *args: Any) -> None:
"""Wrap a function with connection to receive and send data."""
running = True
# pylint: disable=unused-argument
def handle_signal(signum: int, frame: Any) -> None:
"""Handle signal."""
nonlocal running
running = False
conn.close()
signal.signal(signal.SIGTERM, handle_signal)
signal.signal(signal.SIGINT, handle_signal)
try:
callback = create_callback(*args)
except Exception as exc: # pylint: disable=broad-except
LOGGER.error("Failed to create callback: %s", exc)
return
while running:
while running:
if conn.poll():
break
sleep(0.5)
try:
data = conn.recv()
except EOFError:
LOGGER.debug("Nothing more to receive")
break
except OSError:
LOGGER.debug("Connection is closed")
break
try:
result = callback(data)
except Exception as exc: # pylint: disable=broad-except
LOGGER.error("Failed to run callback: %s", exc)
break
if not running:
break
try:
conn.send(result)
except ValueError:
LOGGER.error("Failed to send result %s", result)
except OSError:
LOGGER.debug("Connection is closed")
break
LOGGER.debug("Exiting process")
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,828
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/server.py
|
"""Provide an image analysis server."""
import asyncio
import concurrent.futures
import logging
from typing import Any, Callable, Coroutine, Dict, Optional
from .commands import get_commands
from .const import API_VERSION, LOGGER, VERSION
from .message import Message
class CPIAServer:
"""Represent an image analysis server."""
# pylint: disable=too-many-instance-attributes
def __init__(self, host: str = "localhost", port: int = 8555) -> None:
"""Set up server instance."""
self.host = host
self.port = port
self.server: Optional[asyncio.AbstractServer] = None
self.serv_task: Optional[asyncio.Task] = None
self.commands: Dict[str, Callable] = {}
self._on_stop_callbacks: list = []
self._pending_tasks: list = []
self._track_tasks = False
self.store: dict = {}
async def start(self) -> None:
"""Start server."""
LOGGER.debug("Starting server")
commands = get_commands()
for module in commands.values():
module.register_command(self) # type: ignore
server = await asyncio.start_server(
self.handle_conn, host=self.host, port=self.port
)
self.server = server
async with server:
self.serv_task = asyncio.create_task(server.serve_forever())
LOGGER.info("Serving at %s:%s", self.host, self.port)
await self.serv_task
async def stop(self) -> None:
"""Stop the server."""
LOGGER.info("Server shutting down")
self._track_tasks = True
for stop_callback in self._on_stop_callbacks:
stop_callback()
self._on_stop_callbacks.clear()
await self.wait_for_tasks()
if self.serv_task is not None:
self.serv_task.cancel()
await asyncio.sleep(0) # Let the event loop cancel the task.
def on_stop(self, callback: Callable) -> None:
"""Register a callback that should be called on server stop."""
self._on_stop_callbacks.append(callback)
def register_command(self, command_name: str, command_func: Callable) -> None:
"""Register a command function."""
self.commands[command_name] = command_func
async def handle_conn(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
) -> None:
"""Handle a connection."""
# Send server version and server api version as welcome message.
version_msg = f"CPIAServer version: {VERSION}, api version: {API_VERSION}\n"
writer.write(version_msg.encode())
await writer.drain()
await self.handle_comm(reader, writer)
LOGGER.debug("Closing the connection")
writer.close()
await writer.wait_closed()
async def handle_comm(
self, reader: asyncio.StreamReader, writer: asyncio.StreamWriter
) -> None:
"""Handle communication between client and server."""
addr = writer.get_extra_info("peername")
while True:
data = await reader.readline()
if not data:
break
msg = Message.decode(data.decode())
if not msg:
# TODO: Send invalid message message. # pylint: disable=fixme
continue
cmd_func = self.commands.get(msg.command)
if cmd_func is None:
LOGGER.warning("Received unknown command %s from %s", msg.command, addr)
# TODO: Send unknown command message. # pylint: disable=fixme
continue
LOGGER.debug("Received %s from %s", msg, addr)
LOGGER.debug("Executing command %s", msg.command)
reply = await cmd_func(self, msg, **msg.data)
LOGGER.debug("Sending: %s", reply)
data = reply.encode().encode()
writer.write(data)
await writer.drain()
def add_executor_job(self, func: Callable, *args: Any) -> Coroutine:
"""Schedule a function to be run in the thread pool.
Return a task.
"""
loop = asyncio.get_running_loop()
task = loop.run_in_executor(None, func, *args)
if self._track_tasks:
self._pending_tasks.append(task)
return task
async def run_process_job(self, func: Callable, *args: Any) -> Any:
"""Run a job in the process pool."""
loop = asyncio.get_running_loop()
with concurrent.futures.ProcessPoolExecutor() as pool:
task = loop.run_in_executor(pool, func, *args)
if self._track_tasks:
self._pending_tasks.append(task)
result = await task
return result
def create_task(self, coro: Coroutine) -> asyncio.Task:
"""Schedule a coroutine on the event loop.
Use this helper to make sure the task is cancelled on server stop.
Return a task.
"""
task = asyncio.create_task(coro)
if self._track_tasks:
self._pending_tasks.append(task)
return task
async def wait_for_tasks(self) -> None:
"""Wait for all pending tasks."""
await asyncio.sleep(0)
while self._pending_tasks:
LOGGER.debug("Waiting for pending tasks")
pending = [task for task in self._pending_tasks if not task.done()]
self._pending_tasks.clear()
if pending:
await asyncio.wait(pending)
else:
await asyncio.sleep(0)
def main() -> None:
"""Run server."""
logging.basicConfig(level=logging.DEBUG, format="%(name)s: %(message)s")
server = CPIAServer()
try:
asyncio.run(server.start(), debug=True)
except KeyboardInterrupt:
asyncio.run(server.stop(), debug=True)
if __name__ == "__main__":
main()
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,829
|
CellProfiling/cpias
|
refs/heads/master
|
/tests/test_message.py
|
"""Provide tests for message."""
from cpias.message import Message
def test_message_decode():
"""Test message decode."""
msg = Message.decode(
'{"cli": "client-1", "cmd": "hello", "dta": {"param1": "world"}}'
)
assert msg.client == "client-1"
assert msg.command == "hello"
assert msg.data == {"param1": "world"}
def test_decode_bad_message():
"""Test decode bad message."""
msg = Message.decode("bad")
assert not msg
msg = Message.decode('["val1", "val2"]')
assert not msg
def test_message_encode():
"""Test message encode."""
msg_string = '{"cli": "client-1", "cmd": "hello", "dta": {"param1": "world"}}\n'
msg = Message.decode(msg_string)
msg_encoded = msg.encode()
assert msg_encoded == msg_string
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,830
|
CellProfiling/cpias
|
refs/heads/master
|
/cpias/commands/hello.py
|
"""Provide the hello command."""
from typing import TYPE_CHECKING, Callable, Optional, Tuple
from cpias.commands import validate
from cpias.const import LOGGER
from cpias.message import Message
from cpias.process import ReceiveError, create_process
if TYPE_CHECKING:
from cpias.server import CPIAServer
# pylint: disable=unused-argument
def register_command(server: "CPIAServer") -> None:
"""Register the hello command."""
server.register_command("hello", hello)
server.register_command("hello_slow", hello_slow)
server.register_command("hello_persistent", hello_persistent)
server.register_command("hello_process", hello_process)
@validate({"planet": str})
async def hello(
server: "CPIAServer", message: Message, planet: Optional[str] = None
) -> Message:
"""Run the hello command."""
if planet is None:
planet = "Jupiter"
LOGGER.info("Hello %s!", planet)
return message
@validate({"planet": str})
async def hello_slow(
server: "CPIAServer", message: Message, planet: Optional[str] = None
) -> Message:
"""Run the slow hello command."""
if planet is None:
planet = "Jupiter"
result = await server.run_process_job(do_cpu_work)
LOGGER.info("Hello %s! The result is %s", planet, result)
reply = message.copy()
reply.data["result"] = result
return reply
@validate({"planet": str})
async def hello_persistent(
server: "CPIAServer", message: Message, planet: Optional[str] = None
) -> Message:
"""Run the persistent hello command.
This command creates a state the first time it's run.
"""
if planet is None:
planet = "Jupiter"
if "hello_persistent_state" not in server.store:
server.store["hello_persistent_state"] = create_state()
command_task = server.store["hello_persistent_state"]
old_planet, new_planet = command_task(planet)
LOGGER.info(
"Hello! The old planet was %s. The new planet is %s", old_planet, new_planet
)
reply = message.copy()
reply.data["old_planet"] = old_planet
reply.data["new_planet"] = new_planet
return reply
@validate({"planet": str})
async def hello_process(
server: "CPIAServer", message: Message, planet: Optional[str] = None
) -> Message:
"""Run the process hello command.
This command creates a process the first time it's run.
"""
if planet is None:
planet = "Jupiter"
if "hello_process" not in server.store:
server.store["hello_process"] = create_process(server, create_state)
recv, send = server.store["hello_process"]
await send(planet)
try:
old_planet, new_planet = await recv()
except ReceiveError:
return message
LOGGER.info(
"Hello! The old planet was %s. The new planet is %s", old_planet, new_planet
)
reply = message.copy()
reply.data["old_planet"] = old_planet
reply.data["new_planet"] = new_planet
return reply
def do_cpu_work() -> int:
"""Do work that should run in the process pool."""
return sum(i * i for i in range(10 ** 7))
def create_state() -> Callable:
"""Initialize state."""
state: str = "init"
def change_state(new_state: str) -> Tuple[str, str]:
"""Do work that should change state."""
nonlocal state
old_state = state
state = new_state
return old_state, new_state
return change_state
|
{"/cpias/cli/__init__.py": ["/cpias/__init__.py", "/cpias/cli/client.py", "/cpias/cli/server.py"], "/cpias/__init__.py": ["/cpias/const.py", "/cpias/message.py", "/cpias/server.py"], "/cpias/cli/client.py": ["/cpias/cli/common.py", "/cpias/client.py"], "/cpias/commands/__init__.py": ["/cpias/const.py", "/cpias/message.py"], "/cpias/message.py": ["/cpias/const.py"], "/cpias/cli/server.py": ["/cpias/cli/common.py", "/cpias/server.py"], "/cpias/client.py": ["/cpias/const.py"], "/cpias/process.py": ["/cpias/const.py", "/cpias/exceptions.py", "/cpias/server.py"], "/cpias/server.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py"], "/tests/test_message.py": ["/cpias/message.py"], "/cpias/commands/hello.py": ["/cpias/commands/__init__.py", "/cpias/const.py", "/cpias/message.py", "/cpias/process.py", "/cpias/server.py"]}
|
1,835
|
b2aff6009/crawler
|
refs/heads/master
|
/tests/testutils.py
|
import os
import sys
import inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import crawler as crawler
def find_gen(baseSettings, tests):
for i, test in enumerate(tests):
settings = baseSettings
for key,val in test[0].items():
settings[key] = val
myCrawler = crawler.createCrawler(settings)
myCrawler.memo = []
gen = myCrawler.generator()
cnt = 0
results = []
try:
while True:
name = next(gen)
results.append(name)
assert name in test[2], "Unexpected file ({}) appeared in found files. During Test: {}".format(name, i)
cnt += 1
except StopIteration:
assert cnt == test[1], "Found {} instead of {} {} files".format(cnt, test[1], test[0])
def find_list(baseSettings, tests):
for i,test in enumerate(tests):
settings = baseSettings
for key,val in test[0].items():
settings[key] = val
myCrawler = crawler.createCrawler(settings)
myCrawler.memo = []
try:
results = myCrawler.getList()
assert len(results) == test[1], "Found {} instead of {} files".format(len(results), test[1])
if len(test[2]) > 0:
for name in results:
assert name in test[2], "Unexpected file ({}) in Test {} appeared in found files. Expected {}".format(name, i, test[2])
except ValueError as VE:
assert settings["onlyOnce"] == False, "Unexpected exeption raises"
singleReturnCnt = 0
def callback_singleReturn(baseSettings, tests):
global singleReturnCnt
settings = baseSettings
settings["onlyOnce"] = False
for test in tests:
for key,val in test[0].items():
settings[key] = val
singleReturnCnt = 0
def callback (file):
global singleReturnCnt
if len(test[2]) > 0:
assert file in test[2], "Couldn't find file ({}) in {}".format(file, test[2])
singleReturnCnt +=1
myCrawler = crawler.createCrawler(settings, callback)
myCrawler.process()
assert singleReturnCnt == test[1], "Found {} instead of {} files".format(singleReturnCnt, test[1])
def callback_listReturn(baseSettings, tests):
settings = baseSettings
settings["singleReturn"] = False
for test in tests:
for key,val in test[0].items():
settings[key] = val
settings["onlyOnce"] = True
def callback (files):
if len(test[2]) > 0:
for file in files:
assert file in test[2], "Couldn't find file ({}) in {}".format(file, test[2])
assert len(files) == test[1], "Found {} instead of {} files".format(len(files), test[1])
myCrawler = crawler.createCrawler(settings, callback)
myCrawler.memo = []
myCrawler.process()
|
{"/tests/testutils.py": ["/crawler.py"], "/tests/test_crawler.py": ["/crawler.py"], "/tests/test_googlecrawler.py": ["/crawler.py"], "/tests/test_localcrawler.py": ["/crawler.py"]}
|
1,836
|
b2aff6009/crawler
|
refs/heads/master
|
/tests/test_crawler.py
|
import pytest
import os
import sys
import inspect
import json
import datetime
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import crawler as crawler
baseSettings = {
"type": "local",
"memo" : "./tests/testdata/memo.json",
"onlyOnce" : True,
#LocalCrawler Settings
"path" : "./tests/testdata/",
"extension" : "",
#GoogleCrawler Settings
"credentialPath" : "./dummy-credentials.json",
}
def test_createCrawlerFactory():
settings = baseSettings
tests = [
["local", crawler.localCrawler],
["google", crawler.googleCrawler],
["git", crawler.gitCrawler]
]
for test in tests:
settings["type"] = test[0]
myCrawler = crawler.createCrawler(settings)
testCrawler = test[1](settings)
assert type(myCrawler) == type(testCrawler),"Wrong crawler type was created. Created crawler was: {}".format(type(myCrawler))
def test_save():
dummyName = "dummyMemo"
mCrawler = crawler.createCrawler(baseSettings)
mCrawler.memo.append(dummyName)
mCrawler.save()
with open(baseSettings["memo"], 'rb') as f:
data = json.load(f)
assert dummyName in data, "Didn't found {} in {}".format(dummyName, baseSettings["memo"])
def test_load():
dummyName = "dummyLoad"
data = [dummyName]
with open(baseSettings['memo'], 'w') as f:
json.dump(data, f, indent=4)
mCrawler = crawler.createCrawler(baseSettings)
assert len(mCrawler.memo) == 1, "Crawler memo contains not exactly one item"
assert mCrawler.memo[0] == dummyName, "Crawlers memo contains {} instead of {}".format(mCrawler.memo[0], dummyName)
cnt = 0
def test_service():
global cnt
settings = baseSettings
settings["service"] = True
settings["sleep"] = 1
settings["onlyOnce"] = True
cnt = 0
mId = 3
cycles = 10
def callback(file, id, processingCrawler):
global cnt
cnt = cnt + 1
assert id == mId, "Argurments doesn't match the expected. Got {} instead of {}".format(id, mId)
if cnt >= cycles:
processingCrawler.settings["service"] = False
mCrawler = crawler.createCrawler(settings, callback)
startTime = datetime.datetime.now()
mCrawler.process(mId, mCrawler)
endTime = datetime.datetime.now()
diffTime = endTime - startTime
def checkTime(seconds):
if seconds > (cycles-1)*settings["sleep"] and seconds < (cycles+1)*settings["sleep"]:
return True
return False
assert checkTime(diffTime.seconds), "Test took {}s, expceted time would be {}s".format(diffTime.seconds, cycles*settings["sleep"])
assert cnt == cycles, "Wrong number of cycles. Got {} instead of {}".format(cnt, cycles)
if __name__ == "__main__":
#test_createCrawlerFactory()
test_service()
|
{"/tests/testutils.py": ["/crawler.py"], "/tests/test_crawler.py": ["/crawler.py"], "/tests/test_googlecrawler.py": ["/crawler.py"], "/tests/test_localcrawler.py": ["/crawler.py"]}
|
1,837
|
b2aff6009/crawler
|
refs/heads/master
|
/crawler.py
|
import os
import json
import time
import sys
def createCrawler(settings, callback = None):
selector = {
"local" : localCrawler,
"google" : googleCrawler,
"git" : gitCrawler
}
return selector[settings.get("type", "local")](settings, callback)
class Crawler:
def __init__(self, settings, callback = None):
self.settings = settings
self.debug = settings.get("debug",0)
self.loadMemo()
self.callback = callback
if self.debug > 0:
print("Crawler: initilised.")
def generator(self):
if self.debug > 2:
print("Crawler: Generator")
pass
def getList(self):
if(self.settings["onlyOnce"] == True):
return list(self.generator())
raise ValueError("onlyOnce option is disabled, this would lead to an infinity list")
def loadMemo(self):
if self.settings["onlyOnce"] == True:
if self.debug > 2:
print("Crawler: read Memo.")
if os.path.isfile(self.settings["memo"]) == False:
self.memo = []
with open(self.settings["memo"], 'w+') as f:
json.dump(self.memo, f, indent = 4)
with open(self.settings["memo"], 'rb') as f:
self.memo = json.load(f)
else:
self.memo = []
def save(self):
with open(self.settings["memo"], 'w') as f:
json.dump(self.memo, f, indent = 4)
def process(self, *args):
if self.callback == None:
raise ValueError("Callback function is not defined, which is needed to the process call. You might want to use generator() instead.")
firstRun = True
if self.debug > 0:
print("Crawler: process")
while self.settings.get("service", False) or firstRun:
firstRun = False
try:
if self.settings.get("singleReturn",False) == True:
for myfile in self.generator():
if self.debug > 3:
print("Crawler: fire callback with file: {}".format(myfile))
self.callback(myfile, *args)
else:
files = self.getList()
if self.debug > 3:
print("Crawler: fire callback with files: {}".format(", ".join(files)))
self.callback(files, *args)
time.sleep(self.settings.get("sleep", 1))
except:
print("Oops!", sys.exc_info()[0], "occured.")
time.sleep(self.settings.get("sleep", 1)*10)
class localCrawler(Crawler):
def __init__(self, settings, callback = None):
super().__init__(settings, callback)
def generator(self):
super().generator()
if self.debug > 3:
print("Crawler: local crawls thru {}".format(self.settings["path"]))
for subdir, dirs, files in os.walk(self.settings["path"]):
for filename in files:
if self.debug > 5:
print("Crawler: Test file {}".format(filename))
if (filename.lower().endswith(self.settings["extension"].lower())):
filepath = os.path.join(subdir, filename)
if self.debug > 4:
print("Crawler: found file {}".format(filepath))
if (self.settings["onlyOnce"] == False or filepath not in self.memo):
self.memo.append(filepath)
self.save()
if self.debug > 4:
print("Crawler: yield file {}".format(filepath))
yield filepath
import gspread
from oauth2client.service_account import ServiceAccountCredentials
class googleCrawler(Crawler):
def __init__(self, settings, callback = None):
super().__init__(settings, callback)
self.scope = ['https://spreadsheets.google.com/feeds', 'https://www.googleapis.com/auth/drive']
self.creds = ServiceAccountCredentials.from_json_keyfile_name(settings["credentialPath"], self.scope)
self.client = gspread.authorize(self.creds)
def generator(self):
sheets = self.client.openall()
for sheet in sheets:
if (self.settings["spreadsheets"] not in sheet.title):
continue
if (self.settings["enableWorksheets"] == False):
if (self.settings["returnType"] == "path"):
yield sheet.title
else:
yield sheet
else:
for worksheet in sheet.worksheets():
if (self.settings["worksheets"] not in worksheet.title):
continue
if (self.settings["returnType"] == "path"):
yield sheet.title + "/" + worksheet.title
else:
yield worksheet
def search(self):
sheets = self.client.openall()
self.reader.setFile(self.settings.get("path"))
self.sheets = self.reader.getSheets()
result = []
for sheet in self.sheets:
if sheet not in self.settings["skip"]:
if self.settings["onlyOnce"] == False or sheet not in self.memo.get("files"):
self.memo.get("files").append(sheet)
result.append(sheet)
self.dumpMemo()
return result
class gitCrawler(Crawler):
def __init__(self, settings, callback = None):
super().__init__(settings)
|
{"/tests/testutils.py": ["/crawler.py"], "/tests/test_crawler.py": ["/crawler.py"], "/tests/test_googlecrawler.py": ["/crawler.py"], "/tests/test_localcrawler.py": ["/crawler.py"]}
|
1,838
|
b2aff6009/crawler
|
refs/heads/master
|
/tests/test_googlecrawler.py
|
import pytest
import os
import sys
import inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import crawler as crawler
import testutils as tu
baseSettings = {
"type": "google",
"memo" : "./tests/testdata/memo.json",
"onlyOnce" : True,
"service" : False,
"sleep" : 1,
"singleReturn" : True,
#google specific settings
"credentialPath" : "./dummy-credentials.json",
"spreadsheets" : "",
"worksheets": "",
"enableWorksheets": False,
"returnType" : "path"
}
tests = [
[{"enableWorksheets": False, "spreadsheets": "", "worksheets": ""}, 2, ["Dummy1", "Dummy2"]],
[{"enableWorksheets": False, "spreadsheets": "1", "worksheets": ""}, 1, ["Dummy1"]],
[{"enableWorksheets": True, "spreadsheets": "", "worksheets": ""}, 5, ["Dummy1/Test1", "Dummy1/Test2","Dummy1/Test3", "Dummy2/Sheet1","Dummy2/Sheet2" ]],
[{"enableWorksheets": True, "spreadsheets": "1", "worksheets": ""}, 3, ["Dummy1/Test1", "Dummy1/Test2","Dummy1/Test3"]],
[{"enableWorksheets": True, "spreadsheets": "", "worksheets": "1"}, 2, ["Dummy1/Test1", "Dummy2/Sheet1"]],
[{"enableWorksheets": True, "spreadsheets": "1", "worksheets": "1"}, 1, ["Dummy1/Test1"]],
]
def test_create_google_crawler():
settings = baseSettings
crawler.createCrawler(settings)
def test_find_gen():
tu.find_gen(baseSettings, tests)
def test_find_list():
tu.find_list(baseSettings, tests)
def test_callback_singleReturn():
tu.callback_singleReturn(baseSettings, tests)
def test_callback_listReturn():
tu.callback_listReturn(baseSettings, tests)
if __name__ == '__main__':
test_callback_listReturn()
|
{"/tests/testutils.py": ["/crawler.py"], "/tests/test_crawler.py": ["/crawler.py"], "/tests/test_googlecrawler.py": ["/crawler.py"], "/tests/test_localcrawler.py": ["/crawler.py"]}
|
1,839
|
b2aff6009/crawler
|
refs/heads/master
|
/tests/test_localcrawler.py
|
import pytest
import os
import sys
import inspect
current_dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0, parent_dir)
import crawler as crawler
import testutils as tu
baseSettings = {
"type": "local",
"memo" : "./tests/testdata/memo.json",
"onlyOnce" : True,
"path" : "./tests/testdata/",
"extension" : "",
"service" : False,
"sleep" : 1,
"singleReturn" : True
}
tests = [
[{"extension": ".csv"}, 5, ["./tests/testdata/test1.csv",
"./tests/testdata/test2.csv",
"./tests/testdata/test3.csv",
"./tests/testdata/test4.csv",
"./tests/testdata/test5.csv"]],
[{"extension": ".xml"}, 2, ["./tests/testdata/test1.xml",
"./tests/testdata/test2.xml"]],
[{"extension": ".json"}, 1, ["./tests/testdata/memo.json"]],
[{"extension": ""}, 8, ["./tests/testdata/test1.csv",
"./tests/testdata/test2.csv",
"./tests/testdata/test3.csv",
"./tests/testdata/test4.csv",
"./tests/testdata/test5.csv",
"./tests/testdata/test1.xml",
"./tests/testdata/test2.xml",
"./tests/testdata/memo.json"]],
[{"onlyOnce" : False,"extension": ""}, 8, ["./tests/testdata/test1.csv",
"./tests/testdata/test2.csv",
"./tests/testdata/test3.csv",
"./tests/testdata/test4.csv",
"./tests/testdata/test5.csv",
"./tests/testdata/test1.xml",
"./tests/testdata/test2.xml",
"./tests/testdata/memo.json"]]
]
def test_find_list():
tu.find_list(baseSettings, tests)
def test_find_gen():
tu.find_gen(baseSettings, tests)
def test_callback_singleReturn():
tu.callback_singleReturn(baseSettings, tests)
def test_callback_listReturn():
tu.callback_listReturn(baseSettings, tests)
if __name__ == '__main__':
test_callback_singleReturn()
|
{"/tests/testutils.py": ["/crawler.py"], "/tests/test_crawler.py": ["/crawler.py"], "/tests/test_googlecrawler.py": ["/crawler.py"], "/tests/test_localcrawler.py": ["/crawler.py"]}
|
1,852
|
ckjoon/teambuilding
|
refs/heads/master
|
/runserver.py
|
from teambuildingapp import app
app.run(debug=True)
|
{"/runserver.py": ["/teambuildingapp/__init__.py"], "/teambuildingapp/views.py": ["/teambuildingapp/__init__.py", "/teambuildingapp/db_util.py"], "/teambuildingapp/__init__.py": ["/teambuildingapp/views.py"], "/teambuildingapp/models.py": ["/teambuildingapp/__init__.py"]}
|
1,853
|
ckjoon/teambuilding
|
refs/heads/master
|
/teambuildingapp/views.py
|
from teambuildingapp import app
from flask import render_template, request, url_for, redirect, session, make_response
from flask_cas import login_required
import os
#from roster_processor import process_roster
from teambuildingapp.db_util import *
@app.route("/")
def main():
return render_template('signin.html')
@app.route("/logout")
def logout():
session.clear()
return redirect("/")
# Route that will process the file upload
@app.route('/upload', methods=['POST'])
def upload():
if request.method == 'POST':
class_name = request.form.get('coursename')
semester = request.form.get('semester')
teamsize = request.form.get('teamsize')
print(class_name)
print(semester)
print(teamsize)
create_class(class_name, semester, session['username'], teamsize)
return redirect(url_for('prof_home'))
@app.route("/prof_home")
# Uncomment this to require CAS to access this page
# @login_required
def prof_home():
username = session['username']
#profile, classes = db_util.get_user_info()
# return render_template('prof_home.html', classes)
#if 'last_class' not in session:
classes = get_professor_classes(username)
if len(classes) > 0:
session['last_class'] = (classes[0][0], '{0} ({1})'.format(classes[0][1], classes[0][2]))
session['max_team_size'] = classes[0][3]
session['class_names'] = ['{0} ({1})'.format(x[1], x[2]) for x in classes]
session['teams'] = get_all_teams_in_class(session['last_class'][0])
else:
session['last_class'] = None
session['max_team_size'] = None
session['class_names'] = []
session['teams'] = []
return make_response(render_template('prof_home.html', last_class=session['last_class'], max_team_size=session['max_team_size'], classes=classes, teams=session['teams']))
@app.route("/student_home")
# Uncomment this to require CAS to access this page
# @login_required
def student_home():
#firsttime = request.cookies.get('firsttime')
#username = request.cookies.get('username')
username = session['username']
#print(username)
if 'class_id' not in session:
student_class_ids = get_student_enrolled_class_id(username)
if len(student_class_ids) > 0:
session['class_id'] = student_class_ids[0]
#student_enrolled_classes = get_student_enrolled_classnames(username)
teamsize = get_class_max_team_size(session['class_id'])
all_teams = get_all_teams_in_class(session['class_id'])
else:
session['class_id'] = None
else:
teamsize = get_class_max_team_size(session['class_id'])
all_teams = get_all_teams_in_class(session['class_id'])
#print(all_teams)
student_comment = get_user_comment(username)
student_enrolled_classes = get_student_enrolled_classes(username)
cur_classname = None
if student_enrolled_classes is not None:
for cla in student_enrolled_classes:
if str(cla[1]) == str(session['class_id']):
cur_classname = cla[0]
if cur_classname is None:
cur_classname = "No Class Selected!"
#print(student_comment)
#print(student_enrolled_classes)
#print(all_teams)
in_team = False
for team in all_teams:
if team[1] == username:
in_team = True
resp = make_response(render_template('student_home.html',
comment = student_comment, max_team_size = teamsize,
classes = student_enrolled_classes, teams = all_teams, in_team=in_team, cur_classname = cur_classname))
#resp.set_cookie('firsttime', '', expires=0)
return resp
@app.route("/signin_error")
def signin_error():
return render_template('signin_error.html')
@app.route("/uploadFile")
def uploadFile():
if request.method == 'POST':
file = request.files['file']
if file and allowed_file(file.filename):
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
process_roster(filename)
return redirect(url_for('prof_home'))
@app.route("/team_manager_panel")
def team_manager_panel():
team_id = session['team_id']
class_id = session['class_id']
team_name = get_team_name(class_id, team_id)
team_captain = get_team_captain(class_id, team_id)
team_captain_name = get_student_name(team_captain)
user_captain = False
students = get_all_students_in_team(class_id, team_id)
requests = get_all_students_request(class_id, team_id)
if session['username'] == team_captain:
user_captain = True
resp = make_response(
render_template('team_manager_panel.html',
team_name = team_name, team_captain_name = team_captain_name,
user_captain = user_captain, students_in_team = students,
current_user = session['username'], requests = requests ))
return resp
@app.route("/api/login", methods=['POST'])
def login():
#handle login stuff.
if request.method == 'POST':
gtusername = request.form.get('gtusername')
# password = request.form.get('password')
all_students = get_all_student_usernames()
all_professors = get_all_professor_usernames()
#print(student_class_ids)
#print(all_professors)
#print(gtusername)
#print(all_students)
#for s in all_students:
#print(s)
is_student = True
if gtusername in all_students:
student_class_ids = get_student_enrolled_class_id(gtusername)
session['username'] = gtusername
#session['firsttime'] = True
if len(student_class_ids) > 0:
session['class_id'] = student_class_ids[0]
team_id = get_student_enrolled_team_id(session['username'], session['class_id'])
session['team_id'] = team_id
if session['team_id'] != None:
resp = make_response(redirect(url_for('team_manager_panel')))
else:
resp = make_response(redirect(url_for('student_home')))
else:
session['class_id'] = None
session['team_id'] = None
elif gtusername in all_professors:
#prof_class_ids = get_professor_classes(gtusername)
is_student = False
session['username'] = gtusername
#session['firsttime'] = True
resp = make_response(redirect(url_for('prof_home')))
else:
return redirect(url_for('signin_error'))
return resp
@app.route("/updateIntroduction", methods=['POST'])
def updateIntroduction():
if request.method == 'POST':
text = request.form.get('introtext')
update_user_comment(session['username'], text)
return redirect(url_for('student_home'))
@app.route("/createTeam", methods=['POST'])
def createTeam():
if request.method == 'POST':
text = request.form.get('team_name')
print(text)
create_team(session['class_id'],session['username'], text)
return redirect(url_for('student_home'))
@app.route("/acceptdecline", methods=['POST'])
def accept_decline():
if request.method == 'POST':
text = request.form.get('gt_username')
print(text)
if (request.form['submit']=='Accept'):
add_to_team(session['class_id'], session['team_id'], text)
if (request.form['submit']=='Decline'):
remove_from_requests(session['class_id'], session['team_id'], text)
remove_from_requests(session['class_id'], session['team_id'], text)
return redirect(url_for('team_manager_panel'))
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in app.config['ALLOWED_EXTENSIONS']
@app.route("/requestTeam", methods=['POST'])
def requestTeam():
if request.method == 'POST':
team_id = request.form.get('team_id')
add_team_request(session['class_id'], team_id, session['username'])
return redirect(url_for('student_home'))
@app.route("/leaveTeam", methods=['POST'])
def leaveTeam():
if request.method == 'POST':
remove_from_team(session['team_id'], session['username'])
return redirect(url_for('student_home'))
@app.route("/ar", methods=['POST'])
def ar():
if request.method == 'POST':
print("here")
stu = request.form.get('student')
print("here1")
if request.form['submit'] == 'Appoint':
print("app")
assign_team_captain(session['team_id'], stu)
elif request.form['submit'] == 'Remove':
print("rem")
remove_from_team(session['team_id'], stu)
return redirect(url_for('team_manager_panel'))
@app.route("/chooseClass", methods=['POST'])
def choose_classs():
if request.method == 'POST':
class_id = request.form.get('class')
print(class_id)
session['class_id'] = class_id
return redirect(url_for('student_home'))
@app.route("/chooseClassProf", methods=['POST'])
def choose_prof_class():
if request.method == 'POST':
class_id = request.form.get('class')
print(class_id)
session['last_class'] = class_id
return redirect(url_for('prof_home'))
|
{"/runserver.py": ["/teambuildingapp/__init__.py"], "/teambuildingapp/views.py": ["/teambuildingapp/__init__.py", "/teambuildingapp/db_util.py"], "/teambuildingapp/__init__.py": ["/teambuildingapp/views.py"], "/teambuildingapp/models.py": ["/teambuildingapp/__init__.py"]}
|
1,854
|
ckjoon/teambuilding
|
refs/heads/master
|
/teambuildingapp/db_util.py
|
import psycopg2
from teambuildingapp.config import *
def get_user_info(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT is_instructor, email, first_name, last_name, comment FROM users WHERE gt_username = \'%s\';'
data = (username)
cur.execute(cmd, data)
profile = list(cur.fetchone())
if profile[0]:
cmd = 'SELECT * FROM classes WHERE instructor_gt_username = %s;'
data = (username)
cur.execute(cmd, data)
classes = cur.fetchall()
else:
cmd = 'SELECT * FROM classes WHERE class_id in (SELECT class_id FROM roster WHERE gt_username = %s);'
data = (username)
cur.execute(cmd, data)
classes = cur.fetchall()
cur.close()
conn.close()
return profile, [list(x) for x in classes]
def create_class(class_name, semester, instructor_username, max_team_size=5):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'INSERT INTO classes (class_name, class_semester, instructor_gt_username, max_team_size) VALUES (%s, %s, %s, %s);'
data = (class_name, semester, instructor_username, max_team_size)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def get_all_teams_in_class(class_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = ('SELECT team_name, username, emails, countUsers, teamid '
'FROM ((SELECT team_name, team_id as teamid, COUNT(gt_username) '
'as countUsers FROM teams where class_id = %s GROUP BY team_id, team_name ) t1 '
' INNER JOIN '
'(SELECT team_id, gt_username as username FROM teams WHERE is_captain = True GROUP BY team_id, gt_username) t2 '
'on teamid = t2.team_id) query1 inner join (select gt_username, email as emails from users) query2 on username = query2.gt_username;')
data = (class_id,)
print(cmd)
print(cur.mogrify(cmd, data))
cur.execute(cmd,data)
all_teams = cur.fetchall()
cur.close()
conn.close()
return all_teams
def create_team(class_id, gt_username, team_name):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'INSERT INTO teams (class_id, gt_username, team_name, is_captain) VALUES (%s, %s, %s, %s);'
data = (class_id, gt_username, team_name, True)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def add_to_team(class_id, team_id, gt_username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT max_team_size FROM classes WHERE class_id = %s;'
data = (class_id,)
cur.execute(cmd, data)
max_size = int(cur.fetchone()[0])
cmd = 'SELECT gt_username FROM teams WHERE class_id = %s AND team_id = %s;'
data = (class_id, team_id)
cur.execute(cmd, data)
cur_size = len(cur.fetchall())
team_name = get_team_name(class_id, team_id)
if cur_size == max_size:
raise Exception('Cannot add more team members because the limit is reached')
cmd = 'INSERT INTO teams (team_id, class_id, gt_username, team_name, is_captain) VALUES (%s, %s, %s, %s, %s);'
data = (team_id, class_id, gt_username, team_name, False)
print(cur.mogrify(cmd, data))
remove_from_requests(class_id, team_id, gt_username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def add_team_request(class_id, team_id, gt_username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'INSERT INTO requests (class_id, team_id, gt_username) VALUES (%s, %s, %s)'
data = (class_id, team_id, gt_username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def remove_from_requests(class_id, team_id, gt_username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'DELETE FROM requests WHERE class_id = %s AND team_id = %s AND gt_username = %s;'
data = (class_id, team_id, gt_username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def remove_from_team(team_id, gt_username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'DELETE FROM teams WHERE team_id = %s AND gt_username = %s;'
data = (team_id, gt_username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def assign_team_captain(team_id, gt_username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'UPDATE teams SET is_captain = %s WHERE team_id = %s;'
data = (False, team_id)
cur.execute(cmd, data)
conn.commit()
cmd = 'UPDATE teams SET is_captain = %s WHERE gt_username = %s;'
data = (True, gt_username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def update_user_comment(username, comment):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'UPDATE users SET comment = %s WHERE gt_username = %s;'
data = (comment, username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def get_user_comment(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT comment from users WHERE gt_username = %s;'
data = (username,)
#print(cur.mogrify(cmd, data))
cur.execute(cmd, data)
conn.commit()
comment = cur.fetchone()
cur.close()
conn.close()
return comment[0]
def get_team_captain(class_id, team_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT gt_username from teams WHERE class_id = %s AND team_id = %s AND is_captain = TRUE;'
data = (class_id, team_id)
cur.execute(cmd, data)
conn.commit()
team_captain = str(cur.fetchone()[0])
cur.close()
conn.close()
return team_captain
def get_student_name(gt_username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT first_name, last_name from users WHERE gt_username = %s;'
data = (gt_username,)
print(cur.mogrify(cmd, data))
cur.execute(cmd, data)
conn.commit()
name = cur.fetchone()
print(name)
cur.close()
conn.close()
return name
def get_student_info(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT first from teams WHERE class_id = %s AND team_id = %s AND is_captain = TRUE;'
data = (username,)
#print(cur.mogrify(cmd, data))
cur.execute(cmd, data)
conn.commit()
team_captain = cur.fetchone()
cur.close()
conn.close()
return team_captain[0]
def enroll_student(username, class_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'INSERT INTO rosters (class_id, gt_username) VALUES (%s, %s);'
data = (class_id, username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def unenroll_student(username, class_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'DELETE FROM rosters WHERE class_id = %s AND gt_username = %s);'
data = (class_id, username)
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def get_professor_class_ids(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT class_id FROM classes WHERE instructor_gt_username = %s;'
data = (username,)
cur.execute(cmd, data)
classes = [x[0] for x in cur.fetchall()]
cur.close()
conn.close()
return classes
def get_professor_classes(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT class_id, class_name, class_semester, max_team_size FROM classes WHERE instructor_gt_username = %s;'
data = (username,)
cur.execute(cmd, data)
classes = cur.fetchall()
cur.close()
conn.close()
return classes
def get_all_students_in_team(class_id, team_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT gt_username, first_name, last_name, email FROM users WHERE gt_username in (SELECT gt_username from teams where class_id = %s AND team_id = %s);'
data = (class_id, team_id)
print(cur.mogrify(cmd, data))
cur.execute(cmd, data)
students_in_team = cur.fetchall()
cur.close()
conn.close()
return students_in_team
def get_all_students_request(class_id, team_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT gt_username, first_name, last_name, email FROM users WHERE gt_username in (SELECT gt_username from requests where class_id = %s AND team_id = %s);'
data = (class_id, team_id)
print(cur.mogrify(cmd, data))
cur.execute(cmd, data)
requests = cur.fetchall()
cur.close()
conn.close()
return requests
def get_all_student_usernames():
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT gt_username FROM users WHERE is_instructor = FALSE;'
cur.execute(cmd)
student_usernames = [x[0] for x in cur.fetchall()]
cur.close()
conn.close()
return student_usernames
def get_team_name(class_id, team_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT team_name FROM teams WHERE class_id = %s AND team_id = %s;'
data = (class_id, team_id)
cur.execute(cmd, data)
team_name = cur.fetchone()[0]
cur.close()
conn.close()
return team_name
def get_all_professor_usernames():
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT gt_username FROM users WHERE is_instructor = TRUE;'
cur.execute(cmd)
professor_usernames = [x[0] for x in cur.fetchall()]
cur.close()
conn.close()
return professor_usernames
def register_user(username, is_instructor, email, first_name, last_name):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'INSERT INTO users (gt_username, is_instructor, email, first_name, last_name, comment) VALUES (%s, %s, %s, %s, %s, %s);'
data = (username, is_instructor, email, first_name, last_name, '')
cur.execute(cmd, data)
conn.commit()
cur.close()
conn.close()
def mass_register_users(userlist):
conn = psycopg2.connect(**db)
cur = conn.cursor()
print(userlist)
cmd = 'INSERT INTO users (gt_username, is_instructor, email, first_name, last_name, comment) VALUES ' + '(%s, %s, %s, %s, %s, %s), '*(len(userlist)//6-1) + '(%s, %s, %s, %s, %s, %s);'
cur.execute(cmd, userlist)
conn.commit()
cur.close()
conn.close()
def get_student_enrolled_classnames(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT class_name from classes where class_id in (SELECT class_id from rosters WHERE gt_username = %s);'
data = (username,)
cur.execute(cmd, data)
class_names = [x[0] for x in cur.fetchall()]
cur.close()
conn.close()
return class_names
def get_student_enrolled_classes(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT class_name, class_id from classes where class_id in (SELECT class_id from rosters WHERE gt_username = %s);'
data = (username,)
cur.execute(cmd, data)
class_names = cur.fetchall()
cur.close()
conn.close()
return class_names
def get_student_enrolled_class_id(username):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT class_id from classes where class_id in (SELECT class_id from rosters WHERE gt_username = %s);'
data = (username,)
cur.execute(cmd, data)
class_names = [x[0] for x in cur.fetchall()]
cur.close()
conn.close()
return class_names
def get_student_enrolled_team_id(gt_username, class_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT team_id from teams where class_id = %s AND gt_username = %s;'
data = (class_id, gt_username)
cur.execute(cmd, data)
team_id = cur.fetchone()
cur.close()
conn.close()
return team_id
def get_class_max_team_size(class_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
cmd = 'SELECT max_team_size from classes where class_id = %s;'
data = (class_id,)
cur.execute(cmd, data)
class_max = cur.fetchone()[0]
print('debug'+str(class_max))
cur.close()
conn.close()
return class_max
def enroll_from_roster(students, class_id):
conn = psycopg2.connect(**db)
cur = conn.cursor()
registered_students = get_all_student_usernames()
print (registered_students)
roster_vals = ()
registration_vals = ()
for s in students:
roster_vals += (class_id, s[0])
if s[0] not in registered_students:
registration_vals += (s[0], False, s[1], s[2], s[3], '')
mass_register_users(registration_vals)
cmd = 'INSERT INTO rosters (class_id, gt_username) VALUES ' + '(%s, %s), '*(len(students)-1) + '(%s, %s);'
cur.execute(cmd, roster_vals)
conn.commit()
cur.close()
conn.close()
|
{"/runserver.py": ["/teambuildingapp/__init__.py"], "/teambuildingapp/views.py": ["/teambuildingapp/__init__.py", "/teambuildingapp/db_util.py"], "/teambuildingapp/__init__.py": ["/teambuildingapp/views.py"], "/teambuildingapp/models.py": ["/teambuildingapp/__init__.py"]}
|
1,855
|
ckjoon/teambuilding
|
refs/heads/master
|
/teambuildingapp/roster_processor.py
|
import xlrd
import db_util
class RosterProcessor:
def __init__(self, file, class_id=None):
self.file = file
self.students = []
self.class_id = class_id
def process(self):
wb = xlrd.open_workbook(file_contents=self.file)
ws = wb.sheet_by_index(0)
for i in range(0, ws.nrows):
r = ws.row_values(i, start_colx=0, end_colx=ws.ncols)
if r[-1] == 'Student':
name = r[0].split(', ')
self.students.append( (r[1], r[2], name[1], name[0]) )
#print(self.students)
def export_to_db(self):
db_util.enroll_from_roster(self.students, self.class_id)
f = open('./../rosters/csxxxx_roster.xls', 'rb+')
instance = RosterProcessor(f.read(), 1) #replace 1 with class ID used in the DB
instance.process()
instance.export_to_db()
def process_roster(fname):
with open(fname, 'rb+'):
instance = RosterProcessor(f.read())
instance.process()
instance.export_to_db()
|
{"/runserver.py": ["/teambuildingapp/__init__.py"], "/teambuildingapp/views.py": ["/teambuildingapp/__init__.py", "/teambuildingapp/db_util.py"], "/teambuildingapp/__init__.py": ["/teambuildingapp/views.py"], "/teambuildingapp/models.py": ["/teambuildingapp/__init__.py"]}
|
1,856
|
ckjoon/teambuilding
|
refs/heads/master
|
/teambuildingapp/db_setup.py
|
import psycopg2
from config import *
def setup_tables():
tables = get_tables()
print('Found tables:{}'.format(tables))
if 'users' not in tables:
print('Users table not found, creating one...')
setup_users_table()
if 'classes' not in tables:
print('Classes table not found, creating one...')
setup_classes_table()
if 'rosters' not in tables:
print('Rosters table not found, creating one...')
setup_rosters_table()
if 'teams' not in tables:
print('Teams table not found, creating one...')
setup_teams_table()
if 'requests' not in tables:
print('Requests table not found, creating one...')
setup_requests_table()
def get_tables():
cur = conn.cursor()
cur.execute("SELECT table_name FROM information_schema.tables WHERE table_schema = 'public'")
tables = cur.fetchall()
cur.close()
return [x[0] for x in tables]
def setup_users_table():
cur = conn.cursor()
cmd = """CREATE TABLE USERS(
GT_USERNAME TEXT PRIMARY KEY NOT NULL,
IS_INSTRUCTOR BOOL NOT NULL,
EMAIL TEXT NOT NULL,
FIRST_NAME TEXT NOT NULL,
LAST_NAME TEXT NOT NULL,
COMMENT TEXT
);"""
cur.execute(cmd)
conn.commit()
cur.close()
def setup_teams_table():
cur = conn.cursor()
cmd = """CREATE TABLE TEAMS(
TEAM_ID SERIAL NOT NULL,
CLASS_ID INTEGER NOT NULL REFERENCES CLASSES (CLASS_ID),
GT_USERNAME TEXT NOT NULL REFERENCES USERS(GT_USERNAME),
TEAM_NAME TEXT NOT NULL,
IS_CAPTAIN BOOL NOT NULL,
COMMENT TEXT,
PRIMARY KEY(CLASS_ID, TEAM_ID, GT_USERNAME)
);"""
cur.execute(cmd)
conn.commit()
cur.close()
def setup_classes_table():
cur = conn.cursor()
cmd = """CREATE TABLE CLASSES(
CLASS_ID SERIAL NOT NULL PRIMARY KEY,
INSTRUCTOR_GT_USERNAME TEXT REFERENCES USERS (GT_USERNAME),
CLASS_NAME TEXT NOT NULL,
CLASS_SEMESTER TEXT NOT NULL,
MAX_TEAM_SIZE INTEGER NOT NULL,
PRIMARY KEY(CLASS_ID, CLASS_NAME, CLASS_SEMESTER)
);"""
cur.execute(cmd)
conn.commit()
cur.close()
def setup_rosters_table():
cur = conn.cursor()
cmd = """CREATE TABLE ROSTERS(
CLASS_ID INTEGER NOT NULL REFERENCES CLASSES (CLASS_ID),
GT_USERNAME TEXT NOT NULL REFERENCES USERS (GT_USERNAME),
PRIMARY KEY(CLASS_ID, GT_USERNAME)
);"""
cur.execute(cmd)
conn.commit()
cur.close()
def setup_requests_table():
cur = conn.cursor()
cmd = """CREATE TABLE REQUESTS(
CLASS_ID INTEGER NOT NULL REFERENCES CLASSES (CLASS_ID),
TEAM_ID INTEGER NOT NULL,
GT_USERNAME TEXT NOT NULL REFERENCES USERS (GT_USERNAME),
PRIMARY KEY(CLASS_ID, TEAM_ID, GT_USERNAME)
);"""
cur.execute(cmd)
conn.commit()
cur.close()
def main():
global conn
conn = psycopg2.connect(**db)
setup_tables()
conn.close()
main()
|
{"/runserver.py": ["/teambuildingapp/__init__.py"], "/teambuildingapp/views.py": ["/teambuildingapp/__init__.py", "/teambuildingapp/db_util.py"], "/teambuildingapp/__init__.py": ["/teambuildingapp/views.py"], "/teambuildingapp/models.py": ["/teambuildingapp/__init__.py"]}
|
1,857
|
ckjoon/teambuilding
|
refs/heads/master
|
/teambuildingapp/__init__.py
|
from flask import Flask
from flask_cas import CAS
from flask_sqlalchemy import SQLAlchemy
from teambuildingapp import config
import os
app = Flask(__name__)
CAS(app, '/cas') # this adds the prefix '/api/cas/' to the /login and /logout
# routes that CAS provides
#db = SQLAlchemy(app)
app.config.from_pyfile('config.py')
app.secret_key = os.urandom(24)
import teambuildingapp.views
|
{"/runserver.py": ["/teambuildingapp/__init__.py"], "/teambuildingapp/views.py": ["/teambuildingapp/__init__.py", "/teambuildingapp/db_util.py"], "/teambuildingapp/__init__.py": ["/teambuildingapp/views.py"], "/teambuildingapp/models.py": ["/teambuildingapp/__init__.py"]}
|
1,858
|
ckjoon/teambuilding
|
refs/heads/master
|
/teambuildingapp/models.py
|
from teambuildingapp import app
from flask_sqlalchemy import Model, Column, Integer, String
# Represents a user. User is related to a team by it's team ID'
class User(db.Model):
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(80), unique=True)
email = db.Column(db.String(120), unique=True)
team_id = db.Column(db.Integer)
def __init__(self, username, email):
self.username = username
self.email = email
def __repr__(self):
return '<User %r>' % self.username
# Represents a Team
class Team(db.Model):
id = db.Column(db.Integer, primary_key=True)
team_name = db.Column(db.String(120), unique=True)
team_leader = db.Column(db.Integer, unique=True)
def __init__(self, team_name, team_leader):
self.team_name = team_name
self.team_leader = team_leader
def __repr__(self):
return '<User %r>' % self.team_name
# class Roster(db.Model):
# id = db.Column(db.Integer, primary_key=True)
# put some stuff about rosters here
|
{"/runserver.py": ["/teambuildingapp/__init__.py"], "/teambuildingapp/views.py": ["/teambuildingapp/__init__.py", "/teambuildingapp/db_util.py"], "/teambuildingapp/__init__.py": ["/teambuildingapp/views.py"], "/teambuildingapp/models.py": ["/teambuildingapp/__init__.py"]}
|
1,922
|
Ashafix/argparse2HTML
|
refs/heads/master
|
/blueprints/celery_task.py
|
import os
import logging
import contextlib
import dill
import binascii
from flask import request, jsonify, Response
from flask import Blueprint, redirect, url_for
from celery import Celery
from args import get_cli
from config import JOB_FOLDER, CELERY_BROKER_URL, RESULT_BACKEND
celery = Celery('argparser_server', broker=CELERY_BROKER_URL, backend=RESULT_BACKEND)
FILENAME_LOG = os.path.join(JOB_FOLDER, '{}.log')
app_with_celery = Blueprint('app_with_celery', __name__,
template_folder='templates')
@celery.task(name='server.background_task', bind=True)
def background_task(self, function, args):
logger = logging.getLogger(self.request.id)
filehandler = logging.FileHandler(FILENAME_LOG.format(self.request.id))
logger.addHandler(filehandler)
function = dill.loads(binascii.a2b_base64(function))
args = dill.loads(binascii.a2b_base64(args))
with contextlib.redirect_stdout(LoggerWriter(logger, 20)):
return function(args)
@app_with_celery.route('/run/<command>', methods=['POST'])
def run_post(command):
params = request.get_json()
found, cli = get_cli(command)
if not found:
return redirect (url_for('list_available_commands'))
func = binascii.b2a_base64(dill.dumps(cli.function)).decode()
args = cli.parser.parse_args(params)
base64_args = binascii.b2a_base64(dill.dumps(args)).decode()
task = background_task.apply_async(args=(func, base64_args))
return task.id
@app_with_celery.route('/status/<task_id>')
def task_status(task_id):
task = background_task.AsyncResult(task_id)
if request.headers.get('accept') == 'text/event-stream':
def status():
while task.status not in ('SUCCESS', 'FAILURE', 'REVOKED'):
fname = FILENAME_LOG.format(task_id)
resp = ['data: \n']
if os.path.isfile(fname):
with open(fname, 'r') as f:
resp = ["data: {}".format(line.strip()) for line in f.readlines()]
resp.append('\n')
yield '\n'.join(resp)
yield "data: \n\n"
return Response(status(), content_type='text/event-stream')
if task.status == 'SUCCESS':
return jsonify(task.result)
return jsonify(task.status)
class LoggerWriter:
def __init__(self, logger, level):
self.logger = logger
self.level = level
def write(self, message):
if message != '\n':
self.logger.log(logging.CRITICAL, message)
def flush(self, *kargs, **kwargs):
pass
|
{"/blueprints/celery_task.py": ["/args.py", "/config.py"], "/blueprints/subprocess_task.py": ["/args.py", "/config.py"], "/args.py": ["/example_argparser.py"], "/server.py": ["/args.py", "/argparse2dict.py", "/config.py", "/blueprints/celery_task.py", "/blueprints/subprocess_task.py"]}
|
1,923
|
Ashafix/argparse2HTML
|
refs/heads/master
|
/example_argparser.py
|
import argparse
import itertools
import time
import builtins
parser1 = argparse.ArgumentParser(prog='simple math', description='a simple math command line interface')
parser1.add_argument('x', help='first value',
default='2',
type=int)
parser1.add_argument('y', help='second value',
default='3',
type=int)
parser1.add_argument('--action', help='which method to apply',
default='min', choices=['min', 'max', 'sum'],
type=str)
parser_cycler = argparse.ArgumentParser(prog='cycler')
parser_cycler.add_argument('--delay', help='delay in seconds',
default=0.1,
type=float)
parser_cycler.add_argument('--max', help='Number of iterations',
default=20,
type=int)
parser_cycler.add_argument('--characters', help='Cycle through those characters',
default='\\|/-',
type=str)
parser_wrapper = argparse.ArgumentParser(prog='wrapper')
parser_wrapper.add_argument('columns', help='List of comma separated column names',
default='a,b,c',
type=str)
parser_wrapper.add_argument('values', help='List of comma separated values',
default='1,2,3',
type=str)
def simple_math(args):
vals = [args.x, args.y]
f = getattr(builtins, args.action)
return f(vals)
def cycler(args):
for i, c in enumerate(itertools.cycle(args.characters)):
print(c)
time.sleep(args.delay)
if i >= args.max:
break
return 'Finished after {} iterations'.format(i)
def wrapper(args):
columns = args.columns.split(',')
values = args.values.split(',')
return complex_function(columns, values)
def complex_function(columns, values):
resp = []
for c, col in enumerate(columns):
resp.append('{}: {}'.format(col, values[c]))
return '\n'.join(resp)
|
{"/blueprints/celery_task.py": ["/args.py", "/config.py"], "/blueprints/subprocess_task.py": ["/args.py", "/config.py"], "/args.py": ["/example_argparser.py"], "/server.py": ["/args.py", "/argparse2dict.py", "/config.py", "/blueprints/celery_task.py", "/blueprints/subprocess_task.py"]}
|
1,924
|
Ashafix/argparse2HTML
|
refs/heads/master
|
/blueprints/subprocess_task.py
|
import sys
import os
import uuid
import psutil
import subprocess
from flask import request, jsonify, Response
from flask import Blueprint, redirect, url_for
from args import get_cli
from config import JOB_FOLDER
FILENAME_LOG = os.path.join(JOB_FOLDER, '{}.log')
FILENAME_PID = os.path.join(JOB_FOLDER, '{}.pid')
SEP = '=-' * 30
app_with_subprocess = Blueprint('app_with_subprocess ', __name__,
template_folder='templates')
@app_with_subprocess.route('/run/<command>', methods=['POST'])
def run_post(command):
params = request.get_json()
found, cli = get_cli(command)
if not found:
return redirect(url_for('list_available_commands'))
args = cli.parser.parse_args(params)
code = 'from {module} import {function}; import argparse; args = argparse.Namespace(**{args}); r = ({function}(args)); print(\'{SEP}\'); print(r)'.format(
module=cli.function.__module__,
function=cli.function.__name__,
SEP=SEP,
args=args.__dict__
)
task_id = str(uuid.uuid4())
f = open(FILENAME_LOG.format(task_id), 'w+')
p = subprocess.Popen([sys.executable, '-u', '-c', code], stderr=f, stdout=f, bufsize=0)
with open(FILENAME_PID.format(task_id), 'w') as ff:
ff.write(str(p.pid))
return str(task_id)
@app_with_subprocess.route('/status/<task_id>')
def task_status(task_id):
try:
with open(FILENAME_PID.format(task_id), 'r') as f:
pid = int(f.read())
process = psutil.Process(pid)
except (FileNotFoundError, psutil.NoSuchProcess, psutil.AccessDenied):
process = None
fname = FILENAME_LOG.format(task_id)
if request.headers.get('accept') == 'text/event-stream':
def status():
while process is not None:
try:
process_running = process.status() == psutil.STATUS_RUNNING
except psutil.NoSuchProcess:
process_running = False
if not process_running:
break
try:
with open(fname, 'r') as f:
resp = ["data: {}".format(line.strip()) for line in f.readlines()]
except (FileNotFoundError, IOError):
resp = ['data: \n']
resp.append('\n')
yield '\n'.join(resp)
yield 'data: \n\n'
return Response(status(), content_type='text/event-stream')
try:
with open(fname, 'r') as f:
lines = f.readlines()
i = len(lines) - 1
while i >= 0:
if lines[i].strip() == SEP:
break
i -= 1
if len(lines) > 0 and i >= 0:
resp = '\n'.join([line.strip() for line in lines[i + 1:]])
else:
resp = ''
except (FileNotFoundError, IOError):
resp = ''
return jsonify(resp)
|
{"/blueprints/celery_task.py": ["/args.py", "/config.py"], "/blueprints/subprocess_task.py": ["/args.py", "/config.py"], "/args.py": ["/example_argparser.py"], "/server.py": ["/args.py", "/argparse2dict.py", "/config.py", "/blueprints/celery_task.py", "/blueprints/subprocess_task.py"]}
|
1,925
|
Ashafix/argparse2HTML
|
refs/heads/master
|
/args.py
|
from example_argparser import parser1, simple_math
from example_argparser import parser_cycler, cycler
from example_argparser import parser_wrapper, wrapper
from collections import namedtuple
Parser = namedtuple('CLI', ('name', 'parser', 'function'))
parsers = [Parser(name=parser1.prog,
parser=parser1,
function=simple_math),
Parser(name=parser_cycler.prog,
parser=parser_cycler,
function=cycler),
Parser(name=parser_wrapper.prog,
parser=parser_wrapper,
function=wrapper)
]
def get_cli(cmd):
cmds = [parser for parser in parsers if parser.name == cmd]
if len(cmds) == 0:
return False, None
if len(cmds) > 1:
return False, (500, "more than one parser with prog name '{}' found ".format(cmd))
cli = cmds[0]
return True, cli
|
{"/blueprints/celery_task.py": ["/args.py", "/config.py"], "/blueprints/subprocess_task.py": ["/args.py", "/config.py"], "/args.py": ["/example_argparser.py"], "/server.py": ["/args.py", "/argparse2dict.py", "/config.py", "/blueprints/celery_task.py", "/blueprints/subprocess_task.py"]}
|
1,926
|
Ashafix/argparse2HTML
|
refs/heads/master
|
/argparse2dict.py
|
import argparse
from collections import OrderedDict
def argparser_to_dict(parser):
"""
Converts an ArgumentParser from the argparse module to a dictionary
:param parser: ArgumentParser, argparser which should be converted to a dictionary
:return: dict, key: argparser.dest, value: dict with key: argparse.attribute and value: argparse.attribute_value
"""
args = [a for a in parser._actions if type(a) not in (argparse._HelpAction, argparse._VersionAction)]
arg_dict = OrderedDict()
for arg in args:
arg_dict[arg.dest] = {k: arg.__getattribute__(k) for k in dir(arg)
if (not k.startswith('_') and k not in ('container', ))}
type_ = arg_dict[arg.dest].get('type')
if type_ is not None:
type_ = str(type_)
if type_.startswith('<class') and "'" in type_:
arg_dict[arg.dest]['type'] = type_.split("'")[1]
default = arg_dict[arg.dest].get('default', False)
if default is None:
del arg_dict[arg.dest]['default']
return arg_dict
|
{"/blueprints/celery_task.py": ["/args.py", "/config.py"], "/blueprints/subprocess_task.py": ["/args.py", "/config.py"], "/args.py": ["/example_argparser.py"], "/server.py": ["/args.py", "/argparse2dict.py", "/config.py", "/blueprints/celery_task.py", "/blueprints/subprocess_task.py"]}
|
1,927
|
Ashafix/argparse2HTML
|
refs/heads/master
|
/config.py
|
import os
CELERY_BROKER_URL = 'redis://localhost:6379/0'
RESULT_BACKEND = 'redis://localhost:6379/0'
USE_CELERY = False
SERVER_PORT = 5000
JOB_FOLDER = os.path.join(os.getcwd(), 'jobs')
os.makedirs(JOB_FOLDER, exist_ok=True)
|
{"/blueprints/celery_task.py": ["/args.py", "/config.py"], "/blueprints/subprocess_task.py": ["/args.py", "/config.py"], "/args.py": ["/example_argparser.py"], "/server.py": ["/args.py", "/argparse2dict.py", "/config.py", "/blueprints/celery_task.py", "/blueprints/subprocess_task.py"]}
|
1,928
|
Ashafix/argparse2HTML
|
refs/heads/master
|
/server.py
|
import os
from collections import OrderedDict
import importlib
import socket
from flask import Flask, request, url_for, redirect
import jinja2
import args
from argparse2dict import argparser_to_dict
from config import CELERY_BROKER_URL, RESULT_BACKEND, USE_CELERY, SERVER_PORT
app = Flask(__name__)
if USE_CELERY:
from blueprints.celery_task import app_with_celery
app.config['CELERY_BROKER_URL'] = CELERY_BROKER_URL
app.config['result_backend'] = RESULT_BACKEND
app.config['task_track_started'] = True
app.config['worker_redirect_stdouts'] = False
app.config['worker_hijack_root_logger'] = False
app.register_blueprint(app_with_celery)
else:
from blueprints.subprocess_task import app_with_subprocess
app.register_blueprint(app_with_subprocess)
TEMPLATE_FOLDER = './templates'
TEMPLATE_FILE = "default_template.html"
SERVER_NAME = socket.gethostbyname(socket.gethostname())
template_loader = jinja2.FileSystemLoader(searchpath=TEMPLATE_FOLDER)
template_env = jinja2.Environment(loader=template_loader)
@app.route('/')
def show_command_line_options():
cmd = request.args.get('command')
found, cli = args.get_cli(cmd)
if not found:
return redirect(url_for('list_available_commands'))
parser = cli.parser
if os.path.isfile(os.path.join(TEMPLATE_FOLDER, '{}.html'.format(cmd))):
template_file = '{}.html'.format(cmd)
else:
template_file = TEMPLATE_FILE
template = template_env.get_template(template_file)
server = 'http://{}:{}/run/{}'.format(SERVER_NAME, SERVER_PORT, cmd)
return template.render(title=cli.name,
description=cli.parser.description,
args=argparser_to_dict(parser),
server=server,
css_url=url_for('static', filename='css/main.css'))
@app.route('/list')
def list_available_commands():
template = template_env.get_template('list_commands.html')
cmds = {parser.name: 'http://{}:{}/?command={}'.format(SERVER_NAME, SERVER_PORT,
parser.name) for parser in args.parsers}
cmds_sorted = OrderedDict()
for cmd in sorted(cmds.keys()):
cmds_sorted[cmd] = cmds[cmd]
return template.render(args=cmds_sorted, css_url=url_for('static', filename='css/main.css'))
@app.route('/refresh')
def refresh():
importlib.reload(args)
return 'refreshed argparsers'
if __name__ == '__main__':
app.run(threaded=True, host='0.0.0.0', port=SERVER_PORT)
|
{"/blueprints/celery_task.py": ["/args.py", "/config.py"], "/blueprints/subprocess_task.py": ["/args.py", "/config.py"], "/args.py": ["/example_argparser.py"], "/server.py": ["/args.py", "/argparse2dict.py", "/config.py", "/blueprints/celery_task.py", "/blueprints/subprocess_task.py"]}
|
1,937
|
hillarry/sentiment
|
refs/heads/master
|
/DlogSysInfo.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\DELL\Desktop\finalGUI\DlogSysInfo.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_SystemInfo(object):
def setupUi(self, SystemInfo):
SystemInfo.setObjectName("SystemInfo")
SystemInfo.resize(343, 315)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/imcon/SysInfo1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
SystemInfo.setWindowIcon(icon)
self.sysInfotxtBrowser = QtWidgets.QTextBrowser(SystemInfo)
self.sysInfotxtBrowser.setGeometry(QtCore.QRect(0, 0, 351, 321))
self.sysInfotxtBrowser.setStyleSheet("font: 8pt \"Courier New\";")
self.sysInfotxtBrowser.setObjectName("sysInfotxtBrowser")
self.retranslateUi(SystemInfo)
QtCore.QMetaObject.connectSlotsByName(SystemInfo)
def retranslateUi(self, SystemInfo):
_translate = QtCore.QCoreApplication.translate
SystemInfo.setWindowTitle(_translate("SystemInfo", "SystemInfo"))
self.sysInfotxtBrowser.setHtml(_translate("SystemInfo", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Courier New\'; font-size:8pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'MS Shell Dlg 2\'; font-size:11pt; font-weight:600;\">System Info</span></p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px; font-family:\'MS Shell Dlg 2\'; font-size:10pt;\"><br /></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'MS Shell Dlg 2\'; font-size:10pt;\">* Powered by Ipython,</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-family:\'MS Shell Dlg 2\'; font-size:10pt;\">* Designed by PyQt5 Designer</span></p></body></html>"))
import imicons
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
SystemInfo = QtWidgets.QDialog()
ui = Ui_SystemInfo()
ui.setupUi(SystemInfo)
SystemInfo.show()
sys.exit(app.exec_())
|
{"/DesignedWelWindow.py": ["/DesignedHomeWin.py"], "/DesignedHomeWin.py": ["/DlogSysInfo.py", "/DlogExtLink.py", "/prouitest.py"]}
|
1,938
|
hillarry/sentiment
|
refs/heads/master
|
/prouitest.py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 21 21:41:50 2019
@author: User
"""
# -*- coding: utf-8 -*-
"""
Created on Sun Jul 14 22:02:39 2019
@author: User
"""
import nltk
import pandas as pd
import numpy as np
from nltk.stem import PorterStemmer
from nltk.corpus import stopwords
from nltk.stem import WordNetLemmatizer
lemmatizer=WordNetLemmatizer()
stemming = PorterStemmer()
stops = set(stopwords.words("english"))
try:
file=open("D:/VITproject/train4.csv","r")
except:
print("File not found or path is incorrect")
df=pd.read_csv(file,error_bad_lines=False,engine="python")
df.comment=df.comment.astype(str)
def apply_cleaning_function_to_list(X):
cleaned_X = []
for element in X:
cleaned_X.append(clean_text(element))
return cleaned_X
def clean_text(raw_text):
# Convert to lower case
text = raw_text.lower()
# Tokenize
tokens = nltk.word_tokenize(text)
# Keep only words (removes punctuation + numbers)
# use .isalnum to keep also numbers
token_words = [w for w in tokens if w.isalpha()]
# Stemming
#stemmed_words = [stemming.stem(w) for w in token_words]
# lemmatizing
lemmatized_words=[lemmatizer.lemmatize(word) for word in token_words]
# Remove stop words
meaningful_words = [w for w in lemmatized_words if not w in stops]
# Rejoin meaningful stemmed words
joined_words = ( " ".join(meaningful_words))
# Return cleaned data
return joined_words
#test.head(10)
text_to_clean =list(df['comment'])
cleaned_text = apply_cleaning_function_to_list(text_to_clean)
#for i in range(504):
# #clean=cleaned_text[i]
# print('Original text:',text_to_clean[i])
# print ('\nCleaned text:', cleaned_text[i])
# print('')
#combi=df.append(cleaned_text[i],ignore_index=True)
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfVectorizer
vectorizer=CountVectorizer()
vectorizer.fit(cleaned_text)
#print("Vocabulary_content:\n{}".format(vectorizer.vocabulary_))
#print("Features names:"+str(vectorizer.get_feature_names()))
#print(df.shape)
#print(df.info())
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(cleaned_text,df['sentiment'],train_size=0.75,test_size=0.25,random_state=42,shuffle=True)
#print(X_train.shape,y_train.shape)
Tfidf_vect=TfidfVectorizer(max_features=5000)
Tfidf_vect.fit(cleaned_text)
Train_X_Tfidf=Tfidf_vect.transform(X_train)
Test_X_Tfidf=Tfidf_vect.transform(X_test)
#from sklearn import svm
from sklearn import metrics
#clf=svm.SVC(kernel='linear')
#clf.fit(Train_X_Tfidf,y_train)
#y_pred=clf.predict(Test_X_Tfidf)
#accuracy=metrics.accuracy_score(y_test,y_pred)*100
#print("Accuracy of the classifier=",round(accuracy,2),'%')
from sklearn.naive_bayes import MultinomialNB
nb=MultinomialNB()
nb.fit(Train_X_Tfidf,y_train)
#print("Training set score:{:.3f}".format(nb.score(Train_X_Tfidf,y_train)))
#print("Test set score:{:.4f}".format(nb.score(Test_X_Tfidf,y_test)))
pred_nb=nb.predict(Test_X_Tfidf)
confusion=metrics.confusion_matrix(y_test,pred_nb)
#print("Confusion matrix:\n{}".format(confusion))
#text_input=input("Enter the text what you want to type:")
def testing(text_input):
result=(nb.predict(vectorizer.transform([text_input]))[0])
if result==1:
return result
elif result==0:
return result
#print(testing("Why was she so upset?"))
|
{"/DesignedWelWindow.py": ["/DesignedHomeWin.py"], "/DesignedHomeWin.py": ["/DlogSysInfo.py", "/DlogExtLink.py", "/prouitest.py"]}
|
1,939
|
hillarry/sentiment
|
refs/heads/master
|
/DesignedWelWindow.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\DELL\Desktop\finalGUI\DesignedWelWindow.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from DesignedHomeWin import Ui_HomeMainWindow
class Ui_DesignedWelWindow(object):
def openMainWin(self):
self.window = QtWidgets.QMainWindow()
self.ui = Ui_HomeMainWindow()
self.ui.setupUi(self.window)
DesignedWelWindow.hide()
self.window.show()
def setupUi(self, DesignedWelWindow):
DesignedWelWindow.setObjectName("DesignedWelWindow")
DesignedWelWindow.resize(762, 326)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/imcon/senti2.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
DesignedWelWindow.setWindowIcon(icon)
self.centralwidget = QtWidgets.QWidget(DesignedWelWindow)
self.centralwidget.setObjectName("centralwidget")
self.lbl_Heading = QtWidgets.QLabel(self.centralwidget)
self.lbl_Heading.setGeometry(QtCore.QRect(160, 60, 541, 51))
font = QtGui.QFont()
font.setFamily("MV Boli")
font.setPointSize(26)
self.lbl_Heading.setFont(font)
self.lbl_Heading.setObjectName("lbl_Heading")
self.lbl_HeadIcon = QtWidgets.QLabel(self.centralwidget)
self.lbl_HeadIcon.setGeometry(QtCore.QRect(40, 20, 121, 91))
self.lbl_HeadIcon.setStyleSheet("image:url(:/imcon/classify1.png)")
self.lbl_HeadIcon.setText("")
self.lbl_HeadIcon.setObjectName("lbl_HeadIcon")
self.lbl_welIcon = QtWidgets.QLabel(self.centralwidget)
self.lbl_welIcon.setGeometry(QtCore.QRect(620, 150, 51, 41))
self.lbl_welIcon.setStyleSheet("image:url(:/imcon/senti1.png)")
self.lbl_welIcon.setText("")
self.lbl_welIcon.setObjectName("lbl_welIcon")
self.lbl_wel = QtWidgets.QLabel(self.centralwidget)
self.lbl_wel.setGeometry(QtCore.QRect(120, 160, 501, 41))
font = QtGui.QFont()
font.setFamily("Lucida Calligraphy")
font.setPointSize(11)
self.lbl_wel.setFont(font)
self.lbl_wel.setObjectName("lbl_wel")
self.btnOpenMainWindow = QtWidgets.QPushButton(self.centralwidget)
self.btnOpenMainWindow.setGeometry(QtCore.QRect(230, 250, 271, 41))
font = QtGui.QFont()
font.setFamily("Lucida Calligraphy")
font.setPointSize(11)
self.btnOpenMainWindow.setFont(font)
self.btnOpenMainWindow.setStyleSheet("\n"
"color: rgb(133, 66, 199);\n"
"background-color: rgb(0, 209, 0);")
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/imcon/key1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnOpenMainWindow.setIcon(icon1)
self.btnOpenMainWindow.setObjectName("btnOpenMainWindow")
#click to open home window
self.btnOpenMainWindow.clicked.connect(self.openMainWin)
DesignedWelWindow.setCentralWidget(self.centralwidget)
self.statusbar = QtWidgets.QStatusBar(DesignedWelWindow)
self.statusbar.setObjectName("statusbar")
DesignedWelWindow.setStatusBar(self.statusbar)
self.retranslateUi(DesignedWelWindow)
QtCore.QMetaObject.connectSlotsByName(DesignedWelWindow)
def retranslateUi(self, DesignedWelWindow):
_translate = QtCore.QCoreApplication.translate
DesignedWelWindow.setWindowTitle(_translate("DesignedWelWindow", "This is a Welcome Page to You!"))
self.lbl_Heading.setText(_translate("DesignedWelWindow", "<html><head/><body><p><span style=\" color:#00007f;\">Abusive Word Detection System</span></p></body></html>"))
self.lbl_wel.setText(_translate("DesignedWelWindow", "<html><head/><body><p><span style=\" font-size:10pt; font-weight:600; color:#740057;\">Welcome to Our ProjectWork ,Abusive Word Detection System!.</span></p></body></html>"))
self.btnOpenMainWindow.setText(_translate("DesignedWelWindow", "ACCESS PROJECT"))
import newresourses
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
DesignedWelWindow = QtWidgets.QMainWindow()
ui = Ui_DesignedWelWindow()
ui.setupUi(DesignedWelWindow)
DesignedWelWindow.show()
sys.exit(app.exec_())
|
{"/DesignedWelWindow.py": ["/DesignedHomeWin.py"], "/DesignedHomeWin.py": ["/DlogSysInfo.py", "/DlogExtLink.py", "/prouitest.py"]}
|
1,940
|
hillarry/sentiment
|
refs/heads/master
|
/DlogExtLink.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\DELL\Desktop\finalGUI\DlogExtLink.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_ExternalLinks(object):
def setupUi(self, ExternalLinks):
ExternalLinks.setObjectName("ExternalLinks")
ExternalLinks.resize(343, 313)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/imcon/link1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
ExternalLinks.setWindowIcon(icon)
self.ExternalLinktextBrowser = QtWidgets.QTextBrowser(ExternalLinks)
self.ExternalLinktextBrowser.setGeometry(QtCore.QRect(0, 0, 351, 321))
self.ExternalLinktextBrowser.setObjectName("ExternalLinktextBrowser")
self.retranslateUi(ExternalLinks)
QtCore.QMetaObject.connectSlotsByName(ExternalLinks)
def retranslateUi(self, ExternalLinks):
_translate = QtCore.QCoreApplication.translate
ExternalLinks.setWindowTitle(_translate("ExternalLinks", "External Links"))
self.ExternalLinktextBrowser.setHtml(_translate("ExternalLinks", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'MS Shell Dlg 2\'; font-size:8.25pt; font-weight:400; font-style:normal;\">\n"
"<p align=\"center\" style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt; font-weight:600;\">External Links</span></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">- </span><a href=\"www.pyorg.com\"><span style=\" font-size:10pt; text-decoration: underline; color:#0000ff;\">Python</span></a></p>\n"
"<p style=\" margin-top:12px; margin-bottom:12px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" font-size:10pt;\">- </span><a href=\"www.pyqt.com\"><span style=\" font-size:10pt; text-decoration: underline; color:#0000ff;\">PyQt</span></a></p></body></html>"))
import imicons
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
ExternalLinks = QtWidgets.QDialog()
ui = Ui_ExternalLinks()
ui.setupUi(ExternalLinks)
ExternalLinks.show()
sys.exit(app.exec_())
|
{"/DesignedWelWindow.py": ["/DesignedHomeWin.py"], "/DesignedHomeWin.py": ["/DlogSysInfo.py", "/DlogExtLink.py", "/prouitest.py"]}
|
1,941
|
hillarry/sentiment
|
refs/heads/master
|
/DesignedHomeWin.py
|
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'C:\Users\DELL\Desktop\finalGUI\DesignedHomeWin.ui'
#
# Created by: PyQt5 UI code generator 5.11.3
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
from DlogSysInfo import Ui_SystemInfo
from DlogExtLink import Ui_ExternalLinks
from DlogAbtUs import Ui_AboutUs
from PyQt5.QtWidgets import QMessageBox
import prouitest
class Ui_HomeMainWindow(object):
def openSysInfo(self):
self.window = QtWidgets.QDialog()
self.ui = Ui_SystemInfo()
self.ui.setupUi(self.window)
self.window.show()
def openExternalLink(self):
self.window = QtWidgets.QDialog()
self.ui =Ui_ExternalLinks()
self.ui.setupUi(self.window)
self.window.show()
def openAboutUs(self):
self.window = QtWidgets.QDialog()
self.ui = Ui_AboutUs()
self.ui.setupUi(self.window)
self.window.show()
def setupUi(self, HomeMainWindow):
HomeMainWindow.setObjectName("HomeMainWindow")
HomeMainWindow.resize(1041, 622)
icon = QtGui.QIcon()
icon.addPixmap(QtGui.QPixmap(":/imcon/senti2.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
HomeMainWindow.setWindowIcon(icon)
#HomeMainWindow.setStyleSheet("border-color: rgb(85, 0, 0);\n"
#"background-color: rgb(255, 246, 247);\n"
#"")
HomeMainWindow.setStyleSheet("background-color: qlineargradient(spread:pad, x1:0.068, y1:1, x2:0.113636, y2:0.807, stop:0.409091 rgba(221, 115, 106, 255), stop:1 rgba(255, 255, 255, 255));")
self.centralwidget = QtWidgets.QWidget(HomeMainWindow)
self.centralwidget.setObjectName("centralwidget")
self.lbl_Heading = QtWidgets.QLabel(self.centralwidget)
self.lbl_Heading.setGeometry(QtCore.QRect(370, 40, 541, 51))
font = QtGui.QFont()
font.setFamily("MV Boli")
font.setPointSize(26)
self.lbl_Heading.setFont(font)
self.lbl_Heading.setObjectName("lbl_Heading")
self.lbl_MainIcon = QtWidgets.QLabel(self.centralwidget)
self.lbl_MainIcon.setGeometry(QtCore.QRect(270, 0, 91, 81))
self.lbl_MainIcon.setStyleSheet("image:url(:/imcon/classify1.png)")
self.lbl_MainIcon.setText("")
self.lbl_MainIcon.setObjectName("lbl_MainIcon")
self.label_entertext = QtWidgets.QLabel(self.centralwidget)
self.label_entertext.setGeometry(QtCore.QRect(70, 190, 111, 21))
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(14)
self.label_entertext.setFont(font)
self.label_entertext.setObjectName("label_entertext")
self.lineEdit_InputText = QtWidgets.QLineEdit(self.centralwidget)
self.lineEdit_InputText.setGeometry(QtCore.QRect(190, 180, 621, 41))
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(12)
self.lineEdit_InputText.setFont(font)
self.lineEdit_InputText.setObjectName("lineEdit_InputText")
self.lineEdit_InputText.setPlaceholderText(" Enter the text what you want to type ")
self.btnAnalyze = QtWidgets.QPushButton(self.centralwidget)
self.btnAnalyze.setGeometry(QtCore.QRect(840, 180, 111, 41))
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(12)
self.btnAnalyze.setFont(font)
self.btnAnalyze.setStyleSheet("background-color: rgb(0, 255, 0);\n"
"border-color: rgb(239, 0, 0);")
self.btnAnalyze.clicked.connect(self.on_click)
icon1 = QtGui.QIcon()
icon1.addPixmap(QtGui.QPixmap(":/imcon/system2.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnAnalyze.setIcon(icon1)
self.btnAnalyze.setObjectName("btnAnalyze")
self.lbl_Language = QtWidgets.QLabel(self.centralwidget)
self.lbl_Language.setGeometry(QtCore.QRect(120, 310, 211, 31))
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(14)
self.lbl_Language.setFont(font)
self.lbl_Language.setObjectName("lbl_Language")
self.label_sentiment = QtWidgets.QLabel(self.centralwidget)
self.label_sentiment.setGeometry(QtCore.QRect(120, 400, 201, 41))
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(14)
self.label_sentiment.setFont(font)
self.label_sentiment.setObjectName("label_sentiment")
self.label_classification = QtWidgets.QLabel(self.centralwidget)
self.label_classification.setGeometry(QtCore.QRect(710, 310, 171, 31))
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(14)
self.label_classification.setFont(font)
self.label_classification.setObjectName("label_classification")
self.btnAbusive = QtWidgets.QPushButton(self.centralwidget)
self.btnAbusive.setStyleSheet("background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0.0340909 rgba(255, 97, 90, 255), stop:0.0511364 rgba(252, 63, 131, 14), stop:1 rgba(255, 36, 36, 228));")
self.btnAbusive.setGeometry(QtCore.QRect(610, 380, 161, 71))
self.btnAbusive.setEnabled(False)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(14)
self.btnAbusive.setFont(font)
## self.btnAbusive.setStyleSheet("color: rgb(255, 0, 0);\n"
##"gridline-color: rgb(193, 0, 0);")
icon2 = QtGui.QIcon()
icon2.addPixmap(QtGui.QPixmap(":/imcon/Icon4.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnAbusive.setIcon(icon2)
self.btnAbusive.setObjectName("btnAbusive")
self.btnNonAbusive = QtWidgets.QPushButton(self.centralwidget)
self.btnNonAbusive.setStyleSheet("background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0.0340909 rgba(0, 207, 0, 255), stop:0.0511364 rgba(252, 63, 131, 14), stop:0.426136 rgba(16, 203, 4, 221));")
self.btnNonAbusive.setGeometry(QtCore.QRect(790, 380, 161, 71))
self.btnNonAbusive.setEnabled(False)
font = QtGui.QFont()
font.setFamily("Courier New")
font.setPointSize(14)
self.btnNonAbusive.setFont(font)
## self.btnNonAbusive.setStyleSheet("color: rgb(0, 150, 0);\n"
##"gridline-color: rgb(0, 166, 0);")
icon3 = QtGui.QIcon()
icon3.addPixmap(QtGui.QPixmap(":/imcon/Icon3.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.btnNonAbusive.setIcon(icon3)
self.btnNonAbusive.setObjectName("btnNonAbusive")
HomeMainWindow.setCentralWidget(self.centralwidget)
self.menubar = QtWidgets.QMenuBar(HomeMainWindow)
self.menubar.setGeometry(QtCore.QRect(0, 0, 1041, 21))
self.menubar.setObjectName("menubar")
self.menuAbout = QtWidgets.QMenu(self.menubar)
self.menuAbout.setObjectName("menuAbout")
HomeMainWindow.setMenuBar(self.menubar)
self.statusbar = QtWidgets.QStatusBar(HomeMainWindow)
self.statusbar.setObjectName("statusbar")
HomeMainWindow.setStatusBar(self.statusbar)
self.actionSystem_Info = QtWidgets.QAction(HomeMainWindow)
icon4 = QtGui.QIcon()
icon4.addPixmap(QtGui.QPixmap(":/imcon/SysInfo1.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionSystem_Info.setIcon(icon4)
font = QtGui.QFont()
font.setFamily("Courier New")
self.actionSystem_Info.setFont(font)
self.actionSystem_Info.setObjectName("actionSystem_Info")
#open System Info Dialog
self.actionSystem_Info.triggered.connect(self.openSysInfo)
self.actionExternal_Link = QtWidgets.QAction(HomeMainWindow)
icon5 = QtGui.QIcon()
icon5.addPixmap(QtGui.QPixmap(":/imcon/link3.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionExternal_Link.setIcon(icon5)
font = QtGui.QFont()
font.setFamily("Courier New")
self.actionExternal_Link.setFont(font)
self.actionExternal_Link.setObjectName("actionExternal_Link")
#open External Links Dialog
self.actionExternal_Link.triggered.connect(self.openExternalLink)
self.actionAbout_Us = QtWidgets.QAction(HomeMainWindow)
icon6 = QtGui.QIcon()
icon6.addPixmap(QtGui.QPixmap(":/imcon/about2.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.actionAbout_Us.setIcon(icon6)
font = QtGui.QFont()
font.setFamily("Courier New")
self.actionAbout_Us.setFont(font)
self.actionAbout_Us.setObjectName("actionAbout_Us")
#open About Us Dialog
self.actionAbout_Us.triggered.connect(self.openAboutUs)
self.menuAbout.addAction(self.actionSystem_Info)
self.menuAbout.addAction(self.actionExternal_Link)
self.menuAbout.addAction(self.actionAbout_Us)
self.menubar.addAction(self.menuAbout.menuAction())
self.retranslateUi(HomeMainWindow)
self.btnAnalyze.clicked.connect(self.lineEdit_InputText.clear)
QtCore.QMetaObject.connectSlotsByName(HomeMainWindow)
def retranslateUi(self, HomeMainWindow):
_translate = QtCore.QCoreApplication.translate
HomeMainWindow.setWindowTitle(_translate("HomeMainWindow", "Abusive Word Detection System"))
self.lbl_Heading.setText(_translate("HomeMainWindow", "<html><head/><body><p><span style=\" color:#55007f;\">Abusive Word Detection System</span></p></body></html>"))
self.label_entertext.setText(_translate("HomeMainWindow", "Enter Text"))
self.btnAnalyze.setText(_translate("HomeMainWindow", "Analyze"))
self.lbl_Language.setText(_translate("HomeMainWindow", "<html><head/><body><p><span style=\" color:#47006b;\">Language:</span><span style=\" color:#00b900;\">English</span></p></body></html>"))
self.label_sentiment.setText(_translate("HomeMainWindow", "<html><head/><body><p><span style=\" color:#3e005d;\">Accuracy:</span></p></body></html>"))
self.label_classification.setText(_translate("HomeMainWindow", "<html><head/><body><p><span style=\" color:#55007f;\">Classification</span></p></body></html>"))
self.btnAbusive.setText(_translate("HomeMainWindow", "Abusive"))
self.btnNonAbusive.setText(_translate("HomeMainWindow", "NonAbusive"))
self.menuAbout.setTitle(_translate("HomeMainWindow", "About"))
self.actionSystem_Info.setText(_translate("HomeMainWindow", "System Info"))
self.actionExternal_Link.setText(_translate("HomeMainWindow", "External Link"))
self.actionAbout_Us.setText(_translate("HomeMainWindow", "About Us"))
#self.actionAbout_Us.setShortcut(_translate("HomeMainWindow", "Ctrl+U"))
def on_click(self):
text_input=self.lineEdit_InputText.text()
x=str(text_input)
command=prouitest.testing(x)
self.label_sentiment.setText("<html><head/><body><p><span style=\" color:#3e005d;\">Accuracy:85.88%</span></p></body></html>")
if command==1:
#self.btnAnalyze.clicked.connect(self.show_popup)
#self.btnAnalyze.clicked.connect(
self.btnAbusive.setEnabled(True)
#self.btnAbusive.setStyleSheet("background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0.0340909 rgba(255, 97, 90, 255), stop:0.0511364 rgba(252, 63, 131, 14), stop:1 rgba(255, 36, 36, 228));")
QMessageBox.warning(QMessageBox(),"Warning","We can't allow your abusive message")
self.btnAbusive.setEnabled(False)
#self.btnAbusive.setEnabled(True)
#self.btnAbusive.clicked.connect(self.show_popup)
elif command==0:
#self.btnAnalyze.clicked.connect(self.show)
self.btnNonAbusive.setEnabled(True)
#self.btnNonAbusive.setStyleSheet("background-color: qlineargradient(spread:pad, x1:0, y1:0, x2:1, y2:0, stop:0.0340909 rgba(0, 207, 0, 255), stop:0.0511364 rgba(252, 63, 131, 14), stop:0.585227 rgba(16, 203, 4, 221));")
QMessageBox.information(QMessageBox(),"Allow","We allow your message")
self.btnNonAbusive.setEnabled(False)
#self.btnNonAbusive.clicked.connect(self.show)
#self.label_sentiment.setText("<html><head/><body><p><span style=\" color:#3e005d;\">Accuracy:85.88%</span></p></body></html>")
'''def show_popup(self):
msg=QMessageBox()
msg.setWindowTitle("Warning!")
msg.setIcon(QtWidgets.QMessageBox.Warning)
msg.setText("Your sentence has abusive words.We can't allow your message.")
x=msg.exec_()
self.btnAbusive.setEnabled(False)'''
'''def show(self):
msg=QMessageBox()
msg.setWindowTitle("Allow")
msg.setIcon(QtWidgets.QMessageBox.Information)
msg.setText("Your message has no abusive words.We can allow your message.")
x=msg.exec_()
self.btnNonAbusive.setEnabled(False)'''
import newresourses
if __name__ == "__main__":
import sys
app = QtWidgets.QApplication(sys.argv)
HomeMainWindow = QtWidgets.QMainWindow()
ui = Ui_HomeMainWindow()
ui.setupUi(HomeMainWindow)
HomeMainWindow.show()
sys.exit(app.exec_())
|
{"/DesignedWelWindow.py": ["/DesignedHomeWin.py"], "/DesignedHomeWin.py": ["/DlogSysInfo.py", "/DlogExtLink.py", "/prouitest.py"]}
|
1,951
|
Dorothylyly/proxyPool
|
refs/heads/master
|
/loggingCustom.py
|
import logging
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
DATE_FORMAT = "%m/%d/%Y %H:%M:%S %p"
def log():
logging.basicConfig(filename='RedisClient.log', level=logging.DEBUG,
format=LOG_FORMAT, datefmt=DATE_FORMAT)
return logging
|
{"/RedisClient.py": ["/loggingCustom.py"], "/together.py": ["/loggingCustom.py", "/tester.py"], "/tester.py": ["/RedisClient.py"]}
|
1,952
|
Dorothylyly/proxyPool
|
refs/heads/master
|
/RedisClient.py
|
import redis
# 生成随机数
from random import choice
"""
操作缓存数据库的有序集合,实现分数的设置,代理的获取
检测代理
"""
# 最大分数 100分为高可用
MAX_SCORE = 100
# 最低分数 一旦低于0分 立即剔除
MIN_SCORE = 0
# 刚爬取到的代理的初始化分数
INITIAL_SCORE = 10
#
# #LOG_FORMAT = "%(asctime)s - %(levelname)s - %(message)s"
# DATE_FORMAT = "%m/%d/%Y %H:%M:%S %p"
REDIS_HOST = "localhost"
REDIS_PORT = 6379
REDIS_PASSWORD = "123456"
REDIS_KEY = "proxies"
from loggingCustom import log as logging
class RedisClient(object):
# logging.basicConfig(filename='RedisClient.#log', level=logging.DEBUG, format=#LOG_FORMAT, datefmt=DATE_FORMAT)
def __init__(self, host=REDIS_HOST, port=REDIS_PORT, password=REDIS_PASSWORD):
"""
初始化
:param host: 地址
:param port: 端口
:param password: 密码
"""
self.db = redis.StrictRedis(host=host, port=port, password=password, decode_responses=True)
def add(self, proxy, score=INITIAL_SCORE):
"""
添加代理,设置分数为最高
:param proxy:
:param score:
:return:
"""
# 如果redis里面没有 这个proxy 就将代理添加进redis
if not self.db.zscore(REDIS_KEY, proxy):
return self.db.zadd(REDIS_KEY, score, proxy)
def random(self):
"""
随机获取有效代理,首先尝试获取最高代理分数,如果最高分数不存在,则按排名获取,否则异常
:return: 随机代理
"""
# result 返回score等于100的所有代理列表 大于等于MAX_SCORE小于等于MAX_SCORE
result = self.db.zrangebyscore(REDIS_KEY, MAX_SCORE, MAX_SCORE)
if len(result):
# 在result列表中 随机选择一个,实现负载均衡
return choice(result)
# 如果不存在100分的代理
else:
result = self.db.zrevrange(REDIS_KEY, 0, 100)
# 当result不为空时
if len(result):
return choice(result)
else:
logging().warning("raise PoolEmptyError")
raise TimeoutError
def decrease(self, proxy):
"""
代理值减一分,分数小于最小值,则代理删除
:param proxy: 代理
:return: 修改后的代理的分数
"""
score = self.db.zscore(REDIS_KEY, proxy)
if score and score > MIN_SCORE:
logging().info("proxy{}score{} - 1".format(proxy, score))
print("proxy", proxy, "score", score, "-1")
return self.db.zincrby(REDIS_KEY, proxy, -1)
else:
print("proxy", proxy, "score too low,out")
return self.db.zrem(REDIS_KEY, proxy)
def exits(self,proxy):
"""
判断代理是否存在
:param proxy: 代理
:return: 是否存在
"""
return not self.db.zscore(REDIS_KEY, proxy) is None
def max(self, proxy):
"""
将代理设置为 MAX_SCORE
:param proxy: 代理
:return:
"""
logging().info("proxy{}ok,set score {}".format(proxy, MAX_SCORE))
print("proxy", proxy, "ok,set score", MAX_SCORE)
return self.db.zadd(REDIS_KEY, MAX_SCORE, proxy)
def count(self):
"""
获取代理数量
:return: 代理数量
"""
return self.db.zcard(REDIS_KEY)
def all(self):
"""
获取全部代理列表
:return:
"""
return self.db.zrangebyscore(REDIS_KEY, MIN_SCORE, MAX_SCORE)
|
{"/RedisClient.py": ["/loggingCustom.py"], "/together.py": ["/loggingCustom.py", "/tester.py"], "/tester.py": ["/RedisClient.py"]}
|
1,953
|
Dorothylyly/proxyPool
|
refs/heads/master
|
/together.py
|
from loggingCustom import log as logging
from tester import Tester
from Getter import Getter
import time
# 多线程模块
from multiprocessing import Process
from flaskWeb import app
API_ENABLE = True
TESTER_CYCLE = 20
GETTER_CYCLE = 20
TESTER_ENABLED = True
GETTER_ENABLE = True
class Scheduler():
def schedule_tester(self, cycle=TESTER_CYCLE):
"""
定时检测代理
:param cycle:
:return:
"""
tester = Tester()
while True:
logging().info("测试器开始运行")
print("测试器开始运行")
tester.run()
time.sleep(cycle)
def schedule_getter(self, cycle=GETTER_CYCLE):
getter = Getter()
while True:
logging().info("开始抓取代理")
print("开始抓取代理")
# 抓取器开始运行
getter.run()
time.sleep(cycle)
def schedule_api(self):
"""
开启api
:return:
"""
app.run()
def run(self):
logging().info("代理池开始运行")
print("代理池开始运行")
# 以主线程为父线程 创建子线程
if TESTER_ENABLED:
tester_process = Process(target=self.schedule_tester)
tester_process.start()
if GETTER_ENABLE:
getter_process = Process(target=self.schedule_getter)
getter_process.start()
if API_ENABLE:
api_process = Process(target=self.schedule_api)
api_process.start()
if __name__=="__main__":
Scheduler().run()
|
{"/RedisClient.py": ["/loggingCustom.py"], "/together.py": ["/loggingCustom.py", "/tester.py"], "/tester.py": ["/RedisClient.py"]}
|
1,954
|
Dorothylyly/proxyPool
|
refs/heads/master
|
/tester.py
|
from RedisClient import RedisClient
import aiohttp
import asyncio
import time
# 定义合法的状态码
VALID_STATUS_CODE = [200]
TEST_URL = "http://desk.zol.com.cn/fengjing/"
# 定义一次最多验证多少个代理IP
BATCH_TEST_SIZE = 100
# aiohttp 其实表示协程
class Tester(object):
def __init__(self):
self.redis = RedisClient()
# 异步的方法
async def test_single_proxy(self, proxy):
"""
方法用于检测一个代理是否合法
:param proxy: 需要检测的代理
:return:
"""
# 用来设置一次最大连接数量 参数用来防止ssl报错
conn = aiohttp.TCPConnector(verify_ssl=False)
# 用来创建一个Session连接
async with aiohttp.ClientSession(connector=conn) as session:
try:
# 检测proxy是否为bytes类型
if isinstance(proxy,bytes):
# 如果是的话 用utf-8进行proxy编码
proxy = proxy.decode('utf-8')
real_proxy="http://"+proxy
print("testing...", proxy)
# 发起get请求
async with session.get(TEST_URL, proxy=real_proxy, timeout=15) as response:
# 如果响应状态码是200
if response.status in VALID_STATUS_CODE:
# 将proxy的分数设置为 100
self.redis.max(proxy)
print("proxy ok", proxy)
else:
# 将代理分数减一
self.redis.decrease(proxy)
print("return code is illegal", proxy)
except (aiohttp.ClientError, aiohttp.ClientConnectorError, TimeoutError,AttributeError):
self.redis.decrease(proxy)
print("proxy request fail", proxy)
def run(self):
"""
测试主函数
:return:
"""
print("测试器开始运行")
try:
proxies = self.redis.all()
# 创建消息循环队列
loop = asyncio.get_event_loop()
# 进行批量测试
for i in range(0, len(proxies), BATCH_TEST_SIZE):
# 一次测试 100 个代理
test_proxies = proxies[i:i+BATCH_TEST_SIZE]
tasks = [self.test_single_proxy(proxy) for proxy in test_proxies]
loop.run_until_complete(asyncio.wait(tasks))
time.sleep(5)
except Exception as e:
print("error", e.args)
|
{"/RedisClient.py": ["/loggingCustom.py"], "/together.py": ["/loggingCustom.py", "/tester.py"], "/tester.py": ["/RedisClient.py"]}
|
1,959
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/__init__.py
|
from pyResearchInsights.Scraper import scraper_main
from pyResearchInsights.Cleaner import cleaner_main
from pyResearchInsights.Analyzer import analyzer_main
from pyResearchInsights.NLP_Engine import nlp_engine_main
from pyResearchInsights.common_functions import pre_processing, arguments_parser, end_process
from pyResearchInsights.system_functions import tarballer, rm_original_folder
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,960
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/common_functions.py
|
'''Hello! This script contains functions that are resued by other pieces of code and scripts belonging to this
project as well.
Checkout the README.md for more details regarding the project itself.
Sarthak J Shetty
12/09/2018'''
'''datetime is used while building the database logs'''
from datetime import datetime
'''Importing OS functions to build the folders for the LOG run here as well'''
import os
'''Importing argparse to parse the keywords, then supplied to the Scraper.py code'''
import argparse
def status_logger(status_logger_name, status_key):
'''Status logger to print and log details throught the running the program.
Declaring current_hour, current_minute & current_second.'''
current_hour = str(datetime.now().time().hour)
current_minute = str(datetime.now().time().minute)
current_second = str(datetime.now().time().second)
'''Logging the complete_status_key and printing the complete_status_key'''
complete_status_key = "[INFO]"+current_hour+":"+current_minute+":"+current_second+" "+status_key
print(complete_status_key)
status_log = open(status_logger_name+'.txt', 'a')
status_log.write(complete_status_key+"\n")
status_log.close()
def status_logger_creator(abstracts_log_name):
'''This is a standalone status_logger and session_folder filename generator, if someone is using the Bias components as standalone functions'''
session_folder_name = abstracts_log_name.split('/')[-1]
os.makedirs(session_folder_name)
status_logger_name = session_folder_name+"/"+"Status_Logger"
return status_logger_name, session_folder_name
def pre_processing(keywords):
'''This function contains all the pre-processing statements related to the running of the program, including:
1. Abstracts LOG Name
2. Status Logger Name'''
if((type(keywords) == str)):
'''If the user uses the function independently of the argument_parser() we need this to convert the keywords to a list of words'''
keywords = argument_formatter(keywords)
'''Declaring the time and date variables here. Year, month, day, hours, minute & seconds.'''
run_start_year = str(datetime.now().date().year)
run_start_month = str(datetime.now().date().month)
run_start_day = str(datetime.now().date().day)
run_start_date = str(datetime.now().date())
run_start_hour = str(datetime.now().time().hour)
run_start_minute = str(datetime.now().time().minute)
run_start_second = str(datetime.now().time().second)
'''Keywords have to be written into the filename of the LOG that we are running'''
folder_attachement = ""
if(len(keywords)==1):
folder_attachement = keywords[0]
else:
for keyword_index in range(0, len(keywords)):
if((keyword_index+1)==len(keywords)):
folder_attachement = folder_attachement+keywords[keyword_index]
else:
folder_attachement = folder_attachement+keywords[keyword_index]+"_"
'''Declaring the LOG folder and the abstract, abstract_id & status_logger files.'''
logs_folder_name = "LOGS"+"/"+"LOG"+"_"+run_start_date+'_'+run_start_hour+'_'+run_start_minute+"_"+folder_attachement
abstracts_log_name = logs_folder_name+"/"+'Abstract_Database'+'_'+run_start_date+'_'+run_start_hour+'_'+run_start_minute
status_logger_name = logs_folder_name+"/"+'Status_Logger'+'_'+run_start_date+'_'+run_start_hour+'_'+run_start_minute
'''If the filename does not exist create the file in the LOG directory'''
if not os.path.exists(logs_folder_name):
os.makedirs(logs_folder_name)
'''Creating the status_log and writing the session duration & date'''
status_log = open(status_logger_name+'.txt', 'a')
status_log.write("Session:"+" "+run_start_day+"/"+run_start_month+"/"+run_start_year+"\n")
status_log.write("Time:"+" "+run_start_hour+":"+run_start_minute+":"+run_start_second+"\n")
status_log.close()
logs_folder_name_status_key = "Built LOG folder for session"
status_logger(status_logger_name, logs_folder_name_status_key)
return abstracts_log_name, status_logger_name
def keyword_url_generator(keywords_to_search):
'''Reducing the long output of the pre_processing statement by offloading some of the scraper specific functions to another function'''
if((type(keywords_to_search) == str)):
'''If the user uses the function independently of the argument_parser() we need this to convert the keywords to a list of words'''
keywords = argument_formatter(keywords_to_search)
query_string = ""
if (len(keywords)==1):
query_string = keywords[0]
else:
for keyword_index in range(0, len(keywords)):
if((keyword_index+1)==len(keywords)):
query_string = query_string+keywords[keyword_index]
else:
query_string = query_string+keywords[keyword_index]+"+"
start_url = "https://link.springer.com/search/page/"
abstract_url = 'https://link.springer.com'
'''We take the keywords here and generate the URLs here'''
return start_url, abstract_url, query_string
def abstract_id_log_name_generator(abstracts_log_name):
'''We use this function to generate the abstract_id_log_name from the abstracts_log_name'''
return abstracts_log_name.split('Abstract')[0] + 'Abstract_ID' + abstracts_log_name.split('Abstract')[1]+'_'
def argument_formatter(argument_string):
'''We make this into a function so that we can use it across the pyResearchInsights stack'''
return argument_string.split()
def arguments_parser():
'''This function is used to read the initial keyword that will be queried in Springer (for now).
We will be scrapping Science, Nature etc later, as long as generic URLs are supported.
Parses two arguments now:
a) --keywords: This argument is the term that will be searched for in Springer.
b) --trends: This argument provides the term whose research trend will be generated.
c) --paper: This argument is triggered if the PDFs have to be downloaded as well.'''
parser = argparse.ArgumentParser()
parser.add_argument("--keywords", help="Keyword to search on Springer", default="Tiger")
parser.add_argument("--trends", help="Keywords to generate the trends histogram for", default="Conservation")
parser.add_argument("--paper", help="If papers have to be downloaded as well", default="No")
arguments = parser.parse_args()
if arguments.keywords:
keywords = arguments.keywords
'''The keyword if a string will be split and then be passed to the scraper functions'''
keywords = argument_formatter(keywords)
if arguments.trends:
trends = arguments.trends
'''The entire list of the abstract words will be lowered and hence trends term has to be
lowered to obtain a match with those terms.'''
'''if arguments.paper:
paper = arguments.paper'''
'''If this argument is turned to Yes, then the papers will be downloaded as well'''
trends = trends.lower()
trends = argument_formatter(trends)
return keywords, trends
def end_process(status_logger_name):
'''Self-explanatory, this function declares successful completion of the code.'''
end_process_status_key="Process has successfully ended"
status_logger(status_logger_name, end_process_status_key)
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,961
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/Visualizer.py
|
'''Hello! This code code is part of the pyResearchInsights project.
We will be displaying the results from the NLP_Engine.py code here, using primarily using pyLDAvis library.
Check out the repository README.md for a high-level overview of the project and the objective.
Sarthak J. Shetty
24/11/2018'''
from pyResearchInsights.common_functions import argument_formatter, status_logger
'''import matplotlib as plt'''
import matplotlib.pyplot as plt
'''Library necessary to develop the html visualizations'''
import pyLDAvis
'''Generating dictionary from the textual_data that is lemmatized'''
from collections import Counter
'''Importing pandas to create the dataframes to plot the histograms'''
import pandas as pd
'''Importing the colormap functions using matplotlib'''
from matplotlib import cm
def visualizer_generator(lda_model, corpus, id2word, logs_folder_name, status_logger_name):
'''This code generates the .html file with generates the visualization of the data prepared.'''
visualizer_generator_start_status_key = "Preparing the topic modeling visualization"
status_logger(status_logger_name, visualizer_generator_start_status_key)
'''Here, we generate the actual topic modelling visualization from thghe model created by pyLDAvis'''
textual_data_visualization = pyLDAvis.gensim_models.prepare(lda_model, corpus, id2word)
pyLDAvis.save_html(textual_data_visualization, logs_folder_name+"/"+"Data_Visualization_Topic_Modelling.html")
'''Here, we generate the order of topics according to the LDA visualization'''
topic_order = [textual_data_visualization[0].iloc[topic].name for topic in range(lda_model.num_topics)]
return topic_order
visualizer_generator_end_status_key = "Prepared the topic modeling visualization"+" "+logs_folder_name+"/"+"Data_Visualization_Topic_Modelling.html"
status_logger(status_logger_name, visualizer_generator_end_status_key)
def topic_builder(lda_model, topic_order, num_topics, num_keywords, textual_data_lemmatized, logs_folder_name, status_logger_name):
'''We generate histograms here to present the frequency and weights of the keywords of each topic and save them to the disc for further analysis'''
topic_builder_start_status_key = "Preparing the frequency and weights vs keywords charts"
status_logger(status_logger_name, topic_builder_start_status_key)
'''Setting the colormaps here to generate the num_topics charts that proceed'''
colorchart = cm.get_cmap('plasma', num_topics)
topics = lda_model.show_topics(num_topics = -1, num_words = num_keywords, formatted=False)
data_flat = [w for w_list in textual_data_lemmatized for w in w_list]
counter = Counter(data_flat)
'''Generating a pandas dataframe that contains the word, topic_id, importance and word_count'''
out = []
for i, topic in topics:
for word, weight in topic:
out.append([word, i , weight, counter[word]])
'''We will use bits of this dataframe across this function'''
df = pd.DataFrame(out, columns=['word', 'topic_id', 'importance', 'word_count'])
for topic in topic_order:
'''Progressively generating the figures comprising the weights and frequencies for each keyword in each topic'''
_, ax = plt.subplots(1, 1, figsize=[20, 15])
x_axis = [x_axis_element for x_axis_element in range(0, num_keywords)]
'''Creating the x_axis labels here, which is the topic keywords'''
x_axis_labels = [element for element in df.loc[df.topic_id==topic, 'word']]
y_axis = [round(element, 2) for element in df.loc[df.topic_id==topic, 'word_count']]
'''Here, we make sure that the y_axis labels are equally spaced, and that there are 10 of them'''
word_count_list = [word_count for word_count in df.loc[df.topic_id==topic, 'word_count']]
word_count_increment = (max(word_count_list)/10)
y_axis_labels = [round(0 + increment*(word_count_increment)) for increment in range(0, 10)]
'''Here, we make sure that the y_axis_twin labels are equally spaced, and that there are 10 of them'''
word_importance_list = [word_count for word_count in df.loc[df.topic_id==topic, 'importance']]
word_importance_increment = (max(word_importance_list)/10)
y_axis_twin_labels = [0 + increment*(word_importance_increment) for increment in range(0, 10)]
plt.xticks(x_axis, x_axis_labels, rotation=40, horizontalalignment='right', fontsize = 25)
ax.bar(x_axis, y_axis, width=0.5, alpha=0.3, color=colorchart.colors[topic], label="Word Count")
ax.set_yticks(y_axis_labels)
ax.tick_params(axis = 'y', labelsize = 25)
ax.set_ylabel('Word Count', color=colorchart.colors[topic], fontsize = 25)
ax.legend(loc='upper left', fontsize = 20)
'''Generating the second set of barplots here'''
ax_twin = ax.twinx()
ax_twin.bar(x_axis, df.loc[df.topic_id==topic, 'importance'], width=0.2, color=colorchart.colors[topic], label = "Weight")
ax_twin.set_ylabel('Weight', color=colorchart.colors[topic], fontsize = 25)
ax_twin.set_yticks(y_axis_twin_labels)
ax_twin.tick_params(axis='y', labelsize = 25)
ax_twin.legend(loc='upper right', fontsize = 20)
plt.title('Topic Number: '+str(topic_order.index(topic) + 1), color=colorchart.colors[topic], fontsize=25)
'''Saving each of the charts generated to the disc'''
plt.savefig(logs_folder_name + '/FrequencyWeightChart_TopicNumber_' + str(topic_order.index(topic) + 1) + '.png')
topic_builder_end_status_key = "Prepared the frequency and weights vs keywords charts"
status_logger(status_logger_name, topic_builder_end_status_key)
def trends_histogram(abstracts_log_name, logs_folder_name, trend_keywords, status_logger_name):
'''This function is responsible for generating the histograms to visualizations the trends in research topics.'''
trends_histogram_start_status_key = "Generating the trends histogram"
status_logger(status_logger_name, trends_histogram_start_status_key)
'''What's happening here?
a) trends_histogram receives the dictionary filename for the dictionary prepared by the Scraper code.
b) Information is organized in a conventional key and value form; key=year, value=frequency.
c) We extract the key from the dictionary and generate a new list comprising of the years in which the trend keywords occurs.
d) We calculate the max and min years in this new list and convert them to int and extract the complete set of years that lie between these extremes.
e) We cycle through the keys in the dictionary and extract frequency of occurrence for each year in the list of years.
f) If the term does not appear in that year, then it's assigned zero (that's how dictionaries work).
g) The two lists (list of years and list of frequencies) are submitted to the plot function for plotting.'''
'''This list will hold the abstract years which contain occurrences of the word that we are investigating'''
list_of_years=[]
list_of_frequencies = []
'''Accessing the dictionary data dumped by the Scraper code'''
abstract_word_dictionary_file = open(abstracts_log_name + '_DICTIONARY.csv', 'r')
'''Here we collect the dictionary data dumped by the Scraper code'''
for line in abstract_word_dictionary_file:
list_of_years.append(int(line.split(',')[0]))
list_of_frequencies.append(int(line.split(',')[1][:-1]))
'''Tabulating the start and the ending years of appearence of the specific trend_keywords'''
starting_year = min(list_of_years)
ending_year = max(list_of_years)
'''Recreating the actual dictionary here'''
abstract_word_dictionary = {list_of_years[year]:list_of_frequencies[year] for year in range(0, len(list_of_years))}
'''Generating a continuous list of years to be plotted from the abstracts collected'''
list_of_years_to_be_plotted = [year for year in range((starting_year), (ending_year)+1)]
frequencies_to_be_plotted = []
'''Here we generate the corresponding frequencies for each of the years recorded'''
for year in range(starting_year, ending_year+1):
try:
frequencies_to_be_plotted.append(abstract_word_dictionary[year])
except KeyError:
frequencies_to_be_plotted.append(0)
'''Here, we will generate a list of frequencies to be plotted along the Y axis, using the Y ticks function'''
y_ticks_frequency = []
'''Extracting the largest frequency value in the list to generate the Y ticks list'''
max_frequency_value = max(frequencies_to_be_plotted)
for frequency_element in range(0, max_frequency_value+1):
y_ticks_frequency.append(frequency_element)
'''Varying the size of the figure to accommodate the entire trends graph generated'''
plt.figure(figsize=[15,10])
'''Plotting the years along the X axis and the frequency along the Y axis'''
plt.plot(list_of_years_to_be_plotted, frequencies_to_be_plotted)
'''Plotting the frequencies again to make the frequency pivots visible'''
plt.plot(list_of_years_to_be_plotted, frequencies_to_be_plotted, 'ro')
'''Here, we are labeling each of the frequencies plotted to ensure better readability, instead of second-guessing Y axis values'''
for element in range(0, len(list_of_years_to_be_plotted)):
'''Avoiding the unnecessary clutter in the visualization by removing text boxes for frequency=0'''
if(frequencies_to_be_plotted[element]!=0):
plt.text(list_of_years_to_be_plotted[element], frequencies_to_be_plotted[element], "Frequency: "+str(frequencies_to_be_plotted[element]), bbox=dict(facecolor='orange', alpha=0.3), horizontalalignment='right', verticalalignment='top',size=8)
'''Adds a label to the element being represented across the Y-axis (frequency of occurrence)'''
plt.ylabel("Frequency of occurrence:"+" "+trend_keywords[0])
'''Adds a label to the element being represented across the X-axis (years)'''
plt.xlabel("Year of occurrence:"+" "+trend_keywords[0])
'''Adds an overall title to the trends chart'''
plt.title("Trends Chart:"+" "+trend_keywords[0])
'''xticks() ensures that each and every year is plotted along the x axis and changing the rotation to ensure better readability'''
plt.xticks(list_of_years_to_be_plotted, rotation=45)
'''yticks() ensures that each and every frequency is plotted to ensure better readability in the resulting figure'''
plt.yticks(y_ticks_frequency)
'''Saves the graph generated to the disc for further analysis'''
plt.savefig(logs_folder_name+"/"+"Data_Visualization_Trends_Graph"+"_"+trend_keywords[0]+".png")
trends_histogram_end_status_key = "Generated the trends graph"+" "+logs_folder_name+"/"+"Data_Visualization_Trends_Graph"+"_"+trend_keywords[0]+".png"
status_logger(status_logger_name, trends_histogram_end_status_key)
def visualizer_main(lda_model, corpus, id2word, textual_data_lemmatized, num_topics, num_keywords, logs_folder_name, status_logger_name):
visualizer_main_start_status_key = "Entering the visualizer_main() code"
status_logger(status_logger_name, visualizer_main_start_status_key)
'''This the main visualizer code. Reorging this portion of the code to ensure modularity later on as well.'''
topic_order = visualizer_generator(lda_model, corpus, id2word, logs_folder_name, status_logger_name)
'''We generate histograms here to present the frequency and weights of the keywords of each topic'''
topic_builder(lda_model, topic_order, num_topics, num_keywords, textual_data_lemmatized, logs_folder_name, status_logger_name)
visualizer_main_end_status_key = "Exiting the visualizer_main() code"
status_logger(status_logger_name, visualizer_main_end_status_key)
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,962
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/Scraper.py
|
'''The aim of this script is to scrape abstracts, author names and date of publication from Springer
Sarthak J. Shetty
04/08/2018'''
'''Adding the libraries to be used here.'''
'''Importing urllib.request to use urlopen'''
from urllib.request import build_opener, HTTPCookieProcessor
''''Importing urllib.error to handle errors in HTTP pinging.'''
import urllib.error
'''BeautifulSoup is used for souping.'''
from bs4 import BeautifulSoup as bs
'''Counter generates a dictionary from the abstract data, providing frequencies of occurences'''
from collections import Counter
'''Importing the CSV library here to dump the dictionary for further analysis and error checking if required. Will edit it out later.'''
import csv
'''Importing numpy to generate a random integer for the delay_function (see below)'''
import numpy as np
'''This library is imported to check if we can feasibly introduce delays into the processor loop to reduce instances of the remote server, shutting the connection while scrapping extraordinarily large datasets.'''
import time
'''Fragmenting code into different scripts. Some functions are to be used across the different sub-parts as well. Hence, shifted some of the functions to the new script.'''
from pyResearchInsights.common_functions import pre_processing, argument_formatter, keyword_url_generator, abstract_id_log_name_generator, status_logger
def url_reader(url, status_logger_name):
'''This keyword is supplied to the URL and is hence used for souping.
Encountered an error where some links would not open due to HTTP.error
This is added here to try and ping the page. If it returns false the loop ignores it and
moves on to the next PII number'''
try:
'''Using the urllib function, urlopen to extract the html_code of the given page'''
open_connection = build_opener(HTTPCookieProcessor())
html_code = open_connection.open(url)
'''Closing the abstract window after each abstract has been extracted'''
return html_code
except (UnboundLocalError, urllib.error.HTTPError):
pass
def results_determiner(url, status_logger_name):
'''This function determines the number of results that a particular keywords returns
once it looks up the keyword on link.springer.com
The function returns all the possible links containing results and then provides the total number of results
returned by a particular keyword, or combination of keywords.'''
first_page_to_scrape = url_reader(url, status_logger_name)
first_page_to_scrape_soup = page_souper(first_page_to_scrape, status_logger_name)
number_of_results = first_page_to_scrape_soup.find('h1', {'id':'number-of-search-results-and-search-terms'}).find('strong').text
results_determiner_status_key = "Total number of results obtained: "+number_of_results
status_logger(status_logger_name, results_determiner_status_key)
def url_generator(start_url, query_string, status_logger_name):
'''This function is written to scrape all possible webpages of a given topic
The search for the URLs truncates when determiner variable doesn't return a positive value'''
url_generator_start_status_key = start_url+" "+"start_url has been received"
status_logger(status_logger_name, url_generator_start_status_key)
'''Initiallizing a list here in order to contain the URLs. Even if a URL does not return valid results,
it is popped later on from the list.'''
urls_to_scrape=[]
counter = 0
total_url = start_url+str(counter)+"?facet-content-type=\"Article\"&query="+query_string+"&facet-language=\"En\""
initial_url_status_key = total_url+" "+"has been obtained"
status_logger(status_logger_name, initial_url_status_key)
urls_to_scrape.append(total_url)
test_soup = bs(url_reader(total_url, status_logger_name), 'html.parser')
'''Here, we grab the page element that contains the number of pages to be scrapped'''
determiner = test_soup.findAll('span', {'class':'number-of-pages'})[0].text
'''We generate the urls_to_scrape from the stripped down determiner element'''
urls_to_scrape = [(start_url+str(counter)+"?facet-content-type=\"Article\"&query="+query_string+"&facet-language=\"En\"") for counter in range(1, (int(determiner.replace(',', '')) + 1))]
url_generator_stop_status_key = determiner.replace(',', '') + " page URLs have been obtained"
status_logger(status_logger_name, url_generator_stop_status_key)
return urls_to_scrape
def page_souper(page, status_logger_name):
'''Function soups the webpage elements and provided the tags for search.
Note: Appropriate encoding has to be picked up beenfore souping'''
page_souper_start_status_key = "Souping page"
status_logger(status_logger_name, page_souper_start_status_key)
page_soup = bs(page, 'html.parser')
page_souper_stop_status_key = "Souped page"
status_logger(status_logger_name, page_souper_stop_status_key)
return page_soup
def abstract_word_extractor(abstract, abstract_title, abstract_year, permanent_word_sorter_list, trend_keywords, status_logger_name):
'''This function creates the list that stores the text in the form of individual words
against their year of appearence.'''
abstract_word_sorter_start_status_key = "Adding:"+" "+abstract_title+" "+"to the archival list"
status_logger(status_logger_name, abstract_word_sorter_start_status_key)
'''This line of code converts the entire abstract into lower case'''
abstract = abstract.lower()
'''Converting the abstract into a list of words'''
abstract_word_list = abstract.split()
'''This line of code sorts the elements in the word list alphabetically. Working with dataframes is harden, hence
we are curbing this issue by modifying the list rather.'''
abstract_word_list.sort()
'''If the word currently being looped in the abstract list matches the trend word being investigated for, the year it appears
is appended to the permanent word sorter list'''
for element in abstract_word_list:
if(element==trend_keywords[0]):
permanent_word_sorter_list.append(abstract_year[:4])
abstract_word_sorter_end_status_key = "Added:"+" "+abstract_title+" "+"to the archival list"
status_logger(status_logger_name, abstract_word_sorter_end_status_key)
def abstract_year_list_post_processor(permanent_word_sorter_list, status_logger_name):
'''Because of this function we have a dictionary containing the frequency of occurrence of terms in specific years'''
abstract_year_list_post_processor_start_status_key = "Post processing of permanent word sorter list has commenced"
status_logger(status_logger_name, abstract_year_list_post_processor_start_status_key)
abstract_year_dictionary = Counter(permanent_word_sorter_list)
abstract_year_list_post_processor_end_status_key = "Post processing of permanent word sorter list has completed"
status_logger(status_logger_name, abstract_year_list_post_processor_end_status_key)
return abstract_year_dictionary
def abstract_year_dictionary_dumper(abstract_word_dictionary, abstracts_log_name, status_logger_name):
'''This function saves the abstract word dumper to the disc for further inspection.
The file is saved as a CSV bucket and then dumped.'''
permanent_word_sorter_list_start_status_key = "Dumping the entire dictionary to the disc"
status_logger(status_logger_name, permanent_word_sorter_list_start_status_key)
with open(abstracts_log_name+"_"+"DICTIONARY.csv", 'w') as dictionary_to_csv:
writer = csv.writer(dictionary_to_csv)
for key, value in abstract_word_dictionary.items():
year = key
writer.writerow([year, value])
permanent_word_sorter_list_end_status_key = "Dumped the entire dictionary to the disc"
status_logger(status_logger_name, permanent_word_sorter_list_end_status_key)
def abstract_page_scraper(abstract_url, abstract_input_tag_id, abstracts_log_name, permanent_word_sorter_list, site_url_index, status_logger_name):
'''This function is written to scrape the actual abstract of the specific paper,
that is being referenced within the list of abstracts'''
abstract_page_scraper_status_key="Abstract ID:"+" "+abstract_input_tag_id
status_logger(status_logger_name, abstract_page_scraper_status_key)
abstract_page_url = abstract_url+abstract_input_tag_id
abstract_page = url_reader(abstract_page_url, status_logger_name)
abstract_soup = page_souper(abstract_page, status_logger_name)
title = title_scraper(abstract_soup, status_logger_name)
abstract_date = abstract_date_scraper(title, abstract_soup, status_logger_name)
'''Due to repeated attribute errors with respect to scraping the authors name, these failsafes had to be put in place.'''
try:
author = author_scraper(abstract_soup, status_logger_name)
except AttributeError:
author = "Author not available"
'''Due to repeated attribute errors with respect to scraping the abstract, these failsafes had to be put in place.'''
try:
abstract = abstract_scraper(abstract_soup)
# abstract_word_extractor(abstract, title, abstract_date, permanent_word_sorter_list, trend_keywords, status_logger_name)
except AttributeError:
abstract = "Abstract not available"
abstract_database_writer(abstract_page_url, title, author, abstract, abstracts_log_name, abstract_date, status_logger_name)
analytical_abstract_database_writer(title, author, abstract, abstracts_log_name, status_logger_name)
def abstract_crawler(abstract_url, abstract_id_log_name, abstracts_log_name, permanent_word_sorter_list, site_url_index, status_logger_name):
abstract_crawler_start_status_key = "Entered the Abstract Crawler"
status_logger(status_logger_name, abstract_crawler_start_status_key)
abstract_crawler_temp_index = site_url_index
'''This function crawls the page and access each and every abstract'''
abstract_input_tag_ids = abstract_id_database_reader(abstract_id_log_name, abstract_crawler_temp_index, status_logger_name)
for abstract_input_tag_id in abstract_input_tag_ids:
try:
abstract_crawler_accept_status_key="Abstract Number:"+" "+str((abstract_input_tag_ids.index(abstract_input_tag_id)+1)+abstract_crawler_temp_index*20)
status_logger(status_logger_name, abstract_crawler_accept_status_key)
abstract_page_scraper(abstract_url, abstract_input_tag_id, abstracts_log_name, permanent_word_sorter_list, site_url_index, status_logger_name)
except TypeError:
abstract_crawler_reject_status_key="Abstract Number:"+" "+str(abstract_input_tag_ids.index(abstract_input_tag_id)+1)+" "+"could not be processed"
status_logger(status_logger_name, abstract_crawler_reject_status_key)
pass
abstract_crawler_end_status_key = "Exiting the Abstract Crawler"
status_logger(status_logger_name, abstract_crawler_end_status_key)
def analytical_abstract_database_writer(title, author, abstract, abstracts_log_name, status_logger_name):
'''This function will generate a secondary abstract file that will contain only the abstract.
The abstract file generated will be passed onto the Visualizer and Analyzer function, as opposed to the complete
abstract log file containing lot of garbage words in addition to the abstract text.'''
analytical_abstract_database_writer_start_status_key = "Writing"+" "+title+" "+"by"+" "+author+" "+"to analytical abstracts file"
status_logger(status_logger_name, analytical_abstract_database_writer_start_status_key)
analytical_abstracts_txt_log = open(abstracts_log_name+'_'+'ANALYTICAL'+'.txt', 'a')
analytical_abstracts_txt_log.write(abstract)
analytical_abstracts_txt_log.write('\n'+'\n')
analytical_abstracts_txt_log.close()
analytical_abstract_database_writer_stop_status_key = "Written"+" "+title+" "+"to disc"
status_logger(status_logger_name, analytical_abstract_database_writer_stop_status_key)
def abstract_database_writer(abstract_page_url, title, author, abstract, abstracts_log_name, abstract_date, status_logger_name):
'''This function makes text files to contain the abstracts for future reference.
It holds: 1) Title, 2) Author(s), 3) Abstract'''
abstract_database_writer_start_status_key = "Writing"+" "+title+" "+"by"+" "+author+" "+"to disc"
status_logger(status_logger_name, abstract_database_writer_start_status_key)
abstracts_csv_log = open(abstracts_log_name+'.csv', 'a')
abstracts_txt_log = open(abstracts_log_name+'.txt', 'a')
abstracts_txt_log.write("Title:"+" "+title)
abstracts_txt_log.write('\n')
abstracts_txt_log.write("Author:"+" "+author)
abstracts_txt_log.write('\n')
abstracts_txt_log.write("Date:"+" "+abstract_date)
abstracts_txt_log.write('\n')
abstracts_txt_log.write("URL:"+" "+abstract_page_url)
abstracts_txt_log.write('\n')
abstracts_txt_log.write("Abstract:"+" "+abstract)
abstracts_csv_log.write(abstract)
abstracts_csv_log.write('\n')
abstracts_txt_log.write('\n'+'\n')
abstracts_txt_log.close()
abstracts_csv_log.close()
abstract_database_writer_stop_status_key = "Written"+" "+title+" "+"to disc"
status_logger(status_logger_name, abstract_database_writer_stop_status_key)
def abstract_id_database_reader(abstract_id_log_name, site_url_index, status_logger_name):
'''This function has been explicitly written to access
the abstracts database that the given prgram generates.'''
abstract_id_reader_temp_index = site_url_index
abstract_id_database_reader_start_status_key = "Extracting Abstract IDs from disc"
status_logger(status_logger_name, abstract_id_database_reader_start_status_key)
lines_in_abstract_id_database=[line.rstrip('\n') for line in open(abstract_id_log_name+str(abstract_id_reader_temp_index+1)+'.txt')]
abstract_id_database_reader_stop_status_key = "Extracted Abstract IDs from disc"
status_logger(status_logger_name, abstract_id_database_reader_stop_status_key)
return lines_in_abstract_id_database
def abstract_id_database_writer(abstract_id_log_name, abstract_input_tag_id, site_url_index):
'''This function writes the abtract ids to a .txt file for easy access and documentation.'''
abstract_id_writer_temp_index = site_url_index
abstract_id_log = open((abstract_id_log_name+str(abstract_id_writer_temp_index+1)+'.txt'), 'a')
abstract_id_log.write(abstract_input_tag_id)
abstract_id_log.write('\n')
abstract_id_log.close()
def abstract_date_scraper(title, abstract_soup, status_logger_name):
'''This function scrapes the date associated with each of the abstracts.
This function will play a crucial role in the functionality that we are trying to build into our project.'''
date_scraper_entry_status_key = "Scraping date of the abstract titled:"+" "+title
status_logger(status_logger_name, date_scraper_entry_status_key)
try:
abstract_date = abstract_soup.find('time').get('datetime')
date_scraper_exit_status_key = title+" "+"was published on"+" "+abstract_date
except AttributeError:
abstract_date = "Date for abstract titled:"+" "+title+" "+"was not available"
date_scraper_exit_status_key = abstract_date
pass
status_logger(status_logger_name, date_scraper_exit_status_key)
return abstract_date
def abstract_scraper(abstract_soup):
'''This function scrapes the abstract from the soup and returns to the page scraper'''
abstract = str(abstract_soup.find('div', {'id':'Abs1-content'}).text.encode('utf-8'))[1:]
return abstract
def author_scraper(abstract_soup, status_logger_name):
'''This function scrapes the author of the text, for easy navigation and search'''
author_scraper_start_status_key = "Scraping the author name"
status_logger(status_logger_name, author_scraper_start_status_key)
'''This class element's text attribute contains all the authors names. It is converted to a findAll() list and then concatinated into a string for storage.'''
author = ''.join(str(author) for author in [authorElement.text for authorElement in abstract_soup.findAll('li', {'class':'c-author-list__item'})])
author_scraper_end_status_key = "Scraped the author's name:" + " "+str(author)
status_logger(status_logger_name, author_scraper_end_status_key)
return author
def title_scraper(abstract_soup, status_logger_name):
'''This function scrapes the title of the text from the abstract'''
title_scraper_start_status_key = "Scraping the title of the abstract"
status_logger(status_logger_name, title_scraper_start_status_key)
'''Purpose of this block is to retrieve the title of the text even if an AttributeError arises'''
try:
title = str(abstract_soup.find('h1', {'class':'c-article-title'}).text.encode('utf-8'))[1:]
'''In case an incorrectly classified asset is to be scrapped (Journal/Chapter as opposed to Article), go through this block in an attempt to retrieve the title.'''
except AttributeError:
try:
title = str(abstract_soup.find('h1',{'class':'ChapterTitle'}).text.encode('utf-8'))[1:]
except AttributeError:
try:
title = (abstract_soup.find('span', {'class':'JournalTitle'}).text)
except AttributeError:
title = "Title not available"
title_scraper_end_status_key = "Scraped the title of the abstract"
status_logger(status_logger_name, title_scraper_end_status_key)
return title
def abstract_id_scraper(abstract_id_log_name, page_soup, site_url_index, status_logger_name):
'''This function helps in obtaining the PII number of the abstract.
This number is then coupled with the dynamic URL and provides'''
abstract_id_scraper_start_status_key="Scraping IDs"
status_logger(status_logger_name, abstract_id_scraper_start_status_key)
''''This statement collects all the input tags that have the abstract ids in them'''
abstract_input_tags = page_soup.findAll('a', {'class':'title'})
for abstract_input_tag in abstract_input_tags:
abstract_input_tag_id=abstract_input_tag.get('href')
abstract_id_database_writer(abstract_id_log_name, abstract_input_tag_id, site_url_index)
abstract_id_scraper_stop_status_key="Scraped IDs"
status_logger(status_logger_name, abstract_id_scraper_stop_status_key)
def word_sorter_list_generator(status_logger_name):
word_sorter_list_generator_start_status_key = "Generating the permanent archival list"
status_logger(status_logger_name, word_sorter_list_generator_start_status_key)
'''This function generates the list that hold the Words and corresponding Years of the
abstract data words before the actual recursion of scrapping data from the website begins.'''
word_sorter_list = []
word_sorter_list_generator_exit_status_key = "Generated the permanent archival list"
status_logger(status_logger_name, word_sorter_list_generator_exit_status_key)
return word_sorter_list
def delay_function(status_logger_name):
'''Since the Springer servers are contstantly shutting down the remote connection, we introduce
this function in the processor function in order to reduce the number of pings it delivers to the remote.'''
delay_variable = np.random.randint(0, 20)
delay_function_start_status_key = "Delaying remote server ping:"+" "+str(delay_variable)+" "+"seconds"
status_logger(status_logger_name, delay_function_start_status_key)
'''Sleep parameter causes the code to be be delayed by 1 second'''
time.sleep(delay_variable)
delay_function_end_status_key = "Delayed remote server ping:"+" "+str(delay_variable)+" "+"seconds"
status_logger(status_logger_name, delay_function_end_status_key)
def processor(abstract_url, urls_to_scrape, abstract_id_log_name, abstracts_log_name, status_logger_name, keywords_to_search):
''''Multiple page-cycling function to scrape multiple result pages returned from Springer.
print(len(urls_to_scrape))'''
'''This list will hold all the words mentioned in all the abstracts. It will be later passed on to the
visualizer code to generate the trends histogram.'''
permanent_word_sorter_list = word_sorter_list_generator(status_logger_name)
for site_url_index in range(0, len(urls_to_scrape)):
if(site_url_index==0):
results_determiner(urls_to_scrape[site_url_index], status_logger_name)
'''Collects the web-page from the url for souping'''
page_to_soup = url_reader(urls_to_scrape[site_url_index], status_logger_name)
'''Souping the page for collection of data and tags'''
page_soup = page_souper(page_to_soup, status_logger_name)
'''Scrapping the page to extract all the abstract IDs'''
abstract_id_scraper(abstract_id_log_name, page_soup, site_url_index, status_logger_name)
'''Actually obtaining the abstracts after combining ID with the abstract_url'''
abstract_crawler(abstract_url, abstract_id_log_name, abstracts_log_name, permanent_word_sorter_list, site_url_index, status_logger_name)
'''Delaying after each page being scrapped, rather than after each abstract'''
delay_function(status_logger_name)
'''This line of code processes and generates a dictionary from the abstract data'''
abstract_year_dictionary = abstract_year_list_post_processor(permanent_word_sorter_list, status_logger_name)
return abstract_year_dictionary
def scraper_main(keywords_to_search, abstracts_log_name, status_logger_name):
''''This function contains all the functions and contains this entire script here, so that it can be imported later to the main function'''
'''Here, we utilize the keywords provided by the user to generate the URLs for scrapping'''
start_url, abstract_url, query_string = keyword_url_generator(keywords_to_search)
'''Since we receive only the abstracts_log_name, we have to extract the abstract_id_log_name'''
abstract_id_log_name = abstract_id_log_name_generator(abstracts_log_name)
if(type(keywords_to_search) == str):
'''If the user ran the code using just the function from the library, then the keywords and trends words need to be in this format'''
keywords_to_search = argument_formatter(keywords_to_search)
else:
keywords_to_search = keywords_to_search
'''Provides the links for the URLs to be scraped by the scraper'''
urls_to_scrape = url_generator(start_url, query_string, status_logger_name)
'''Calling the processor() function here'''
abstract_year_dictionary = processor(abstract_url, urls_to_scrape, abstract_id_log_name, abstracts_log_name, status_logger_name, keywords_to_search)
'''This function dumps the entire dictionary onto the disc for further analysis and inference.'''
abstract_year_dictionary_dumper(abstract_year_dictionary, abstracts_log_name, status_logger_name)
return 0
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,963
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/system_functions.py
|
'''Hello! This portion of the code that acts as the processing code corroborating with the main scripts [re: Scraper, Analyzer+NLP_Engine, Visualizer]
- Sarthak J. Shetty
06/02/2019
This script has been renamed as the system_functions.py to carry out OS level interactions, such as:
1. tarballing the LOGS generated to reduce space.
2. Deleting the LOGs once the tarball has been created.
3. (Eventually) enable shell script to send the tarballed file over mail to the user.
4. (Eventually) enable shell script to upload the LOGS generated to GitHub.
- Sarthak J. Shetty
15/04/2019'''
'''Importing OS to call the tar function to generate the .tar file.'''
import os
'''From common_functions.py calling the status_logger() function to LOG the tarballing process and others as they are added here.'''
from pyResearchInsights.common_functions import status_logger
def rm_original_folder(logs_folder_name, status_logger_name):
'''This function deletes the logs folder generated once the .tar.gz file has been created.'''
rm_original_folder_start_status_key = "Deleting files belonging to:"+" "+logs_folder_name
status_logger(status_logger_name, rm_original_folder_start_status_key)
command_to_rm_function = "rm -r"+" "+logs_folder_name
os.system(command_to_rm_function)
def tarballer(logs_folder_name, status_logger_name):
'''This function prepares the tar ball of the LOG file.'''
tarballer_start_status_key = "Tarballing"+" "+logs_folder_name+" "+"into"+" "+logs_folder_name+".tar.gz"
status_logger(status_logger_name, tarballer_start_status_key)
command_to_tar_function = "tar czf"+" "+logs_folder_name+".tar.gz"+" "+logs_folder_name
os.system(command_to_tar_function)
tarballer_start_end_key = "Tarballed"+" "+logs_folder_name+" "+"into"+" "+logs_folder_name+".tar.gz"
status_logger(status_logger_name, tarballer_start_end_key)
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,964
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/NLP_Engine.py
|
'''Hello! This module of code is a part of the larger pyResearchInsights project.
This file was earlier named as Temp_Gensim_Code; code is now bifurcated into Gensim code (this) and a seperate
visualization code that will be added to the repository as well.
Checkout the Bias README.md for an overview of the project.
Sarthak J. Shetty
24/11/2018'''
'''Natural Language toolkit. Here we download the commonly used English stopwords'''
import nltk; nltk.download('stopwords')
'''Standard set of functions for reading and appending files'''
import re
'''Pandas and numpy is a dependency used by other portions of the code.'''
import numpy as np
import pandas as pd
'''Think this stands for pretty print. Prints out stuff to the terminal in a prettier way'''
from pprint import pprint
'''Importing OS to get current working directory (cwd) to tackle abstracts_log_name edge cases'''
import os
'''Contains the language model that has to be developed.'''
import gensim
import gensim.corpora as corpora
from gensim.utils import simple_preprocess
from gensim.models import CoherenceModel
from pyResearchInsights.common_functions import status_logger
from pyResearchInsights.Visualizer import visualizer_main
'''Industrial level toolkit for NLP'''
import spacy
import pyLDAvis
import pyLDAvis.gensim_models
'''Make pretty visualizations'''
import matplotlib as plt
'''Library to log any errors. Came across this in the tutorial.'''
import logging
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.ERROR)
import warnings
warnings.filterwarnings("ignore", category=DeprecationWarning)
from nltk.corpus import stopwords
stop_words = stopwords.words('english')
stop_words.extend(['from', 'subject', 're', 'edu', 'use', 'com', 'https', 'url', 'link', 'abstract', 'author', 'chapter', 'springer', 'title', "the", "of", "and", "in", "to", "a", "is", "for", "from", "with", "that", "by", "are", "on", "was", "as",
"were", "url:", "abstract:", "abstract", "author:", "title:", "at", "be", "an", "have", "this", "which", "study", "been", "not", "has", "its", "also", "these", "this", "can", "a", 'it', 'their', "e.g.", "those", "had", "but", "while", "will", "when", "only", "author", "title", "there", "our", "did", "as", "if", "they", "such", "than", "no", "-", "could"])
def data_reader(abstracts_log_name, status_logger_name):
'''This wherer the file is being parsed from to the model'''
data_reader_start_status_key = abstracts_log_name+".txt is being ported to dataframe"
status_logger(status_logger_name, data_reader_start_status_key)
try:
'''If the NLP_Engine script is run independently, not as part of the pipeline as a whole, there would be no filename_CLEAND.txt.
This ensures that that file can be processed independently.'''
abstracts_txt_file_name = (abstracts_log_name.split(".txt")[0]) + "_" + 'CLEANED.txt'
textual_dataframe = pd.read_csv(abstracts_txt_file_name, delimiter="\t")
except FileNotFoundError:
textual_dataframe = pd.read_csv(abstracts_log_name, delimiter="\t")
data_reader_end_status_key = abstracts_log_name + ".txt has been ported to dataframe"
status_logger(status_logger_name, data_reader_end_status_key)
return textual_dataframe
def textual_data_trimmer(textual_dataframe, status_logger_name):
'''Converts each of the abstracts in the file into a list element, of size = (number of abstracts)'''
textual_data_trimmer_start_status_key = "Trimming data and preparing list of words"
status_logger(status_logger_name, textual_data_trimmer_start_status_key)
textual_data = textual_dataframe.values.tolist()
textual_data_trimmer_end_status_key = "Trimmed data and prepared list of words"
status_logger(status_logger_name, textual_data_trimmer_end_status_key)
return textual_data
def sent_to_words(textual_data, status_logger_name):
'''Removing unecessary characters and removing punctuations from the corpus. Resultant words are then tokenized.'''
sent_to_words_start_status_key = "Tokenizing words"
status_logger(status_logger_name, sent_to_words_start_status_key)
for sentence in textual_data:
yield(gensim.utils.simple_preprocess(str(sentence), deacc=True))
textual_data = list(sent_to_words(textual_data, status_logger_name))
sent_to_words_end_status_key = "Tokenized words"
status_logger(status_logger_name, sent_to_words_end_status_key)
return textual_data
def bigram_generator(textual_data, status_logger_name):
'''Generating bigram model from the words that are in the corpus.'''
'''Bigrams: Words that occur together with a high frequency,'''
bigram_generator_start_status_key = "Generating word bigrams"
status_logger(status_logger_name, bigram_generator_start_status_key)
bigram = gensim.models.Phrases(textual_data, min_count=5, threshold=100)
bigram_mod = gensim.models.phrases.Phraser(bigram)
bigram_generator_end_status_key = "Generated word bigrams"
status_logger(status_logger_name, bigram_generator_end_status_key)
return bigram_mod
def remove_stopwords(textual_data, status_logger_name):
'''This function removes the standard set of stopwords from the corpus of abstract words.
We've added a bunch of other words in addition.'''
remove_stopwords_start_status_key = "Removing stopwords"
status_logger(status_logger_name, remove_stopwords_start_status_key)
return [[word for word in simple_preprocess(str(doc)) if word not in stop_words] for doc in textual_data]
remove_stopwords_end_status_key = "Removed stopwords"
status_logger(status_logger_name, remove_stopwords_end_status_key)
def format_topics_sentences(ldamodel, corpus, texts):
'''This function generates a dataframe that presents the dominant topic of each entry in the dataset'''
sent_topics_df = pd.DataFrame()
for i, row in enumerate(ldamodel[corpus]):
row = sorted(row, key=lambda x: (x[1]), reverse=True)
for j, (topic_num, prop_topic) in enumerate(row):
if j == 0:
wp = ldamodel.show_topic(topic_num)
topic_keywords = ", ".join([word for word, prop in wp])
sent_topics_df = sent_topics_df.append(pd.Series([int(topic_num), round(prop_topic,4), topic_keywords]), ignore_index=True)
else:
break
sent_topics_df.columns = ['Dominant_Topic', 'Perc_Contribution', 'Topic_Keywords']
contents = pd.Series(texts)
sent_topics_df = pd.concat([sent_topics_df, contents], axis=1)
return (sent_topics_df)
def make_bigrams(textual_data, status_logger_name):
'''Generates multiple bigrams of word pairs in phrases that commonly occuring with each other over the corpus'''
make_bigrams_start_status_key = "Generating bigrams"
status_logger(status_logger_name, make_bigrams_start_status_key)
bigram_mod = bigram_generator(textual_data, status_logger_name)
return [bigram_mod[doc] for doc in textual_data]
make_bigrams_end_status_key = "Generated bigrams"
status_logger(status_logger_name, make_bigrams_end_status_key)
def lemmatization(status_logger_name, textual_data, allowed_postags=['NOUN', 'ADJ', 'VERB', 'ADV']):
'''Reducing a word to the root word. Running -> Run for example'''
lemmatization_start_status_key = "Beginning lemmatization"
status_logger(status_logger_name, lemmatization_start_status_key)
texts_out = []
try:
nlp = spacy.load('en_core_web_sm', disable=['parser', 'ner'])
except OSError:
from spacy.cli import download
download('en_core_web_sm')
nlp = spacy.load('en_core_web_sm', disable=['parser', 'ner'])
for sent in textual_data:
doc = nlp(" ".join(sent))
texts_out.append([token.lemma_ for token in doc if token.pos_ in allowed_postags])
lemmatization_end_status_key = "Ending lemmatization"
status_logger(status_logger_name, lemmatization_end_status_key)
return texts_out
def nlp_engine_main(abstracts_log_name, status_logger_name, num_topics = None, num_keywords = None, mallet_path = None):
nlp_engine_main_start_status_key = "Initiating the NLP Engine"
status_logger(status_logger_name, nlp_engine_main_start_status_key)
'''We can arrive at logs_folder_name from abstracts_log_name, instead of passing it to the NLP_Engine function each time'''
if('Abstract' in abstracts_log_name):
logs_folder_name = abstracts_log_name.split('Abstract')[0][:-1]
else:
'''If the user points to an abstracts_log_name that does not contain 'Abstract' and lies at the current working directory then set the logs_folder_name as cwd'''
logs_folder_name = ''
if(logs_folder_name == ''):
'''This condition is required, if the file is located at the directory of the pyResearchInsights code.'''
logs_folder_name = logs_folder_name + os.getcwd()
'''Declaring the number of topics to be generated by the LDA model'''
if num_topics == None:
'''If the user has not provided this argument then set to 10'''
num_topics = 10
'''Declaring the number of keywords to be presented by the Visualizer'''
if num_keywords == None:
'''If the user has not provided this argument then set to 20'''
num_keywords = 20
'''Extracts the data from the .txt file and puts them into a Pandas dataframe buckets'''
textual_dataframe = data_reader(abstracts_log_name, status_logger_name)
'''Rids the symbols and special characters from the textual_data'''
textual_data = textual_data_trimmer(textual_dataframe, status_logger_name)
'''Removes stopwords that were earlier downloaded from the textual_data'''
textual_data_no_stops = remove_stopwords(textual_data, status_logger_name)
'''Prepares bigrams'''
textual_data_words_bigrams = make_bigrams(textual_data_no_stops, status_logger_name)
'''Lemmatization: Running -> Run'''
textual_data_lemmatized = lemmatization(status_logger_name, textual_data_words_bigrams, allowed_postags=['NOUN', 'ADJ', 'VERB', 'ADV'])
'''Creating a dictionary for each term as the key, and the value as their frequency in that sentence.'''
id2word = corpora.Dictionary(textual_data_lemmatized)
texts = textual_data_lemmatized
'''Creating a dictionary for the entire corpus and not just individual abstracts and documents.'''
corpus = [id2word.doc2bow(text) for text in texts]
'''Builds the actual LDA model that will be used for the visualization and inference'''
lda_model_generation_start_status_key = "Generating the LDA model using default parameter set"
status_logger(status_logger_name, lda_model_generation_start_status_key)
if(mallet_path):
lda_model = gensim.models.wrappers.LdaMallet(mallet_path, corpus=corpus, num_topics = num_topics, id2word=id2word)
'''Generating a dataset to show which '''
df_topic_sents_keywords = format_topics_sentences(ldamodel = lda_model, corpus = corpus, texts = textual_data)
df_dominant_topic = df_topic_sents_keywords.reset_index()
df_dominant_topic.columns = ['Document_No', 'Dominant_Topic', 'Topic_Perc_Contrib', 'Keywords', 'Text']
df_dominant_topic.to_csv(logs_folder_name + '/Master_Topic_Per_Sentence.csv')
'''Generating a dataset to present the percentage of papers under each topic, their keywords and number of papers'''
sent_topics_sorteddf_mallet = pd.DataFrame()
sent_topics_outdf_grpd = df_topic_sents_keywords.groupby('Dominant_Topic')
for i, grp in sent_topics_outdf_grpd:
sent_topics_sorteddf_mallet = pd.concat([sent_topics_sorteddf_mallet, grp.sort_values(['Perc_Contribution'], ascending=[0]).head(1)], axis=0)
sent_topics_sorteddf_mallet.reset_index(drop=True, inplace=True)
topic_counts = df_topic_sents_keywords['Dominant_Topic'].value_counts()
topic_contribution = round(topic_counts/topic_counts.sum(), 4)
sent_topics_sorteddf_mallet.columns = ['Topic_Num', "Topic_Perc_Contrib", "Keywords", "Text"]
sent_topics_sorteddf_mallet.head()
sent_topics_sorteddf_mallet['Number_Papers'] = [topic_counts[count] for count in range(num_topics)]
sent_topics_sorteddf_mallet['Percentage_Papers'] = [topic_contribution[count] for count in range(0, num_topics)]
sent_topics_sorteddf_mallet.to_csv(logs_folder_name+'/Master_Topics_Contribution.csv')
'''Converting the mallet model to LDA for use by the Visualizer code'''
lda_model = gensim.models.wrappers.ldamallet.malletmodel2ldamodel(lda_model)
else:
lda_model = gensim.models.ldamodel.LdaModel(corpus = corpus, id2word = id2word, num_topics = num_topics, random_state = 100, update_every = 1, chunksize = 100, passes = 10, alpha = 'auto', per_word_topics = True)
lda_model_generation_end_status_key = "Generated the LDA model using default parameter set"
status_logger(status_logger_name, lda_model_generation_end_status_key)
perplexity_score = lda_model.log_perplexity(corpus)
perplexity_status_key = "Issued perplexity:"+" "+str(perplexity_score)
status_logger(status_logger_name, perplexity_status_key)
nlp_engine_main_end_status_key = "Idling the NLP Engine"
status_logger(status_logger_name, nlp_engine_main_end_status_key)
'''Importing the visualizer_main function to view the LDA Model built by the NLP_engine_main() function'''
visualizer_main(lda_model, corpus, id2word, textual_data_lemmatized, num_topics, num_keywords, logs_folder_name, status_logger_name)
return 0
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,965
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/Cleaner.py
|
'''
Hello! This script is part of the larger pyResearchInsights project that you can check out here: https://github.com/SarthakJShetty/pyResearchInsights
We are trying to build an end-to-end ACA tool here
-Sarthak
(03/10/2019)
Purpose of this script:
Clean the corpus of special character'''
'''Importing the status logger function here to LOG the cleaner module working for debugging'''
from pyResearchInsights.common_functions import status_logger
'''This holds the elements of the abstract after it has been split at the spaces'''
elements = []
'''Holds the dirty elements that contain the \\ and // in them'''
dirty_elements = []
'''Holds the clean members of the abstract elements'''
cleaned_str_list = []
'''Holds the screened abstracts, null of any special character occurances'''
cleaned_texts = []
'''What needs to be implemented here?
1. A way for each element containing \\ to be put into a list.
2. Subtract said list from elements'''
def txt_to_list(abstract_directory, status_logger_name):
'''Converting the text file to a list for easier processing'''
txt_to_list_start_status_key = "Converting text to list"
status_logger(status_logger_name, txt_to_list_start_status_key)
try:
'''If the Cleaner script is run independently, not as part of the pipeline as a whole, there would be no filename_ANALYTICAL.txt.
This ensures that that file can be processed independently.'''
cleaner_abstract_directory = (abstract_directory.split(".txt")[0])+"_"+'ANALYTICAL.txt'
folder = open(cleaner_abstract_directory, 'r')
except FileNotFoundError:
cleaner_abstract_directory = (abstract_directory.split(".txt")[0])+'.txt'
folder = open(cleaner_abstract_directory, 'r')
abstracts = []
for line in folder:
abstracts.append(line)
txt_to_list_end_status_key = "Converted text to list"
status_logger(status_logger_name, txt_to_list_end_status_key)
return abstracts
def dirty_element_generator(texts, status_logger_name):
'''Finds all the elements which have the special character in them, makes a list and
referes through them durng the next phases'''
dirty_element_generator_start_status_key = "Generating list with special elements for weeding out later"
status_logger(status_logger_name, dirty_element_generator_start_status_key)
for text in texts:
elements = text.split(" ")
for element in elements:
if('\\' in element):
dirty_elements.append(element)
dirty_element_generator_end_status_key = "Generated list with special elements for weeding out later"
status_logger(status_logger_name, dirty_element_generator_end_status_key)
return dirty_elements
def dirty_element_weeder(texts, dirty_elements, status_logger_name):
'''Refers to the list of dirty variables and cleans the abstracts'''
dirty_element_weeder_start_status_key = "Removing elements with special characters from the text list"
status_logger(status_logger_name, dirty_element_weeder_start_status_key)
cleaned_str_list =[]
for text in texts:
elements = text.split(" ")
for element in elements:
if element not in dirty_elements:
cleaned_str_list.append(element)
cleaned_texts.append(" ".join(lol for lol in cleaned_str_list))
cleaned_str_list = []
dirty_element_weeder_end_status_key = "Removed elements with special characters from the text list"
status_logger(status_logger_name, dirty_element_weeder_end_status_key)
return cleaned_texts
def cleaned_abstract_dumper(abstract_directory, cleaned_texts, status_logger_name):
'''Dumping the cleaned abstracts to the disc and will be referring to it henceforth in the code'''
cleaned_abstract_dumper_start_status_key = "Dumping the cleaned abstract .txt to the disc"
status_logger(status_logger_name, cleaned_abstract_dumper_start_status_key)
pre_new_cleaned_texts_folder = abstract_directory.split(".txt")[0]
new_cleaned_texts_folder = open(pre_new_cleaned_texts_folder + "_"+"CLEANED.txt", 'w')
for cleaned_text in cleaned_texts:
new_cleaned_texts_folder.write(cleaned_text)
new_cleaned_texts_folder.write('\n')
cleaned_abstract_dumper_end_status_key = "Dumped the cleaned abstract .txt to the disc"
status_logger(status_logger_name, cleaned_abstract_dumper_end_status_key)
return new_cleaned_texts_folder
def cleaner_main(abstract_directory, status_logger_name):
'''This module removes all the special characters from the abstract scrapped using the Bias tool.'''
cleaner_main_start_status_key = "Entering the Cleaner module"
status_logger(status_logger_name, cleaner_main_start_status_key)
abstracts = txt_to_list(abstract_directory, status_logger_name)
dirty_elements = dirty_element_generator(abstracts, status_logger_name)
cleaned_texts = dirty_element_weeder(abstracts, dirty_elements, status_logger_name)
new_cleaned_texts_folder = cleaned_abstract_dumper(abstract_directory, cleaned_texts, status_logger_name)
'''Main contribution from this block of the code is the new cleaned .txt folder and cleaned abstracts. Just in case.'''
cleaner_main_end_status_key = "Exiting the Cleaner module"
status_logger(status_logger_name, cleaner_main_end_status_key)
return cleaned_texts, new_cleaned_texts_folder
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,966
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/example.py
|
'''Hello! We have decided to modularize the entire code, and run it off of one common script.
In the future, the Analyzer.py and the Visualizer.py scripts will be called here as well.
Check out the build-log.md for a detailed changes implemented.
Check out the README.md for more details about the project.
Sarthak J. Shetty
12/09/2018'''
'''Imports scraper_main() from Scraper.py'''
from pyResearchInsights.Scraper import scraper_main
'''Importing the analyzer code here as well'''
from pyResearchInsights.Analyzer import analyzer_main
'''Importing the Cleaner functions here that removes special characters from the corpus'''
from pyResearchInsights.Cleaner import cleaner_main
'''Importing the visualizer and gensim code here'''
from pyResearchInsights.NLP_Engine import nlp_engine_main
'''Imports some of the functions required by different scripts here.'''
from pyResearchInsights.common_functions import pre_processing
'''Declaring tarballer here from system_functions() to tarball the LOG directory, & rm_original_folder to delete the directory and save space.'''
from pyResearchInsights.system_functions import tarballer, rm_original_folder
keywords_to_search = "Western Ghats Conservation"
'''Calling the pre_processing functions here so that abstracts_log_name and status_logger_name is available across the code.'''
abstracts_log_name, status_logger_name = pre_processing(keywords_to_search)
'''Runs the scraper here to scrape the details from the scientific repository'''
scraper_main(keywords_to_search, abstracts_log_name, status_logger_name)
'''Cleaning the corpus here before any of the other modules use it for analysis'''
cleaner_main(abstracts_log_name, status_logger_name)
'''Calling the Analyzer Function here'''
analyzer_main(abstracts_log_name, status_logger_name)
'''Calling the visualizer code below this portion'''
nlp_engine_main(abstracts_log_name, status_logger_name)
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,967
|
SarthakJShetty/pyResearchInsights
|
refs/heads/master
|
/pyResearchInsights/Analyzer.py
|
'''This code is part of the larger pyResearchInsights project, where we aim
to study the research themes being discussed in scientific publications.
This portion of the code analyzes the contents of the .txt file developed
by the Scraper.py and saves it to a .csv for later visualization by the
soon to be built Visualizer.py script
Sarthak J. Shetty
01/09/2018'''
'''Importing OS here to split the filename at the extension'''
import os
'''Importing status_logger here to log the details of the process run.'''
from pyResearchInsights.common_functions import status_logger
'''Importing the collections which contains the Counter function'''
from collections import Counter
'''Importing pandas here to build the dataframe'''
import pandas as pd
'''Importing numpy here to build the index of the pandas frameword'''
import numpy as np
def analyzer_pre_processing(abstracts_log_name, status_logger_name):
'''Carries out the pre-processing tasks, such as folder creation'''
analyzer_pre_processing_status_key="Carrying out pre-processing functions for analyzer"
status_logger(status_logger_name, analyzer_pre_processing_status_key)
try:
'''If the Analyzer script is run independently, not as part of the pipeline as a whole, there would be no filename_CLEAND.txt.
This ensures that that file can be processed independently.'''
abstracts_txt_file_name = (abstracts_log_name.split(".txt")[0])+"_"+'CLEANED.txt'
open(abstracts_txt_file_name, 'r')
except FileNotFoundError:
abstracts_txt_file_name = (abstracts_log_name.split(".txt")[0])+'.txt'
'''This code strips the abstracts_log_name of its extension and adds a .csv to it'''
abstracts_csv_file_name = (abstracts_log_name.split(".txt")[0]) + "_" + "FREQUENCY_CSV_DATA" + ".csv"
analyzer_pre_processing_status_key = "Carried out pre-processing functions for analyzer"
status_logger(status_logger_name, analyzer_pre_processing_status_key)
return abstracts_txt_file_name, abstracts_csv_file_name
def list_cleaner(list_to_be_cleaned, status_logger_name):
list_cleaner_start_status_key = "Cleaning the list of words generated"
status_logger(status_logger_name, list_cleaner_start_status_key)
'''This function cleans the list containing the words found in the abstract. It eliminates words found in another pre-defined list of words.'''
words_to_be_eliminated = ['from', 'subject', 're', 'edu', 'use', 'com', 'https', 'url', 'link', 'abstract', 'author', 'chapter', 'springer', 'title', "the", "of", "and", "in", "to", "a", "is", "for", "from", "with", "that", "by", "are", "on", "was", "as",
"were", "url:", "abstract:", "abstract", "author:", "title:", "at", "be", "an", "have", "this", "which", "study", "been", "not", "has", "its", "also", "these", "this", "can", "a", 'it', 'their', "e.g.", "those", "had", "but", "while", "will", "when", "only", "author", "title", "there", "our", "did", "as", "if", "they", "such", "than", "no", "-", "could"]
cleaned_list_of_words_in_abstract = [item for item in list_to_be_cleaned if item not in words_to_be_eliminated]
list_cleaner_end_status_key = "Cleaned the list of words generated"
status_logger(status_logger_name, list_cleaner_end_status_key)
return cleaned_list_of_words_in_abstract
def transfer_function(abstracts_txt_file_name, abstracts_csv_file_name, status_logger_name):
'''This function is involved in the actual transfer of data from the .txt file to the .csv file'''
transfer_function_status_key = "Copying data from"+" "+str(abstracts_txt_file_name)+" "+"to"+" "+"pandas dataframe"
status_logger(status_logger_name, transfer_function_status_key)
'''This list will contain all the words extracted from the .txt abstract file'''
list_of_words_in_abstract=[]
'''Each word is appended to the list, from the .txt file'''
with open(abstracts_txt_file_name, 'r') as abstracts_txt_data:
for line in abstracts_txt_data:
for word in line.split():
list_of_words_in_abstract.append(word)
'''This function cleans up the data of uneccessary words'''
cleaned_list_of_words_in_abstract = list_cleaner(list_of_words_in_abstract, status_logger_name)
'''A Counter is a dictionary, where the value is the frequency of term, which is the key'''
dictionary_of_abstract_list = Counter(cleaned_list_of_words_in_abstract)
length_of_abstract_list = len(dictionary_of_abstract_list)
'''Building a dataframe to hold the data from the list, which in turn contains the data from '''
dataframe_of_abstract_words=pd.DataFrame(index=np.arange(0, length_of_abstract_list), columns=['Words', 'Frequency'])
'''An element to keep tab of the number of elements being added to the list'''
dictionary_counter = 0
'''Copying elements from the dictionary to the pandas file'''
for dictionary_element in dictionary_of_abstract_list:
if(dictionary_counter==length_of_abstract_list):
pass
else:
dataframe_of_abstract_words.loc[dictionary_counter, 'Words'] = dictionary_element
dataframe_of_abstract_words.loc[dictionary_counter, 'Frequency'] = dictionary_of_abstract_list[dictionary_element]
dictionary_counter = dictionary_counter+1
transfer_function_status_key = "Copied data from"+" "+str(abstracts_txt_file_name)+" "+"to"+" "+"pandas dataframe"
status_logger(status_logger_name, transfer_function_status_key)
transfer_function_status_key = "Copying data from pandas dataframe to"+" "+str(abstracts_csv_file_name)
status_logger(status_logger_name, transfer_function_status_key)
'''Saving dataframe to csv file, without the index column'''
dataframe_of_abstract_words.to_csv(abstracts_csv_file_name, index=False)
transfer_function_status_key = "Copied data from pandas dataframe to"+" "+str(abstracts_csv_file_name)
status_logger(status_logger_name, transfer_function_status_key)
def analyzer_main(abstracts_log_name, status_logger_name):
'''Declaring the actual analyzer_main function is integrated to Bias.py code'''
analyzer_main_status_key="Entered the Analyzer.py code."
status_logger(status_logger_name, analyzer_main_status_key)
'''Calling the pre-processing and transfer functions here'''
abstracts_txt_file_name, abstracts_csv_file_name = analyzer_pre_processing(abstracts_log_name, status_logger_name)
transfer_function(abstracts_txt_file_name, abstracts_csv_file_name, status_logger_name)
'''Logs the end of the process Analyzer code in the status_logger'''
analyzer_main_status_key="Exiting the Analyzer.py code."
status_logger(status_logger_name, analyzer_main_status_key)
|
{"/pyResearchInsights/__init__.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Visualizer.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/Scraper.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/system_functions.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/NLP_Engine.py": ["/pyResearchInsights/common_functions.py", "/pyResearchInsights/Visualizer.py"], "/pyResearchInsights/Cleaner.py": ["/pyResearchInsights/common_functions.py"], "/pyResearchInsights/example.py": ["/pyResearchInsights/Scraper.py", "/pyResearchInsights/Analyzer.py", "/pyResearchInsights/Cleaner.py", "/pyResearchInsights/NLP_Engine.py", "/pyResearchInsights/common_functions.py", "/pyResearchInsights/system_functions.py"], "/pyResearchInsights/Analyzer.py": ["/pyResearchInsights/common_functions.py"]}
|
1,974
|
Tarun-yadav777/Djanjo-Project-Schemer-
|
refs/heads/master
|
/schemegen/migrations/0003_auto_20201116_1718.py
|
# Generated by Django 3.0.8 on 2020-11-16 11:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('schemegen', '0002_delete_genre'),
]
operations = [
migrations.AlterField(
model_name='schemegen',
name='type',
field=models.CharField(choices=[("Women's Development", "Women's Development"), ('Social Justice', 'Social Justice'), ('Sports', 'Sports'), ('Ruler Development', 'Ruler Development'), ('Child Development', 'Child Development')], default="Women's Development", max_length=20),
),
]
|
{"/schemegen/views.py": ["/schemegen/models.py"], "/schemegen/admin.py": ["/schemegen/models.py"]}
|
1,975
|
Tarun-yadav777/Djanjo-Project-Schemer-
|
refs/heads/master
|
/schemegen/models.py
|
from django.db import models
class Schemegen(models.Model):
choices = [
("Women's Development", "Women's Development"),
("Social Justice", "Social Justice"),
("Sports", "Sports"),
("Ruler Development", "Ruler Development"),
("Child Development", "Child Development")
]
name = models.CharField(max_length=200)
type = models.CharField(max_length=20, choices=choices, default="Women's Development")
info_link = models.URLField(max_length=200)
def __str__(self):
return self.name
class User_info(models.Model):
name = models.CharField(max_length=200)
gender = models.CharField(max_length=6)
dob = models.DateField(auto_now=False)
address = models.CharField(max_length=100)
phone_no = models.IntegerField()
interested_scheme = models.CharField(max_length=200)
def __str__(self):
return self.name
|
{"/schemegen/views.py": ["/schemegen/models.py"], "/schemegen/admin.py": ["/schemegen/models.py"]}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.