index int64 | repo_name string | branch_name string | path string | content string | import_graph string |
|---|---|---|---|---|---|
54,774 | nausicaa59/refonte-scrapping | refs/heads/master | /parseurs/parseurTopic.py | import re
import datetime
from bs4 import BeautifulSoup
from parseurs.parseur import Parseur
class ParseurTopic(Parseur):
def parse(self, html):
soup = BeautifulSoup(html, 'html.parser')
return (self.getPagination(soup), self.getReponses(soup))
def getReponses(self, soup):
ouput = []
reponses = soup.select(".bloc-message-forum")
for reponse in reponses:
try:
ouput.append({
"auteur": self.getPseudoReponse(reponse),
"date": self.getDateReponse(reponse),
"reference" : self.getReferenceReponse(reponse),
"contenu" :self.getReponseContenu(reponse)
})
except Exception as e:
print("erreur lors l'extration")
return ouput
def getReponseContenu(self, reponse):
data = {
"content" : "",
"citation" : [],
"images" : { "smileys" : [], "other" : [] }
}
corps = reponse.select(".bloc-contenu")
if corps == []:
return ""
#delete signature
self.deleteElementBySelect(corps[0], ".signature-msg")
# search auteur citation + delete
data["citation"] = self.getReponseCitationAuteur(corps[0])
self.deleteElementBySelect(corps[0], ".blockquote-jv")
# get images
imagesAll = self.getReponseImg(corps[0])
data["images"]["smileys"] = [x for x in imagesAll if x.find("smileys") != -1]
data["images"]["other"] = [x for x in imagesAll if x.find("smileys") == -1]
# delete liens
self.deleteElementBySelect(corps[0], "a")
self.deleteElementBySelect(corps[0], ".JvCare")
self.deleteElementBySelect(corps[0], "img")
self.deleteElementBySelect(corps[0], "b")
self.deleteElementBySelect(corps[0], "i")
for p in corps[0].select("p"):
data["content"] += " " + p.get_text()
return data
def getReponseImg(self, html):
images = html.select("img")
if images == []:
return []
return [x["src"] for x in images if x.has_attr("src")]
def getReponseCitationAuteur(self, reponse):
regexGetPseudo = "\d{2}:\d{2}:\d{2}\s+(.+)\s+a écrit"
citations = reponse.select(".text-enrichi-forum > .blockquote-jv")
if citations == []:
return []
head = [x.select("> p:nth-of-type(1)")[0] for x in citations]
if head == []:
return []
auteur = [x.get_text() for x in head]
if auteur == []:
return []
reg = re.compile(regexGetPseudo)
match = [reg.findall(x) for x in auteur]
return [self.cleanEspace(x[0]) for x in match if len(x) > 0]
def getReferenceReponse(self, reponse):
if reponse.has_attr("data-id"):
return reponse["data-id"]
else:
return False
def getPseudoReponse(self, reponse):
try:
pseudo = reponse.select(".bloc-pseudo-msg")[0]
return self.cleanEspace(pseudo.get_text())
except Exception as e:
print("Erreur extraction pseudo", str(e))
return "inconnu"
def getDateReponse(self, reponse):
try:
dateNode = reponse.select(".bloc-date-msg span")
if dateNode == []:
dateNode = reponse.select(".bloc-date-msg")
date = dateNode[0].get_text().strip()
return self.converteDate(date)
except Exception as e:
print("Erreur extraction date reponse", str(e))
raise "0000-00-00 00:00:00"
def converteDate(self, c):
d = c.replace(' à ', ' ')
d = d.replace("janvier", "01")
d = d.replace('janvier', '01')
d = d.replace('février', '02')
d = d.replace('mars', '03')
d = d.replace('avril', '04')
d = d.replace('mai', '05')
d = d.replace('juin', '06')
d = d.replace('juillet', '07')
d = d.replace('août', '08')
d = d.replace('septembre', '09')
d = d.replace('octobre', '10')
d = d.replace('novembre', '11')
d = d.replace('décembre', '12')
return datetime.datetime.strptime(d, "%d %m %Y %H:%M:%S").strftime("%Y-%m-%d %H:%M:%S")
def getPagination(self, soup):
pagination = []
navs = soup.select(".bloc-liste-num-page")
if len(navs) == 0:
return pagination
itemsNav = navs[0].select("span")
if len(itemsNav) == 1:
return pagination
currentPagePass = False
for item in itemsNav:
if item.has_attr("class"):
if "page-active" in item["class"]:
currentPagePass = True
else:
if currentPagePass == True:
pagination.append('http://www.jeuxvideo.com' + item.select("a")[0]["href"])
return pagination | {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,775 | nausicaa59/refonte-scrapping | refs/heads/master | /workers/workerTopic.py | import sys
import time
from worker import Worker
from worker import Exception410
from worker import ExceptionParse
from worker import ExceptionPersiste
from worker import ExceptionAnyUrl
from pymongo.errors import ServerSelectionTimeoutError
from parseurs.factoryParseur import FactoryParseur
from verifications.verificationReponses import VerificationReponses
from models import modelReponses
from models import modelProfil
from models import modelSujets
from tools import generateurUrl
from env import env
class WorkerTopic(Worker):
def __init__(self):
super(WorkerTopic, self).__init__(FactoryParseur.make('topic'))
def run(self):
while True:
try:
self.initDBConnection()
cible = modelSujets.getNotScrapped(self.db)
cible["url"] = generateurUrl.startTopicPage(cible["url"], cible["nbReponses"])
modelSujets.setScrapped(self.db, cible["reference"], True)
self.extractReponseTopic(cible)
modelSujets.setScrappedFinish(self.db, cible["reference"], True)
modelSujets.setChanged(self.db, cible["reference"], False)
except ServerSelectionTimeoutError as e:
self.retryConnect()
except Exception as e:
print(str(e));
finally:
time.sleep(env.WORKER_FREQ)
def extractReponseTopic(self, cible):
nextUrl = cible["url"]
while nextUrl != None :
try:
html = self.runGetQuery(nextUrl)
(paggination, reponses) = self.parse(html)
self.notify(cible["reference"], nextUrl, reponses)
nextUrl = paggination[0] if len(paggination) > 0 else None
self.persiste(reponses, cible["reference"])
except Exception410 as e:
nextUrl = None
modelSujets.setDeleted(self.db, cible["reference"], True)
print("Topic mort !", cible["reference"])
def persiste(self, reponses, refTopic):
for i in range(len(reponses)):
reponses[i]["sujet"] = refTopic
validator = VerificationReponses(reponses)
validator.controle()
validator.prepare()
modelReponses.save(self.db, validator.data)
for d in validator.data:
modelProfil.saveNotScrapped(self.db, d["auteur"])
def notify(self, ref, url, data):
if env.WORKER_VERBOSE :
print("-----------")
print("Type : sujet")
print("Num sujet : " + str(ref))
print("Url courante : " + str(url))
print("Nb reponses trouvées : " + str(len(data)))
| {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,776 | nausicaa59/refonte-scrapping | refs/heads/master | /workers/workerProfil.py | import sys
import time
from tools import generateurUrl
from worker import Worker
from worker import Exception410
from parseurs.factoryParseur import FactoryParseur
from pymongo.errors import ServerSelectionTimeoutError
from verifications.verificationProfil import VerificationProfil
from models import modelProfil
from env import env
class WorkerProfil(Worker):
def __init__(self):
super(WorkerProfil, self).__init__(FactoryParseur.make('profil'))
def run(self):
while True:
try:
self.initDBConnection()
auteur = modelProfil.getNotScrapped(self.db)
pseudo = auteur["pseudo"]
url = generateurUrl.userProfil(pseudo)
html = self.runGetQuery(url)
profil = self.parse(html)
profil['pseudo'] = pseudo
self.persiste(profil)
self.notify(pseudo, url, profil)
except Exception410 as e:
auteur["banni"] = 1
modelProfil.save(self.db, auteur)
except ServerSelectionTimeoutError as e:
self.retryConnect()
except Exception as e:
print(str(e))
finally:
print(env.WORKER_FREQ)
time.sleep(env.WORKER_FREQ)
def persiste(self, profil):
validator = VerificationProfil(profil)
validator.controle()
validator.prepare()
modelProfil.save(self.db, validator.data)
def notify(self, pseudo, url, profil):
if env.WORKER_VERBOSE :
print("-----------")
print("Type : profil")
print("Pseudo : " + pseudo)
print("Resultat", profil)
| {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,777 | nausicaa59/refonte-scrapping | refs/heads/master | /verifications/verificationReponses.py | from cerberus import Validator
from verifications import helpers
class VerificationReponses:
def __init__(self, data):
self.data = data
self.schema = {
'auteur' : {'type': 'string', 'empty': False,'required' : True},
'date' : {'type': 'string', 'empty': False, 'required' : True},
'reference' : {'type': 'string', 'empty': False, 'required' : True},
'contenu' : {'required' : True},
'sujet' : {'type': 'string', 'empty': False, 'required' : True},
}
self.schemaContenu = {
'content' : {'required' : True},
'citation' : {'required' : True},
'images' : {'required' : True}
}
self.schemaImages = {
'smileys' : {'required' : True},
'other' : {'required' : True}
}
def controle(self):
v = Validator(self.schema, allow_unknown = True)
self.data = [x for x in self.data if v.validate(x)]
v = Validator(self.schemaContenu, allow_unknown = True)
self.data = [x for x in self.data if v.validate(x['contenu'])]
v = Validator(self.schemaImages, allow_unknown = True)
self.data = [x for x in self.data if v.validate(x['contenu']['images'])]
def prepare(self):
for i in range(len(self.data)):
self.data[i]['auteur'] = self.data[i]['auteur'].lower()
self.data[i]['date'] = helpers.parseDate(self.data[i]['date'])
self.data[i]['contenu']['citation'] = [x.lower() for x in self.data[i]['contenu']['citation']]
| {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,778 | nausicaa59/refonte-scrapping | refs/heads/master | /parseurs/factoryParseur.py | from parseurs.parseurPageListe import ParseurPageListe
from parseurs.parseurTopic import ParseurTopic
from parseurs.parseurProfil import ParseurProfil
from parseurs.parseurProfilAbonne import ParseurProfilAbonne
class FactoryParseur:
@staticmethod
def make(type):
if type == "sujet":
return ParseurPageListe()
if type == "topic":
return ParseurTopic()
if type == "profil":
return ParseurProfil()
if type == "abonne":
return ParseurProfilAbonne()
raise AssertionError("Aucun parseur trouver: " + type) | {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,779 | nausicaa59/refonte-scrapping | refs/heads/master | /env/env.py | # worker
DB_USER = 'localhost'
DB_PASS = ''
DB_PORT = 27017
DB_BASE = 'forum'
WORKER_SERVERMQ = "localhost"
WORKER_VERBOSE = True
WORKER_FREQ = 1
# worker Home nav
WORKER_HOMENAV_START = 100
WORKER_HOMENAV_LEFT = True
# worker Home random
WORKER_HOMERANDOM_MIN = 0
WORKER_HOMERANDOM_MAX = 100 | {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,780 | nausicaa59/refonte-scrapping | refs/heads/master | /verifications/errorsE.py | import validus
import datetime
class ExceptionParse(Exception):
pass
class ExceptionValidation(Exception):
pass | {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,781 | nausicaa59/refonte-scrapping | refs/heads/master | /models/modelAbonne.py | import datetime
def save(db, data):
try:
abonnes = db.abonne
profilAbonnes = abonnes.find_one({"pseudo": data["pseudo"]})
if profilAbonnes != None:
data['abonnes'] += [x for x in profilAbonnes['abonnes'] if x not in data['abonnes']]
abonnes.delete_one({'_id': profilAbonnes["_id"]})
abonnes.insert_one(data)
return (True, None)
except Exception as e:
return (False, str(e))
| {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,782 | nausicaa59/refonte-scrapping | refs/heads/master | /models/modelReponses.py | def checkIfExist(db, reference):
collection = db.reponses
sujet = collection.find_one({"reference": reference})
if sujet == None:
return False
return True
def save(db, data):
try:
data = [x for x in data if checkIfExist(db, x['reference']) == False]
reponses = db.reponses
for m in data:
reponses.insert_one(m)
return (True, None)
except Exception as e:
return (False, str(e))
def countNbReponsesScrapped(db, refSujet):
try:
collection = db.reponses
return collection.find({"sujet":refSujet}).count()
except Exception as e:
raise e | {"/workers/workerAbonne.py": ["/parseurs/factoryParseur.py", "/verifications/verificationAbonne.py"], "/workers/workerHomeRandom.py": ["/parseurs/factoryParseur.py", "/verifications/verificationSujets.py", "/verifications/verificationConnect.py"], "/verifications/verificationConnect.py": ["/verifications/errors.py"], "/parseurs/parseurProfil.py": ["/parseurs/parseur.py"], "/scrap.py": ["/workers/factoryWorker.py"], "/verifications/verificationProfil.py": ["/verifications/errors.py"], "/verifications/verificationAbonne.py": ["/verifications/errors.py"], "/workers/factoryWorker.py": ["/workers/workerHomeRandom.py", "/workers/workerHomeNav.py", "/workers/workerTopic.py", "/workers/workerProfil.py", "/workers/workerAbonne.py"], "/parseurs/parseurProfilAbonne.py": ["/parseurs/parseur.py"], "/workers/workerHomeNav.py": ["/workers/workerHomeRandom.py", "/parseurs/factoryParseur.py"], "/parseurs/parseurPageListe.py": ["/parseurs/parseur.py"], "/parseurs/parseurTopic.py": ["/parseurs/parseur.py"], "/workers/workerTopic.py": ["/parseurs/factoryParseur.py", "/verifications/verificationReponses.py"], "/workers/workerProfil.py": ["/parseurs/factoryParseur.py", "/verifications/verificationProfil.py"], "/parseurs/factoryParseur.py": ["/parseurs/parseurPageListe.py", "/parseurs/parseurTopic.py", "/parseurs/parseurProfil.py", "/parseurs/parseurProfilAbonne.py"]} |
54,787 | RituYadav92/Image-Classification | refs/heads/master | /transform.py | import torchvision.transforms as transforms
from autoaugment import ImageNetPolicy
import config as cf
def transform_training():
transform_train = transforms.Compose([
transforms.Resize(cf.resize),
transforms.CenterCrop(cf.crop_size),
transforms.RandomResizedCrop(cf.resize),
transforms.RandomHorizontalFlip(),
# CIFAR10Policy(),
ImageNetPolicy(),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
]) # meanstd transformation
return transform_train
def transform_testing():
transform_test = transforms.Compose([
transforms.Resize(cf.resize),
transforms.CenterCrop(cf.crop_size),
transforms.RandomResizedCrop(cf.resize),
transforms.RandomHorizontalFlip(),
# CIFAR10Policy(),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
return transform_test
def transform_testing_vgg():
transform_test = transforms.Compose([
#transforms.Resize((cf.resize, cf.resize)),
#transforms.RandomCrop(224, padding=4),
# transforms.RandomHorizontalFlip(),
#CIFAR10Policy(),
#Dog_BreedPolicy(),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
return transform_test | {"/train_test.py": ["/transform.py"]} |
54,788 | RituYadav92/Image-Classification | refs/heads/master | /train_test.py | import torch
import torchvision
import os
import torch.optim as optim
from torch.autograd import Variable
import config as cf
import time
import math
import numpy as np
from transform import transform_training, transform_testing
use_cuda = torch.cuda.is_available()
decay_coefficient=2.5
best_acc = 0
lr=cf.lr
"""def adjust_learning_rate(epoch):
learning_rate=cf.lr
if (epoch)<= 10:
learning_rate=cf.lr
if (epoch) >= 10:
learning_rate = cf.lr/ 10
return learning_rate"""
def adjust_learning_rate(epoch):
learning_rate = cf.min_learning_rate + (cf.max_learning_rate - cf.min_learning_rate) * math.exp(-epoch/cf.decay_speed)
return learning_rate
def train(epoch, net, trainloader, criterion):
net.train()
train_loss = 0
correct = 0
total = 0
lr=adjust_learning_rate(epoch)
optimizer = optim.Adam(net.parameters(), lr, weight_decay=5e-4)
train_loss_stacked = np.array([0])
print('\n=> Training Epoch #%d, LR=%.4f' %(epoch, lr))
for batch_idx, (inputs_value, targets) in enumerate(trainloader):
if use_cuda:
inputs_value, targets = inputs_value.cuda(), targets.cuda() # GPU settings
optimizer.zero_grad()
inputs_value, targets = Variable(inputs_value), Variable(targets)
outputs = net(inputs_value) # Forward Propagation
loss = criterion(outputs, targets) # Loss
loss.backward() # Backward Propagation
optimizer.step() # Optimizer update
#print(batch_idx,inputs_value, targets)
train_loss += loss.item()
_, predicted = torch.max(outputs.data, 1)
total += targets.size(0)
#print(inputs_value.size(0),targets.size(0))
correct += predicted.eq(targets.data).cpu().sum()
train_loss_stacked = np.append(train_loss_stacked, loss.data.cpu().numpy())
print ('| Epoch [%3d/%3d] \t\tLoss: %.4f Acc@1: %.3f%%'
%(epoch, cf.num_epochs, loss.item(), 100.*correct/total))
return train_loss_stacked
def save_net(epoch,net):
state = {
'net':net.module if use_cuda else net,
'epoch':epoch,
}
if not os.path.isdir('checkpoint'):
os.mkdir('checkpoint')
print('checkpoint created')
torch.save(state, './checkpoint/savetorch.t7')
def test_only():
global best_acc
checkpoint = torch.load('./checkpoint/savetorch.t7')
net = checkpoint['net']
print(net)
net.load_state_dict(torch.load('/Users/monster/te.pt'))
visualize_model(net)
outputs = model(inputs)
_, preds = torch.max(outputs.data, 1)
"""
if 'pooling_mode' in checkpoint.keys():
cfg.POOLING_MODE = checkpoint['pooling_mode']
"""
net.eval()
test_loss = 0
correct = 0
total = 0
sub_outputs = []
testset =torchvision.datasets.ImageFolder(root='./Breed/Test1', transform=transform_testing())
testloader = torch.utils.data.DataLoader(testset, batch_size=cf.batch_size, shuffle=True, num_workers=4)
for batch_idx, (inputs, targets) in enumerate(testloader):
if use_cuda:
inputs, targets = inputs.cuda(), targets.cuda()
inputs, targets = Variable(inputs, volatile=True), Variable(targets)
outputs = net(inputs)
_, predicted = torch.max(outputs.data, 1)
#print(predicted)
total += targets.size(0)
correct += predicted.eq(targets.data).cpu().sum()
acc = 100.*correct/total
print("| Test Result\tAcc@1: %.2f%%" %(acc))
if acc > best_acc:
best_acc = acc
#best_model_wts = model.state_dict()###
print('* Test results : Acc@1 = %.2f%%' % (best_acc))
#return predicted
def test_sumission(model):
since = time.time()
sub_outputs = []
#net=model
net.eval()
##model.train(False) # Set model to evaluate mode
# Iterate over data.
for data in sub_loader:
# get the inputs
inputs, labels = data
inputs = Variable(inputs.type(Tensor))
labels = Variable(labels.type(LongTensor))
# forward
outputs = net(inputs)
_, preds = torch.max(outputs.data, 1)
sub_outputs.append(outputs.data.cpu().numpy())
sub_outputs = np.concatenate(sub_outputs)
for idx,row in enumerate(sub_outputs.astype(float)):
sub_outputs[idx] = np.exp(row)/np.sum(np.exp(row))
output_df.loc[:,1:] = sub_outputs
time_elapsed = time.time() - since
print('Run complete in {:.0f}m {:.0f}s'.format(
time_elapsed // 60, time_elapsed % 60))
return output_df
def test(epoch, net, testloader, criterion):
global best_acc
net.eval()
test_loss = 0
correct = 0
total = 0
test_loss_stacked = np.array([0])
###
#best_model_wts = model.state_dict()
for batch_idx, (inputs_value, targets) in enumerate(testloader):
if use_cuda:
inputs_value, targets = inputs_value.cuda(), targets.cuda()
with torch.no_grad():
inputs_value, targets = Variable(inputs_value), Variable(targets)
outputs = net(inputs_value)
loss = criterion(outputs, targets)
test_loss += loss.item()
_, predicted = torch.max(outputs.data, 1)
#print("predicted",predicted)
total += targets.size(0)
correct += predicted.eq(targets.data).cpu().sum()
test_loss_stacked = np.append(test_loss_stacked, loss.data.cpu().numpy())
# Save checkpoint when best model
acc = 100. * correct / total
print("\n| Validation Epoch #%d\t\t\tLoss: %.4f Acc@1: %.2f%%" % (epoch, loss.item(), acc))
if acc > best_acc:
best_acc = acc
#best_model_wts = model.state_dict()###
print('* Test results : Acc@1 = %.2f%%' % (best_acc))
###
#model.load_state_dict(best_model_wts)
#return model
return test_loss_stacked
def start_train_test(net,trainloader, testloader, criterion):
elapsed_time = 0
for epoch in range(cf.start_epoch, cf.start_epoch + cf.num_epochs):
start_time = time.time()
train_loss = train(epoch, net, trainloader, criterion)
test_loss = test(epoch, net, testloader, criterion)
#save_net(epoch, net)
torch.save(net.state_dict(), './checkpoint/savetorch_vgg.t7')
epoch_time = time.time() - start_time
elapsed_time += epoch_time
print('| Elapsed time : %d:%02d:%02d' % (get_hms(elapsed_time)))
return train_loss.tolist(), test_loss.tolist()
def get_hms(seconds):
m, s = divmod(seconds, 60)
h, m = divmod(m, 60)
return h, m, s
| {"/train_test.py": ["/transform.py"]} |
54,795 | asdrubalivan/decorators-test-example | refs/heads/master | /test_functions.py | from unittest import TestCase
from unittest.mock import patch, Mock
import app
import importlib
class MyTests(TestCase):
def setUp(self):
# Do cleanup first so it is ready if an exception is raised
def kill_patches(): # Create a cleanup callback that undoes our patches
patch.stopall() # Stops all patches started with start()
importlib.reload(app) # Reload our UUT module which restores the original decorator
self.calls = Mock()
self.calls = Mock()
self.addCleanup(kill_patches) # We want to make sure this is run so we do this in addCleanup instead of tearDown
def func(x):
self.calls()
return x
# Now patch the decorator where the decorator is being imported from
patch('app_decorators.make_pretty', func).start() # The lambda makes our decorator into a pass-thru. Also, don't forget to call start()
# HINT: if you're patching a decor with params use something like:
# lambda *x, **y: lambda f: f
importlib.reload(app) # Reloads the uut.py module which applies our patched decorator
def test_called(self):
app.ordinary()
self.calls.assert_called_once() | {"/test_functions.py": ["/app.py"]} |
54,796 | asdrubalivan/decorators-test-example | refs/heads/master | /app.py | from app_decorators import make_pretty
@make_pretty
def ordinary():
print("I am ordinary")
| {"/test_functions.py": ["/app.py"]} |
54,801 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /train_model.py | from distracted_driver_detection.train import train
def getopts():
from sys import argv
opts = {}
while argv:
if argv[0][0] == '-':
opts[argv[0]] = argv[1]
argv = argv[1:]
return opts
if __name__ == "__main__":
opts = getopts()
num_of_epochs = int(opts.get('--epochs', '20'))
print_summary = opts.get('--summary', 'False') == 'True'
skip_first_stage = opts.get('--skip_first_stage', 'False') == 'True'
batch_size = int(opts.get('--batch', '32'))
lr = float(opts.get('--lr', '5e-5'))
weight_path = opts.get('--weight_path', None)
dyn_lr = opts.get('--dyn_lr', 'False') == 'True'
initial_epoch = int(opts.get('--initial_epoch', 0))
train(num_of_epochs, img_width=299, print_summary=print_summary,
batch_size=batch_size, learning_rate=lr, weight_path=weight_path,
dyn_lr=dyn_lr, initial_epoch=initial_epoch,
skip_first_stage=skip_first_stage) | {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,802 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /distracted_driver_detection/config.py | import os.path
train_dir = os.path.abspath("data/train")
validation_dir = os.path.abspath("data/validation")
test_dir = os.path.abspath("data/test")
data_path = os.path.abspath("data")
models_dir = os.path.abspath("models")
normalize_zero = ['inception_v3']
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,803 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /distracted_driver_detection/models/inception_v3.py | from keras.applications.inception_v3 import InceptionV3
from keras.preprocessing import image
from keras.layers import Input, Flatten, Dense, Dropout, regularizers
from keras.models import Model
from keras.layers import Dense, GlobalAveragePooling2D
def get_model(summary=False, img_width=150):
# Get back the convolutional part of a VGG network trained on ImageNet
inception_v3_model = InceptionV3(weights='imagenet',
include_top=False,
input_shape=(img_width, img_width, 3))
# return inception_v3_model
# Use the generated model
output_inception_conv = inception_v3_model.output
# Add the fully-connected layers
x = GlobalAveragePooling2D(name='avg_pool')(output_inception_conv)
x = Dropout(0.5)(x)
x = Dense(10, activation='softmax', kernel_regularizer=regularizers.l2(0.01))(x)
# Create your own model
my_model = Model(input=inception_v3_model.input, output=x)
for i in range(180):
my_model.layers[i].trainable = False
if summary:
print("---------------------------------------------------------")
for i, layer in enumerate(my_model.layers):
print(i, layer.name)
print("---------------------------------------------------------")
print("---------------------------------------------------------")
print("---------------------------------------------------------")
my_model.summary()
return my_model, 180, 3
# if __name__ == "__main__":
# model, first_stage, second_stage = get_model(True, 299)
# print('Length', len(model.layers), first_stage, second_stage)
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,804 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /distracted_driver_detection/models/callbacks.py | import os
from keras.callbacks import ModelCheckpoint, LearningRateScheduler, EarlyStopping
from ..config import models_dir
def learning_rate(init_lr):
def base(epoch):
lr = init_lr * pow(10, -epoch)
return lr
return base
def get_callbacks(base_name, init_lr, dyn_lr=False):
callbacks = []
msave = ModelCheckpoint(
os.path.join(models_dir, "%s-{epoch:02d}-acc-{val_acc:.4f}-loss-{val_loss:.4f}.hdf5" % base_name),
monitor='val_acc', verbose=1, mode='min')
callbacks.append(msave)
if dyn_lr:
lrs = LearningRateScheduler(learning_rate(init_lr=init_lr))
callbacks.append(lrs)
return callbacks
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,805 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /Stage1/fatigue_detection.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
from scipy.spatial import distance as dist
from imutils.video import VideoStream
from imutils import face_utils
from threading import Thread
import numpy as np
import pyglet
import argparse
import imutils
import time
import dlib
import cv2
from ImgWindow import ImgWindow
class Fatigue_detection:
MOUTH_AR_THRESH = 0.2
MOUTH_AR_CONSEC_FRAMES = 48
EYE_AR_THRESH = 0.2
EYE_AR_CONSEC_FRAMES = 48
(lStart, lEnd) = face_utils.FACIAL_LANDMARKS_IDXS["left_eye"]
(rStart, rEnd) = face_utils.FACIAL_LANDMARKS_IDXS["right_eye"]
(mStart, mEnd) = (60, 68)
def __init__(self):
self.eye_counter = 0
self.mouth_counter = 0
self.alarm_on = False
self.ear = 0.0
self.mar = 0.0
def sound_alarm(self, path):
# play an alarm sound
music = pyglet.resource.media('alarm.wav')
music.play()
pyglet.app.run()
def mouth_aspect_ratio(self, mouth):
# compute the euclidean distances between the two sets of
# vertical eye landmarks (x, y)-coordinates
A = dist.euclidean(mouth[1], mouth[7])
B = dist.euclidean(mouth[2], mouth[6])
C = dist.euclidean(mouth[3], mouth[5])
# compute the euclidean distance between the horizon
# eye landmark (x, y)-coordinates
D = dist.euclidean(mouth[0], mouth[4])
# compute the eye aspect ratio
mar = (A + B + C) / (3 * D)
# return the eye aspect ratio
return mar
def eye_aspect_ratio(self, eye):
# compute the euclidean distances between the two sets of
# vertical eye landmarks (x, y)-coordinates
A = dist.euclidean(eye[1], eye[5])
B = dist.euclidean(eye[2], eye[4])
# compute the euclidean distance between the horizon
# eye landmark (x, y)-coordinates
C = dist.euclidean(eye[0], eye[3])
# compute the eye aspect ratio
ear = (A + B) / (2.0 * C)
# return the eye aspect ratio
return ear
def mouth_fatigue_detection(self, shape):
mouth = shape[Fatigue_detection.mStart:Fatigue_detection.mEnd]
self.mar = self.mouth_aspect_ratio(mouth)
if self.mar > Fatigue_detection.MOUTH_AR_THRESH:
return True
else:
return False
def eye_fatigue_detection(self, shape):
leftEye = shape[Fatigue_detection.lStart:Fatigue_detection.lEnd]
rightEye = shape[Fatigue_detection.rStart:Fatigue_detection.rEnd]
leftEAR = self.eye_aspect_ratio(leftEye)
rightEAR = self.eye_aspect_ratio(rightEye)
# average the eye aspect ratio together for both eyes
self.ear = (leftEAR + rightEAR) / 2.0
# check to see if the eye aspect ratio is below the blink
# threshold, and if so, increment the blink frame counter
if self.ear < Fatigue_detection.EYE_AR_THRESH:
return True
else:
return False
def fatigue_detection(self, shape):
if self.eye_fatigue_detection(shape):
self.eye_counter += 1
else:
self.eye_counter = 0
self.alarm_on = False
if self.mouth_fatigue_detection(shape):
self.mouth_counter += 1
else:
self.mouth_counter = 0
self.alarm_on = False
if self.eye_counter > Fatigue_detection.EYE_AR_CONSEC_FRAMES or self.mouth_counter > Fatigue_detection.MOUTH_AR_CONSEC_FRAMES:
self.alarm_on = True
def is_fatigue(self):
return self.alarm_on
if __name__ == '__main__':
try:
# construct the argument parse and parse the arguments
rospy.init_node('talker', anonymous=True)
rate = rospy.Rate(10) # 10hz
ap = argparse.ArgumentParser()
ap.add_argument("-w", "--webcam", type=int, default=0,
help="index of webcam on system")
args = vars(ap.parse_args())
fd = Fatigue_detection()
iwin = ImgWindow(args)
# initialize dlib's face detector (HOG-based) and then create
# the facial landmark predictor
print("[INFO] loading facial landmark predictor...")
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor("68 face landmarks.dat")
# start the video stream thread
print("[INFO] starting video stream thread...")
time.sleep(1.0)
# loop over frames from the video stream
while True:
# grab the frame from the threaded video file stream, resize
# it, and convert it to grayscale
# channels)
frame = iwin.read()
#frame = imutils.resize(frame, width=450)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# detect faces in the grayscale frame
rects = detector(gray, 0)
# loop over the face detections
for rect in rects:
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
fd.fatigue_detection(shape)
iwin.draw(shape, fd.is_fatigue(), fd.mar, fd.ear)
iwin.show()
if iwin.is_close():
break
iwin.close()
# do a bit of cleanup
except rospy.ROSInterruptException:
pass | {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,806 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /distracted_driver_detection/detect.py | from .config import test_dir
from keras.preprocessing import image
from keras.applications.inception_v3 import preprocess_input
import numpy as np
import os
from .models import get_model
class_info = [
'安全驾驶',
'右手打字',
'右手打电话',
'左手打字',
'左手打电话',
'调收音机',
'喝饮料',
'拿后面的东西',
'整理头发和化妆',
'和其他乘客说话'
]
def load_model(path_to_model, model_type='inception_v3', img_width=299):
model = get_model(modelType=model_type, print_summary=False, img_width=img_width)
if (isinstance(model, tuple)):
model = model[0]
model.load_weights(path_to_model)
return model
def detect(path_to_image, img_width=299):
# model = load_model(path_to_model, model_type, img_width=img_width)
# 加载图像
img = image.load_img(path_to_image, target_size=(img_width, img_width))
# 图像预处理
x = image.img_to_array(img)
x = np.expand_dims(x, axis=0)
print(x.shape)
x = preprocess_input(x)
print(x.shape)
# 对图像进行分类
result = model.predict(x)
# 输出预测概率
print('Predicted:', result)
label = np.where(result[0] == max(result[0]))
print(label)
return class_info[int(label[0][0])]
path_to_model = './models/inception_v3-07-acc-0.9934-loss-0.2515.hdf5'
model = load_model(path_to_model=path_to_model, model_type='inception_v3', img_width=299)
print(detect('./img_1.jpg')) | {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,807 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /safe_behavior_server.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
import base64
from distracted_driver_detection.detect import detect
from distracted_driver_detection.detect import load_model
def callback(data):
imgdata = data.data
print(type(imgdata))
imgdata = base64.b64decode(imgdata)
upload_image_path = 'upload_img.jpg'
with open(upload_image_path, 'wb') as f:
f.write(imgdata)
state = detect(upload_image_path)
print(state)
# res = facereco(upload_image_path)
ret_message = state
pub = rospy.Publisher('safe_behavior', String, queue_size=10)
pub.publish(ret_message)
def listener():
# In ROS, nodes are uniquely named. If two nodes with the same
# name are launched, the previous one is kicked off. The
# anonymous=True flag means that rospy will choose a unique
# name for our 'listener' node so that multiple listeners can
# run simultaneously.
rospy.init_node('safe_behavior_server', anonymous=True)
rospy.Subscriber('to_processed_image', String, callback)
# path_to_model = './models/inception_v3-07-acc-0.9934-loss-0.2515.hdf5'
# global model
# model = load_model(path_to_model=path_to_model, model_type='inception_v3', img_width=299)
# spin() simply keeps python from exiting until this node is stopped
rospy.spin()
if __name__ == '__main__':
listener()
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,808 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /fat_detect.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
from scipy.spatial import distance as dist
from imutils.video import VideoStream
from imutils import face_utils
from threading import Thread
import numpy as np
import pyglet
import argparse
import imutils
import time
import dlib
import cv2
from fatigue_detection.ImgWindow import ImgWindow
from fatigue_detection.detection import Fatigue_detection
if __name__ == '__main__':
try:
# construct the argument parse and parse the arguments
rospy.init_node('talker', anonymous=True)
rate = rospy.Rate(10) # 10hz
ap = argparse.ArgumentParser()
ap.add_argument("-w", "--webcam", type=int, default=0,
help="index of webcam on system")
args = vars(ap.parse_args())
fd = Fatigue_detection()
iwin = ImgWindow(args, True)
# initialize dlib's face detector (HOG-based) and then create
# the facial landmark predictor
print("[INFO] loading facial landmark predictor...")
detector = dlib.get_frontal_face_detector()
predictor = dlib.shape_predictor("68 face landmarks.dat")
# start the video stream thread
print("[INFO] starting video stream thread...")
time.sleep(1.0)
# loop over frames from the video stream
while True:
# grab the frame from the threaded video file stream, resize
# it, and convert it to grayscale
# channels)
frame = iwin.read()
#frame = imutils.resize(frame, width=450)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# detect faces in the grayscale frame
rects = detector(gray, 0)
# loop over the face detections
for rect in rects:
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
fd.fatigue_detection(shape)
iwin.draw(shape, fd.is_fatigue(), fd.mar, fd.ear)
iwin.show()
if iwin.is_close():
break
iwin.close()
# do a bit of cleanup
except rospy.ROSInterruptException:
pass | {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,809 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /fatigue_detection/ImgWindow.py | import numpy as np
import cv2
from imutils.video import VideoStream
from imutils import face_utils
import imutils
class ImgWindow:
(lStart, lEnd) = face_utils.FACIAL_LANDMARKS_IDXS["left_eye"]
(rStart, rEnd) = face_utils.FACIAL_LANDMARKS_IDXS["right_eye"]
(mStart, mEnd) = (60, 68)
def __init__(self, args=None, in_local=False):
if in_local:
self.vs = VideoStream(src=args["webcam"]).start()
self.in_local = in_local
self.frame = None
def read(self, file_path=''):
if self.in_local:
self.frame = self.vs.read()
self.frame = imutils.resize(self.frame, width=450)
else:
self.frame = cv2.imread(file_path)
return self.frame
def draw(self, shape, flag, mar, ear):
mouth = shape[ImgWindow.mStart:ImgWindow.mEnd]
leftEye = shape[ImgWindow.lStart:ImgWindow.lEnd]
rightEye = shape[ImgWindow.rStart:ImgWindow.rEnd]
mouthHull = cv2.convexHull(mouth)
cv2.drawContours(self.frame, [mouthHull], -1, (0, 255, 0), 1)
leftEyeHull = cv2.convexHull(leftEye)
rightEyeHull = cv2.convexHull(rightEye)
cv2.drawContours(self.frame, [leftEyeHull], -1, (0, 255, 0), 1)
cv2.drawContours(self.frame, [rightEyeHull], -1, (0, 255, 0), 1)
if flag:
cv2.putText(self.frame, "DROWSINESS ALERT!", (10, 30),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
cv2.putText(self.frame, "MAR: {:.2f}".format(mar), (300, 30),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
cv2.putText(self.frame, "EAR: {:.2f}".format(ear), (300, 70),
cv2.FONT_HERSHEY_SIMPLEX, 0.7, (0, 0, 255), 2)
def show(self):
cv2.imshow("Frame", self.frame)
self.key = cv2.waitKey(1) & 0xFF
def is_close(self):
return self.key == ord("q")
def close(self):
cv2.destroyAllWindows()
# self.vs.stop()
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,810 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /distracted_driver_detection/models/__init__.py | from . import inception_v3
def get_model(modelType, img_width, print_summary=False):
if modelType == 'inception_v3':
return inception_v3.get_model(print_summary, img_width)
return None
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,811 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /web_cam.py | #!/usr/bin/env python3
from flask import Flask
import time
from flask import render_template
from flask import request
import base64
from PIL import Image
from io import StringIO, BytesIO
import robo_talker
import cv2
import numpy as np
from sensor_msgs.msg import Image
app = Flask(__name__)
@app.route('/', methods=['POST','GET'])
def webcam():
if request.method == 'GET':
return render_template('upload.html')
elif request.method == 'POST':
image_b64 = request.form['img']
print(type(image_b64))
robo_talker.talker(image_b64)
# 等待返回结果
time.sleep(1)
result = ''
is_fatigue = ''
mar = ''
ear = ''
if robo_talker.f_img != None:
fat_result_path = robo_talker.f_img
is_fatigue, mar, ear = fat_result_path.split(',')
robo_talker.f_img = None
if robo_talker.drive_state != None:
result = robo_talker.drive_state
robo_talker.drive_state = None
return 'state: {0}, is_fatigue: {1}, mar: {2}, ear: {3}'.format(result, is_fatigue, mar, ear)
# return render_template('tmp.html', state="result", img_path='static/ret_image.jpg')
# @app.route('/upload', methods=['POST','GET'])
# def upload():
# print('getting data from web.')
# if request.method == 'POST':
# image_b64 = request.form['img']
# print(type(image_b64))
# robo_talker.talker(image_b64)
# # 等待返回结果
# time.sleep(3)
# if robo_talker.f_img != None:
# fat_result_path = robo_talker.f_img
# robo_talker.f_img = None
# if robo_talker.drive_state != None:
# result = robo_talker.drive_state
# robo_talker.drive_state = None
# return render_template('tmp.html', state=result, img_path=fat_result_path)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=7779) #, ssl_context='adhoc'
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,812 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /robo_talker.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
from sensor_msgs.msg import Image
global subscr
def callback(data):
msg = data.data
print(msg)
global drive_state
drive_state = msg
subscr.unregister()
global fatigue_subscr
def fatigue_callback(data):
print("fatigue_callback")
msg = data.data
print(msg)
global f_img
f_img = msg
fatigue_subscr.unregister()
subscr = rospy.Subscriber('safe_behavior', String, callback)
drive_state = None
fatigue_subscr = rospy.Subscriber('fatigue', String, fatigue_callback)
f_img = None
#subscr = rospy.Subscriber('results', String, callback)
def talker(message):
pub = rospy.Publisher('to_processed_image', String, queue_size=10)
rospy.init_node('talker', anonymous=True)
if not rospy.is_shutdown():
pub.publish(message)
global subscr
subscr = rospy.Subscriber('safe_behavior', String, callback)
global fatigue_subscr
fatigue_subscr = rospy.Subscriber('fatigue', String, fatigue_callback)
#rospy.spin()
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,813 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /distracted_driver_detection/train.py | from keras.optimizers import RMSprop
from .models import get_model
from .models.callbacks import get_callbacks
from keras.preprocessing.image import ImageDataGenerator
from .config import train_dir, validation_dir, test_dir, data_path, normalize_zero
num_of_imgs = 21794
def get_train_datagen(img_width, batch_size=32):
train_datagen = ImageDataGenerator(rescale=1.0 / 255, shear_range=0.2, zoom_range=0.2, horizontal_flip=True)
train_generator = train_datagen.flow_from_directory(train_dir, target_size=(img_width, img_width),
batch_size=batch_size, class_mode='categorical', shuffle=True)
return train_generator
def get_validation_datagen(img_width, batch_size=32):
test_datagen = ImageDataGenerator(rescale=1.0 / 255)
validation_generator = test_datagen.flow_from_directory(validation_dir, target_size=(img_width, img_width),
batch_size=batch_size,
class_mode='categorical', shuffle=True)
return validation_generator
def train(num_of_epochs, img_width=299, print_summary=False,
batch_size=32, learning_rate=5e-5, weight_path=None,
dyn_lr=False, initial_epoch=0, skip_first_stage=False):
model_type = 'inception_v3'
model, second_stage, first_stage = get_model(model_type, img_width, print_summary)
# Run first stage
if first_stage is not None and second_stage is not None:
for layer in model.layers[:-first_stage]:
layer.trainable = False
for layer in model.layers[-first_stage:]:
layer.trainable = True
if weight_path is not None and len(weight_path) > 0:
print('[INFO] loading weights')
model.load_weights(weight_path)
model.compile(loss='categorical_crossentropy', optimizer='rmsprop', metrics=['accuracy'])
train_generator = get_train_datagen(img_width, batch_size)
validation_generator = get_validation_datagen(img_width, batch_size)
# train the convolutional neural network
if not skip_first_stage:
print('[INFO] Start first stage')
model.fit_generator(generator=train_generator, epochs=2,
steps_per_epoch= 33 / batch_size,
validation_steps= 33 / batch_size,
validation_data=validation_generator,
callbacks=get_callbacks(model_type, 0.001, False),
initial_epoch=0)
# Run second stage
if first_stage is not None and second_stage is not None:
for layer in model.layers[:second_stage]:
layer.trainable = False
for layer in model.layers[second_stage:]:
layer.trainable = True
model_opt = RMSprop(lr=learning_rate)
model.compile(loss='categorical_crossentropy', optimizer=model_opt, metrics=['accuracy'])
print('[INFO] Run train process')
# train the convolutional neural network
model.fit_generator(generator=train_generator, epochs=num_of_epochs + 2,
steps_per_epoch=18304 / batch_size,
validation_steps=3328 / batch_size,
validation_data=validation_generator,
callbacks=get_callbacks(model_type, learning_rate, dyn_lr),
initial_epoch=2 + initial_epoch)
print('[INFO] End train process')
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,814 | sysu-cloudrobot/Fatigue-Detection | refs/heads/master | /fatigue_server.py | #!/usr/bin/env python
import rospy
from std_msgs.msg import String
import base64
from fatigue_detection.detection import Fatigue_detection
from imutils.video import VideoStream
from imutils import face_utils
import pyglet
import imutils
import time
import dlib
import cv2
from fatigue_detection.ImgWindow import ImgWindow
def callback(data):
imgdata = data.data
imgdata = base64.b64decode(imgdata)
print(type(imgdata))
upload_image_path = 'upload_img_fatigue.jpg'
with open(upload_image_path, 'wb') as f:
f.write(imgdata)
frame = iwin.read(upload_image_path)
#frame = imutils.resize(frame, width=450)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# detect faces in the grayscale frame
rects = detector(gray, 0)
# loop over the face detections
for rect in rects:
# determine the facial landmarks for the face region, then
# convert the facial landmark (x, y)-coordinates to a NumPy
# array
shape = predictor(gray, rect)
shape = face_utils.shape_to_np(shape)
fd.fatigue_detection(shape)
iwin.draw(shape, fd.is_fatigue(), fd.mar, fd.ear)
# iwin.close()
# with open()
# ret_message = base64.b64encode(iwin.frame)
ret_image_path = './templates/images/ret_image.jpg'
cv2.imwrite(ret_image_path, iwin.frame)
# with open(ret_image_path, 'rb') as f:
# ret_message = base64.b64encode(f.read())
global pub
# pub.publish('static/ret_image.jpg')
ret_result = str(fd.is_fatigue()) + ',' + str(fd.mar) + ',' + str(fd.ear)
pub.publish(ret_result)
def listener():
# In ROS, nodes are uniquely named. If two nodes with the same
# name are launched, the previous one is kicked off. The
# anonymous=True flag means that rospy will choose a unique
# name for our 'listener' node so that multiple listeners can
# run simultaneously.
rospy.init_node('fatigue_server', anonymous=True)
global fd
fd = Fatigue_detection()
global iwin
iwin = ImgWindow()
# initialize dlib's face detector (HOG-based) and then create
# the facial landmark predictor
print("[INFO] loading facial landmark predictor...")
global detector
detector = dlib.get_frontal_face_detector()
global predictor
predictor = dlib.shape_predictor("68 face landmarks.dat")
rospy.Subscriber('to_processed_image', String, callback)
global pub
pub = rospy.Publisher('fatigue', String, queue_size=10)
# spin() simply keeps python from exiting until this node is stopped
rospy.spin()
if __name__ == '__main__':
listener()
| {"/train_model.py": ["/distracted_driver_detection/train.py"], "/distracted_driver_detection/models/callbacks.py": ["/distracted_driver_detection/config.py"], "/distracted_driver_detection/detect.py": ["/distracted_driver_detection/config.py", "/distracted_driver_detection/models/__init__.py"], "/safe_behavior_server.py": ["/distracted_driver_detection/detect.py"], "/fat_detect.py": ["/fatigue_detection/ImgWindow.py"], "/web_cam.py": ["/robo_talker.py"], "/distracted_driver_detection/train.py": ["/distracted_driver_detection/models/__init__.py", "/distracted_driver_detection/models/callbacks.py", "/distracted_driver_detection/config.py"], "/fatigue_server.py": ["/fatigue_detection/ImgWindow.py"]} |
54,836 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_tools.py | #!/usr/bin/env python3
#coding=utf-8
"""..."""
__author__ = 'Simon J. Greenhill <simon@simon.net.nz>'
__copyright__ = 'Copyright (c) 2016 Simon J. Greenhill'
__license__ = 'New-style BSD'
import unittest
from nexusmaker.tools import slugify, parse_word, natsort
class Test_NatSort(unittest.TestCase):
def test(self):
self.assertEqual(natsort(['b', 'a']), ['a', 'b'])
self.assertEqual(natsort(['c', '1']), ['1', 'c'])
self.assertEqual(natsort(['52', '1']), ['1', '52'])
self.assertEqual(natsort(['54', '53']), ['53', '54'])
self.assertEqual(natsort(['53', '54']), ['53', '54'])
class Test_Slugify(unittest.TestCase):
def test_brackets(self):
self.assertEqual(slugify('Banggai (W.dialect)'), 'Banggai_Wdialect')
def test_dash(self):
self.assertEqual(slugify('Aklanon - Bisayan'), 'Aklanon_Bisayan')
def test_accents(self):
self.assertEqual(slugify('Gimán'), 'Giman')
self.assertEqual(slugify('Hanunóo'), 'Hanunoo')
def test_colon(self):
self.assertEqual(slugify('Kakiduge:n Ilongot'), 'Kakidugen_Ilongot')
def test_slash(self):
self.assertEqual(slugify('Angkola / Mandailin'), 'Angkola_Mandailin')
def test_apostrophe(self):
self.assertEqual(slugify('V’ënen Taut'), 'Venen_Taut')
class TestParseWord(unittest.TestCase):
def test_One_1(self):
self.assertEqual(parse_word("One_1"), ("One", "1"))
def test_One_13(self):
self.assertEqual(parse_word("One_13"), ("One", "13"))
def test_One_u_21(self):
self.assertEqual(parse_word("One_u_21"), ("One", "u_21"))
def test_One_u21(self):
self.assertEqual(parse_word("One_u21"), ("One", "u21"))
def test_One_Hundred_16(self):
self.assertEqual(parse_word("One_Hundred_16"), ("One_Hundred", "16"))
def test_One_Hundred_u_16(self):
self.assertEqual(parse_word("One_Hundred_u_16"), ("One_Hundred", "u_16"))
def test_One_Hundred_u16(self):
self.assertEqual(parse_word("One_Hundred_u16"), ("One_Hundred", "u16"))
def test_Eight_u_3569(self):
self.assertEqual(parse_word("Eight_u_3569"), ("Eight", "u_3569"))
def test_Eight_u3569(self):
self.assertEqual(parse_word("Eight_u3569"), ("Eight", "u3569"))
def test_correct_true_u_5631(self):
self.assertEqual(parse_word("correct_true_u_5631"), ("correct_true", "u_5631"))
def test_correct_true_u5631(self):
self.assertEqual(parse_word("correct_true_u5631"), ("correct_true", "u5631"))
def test_to_tie_up_fasten_u_5685(self):
self.assertEqual(parse_word("to_tie_up_fasten_u_5685"), ("to_tie_up_fasten", "u_5685"))
def test_to_tie_up_fasten_u5685(self):
self.assertEqual(parse_word("to_tie_up_fasten_u5685"), ("to_tie_up_fasten", "u5685"))
def test_error(self):
with self.assertRaises(ValueError):
parse_word("hand")
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,837 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_NexusMakerAscertainedWords.py | import sys
import unittest
from nexusmaker import NexusMakerAscertainedWords
from .test_NexusMaker import TestNexusMaker, TESTDATA
class Test_Is_Sequential(unittest.TestCase):
def setUp(self):
self.maker = NexusMakerAscertainedWords([])
def test_true(self):
assert self.maker._is_sequential([1,2,3,4,5])
def test_true_2(self):
assert self.maker._is_sequential([3,4,5,6,7])
def test_false(self):
assert not self.maker._is_sequential([1, 3])
class TestNexusMakerAscertainedWords(TestNexusMaker):
def setUp(self): # override parent method
self.maker = NexusMakerAscertainedWords(data=TESTDATA)
self.nex = self.maker.make()
def test_get_characters_simple(self):
chars = self.maker._get_characters(self.nex)
# NOTE characters are zero indexed
assert chars['arm'] == [0, 1, 2, 3]
assert chars['eye'] == [4, 5]
assert chars['leg'] == [6, 7, 8]
def test_get_characters_error(self):
with self.assertRaises(ValueError):
chars = self.maker._get_characters(self.nex, delimiter="X")
def test_create_assumptions_simple(self):
assumpt = self.maker.create_assumptions(self.nex)
assert 'begin assumptions' in assumpt[0]
assert 'arm = 1-4' in assumpt[1]
assert 'eye = 5-6' in assumpt[2]
assert 'leg = 7-9' in assumpt[3]
assert 'end;' in assumpt[4]
# 1 more site per word than in ascertainment = none, = 6 cognates + 3 words = 9
def test_nsites(self):
assert len(self.nex.data.keys()) == 9
def test_eye_0(self):
cog = 'eye_%s' % self.maker.ASCERTAINMENT_LABEL
assert self.nex.data[cog]['A'] == '0'
assert self.nex.data[cog]['B'] == '0'
assert self.nex.data[cog]['C'] == '0'
assert self.nex.data[cog]['D'] == '?'
def test_leg_0(self):
cog = 'leg_%s' % self.maker.ASCERTAINMENT_LABEL
assert self.nex.data[cog]['A'] == '0'
assert self.nex.data[cog]['B'] == '0'
assert self.nex.data[cog]['C'] == '?'
assert self.nex.data[cog]['D'] == '0'
def test_arm_0(self):
cog = 'arm_%s' % self.maker.ASCERTAINMENT_LABEL
assert self.nex.data[cog]['A'] == '0'
assert self.nex.data[cog]['B'] == '0'
assert self.nex.data[cog]['C'] == '0'
assert self.nex.data[cog]['D'] == '0'
def test_write_extra(self):
out = self.maker.write()
assert 'begin assumptions;' in out
assert 'charset arm' in out
assert 'charset eye' in out
assert 'charset leg' in out
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,838 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tools.py | import re
import unicodedata
is_unique = re.compile(r"""^(.*)_(u_?\d+)$""")
def parse_word(label, delimiter="_"):
"""
Returns a tuple of word, cognate_id.
"""
if is_unique.match(label):
return is_unique.findall(label)[0]
elif delimiter in label:
return tuple(label.rsplit(delimiter, 1))
else:
raise ValueError("No delimiter %s in %s" % (delimiter, label))
def slugify(var):
var = var.replace("(", "").replace(")", "")
var = var.replace(" / ", "_").replace("/", "_")
var = unicodedata.normalize('NFKD', var)
var = "".join([c for c in var if not unicodedata.combining(c)])
var = var.replace(" - ", "_")
var = var.replace(":", "").replace('?', "")
var = var.replace('’', '').replace("'", "")
var = var.replace(',', "").replace(".", "")
var = var.replace(" ", "_")
return var
def natsort(alist):
"""
Sort the given iterable in the way that humans expect.
From: https://stackoverflow.com/questions/2669059/how-to-sort-alpha-numeric-set-in-python
"""
convert = lambda text: int(text) if text.isdigit() else text
alphanum_key = lambda key: [convert(c) for c in re.split('([0-9]+)', key)]
return sorted(alist, key = alphanum_key) | {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,839 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_regressions.py | import unittest
from nexusmaker import NexusMaker, Record
TESTDATA = [
Record(Language="A", Word="eye", Item="", Cognacy="1"),
Record(Language="B", Word="eye", Item="", Cognacy="1"),
Record(Language="C", Word="eye", Item="", Cognacy="2"),
Record(Language="D", Word="eye", Item="", Cognacy="2"),
Record(Language="E", Word="eye", Item="", Cognacy=""),
]
class TestBugUniquesAndAmbig(unittest.TestCase):
"""
Fix Bug caused by lru_cache on ._is_missing_for_word that made unique
sites occur as single ones in blocks of missing states.
"""
def setUp(self):
self.maker = NexusMaker(data=TESTDATA)
self.nex = self.maker.make()
def test_cognate_sets(self):
assert ('eye', '1') in self.maker.cognates
assert ('eye', '2') in self.maker.cognates
assert ('eye', 'u_1') in self.maker.cognates
assert sorted(self.maker.cognates[('eye', '1')]) == ['A', 'B']
assert sorted(self.maker.cognates[('eye', '2')]) == ['C', 'D']
assert sorted(self.maker.cognates[('eye', 'u_1')]) == ['E']
def test_is_missing_for_word(self):
assert self.maker._is_missing_for_word('E', 'eye') == False
def test_nexus(self):
assert self.nex.data['eye_1'] == {
'A': '1', 'B': '1',
'C': '0', 'D': '0',
'E': '0' # NOT '?'
}
assert self.nex.data['eye_2'] == {
'A': '0', 'B': '0',
'C': '1', 'D': '1',
'E': '0' # NOT '?'
}
assert self.nex.data['eye_u_1'] == {
'A': '0', 'B': '0',
'C': '0', 'D': '0',
'E': '1'
}
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,840 | LinguList/NexusMaker | refs/heads/master | /setup.py | #!/usr/bin/env python
from setuptools import setup, find_packages
__version__ = 1.5
setup(
name='nexusmaker',
version=__version__,
description="nexusmaker - Nexus file maker for language phylogenies",
url='',
author='Simon J. Greenhill',
author_email='simon@simon.net.nz',
license='BSD',
zip_safe=True,
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Intended Audience :: Science/Research",
"License :: OSI Approved :: BSD License",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics",
"Topic :: Software Development :: Libraries :: Python Modules",
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.2',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
],
keywords='language-phylogenies',
packages=find_packages(),
install_requires=['python-nexus', ],
setup_requires=['pytest-runner', ],
tests_require=['pytest', ],
)
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,841 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/__init__.py | from .maker import Record, NexusMaker, NexusMakerAscertained, NexusMakerAscertainedWords
from .CognateParser import CognateParser
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,842 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_Complex.py | import unittest
from nexusmaker import Record, NexusMaker, NexusMakerAscertained
from nexusmaker import NexusMakerAscertainedWords
from nexusmaker import CognateParser
RECORDS = """
Aiwoo-501 132312 five vili 1
Aiwoo-501 133751 leg nyike 86
Aiwoo-501 133752 leg nuku 86
Aiwoo-501 208804 hand nyime 1,66
Aiwoo-501 208805 hand nyimä 1,66
Banoni-4 1075 leg rapinna
Banoni-4 250221 five ghinima 1
Banoni-4 4 hand numa- 1,64
Dehu-196 129281 five tripi 1
Dehu-196 196 hand wanakoim
Eton-1088 265408 five e-lim 1
Eton-1088 278627 leg tua-ŋ 95
Hiw-639 164951 hand mja- 1,78
Hiw-639 164952 leg ᶢʟoŋo- 17
Hiw-639 165135 five təβɔjimə 1
Iaai-471 125656 hand beñi- 14
Iaai-471 125657 hand HAND
Iaai-471 125659 leg ca
Iaai-471 125853 five baa|xaca
Iaai-471 125865 five thabyŋ
Lamogai-67 83796 five elmé 1
Lamogai-67 83881 hand mulǵu 45
Lamogai-67 83882 hand melsé 45
Lamogai-67 83883 hand milpí 45
Lamogai-67 83884 hand melép 45
Lamogai-67 83885 hand milpú 45
Lamogai-67 83886 hand meylá 45
Lamogai-67 83887 hand melsék 45
Lamogai-67 83942 leg kaip 1
Lamogai-67 83943 leg kaŋgú 1
"""
RECORDS = [r.split("\t") for r in RECORDS.split("\n") if len(r)]
COMPLEX_TESTDATA = [
Record(Language=r[0], Word=r[2], Item=r[3], Cognacy=r[4])
for r in RECORDS
]
EXPECTED_COGNATES = {
('five', '1'): {
'Aiwoo-501', 'Banoni-4', 'Dehu-196', 'Eton-1088', 'Hiw-639',
'Lamogai-67'
},
('leg', '86'): {'Aiwoo-501'},
('hand', '1'): {'Aiwoo-501', 'Banoni-4', 'Hiw-639'},
('hand', '64'): {'Banoni-4'},
('hand', '66'): {'Aiwoo-501'},
('leg', '95'): {'Eton-1088'},
('hand', '78'): {'Hiw-639'},
('leg', '17'): {'Hiw-639'},
('hand', '14'): {'Iaai-471'},
('hand', '45'): {'Lamogai-67'},
('leg', '1'): {'Lamogai-67'},
}
EXPECTED_UNIQUES = [
('leg', 'Banoni-4'),
('hand', 'Dehu-196'),
('leg', 'Iaai-471'),
('five', 'Iaai-471'),
]
class TestNexusMaker(unittest.TestCase):
model = NexusMaker
# number of cognate sets expected
expected_ncog = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES)
# number of characters expected in the nexus file
expected_nchar = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES)
def setUp(self):
self.maker = self.model(data=COMPLEX_TESTDATA)
self.nex = self.maker.make()
def test_languages(self):
self.assertEqual(self.maker.languages, {
'Aiwoo-501', 'Banoni-4', 'Dehu-196', 'Eton-1088', 'Hiw-639',
'Iaai-471', 'Lamogai-67'
})
def test_words(self):
self.assertEqual(self.maker.words, {'hand', 'leg', 'five'})
def test_ncognates(self):
self.assertEqual(len(self.maker.cognates), self.expected_ncog)
def test_cognate_sets(self): # pragma: no cover
errors = []
for ecog in EXPECTED_COGNATES:
if ecog not in self.maker.cognates:
errors.append("Missing %s" % (ecog, ))
elif self.maker.cognates.get(ecog, set()) != EXPECTED_COGNATES[ecog]:
errors.append("Cognate set %s incorrect %r != %r" % (
ecog,
EXPECTED_COGNATES[ecog],
self.maker.cognates.get(ecog, set())
))
if errors:
raise AssertionError("Errors: %s" % "\n".join(errors))
def test_uniques(self): # pragma: no cover
errors = []
obtained = [c for c in self.maker.cognates if 'u' in c[1]]
expected = {e: 0 for e in EXPECTED_UNIQUES}
# check what has been identified as unique
for cog in obtained:
if len(self.maker.cognates[cog]) != 1:
errors.append("Unique cognate %s should only have one member" % (cog, ))
# make key to look up EXPECTED_UNIQUES as (word, language)
key = (cog[0], list(self.maker.cognates[cog])[0])
# error on anything that is not expected
if key not in expected:
errors.append("%s unexpectedly seen as unique" % (key, ))
else:
expected[key] += 1
# the counts for each expected cognate should be max 1.
for e in expected:
if expected[e] != 1:
errors.append("Expected 1 cognate for %s, but got %d" % (e, expected[e]))
if errors:
raise AssertionError("Errors: %s" % "\n".join(errors))
def test_dehu_is_all_missing_for_leg(self):
for cog in [cog for cog in self.nex.data if cog.startswith('leg_')]:
assert self.nex.data[cog]['Dehu-196'] == '?'
def test_eton_is_all_missing_for_hand(self):
for cog in [cog for cog in self.nex.data if cog.startswith('hand_')]:
assert self.nex.data[cog]['Eton-1088'] == '?'
def test_only_one_unique_for_Iaai471(self):
iaai = 0
for cog in [cog for cog in self.nex.data if cog.startswith('five_u_')]:
present = [t for t in self.nex.data[cog] if self.nex.data[cog][t] == '1']
if present == ['Iaai-471']:
iaai += 1
if iaai != 1: # pragma: no cover
raise AssertionError("Should only have one unique site for Iaai-471-five")
def test_nexus_symbols(self):
assert sorted(self.nex.symbols) == sorted(['0', '1'])
def test_nexus_taxa(self):
self.assertEqual(self.maker.languages, self.nex.taxa)
def test_nexus_characters_expected_cognates(self):
for e in EXPECTED_COGNATES:
assert "_".join(e) in self.nex.characters
def test_nexus_characters_expected_uniques(self):
uniques = [
c for c in self.nex.characters if
CognateParser().is_unique_cognateset(c, labelled=True)
]
assert len(uniques) == len(EXPECTED_UNIQUES)
def test_nexus_nchar(self):
assert len(self.nex.characters) == self.expected_nchar
def test_entries_with_a_cognate_word_arenot_added_as_unique(self):
hand = [c for c in self.nex.characters if c.startswith('hand_')]
hand = [c for c in hand if CognateParser().is_unique_cognateset(c, labelled=True)]
assert len(hand) == 1, 'Only expecting one unique character for hand'
assert self.nex.data['hand_u_2']['Iaai-471'] in ('0', '?'), \
'Iaai-471 should not be unique for `hand`'
class TestNexusMakerAscertained(TestNexusMaker):
model = NexusMakerAscertained
expected_nchar = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES) + 1
class TestNexusMakerAscertainedWords(TestNexusMaker):
model = NexusMakerAscertainedWords
expected_nchar = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES) + 3
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,843 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_Record.py | import unittest
from nexusmaker import Record
class TestRecord(unittest.TestCase):
def test_simple(self):
r = Record(ID=1, WID=2, LID=3, Language='English', Word='Hand', Item='hand', Annotation='?', Cognacy=None, Loan="L")
assert r.ID == 1
assert r.WID == 2
assert r.LID == 3
assert r.Language == 'English'
assert r.Word == 'Hand'
assert r.Item == 'hand'
assert r.Annotation == '?'
assert r.Loan == "L"
assert r.Cognacy == None
def test_is_loan(self):
assert not Record(Loan="").is_loan
assert not Record(Loan=None).is_loan
assert Record(Loan="L").is_loan
assert Record(Loan="English").is_loan
assert Record(Loan=True).is_loan
assert Record(Loan="B").is_loan
assert Record(Loan="S").is_loan
assert Record(Loan="X").is_loan
assert Record(Loan="x").is_loan
def test_get_token(self):
r = Record(ID=1, WID=2, LID=3, Language='English', Word='Hand', Item='hand', Annotation='?', Cognacy=None, Loan="L")
assert r.get_taxon() == "English_3"
r = Record(ID=1, WID=2, LID=None, Language='English', Word='Hand', Item='hand', Annotation='?', Cognacy=None, Loan="L")
assert r.get_taxon() == "English"
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,844 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_CognateParser.py | import unittest
from nexusmaker import CognateParser
class TestCognateParser(unittest.TestCase):
def test_simple(self):
self.assertEqual(CognateParser().parse_cognate('1'), ['1'])
self.assertEqual(CognateParser().parse_cognate('10'), ['10'])
self.assertEqual(CognateParser().parse_cognate('100'), ['100'])
self.assertEqual(CognateParser().parse_cognate('111'), ['111'])
def test_subset(self):
self.assertEqual(CognateParser().parse_cognate('1,2'), ['1', '2'])
self.assertEqual(CognateParser().parse_cognate('1 , 2'), ['1', '2'])
def test_dubious(self):
self.assertEqual(CognateParser().parse_cognate('1?'), ['u_1'])
self.assertEqual(CognateParser().parse_cognate('?'), ['u_1'])
def test_dubious_subset(self):
self.assertEqual(CognateParser().parse_cognate('1, 2?'), ['1'])
self.assertEqual(CognateParser().parse_cognate('1?, 2'), ['2'])
self.assertEqual(CognateParser().parse_cognate('91?, 42'), ['42'])
self.assertEqual(CognateParser().parse_cognate('?, 31'), ['31'])
# note that both of these are dubious, should become a unique
# state instead
self.assertEqual(CognateParser().parse_cognate('1?, 2?'), ['u_1'])
def test_bad_entries_uniques(self):
# coded as x
self.assertEqual(CognateParser(uniques=True).parse_cognate('X'), ['u_1'])
self.assertEqual(CognateParser(uniques=True).parse_cognate('x'), ['u_1'])
def test_bad_entries_in_forms_uniques(self):
# coded as x
self.assertEqual(CognateParser(uniques=True).parse_cognate('X20'), ['u_1'])
self.assertEqual(CognateParser(uniques=True).parse_cognate('x20'), ['u_1'])
def test_bad_entries_nouniques(self):
self.assertEqual(CognateParser(uniques=False).parse_cognate('X'), [])
self.assertEqual(CognateParser(uniques=False).parse_cognate('x'), [])
def test_s_entries_uniques(self):
# entries that are in the wrong word (e.g. you sg. not you pl.)
self.assertEqual(CognateParser(uniques=True).parse_cognate('s'), ['u_1'])
def test_s_entries_nouniques(self):
# entries that are in the wrong word (e.g. you sg. not you pl.)
self.assertEqual(CognateParser(uniques=False).parse_cognate('s'), [])
def test_empty_entries(self):
with self.assertRaises(ValueError):
CognateParser(uniques=False).parse_cognate(',,')
def test_is_unique_cognateset(self):
self.assertEqual(CognateParser().is_unique_cognateset('u_1'), True)
self.assertEqual(
CognateParser().is_unique_cognateset('u_1', labelled=False), True
)
self.assertEqual(
CognateParser().is_unique_cognateset('hand_u_1', labelled=False), False
)
self.assertEqual(
CognateParser().is_unique_cognateset('hand_u_1', labelled=True), True
)
def test_add_unique(self):
CP = CognateParser()
self.assertEqual(CP.parse_cognate(''), ['u_1'])
self.assertEqual(CP.parse_cognate(''), ['u_2'])
self.assertEqual(CP.parse_cognate(''), ['u_3'])
self.assertEqual(CP.parse_cognate(''), ['u_4'])
def test_no_uniques(self):
CP = CognateParser(uniques=False)
self.assertEqual(CP.parse_cognate(''), [])
self.assertEqual(CP.parse_cognate(''), [])
self.assertEqual(CP.parse_cognate(''), [])
def test_dubious_with_no_strict(self):
self.assertEqual(CognateParser(strict=False).parse_cognate('1?'), ['1'])
self.assertEqual(CognateParser(strict=False).parse_cognate('1, 2?'), ['1', '2'])
def test_null(self):
self.assertEqual(CognateParser().parse_cognate(None), ['u_1'])
def test_bad_cog_int(self):
with self.assertRaises(ValueError):
CognateParser().parse_cognate(1)
def test_complicated_strict_unique(self):
CP = CognateParser(strict=True, uniques=True)
# # 3. right
# Maori katau 5, 40
# Maori matau 5
# South Island Maori tika
self.assertEqual(CP.parse_cognate('5, 40'), ['5', '40'])
self.assertEqual(CP.parse_cognate('5'), ['5'])
self.assertEqual(CP.parse_cognate(''), ['u_1'])
# # 8. turn
# Maori huri 15
# South Island Maori tahuli 15
# South Island Maori tahuri to turn, to turn around 15
self.assertEqual(CP.parse_cognate('15'), ['15'])
self.assertEqual(CP.parse_cognate('15'), ['15'])
self.assertEqual(CP.parse_cognate('15'), ['15'])
# # 20. to know
# Maori moohio 52
# South Island Maori matau 1
# South Island Maori mohio to know 52
# South Island Maori ara to know, to awake
self.assertEqual(CP.parse_cognate('52'), ['52'])
self.assertEqual(CP.parse_cognate('1'), ['1'])
self.assertEqual(CP.parse_cognate('52'), ['52'])
self.assertEqual(CP.parse_cognate(''), ["u_2"])
# # 36: to spit
# Maori tuha 19, 34?
# South Island Maori huare 18
# South Island Maori tuha to expectorate, to spit 19, 34?
self.assertEqual(CP.parse_cognate('19, 34?'), ['19'])
self.assertEqual(CP.parse_cognate('18'), ['18'])
self.assertEqual(CP.parse_cognate('19, 34?'), ['19'])
def test_complicated_nostrict_unique(self):
CP = CognateParser(strict=False, uniques=True)
# # 3. right
# Maori katau 5, 40
# Maori matau 5
# South Island Maori tika
self.assertEqual(CP.parse_cognate('5, 40'), ['5', '40'])
self.assertEqual(CP.parse_cognate('5'), ['5'])
self.assertEqual(CP.parse_cognate(''), ['u_1'])
# # 8. turn
# Maori huri 15
# South Island Maori tahuli 15
# South Island Maori tahuri to turn, to turn around 15
self.assertEqual(CP.parse_cognate('15'), ['15'])
self.assertEqual(CP.parse_cognate('15'), ['15'])
self.assertEqual(CP.parse_cognate('15'), ['15'])
# # 20. to know
# Maori moohio 52
# South Island Maori matau 1
# South Island Maori mohio to know 52
# South Island Maori ara to know, to awake
self.assertEqual(CP.parse_cognate('52'), ['52'])
self.assertEqual(CP.parse_cognate('1'), ['1'])
self.assertEqual(CP.parse_cognate('52'), ['52'])
self.assertEqual(CP.parse_cognate(''), ["u_2"])
# # 36: to spit
# Maori tuha 19, 34?
# South Island Maori huare 18
# South Island Maori tuha to expectorate, to spit 19, 34?
self.assertEqual(CP.parse_cognate('19, 34?'), ['19', '34'])
self.assertEqual(CP.parse_cognate('18'), ['18'])
self.assertEqual(CP.parse_cognate('19, 34?'), ['19', '34'])
def test_complicated_nostrict_nounique(self):
CP = CognateParser(strict=False, uniques=False)
# # 3. right
# Maori katau 5, 40
# Maori matau 5
# South Island Maori tika
self.assertEqual(CP.parse_cognate('5, 40'), ['5', '40'])
self.assertEqual(CP.parse_cognate('5'), ['5'])
self.assertEqual(CP.parse_cognate(''), [])
# # 8. turn
# Maori huri 15
# South Island Maori tahuli 15
# South Island Maori tahuri to turn, to turn around 15
self.assertEqual(CP.parse_cognate('15'), ['15'])
self.assertEqual(CP.parse_cognate('15'), ['15'])
self.assertEqual(CP.parse_cognate('15'), ['15'])
# # 20. to know
# Maori moohio 52
# South Island Maori matau 1
# South Island Maori mohio to know 52
# South Island Maori ara to know, to awake
self.assertEqual(CP.parse_cognate('52'), ['52'])
self.assertEqual(CP.parse_cognate('1'), ['1'])
self.assertEqual(CP.parse_cognate('52'), ['52'])
self.assertEqual(CP.parse_cognate(''), [])
# # 36: to spit
# Maori tuha 19, 34?
# South Island Maori huare 18
# South Island Maori tuha to expectorate, to spit 19, 34?
self.assertEqual(CP.parse_cognate('19, 34?'), ['19', '34'])
self.assertEqual(CP.parse_cognate('18'), ['18'])
self.assertEqual(CP.parse_cognate('19, 34?'), ['19', '34'])
def test_complicated_with_slash(self):
CP = CognateParser(strict=True, uniques=True)
self.assertEqual(CP.parse_cognate('53/54'), ['53', '54'])
def test_combined_cognate(self):
self.assertEqual(CognateParser().parse_cognate('1a'), ['1', '1a'])
self.assertEqual(CognateParser().parse_cognate('2b'), ['2', '2b'])
self.assertEqual(CognateParser().parse_cognate('3az'), ['3', '3az'])
self.assertEqual(CognateParser().parse_cognate('45c'), ['45', '45c'])
self.assertEqual(CognateParser().parse_cognate('1a,2b'), ['1', '1a', '2', '2b'])
self.assertEqual(CognateParser().parse_cognate('1,2a,3b'), ['1', '2', '2a', '3', '3b'])
def test_normalisation(self):
self.assertEqual(CognateParser().parse_cognate('1, 52'), ['1', '52'])
self.assertEqual(CognateParser().parse_cognate('52, 1'), ['1', '52'])
def test_trailing_dash(self):
with self.assertRaises(ValueError):
CognateParser().parse_cognate('1-')
def test_semicolon(self):
with self.assertRaises(ValueError):
CognateParser().parse_cognate('2, 63; 87')
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,845 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/CognateParser.py | import re
from nexusmaker.tools import natsort
is_combined_cognate = re.compile(r"""(\d+)([a-z]+)""")
class CognateParser(object):
UNIQUE_IDENTIFIER = "u_"
def __init__(self, strict=True, uniques=True):
"""
Parses cognates.
- strict (default=True): remove dubious cognates (?)
- uniques (default=True): non-cognate items get unique states
"""
self.uniques = uniques
self.strict = strict
self.unique_id = 0
def is_unique_cognateset(self, cog, labelled=False):
if not labelled:
return str(cog).startswith(self.UNIQUE_IDENTIFIER)
else:
return "_%s" % self.UNIQUE_IDENTIFIER in str(cog)
def _split_combined_cognate(self, cognate):
if is_combined_cognate.match(cognate):
return [
is_combined_cognate.findall(cognate)[0][0],
cognate
]
return [cognate]
def get_next_unique(self):
if not self.uniques:
return []
self.unique_id = self.unique_id + 1
return ["%s%d" % (self.UNIQUE_IDENTIFIER, self.unique_id)]
def parse_cognate(self, value):
raw = value
if value is None:
return self.get_next_unique()
elif value == '':
return self.get_next_unique()
elif str(value).lower() == 's': # error
return self.get_next_unique()
elif 'x' in str(value).lower(): # error
return self.get_next_unique()
elif isinstance(value, str):
if value.startswith(","):
raise ValueError("Possible broken combined cognate %r" % raw)
if value.endswith("-"):
raise ValueError("Possible broken combined cognate %r" % raw)
elif ';' in value:
raise ValueError("Possible broken combined cognate %r" % raw)
value = value.replace('.', ',').replace("/", ",")
# parse out subcognates
value = [v.strip() for v in value.split(",")]
value = [self._split_combined_cognate(v) for v in value]
value = [item for sublist in value for item in sublist]
if self.strict:
# remove dubious cognates
value = [v for v in value if '?' not in v]
# exit if all are dubious, setting to unique state
if len(value) == 0:
return self.get_next_unique()
else:
value = [v.replace("?", "") for v in value]
# remove any empty things in the list
value = [v for v in value if len(v) > 0]
return natsort(value)
else:
raise ValueError("%s" % type(value))
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,846 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_Mayan.py | import unittest
from nexusmaker import Record, NexusMaker, NexusMakerAscertained
from nexusmaker import NexusMakerAscertainedWords
from nexusmaker import CognateParser
RECORDS = """
41551 030 Poqomchi 00004 Kaufman_1976 cloud suutZ' 2
41552 030 Poqomchi 00009 Campbell_1971b cloud suutZ' 2
41553 030 Poqomchi 00022 Mayers_ed_1966 cloud suutZ' 2
41554 030 Poqomchi 00027 Mayers_1960 cloud suutZ' 2
41555 030 Poqomchi 00061 Stoll_1884 cloud su'tZ 2
20569 003 Lacandon 00011 Fisher cloud muuyar 6c
20570 003 Lacandon 00085 Canger_1970 cloud muuyaar 6c
20571 003 Lacandon 00085 Canger_1970 cloud 'u-muuy^r-ir 6c
20572 003 Lacandon 00085 Canger_1970 cloud 'u-muuyaar-ir 6c
20573 003 Lacandon 00037 Andrade_1946 cloud muuyáh 6c
45362 011 Tzotzil 00024 Delgaty_1964 cloud but'ul 11
45363 011 Tzotzil 00024 Delgaty_1964 cloud toc 5
45364 011 Tzotzil 00024 Delgaty_1964 cloud stacel winajel 5
45365 011 Tzotzil 00036 Materials_on_the_Mayan_Languages_of_Mexico_1949b cloud toc 5
45366 011 Tzotzil 00034 Weathers_and_Weathers_1949 cloud toc 5
45367 011 Tzotzil 00061 Stoll_1884 cloud toc 5
25058 012 Tojolabal 00033 Jackson_and_Supple_1952 cloud ason 1
25059 012 Tojolabal 00053 Sapper_1897 cloud ason 1
25060 012 Tojolabal 00061 Stoll_1884 cloud asón 1
25061 012 Tojolabal 00063 Berendt_1870a cloud ason 1
47927 023 Tzutujil 00004 Kaufman_1976 cloud suutZ' 2
47928 023 Tzutujil 00009 Campbell_1971b cloud suutZ' 2
47929 023 Tzutujil 00051 Stoll_1901a cloud su'tZ 2
50387 023 Tzutujil 00086 Kaufman_2003 cloud maayuul 6
49008 028 Uspanteko 00004 Kaufman_1976 cloud su>utZ' 2
49009 028 Uspanteko 00009 Campbell_1971b cloud su>utZ' 2
49010 028 Uspanteko 00053 Sapper_1897 cloud su'tZ 2
49011 028 Uspanteko 00055 Stoll_1896 cloud su'tZ 2
49012 028 Uspanteko 00061 Stoll_1884 cloud su'tZ 2
13219 005 Itza 00007 Schumann_1971 cloud muyal 6c
13220 005 Itza 00054 Armas_1897a cloud muyal 6c
13221 005 Itza 00061 Stoll_1884 cloud muyál 6c
13222 005 Itza 00070 Baezo cloud muyal 6c
13223 005 Itza 00069 Baezo_1832 cloud muyal 6c
32925 022 Kaqchikel 00004 Kaufman_1976 cloud sutZ' 2
32926 022 Kaqchikel 00009 Campbell_1971b cloud suutZ' 2
32927 022 Kaqchikel 00022 Mayers_ed_1966 cloud sutZ' 2
32928 022 Kaqchikel 00061 Stoll_1884 cloud su'tZ 2
50391 022 Kaqchikel 00086 Kaufman_2003 cloud moyan 6e
08999 013 Chuj 00004 Kaufman_1976 cloud 'asun 1
09000 013 Chuj 00022 Mayers_ed_1966 cloud asun 1
09001 013 Chuj 00037 Andrade_1946 cloud asún 1
09002 013 Chuj 00060 Stoll_1887 cloud asun 1
09003 013 Chuj 00060 Stoll_1887 cloud taa u_8341 X
22444 017 Mocho 00016 Schumann_1969 cloud 'asonG 1
22445 017 Mocho 00045 Sapper_1912 cloud músan 9
22446 017 Mocho 00053 Sapper_1897 cloud musan 9
50385 017 Mocho 00086 Kaufman_2003 cloud ma:yu:l 6
42773 029 Poqomam 00004 Kaufman_1976 cloud suutZ' 2
42774 029 Poqomam 00009 Campbell_1971b cloud suutZ' 2
42775 029 Poqomam 00022 Mayers_ed_1966 cloud co' sutZ' 2
42776 029 Poqomam 00027 Mayers_1960 cloud suutZ' 2
42777 029 Poqomam 00047 Sapper_1907 cloud su'tZ 2
42778 029 Poqomam 00053 Sapper_1897 cloud su'tZ 2
42779 029 Poqomam 00061 Stoll_1884 cloud su'tZ 2
34124 002 Chicomuceltec 00041 Termer_1930 cloud siál 4
34125 002 Chicomuceltec 00045 Sapper_1912 cloud sial 4
34126 002 Chicomuceltec 00053 Sapper_1897 cloud sial 4
19167 031 Qeqchi 00004 Kaufman_1976 cloud čoq 5
19168 031 Qeqchi 00004 Kaufman_1976 cloud čoql 5
19169 031 Qeqchi 00009 Campbell_1971b cloud čooql 5
19170 031 Qeqchi 00022 Mayers_ed_1966 cloud čoq 5
19171 031 Qeqchi 00053 Sapper_1897 cloud čoq 5
19172 031 Qeqchi 00061 Stoll_1884 cloud čoq 5
04043 008 Chol 00002 Aulie_and_Aulie_1978 cloud tyocal 5
04044 008 Chol 00084 Attinasi_1973 cloud m^c-^l u_11394 M
04045 008 Chol 00084 Attinasi_1973 cloud toc-al-i-lal 5
04046 008 Chol 00084 Attinasi_1973 cloud toc-al 5
04047 008 Chol 00083 Schumann cloud tocal 5
04048 008 Chol 00036 Materials_on_the_Mayan_Languages_of_Mexico_1949b cloud tocal 5
04049 008 Chol 00047 Sapper_1907 cloud tyocal 5
04050 008 Chol 00053 Sapper_1897 cloud tyocal 5
04051 008 Chol 00061 Stoll_1884 cloud tiocál 5
04052 008 Chol 00075 Moran cloud muyal u_11395 X
44265 010 Tzeltal 00023 Slocum_and_Gerdel_1965 cloud tocal 5
44266 010 Tzeltal 00053 Sapper_1897 cloud tojcal 5
44267 010 Tzeltal 00061 Stoll_1884 cloud tojcál 5
43969 026 Sakapulteko 00004 Kaufman_1976 cloud suutZ' 2
35003 009 Chontal 00061 Stoll_1884 cloud buclá 8
35861 007 Chorti 00004 Kaufman_1976 cloud tocar 5
35862 007 Chorti 00022 Mayers_ed_1966 cloud tocar 5
35863 007 Chorti 00047 Sapper_1907 cloud tocar 5
35864 007 Chorti 00053 Sapper_1897 cloud tocar 5
35865 007 Chorti 00068 Galindo_1834 cloud toc'ar 5
50388 007 Chorti 00086 Kaufman_2003 cloud mayuy u_16438 2
01564 020 Awakateko 00004 Kaufman_1976 cloud sbaaq' 3
01565 020 Awakateko 00022 Mayers_ed_1966 cloud sbaq' 3
01566 020 Awakateko 00037 Andrade_1946 cloud sp'aq' 3
01567 020 Awakateko 00053 Sapper_1897 cloud ciá u_16875 X
01568 020 Awakateko 00060 Stoll_1887 cloud sba'q 3
01569 020 Awakateko 00061 Stoll_1884 cloud sba'q 3
29690 004 Yucatec 00001 Bolles_1981 cloud muyál 6c
29691 004 Yucatec 00012 Fisher_and_Vermont-Salas cloud mu>uyal u_17820 L
29692 004 Yucatec 00052 Zavala_and_Medina_1898 cloud muyal 6c
29693 004 Yucatec 00061 Stoll_1884 cloud muyál 6c
29694 004 Yucatec 00073 Beltran cloud muyal 6c
29695 004 Yucatec 00076 Ticul cloud muyal 6c
37033 019 Mam 00004 Kaufman_1976 cloud muuj 10
37034 019 Mam 00016 Schumann_1969 cloud muj 10
37035 019 Mam 00022 Mayers_ed_1966 cloud muuj 10
37036 019 Mam 00044 Jaramilo_1918 cloud muj 10
37037 019 Mam 00045 Sapper_1912 cloud múaj 10
37038 019 Mam 00053 Sapper_1897 cloud muaj 10
37039 019 Mam 00060 Stoll_1887 cloud muj 10
37040 019 Mam 00061 Stoll_1884 cloud muj 10
14792 021 Ixil 00004 Kaufman_1976 cloud sutZ' 2
14793 021 Ixil 00022 Mayers_ed_1966 cloud sutZ' 2
14794 021 Ixil 00060 Stoll_1887 cloud su'tZ 2
14795 021 Ixil 00061 Stoll_1884 cloud su'tZ 2
53370 032 Cholti 00094 Gates_1935 cloud muyal 6
51398 033 Classical_Maya 00087 Boot_2002 cloud muyal 6c
51399 033 Classical_Maya 00087 Boot_2002 cloud tok 5
16282 014 Jakalteko 00004 Kaufman_1976 cloud 'asun 1
16283 014 Jakalteko 00004 Kaufman_1976 cloud moyan 6
16284 014 Jakalteko 00022 Mayers_ed_1966 cloud moyan 6
16285 014 Jakalteko 00039 La_Farge_and_Byers_1931 cloud asun 1
16286 014 Jakalteko 00045 Sapper_1912 cloud múyan 6
16287 014 Jakalteko 00053 Sapper_1897 cloud muyan 6
16288 014 Jakalteko 00060 Stoll_1887 cloud asun 1
00330 018 Teco 00004 Kaufman_1976 cloud muuj 10
00331 018 Teco 00017 Kaufman_1969a cloud muuj 10
18267 016 Qanjobal 00004 Kaufman_1976 cloud 'asun 1
18268 016 Qanjobal 00037 Andrade_1946 cloud sutZ'án 2b
18269 016 Qanjobal 00037 Andrade_1946 cloud asún 1
51765 035 Colonial_Yucatec 00077 Vienna_Dictionary_1600 cloud buyul 12
51766 035 Colonial_Yucatec 00077 Vienna_Dictionary_1600 cloud muyal 6c
00943 015 Akateco 00004 Kaufman_1976 cloud 'asun 1
50390 015 Akateco 00086 Kaufman_2003 cloud moyan 6d
51611 034 Colonial_Cakchiquel 00088 Coto_Thomas_de_1647 cloud suq u_24152 B
51612 034 Colonial_Cakchiquel 00088 Coto_Thomas_de_1647 cloud moy 6e
44071 027 Sipakapeno 00004 Kaufman_1976 cloud muuj 10
10419 001 Huastec 00029 Larsen_1955 cloud tocou 5b
10420 001 Huastec 00056 Lorenzana_1896a cloud tocóu 5b
10421 001 Huastec 00058 Alejandre_1890 cloud tocob 5b
10422 001 Huastec 00061 Stoll_1884 cloud tocób 5b
10423 001 Huastec 00072 Tapia_Zanteno_1767 cloud tocob 5b
10424 001 Huastec 00082 Tapia_Zenteno_1747 cloud tocob 5b
00030 025 Achi 00022 Mayers_ed_1966 cloud sutZ' 2
53559 037 Tuzanteco 00086 Kaufman_2003 cloud 7aso:n 1
40164 006 Mopan 00003 Andrade_1977 cloud muyál 6c
40165 006 Mopan 00004 Kaufman_1976 cloud muyal 6c
40166 006 Mopan 00022 Mayers_ed_1966 cloud muyal 6c
40167 006 Mopan 00026 Ulrich_and_Ulrich cloud muyal 6c
23136 024 Kiche 00004 Kaufman_1976 cloud suutZ' 2
23137 024 Kiche 00009 Campbell_1971b cloud suutZ' 2
23138 024 Kiche 00022 Mayers_ed_1966 cloud sutZ' 2
23139 024 Kiche 00061 Stoll_1884 cloud su'tZ 2
50386 024 Kiche 00086 Kaufman_2003 cloud mayuul 6
50389 024 Kiche 00086 Kaufman_2003 cloud mayuy 6b
53557 036 Popti 00086 Kaufman_2003 cloud xhmoyxi 6d
53558 036 Popti 00086 Kaufman_2003 cloud asun 1
"""
HEADER = ['ID', "LID", "Language", "SID", "Source", "Word", "Item", "Cognacy", "Loan"]
COMPLEX_TESTDATA = [
Record(**dict(zip(HEADER, r.split('\t')))) for r in RECORDS.split("\n") if len(r.strip())
]
EXPECTED_COGNATES = {
('cloud', '1'): {
'Tojolabal_012', 'Chuj_013', 'Mocho_017', 'Jakalteko_014', 'Qanjobal_016',
'Akateco_015', 'Tuzanteco_037', 'Popti_036'
},
('cloud', '2'): {
'Achi_025', 'Kaqchikel_022', 'Poqomam_029', 'Sakapulteko_026', 'Ixil_021',
'Kiche_024', 'Poqomchi_030', 'Tzutujil_023', 'Uspanteko_028'
},
('cloud', '2b'): {'Qanjobal_016'},
('cloud', '3'): {'Awakateko_020'},
('cloud', '4'): {'Chicomuceltec_002'},
('cloud', '5'): {
'Tzeltal_010', 'Chorti_007', 'Tzotzil_011', 'Qeqchi_031', 'Chol_008',
'Classical_Maya_033',
},
('cloud', '5b'): {'Huastec_001'},
('cloud', '6'): {
'Jakalteko_014', 'Tzutujil_023', 'Mocho_017', 'Cholti_032', 'Kiche_024',
},
('cloud', '6b'): {'Kiche_024'},
('cloud', '6c'): {
'Lacandon_003', 'Itza_005', 'Yucatec_004', 'Classical_Maya_033',
'Colonial_Yucatec_035', 'Mopan_006'
},
('cloud', '6d'): {'Akateco_015', 'Popti_036'},
('cloud', '6e'): {'Kaqchikel_022', 'Colonial_Cakchiquel_034'},
('cloud', '8'): {'Chontal_009'},
('cloud', '9'): {'Mocho_017'},
('cloud', '10'): {'Mam_019', 'Teco_018', 'Sipakapeno_027'},
('cloud', '11'): {'Tzotzil_011'},
('cloud', '12'): {'Colonial_Yucatec_035'},
}
# COMBINED COGNATE SETS
EXPECTED_COGNATES[('cloud', '2')] = EXPECTED_COGNATES[('cloud', '2')] | EXPECTED_COGNATES[('cloud', '2b')]
EXPECTED_COGNATES[('cloud', '5')] = EXPECTED_COGNATES[('cloud', '5')] | EXPECTED_COGNATES[('cloud', '5b')]
EXPECTED_COGNATES[('cloud', '6')] = EXPECTED_COGNATES[('cloud', '6')] | EXPECTED_COGNATES[('cloud', '6b')]
EXPECTED_COGNATES[('cloud', '6')] = EXPECTED_COGNATES[('cloud', '6')] | EXPECTED_COGNATES[('cloud', '6c')]
EXPECTED_COGNATES[('cloud', '6')] = EXPECTED_COGNATES[('cloud', '6')] | EXPECTED_COGNATES[('cloud', '6d')]
EXPECTED_COGNATES[('cloud', '6')] = EXPECTED_COGNATES[('cloud', '6')] | EXPECTED_COGNATES[('cloud', '6e')]
EXPECTED_UNIQUES = []
class TestNexusMaker(unittest.TestCase):
model = NexusMaker
# number of cognate sets expected
expected_ncog = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES)
# number of characters expected in the nexus file
expected_nchar = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES)
def setUp(self):
self.maker = self.model(data=COMPLEX_TESTDATA)
self.nex = self.maker.make()
def test_languages(self):
self.assertEqual(self.maker.languages, {
'Mocho_017', 'Tzotzil_011', 'Cholti_032',
'Colonial_Cakchiquel_034', 'Tzutujil_023', 'Chorti_007',
'Poqomam_029', 'Chol_008', 'Lacandon_003', 'Chontal_009',
'Qanjobal_016', 'Sakapulteko_026', 'Mopan_006', 'Kaqchikel_022',
'Classical_Maya_033', 'Yucatec_004', 'Mam_019', 'Akateco_015',
'Ixil_021', 'Achi_025', 'Itza_005', 'Poqomchi_030', 'Chuj_013',
'Jakalteko_014', 'Huastec_001', 'Tzeltal_010', 'Popti_036',
'Kiche_024', 'Awakateko_020', 'Colonial_Yucatec_035',
'Uspanteko_028', 'Teco_018', 'Tojolabal_012', 'Tuzanteco_037',
'Qeqchi_031', 'Sipakapeno_027', 'Chicomuceltec_002'
})
def test_words(self):
self.assertEqual(self.maker.words, {'cloud'})
def test_ncognates(self):
self.assertEqual(len(self.maker.cognates), self.expected_ncog)
def test_cognate_sets(self): # pragma: no cover
errors = []
for ecog in EXPECTED_COGNATES:
if ecog not in self.maker.cognates:
errors.append("Missing %s" % (ecog, ))
elif self.maker.cognates.get(ecog, set()) != EXPECTED_COGNATES[ecog]:
errors.append("Cognate set %s incorrect %r != %r" % (
ecog,
EXPECTED_COGNATES[ecog],
self.maker.cognates.get(ecog, set())
))
if errors:
raise AssertionError("Errors: %s" % "\n".join(errors))
def test_uniques(self): # pragma: no cover
errors = []
obtained = [c for c in self.maker.cognates if 'u' in c[1]]
expected = {e: 0 for e in EXPECTED_UNIQUES}
# check what has been identified as unique
for cog in obtained:
if len(self.maker.cognates[cog]) != 1:
errors.append("Unique cognate %s should only have one member" % (cog, ))
# make key to look up EXPECTED_UNIQUES as (word, language)
key = (cog[0], list(self.maker.cognates[cog])[0])
# error on anything that is not expected
if key not in expected:
errors.append("%s unexpectedly seen as unique" % (key, ))
else:
expected[key] += 1
# the counts for each expected cognate should be max 1.
for e in expected:
if expected[e] != 1:
errors.append("Expected 1 cognate for %s, but got %d" % (e, expected[e]))
if errors:
raise AssertionError("Errors: %s" % "\n".join(errors))
def test_nexus_symbols(self):
assert sorted(self.nex.symbols) == sorted(['0', '1'])
def test_nexus_taxa(self):
self.assertEqual(self.maker.languages, self.nex.taxa)
def test_nexus_characters_expected_cognates(self):
for e in EXPECTED_COGNATES:
assert "_".join(e) in self.nex.characters
def test_nexus_characters_expected_uniques(self):
uniques = [
c for c in self.nex.characters if
CognateParser().is_unique_cognateset(c, labelled=True)
]
assert len(uniques) == len(EXPECTED_UNIQUES)
def test_nexus_nchar(self):
assert len(self.nex.characters) == self.expected_nchar
class TestNexusMakerAscertained(TestNexusMaker):
model = NexusMakerAscertained
expected_nchar = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES) + 1
class TestNexusMakerAscertainedWords(TestNexusMaker):
model = NexusMakerAscertainedWords
expected_nchar = len(EXPECTED_COGNATES) + len(EXPECTED_UNIQUES) + 1
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,847 | LinguList/NexusMaker | refs/heads/master | /nexusmaker/tests/test_NexusMaker.py | import sys
import unittest
from nexusmaker import Record
from nexusmaker import NexusMaker, NexusMakerAscertained, NexusMakerAscertainedWords
TESTDATA = [
Record(Language="A", Word="eye", Item="", Cognacy="1"),
Record(Language="A", Word="leg", Item="", Cognacy="1"),
Record(Language="A", Word="arm", Item="", Cognacy="1"),
Record(Language="B", Word="eye", Item="", Cognacy="1"),
Record(Language="B", Word="leg", Item="", Cognacy="2"),
Record(Language="B", Word="arm", Item="", Cognacy="2"),
Record(Language="C", Word="eye", Item="", Cognacy="1"),
# No Record for C 'leg'
Record(Language="C", Word="arm", Item="", Cognacy="3"),
Record(Language="D", Word="eye", Item="", Cognacy="1", Loan=True),
Record(Language="D", Word="leg", Item="", Cognacy="1"),
Record(Language="D", Word="leg", Item="", Cognacy="2"),
Record(Language="D", Word="arm", Item="", Cognacy="2,3"),
]
class TestNexusMakerInternals(unittest.TestCase):
def test_error_on_non_record(self):
with self.assertRaises(ValueError):
NexusMaker(['1'])
def test_error_on_bad_record(self):
with self.assertRaises(ValueError):
NexusMaker([
Record(Word="leg", Item="", Cognacy="2") # no language
])
with self.assertRaises(ValueError):
NexusMaker([
Record(Language="French", Item="", Cognacy="2") # no word
])
def test_error_on_cognates_with_loans(self):
"""Test that we generate an error if a loan word enters .cognates()"""
n = NexusMaker(TESTDATA)
n.data.append(Record(Language="D", Word="eye", Item="", Cognacy="1", Loan=True))
with self.assertRaises(ValueError):
n.cognates
def test_error_on_make_with_uniques_bigger_than_one(self):
"""
Test that we generate an error in .make if a unique cognate set contains
more than one language.
"""
n = NexusMaker(TESTDATA)
n.cognates
n._cognates[('test', 'u_1')] = ["A", "B"]
with self.assertRaises(AssertionError):
n.make()
class TestNexusMaker(unittest.TestCase):
model = NexusMaker
def setUp(self):
self.maker = self.model(data=TESTDATA)
self.nex = self.maker.make()
def test_languages(self):
self.assertEqual(self.maker.languages, {'A', 'B', 'C', 'D'})
def test_words(self):
self.assertEqual(self.maker.words, {'eye', 'leg', 'arm'})
def test_nsites(self):
assert len(self.nex.data.keys()) == 6
def test_cognate_sets(self):
assert ('eye', '1') in self.maker.cognates
assert ('leg', '1') in self.maker.cognates
assert ('leg', '2') in self.maker.cognates
assert ('arm', '1') in self.maker.cognates
assert ('arm', '2') in self.maker.cognates
assert ('arm', '3') in self.maker.cognates
def test_is_missing_for_word(self):
assert self.maker._is_missing_for_word('A', 'eye') == False
assert self.maker._is_missing_for_word('A', 'leg') == False
assert self.maker._is_missing_for_word('A', 'arm') == False
assert self.maker._is_missing_for_word('B', 'eye') == False
assert self.maker._is_missing_for_word('B', 'leg') == False
assert self.maker._is_missing_for_word('B', 'arm') == False
assert self.maker._is_missing_for_word('C', 'eye') == False
assert self.maker._is_missing_for_word('C', 'leg') == True, "Should be missing 'leg' for Language 'C'"
assert self.maker._is_missing_for_word('C', 'arm') == False
assert self.maker._is_missing_for_word('D', 'eye') == True, "Should be missing 'eye' for Language 'D' (loan)"
assert self.maker._is_missing_for_word('D', 'leg') == False
assert self.maker._is_missing_for_word('D', 'arm') == False
def test_eye_1(self):
cog = 'eye_1'
assert self.nex.data[cog]['A'] == '1'
assert self.nex.data[cog]['B'] == '1'
assert self.nex.data[cog]['C'] == '1'
assert self.nex.data[cog]['D'] == '?'
def test_leg_1(self):
cog = 'leg_1'
assert self.nex.data[cog]['A'] == '1'
assert self.nex.data[cog]['B'] == '0'
assert self.nex.data[cog]['C'] == '?'
assert self.nex.data[cog]['D'] == '1'
def test_leg_2(self):
cog = 'leg_2'
assert self.nex.data[cog]['A'] == '0'
assert self.nex.data[cog]['B'] == '1'
assert self.nex.data[cog]['C'] == '?'
assert self.nex.data[cog]['D'] == '1'
def test_arm_1(self):
cog = 'arm_1'
assert self.nex.data[cog]['A'] == '1'
assert self.nex.data[cog]['B'] == '0'
assert self.nex.data[cog]['C'] == '0'
assert self.nex.data[cog]['D'] == '0'
def test_arm_2(self):
cog = 'arm_2'
assert self.nex.data[cog]['A'] == '0'
assert self.nex.data[cog]['B'] == '1'
assert self.nex.data[cog]['C'] == '0'
assert self.nex.data[cog]['D'] == '1'
def test_arm_3(self):
cog = 'arm_3'
assert self.nex.data[cog]['A'] == '0'
assert self.nex.data[cog]['B'] == '0'
assert self.nex.data[cog]['C'] == '1'
assert self.nex.data[cog]['D'] == '1'
def test_write(self):
out = self.maker.write()
assert out.startswith("#NEXUS")
assert 'NTAX=4' in out
assert 'CHARSTATELABELS' in out
assert 'MATRIX' in out
class TestNexusMakerAscertained(TestNexusMaker):
def setUp(self): # override parent method
self.maker = NexusMakerAscertained(data=TESTDATA)
self.nex = self.maker.make()
# 1 more site than before in ascertainment = none
def test_nsites(self):
assert len(self.nex.data.keys()) == 7
def test_ascertainment_column(self):
assert self.maker.ASCERTAINMENT_LABEL in self.nex.data
for k in self.nex.data[self.maker.ASCERTAINMENT_LABEL]:
assert self.nex.data[self.maker.ASCERTAINMENT_LABEL][k] == '0'
def test_error_on_multiple_ascertainment_sites(self):
with self.assertRaises(ValueError):
self.maker._add_ascertainment(self.nex)
| {"/nexusmaker/tests/test_tools.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_NexusMakerAscertainedWords.py": ["/nexusmaker/__init__.py", "/nexusmaker/tests/test_NexusMaker.py"], "/nexusmaker/tests/test_regressions.py": ["/nexusmaker/__init__.py"], "/nexusmaker/__init__.py": ["/nexusmaker/CognateParser.py"], "/nexusmaker/tests/test_Complex.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_Record.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_CognateParser.py": ["/nexusmaker/__init__.py"], "/nexusmaker/CognateParser.py": ["/nexusmaker/tools.py"], "/nexusmaker/tests/test_Mayan.py": ["/nexusmaker/__init__.py"], "/nexusmaker/tests/test_NexusMaker.py": ["/nexusmaker/__init__.py"]} |
54,853 | GulshanArora7/hello-world-k8s-python | refs/heads/master | /test_application.py | #!/usr/bin/env python3
import unittest
import hello_world
class TestHello(unittest.TestCase):
def setUp(self):
hello_world.app.testing = True
self.app = hello_world.app.test_client()
def test_hello(self):
rv = self.app.get('/')
self.assertEqual(rv.status, '200 OK')
self.assertEqual(rv.data, b'Hello World!\n')
def test_healthcheck(self):
rv = self.app.get('/health')
self.assertEqual(rv.status, '200 OK')
self.assertEqual(rv.data, b'OK')
if __name__ == '__main__':
unittest.main()
| {"/test_application.py": ["/hello_world.py"]} |
54,854 | GulshanArora7/hello-world-k8s-python | refs/heads/master | /hello_world.py | #!/usr/bin/python3
import os
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello_world():
target = os.environ.get('TARGET', 'World')
return 'Hello {}!\n'.format(target)
@app.route('/health')
def health_check():
return 'OK'
if __name__ == "__main__":
app.run(debug=False,use_reloader=True,host='0.0.0.0',port=int(os.environ.get('PORT', 9090)))
| {"/test_application.py": ["/hello_world.py"]} |
54,855 | PhirePhly/aprs | refs/heads/master | /tests/test_util.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module Utility Function Tests."""
import logging
import logging.handlers
import unittest
from .context import aprs
from . import constants
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
class APRSUtilTestCase(unittest.TestCase): # pylint: disable=R0904
"""Tests for Python APRS Utils."""
_logger = logging.getLogger(__name__)
if not _logger.handlers:
_logger.setLevel(aprs.LOG_LEVEL)
_console_handler = logging.StreamHandler()
_console_handler.setLevel(aprs.LOG_LEVEL)
_console_handler.setFormatter(aprs.LOG_FORMAT)
_logger.addHandler(_console_handler)
_logger.propagate = False
def setUp(self): # pylint: disable=C0103
"""Setup."""
self.test_frames = open(constants.TEST_FRAMES, 'r')
self.test_frame = self.test_frames.readlines()[0].strip()
def tearDown(self): # pylint: disable=C0103
"""Teardown."""
self.test_frames.close()
def test_valid_callsign_valid(self):
"""
Tests valid callsigns using `aprs.valid_callsign()`.
"""
for i in constants.VALID_CALLSIGNS:
self.assertTrue(
aprs.valid_callsign(i), "%s is a valid call" % i)
def test_valid_callsign_invalid(self):
"""
Tests invalid callsigns using `aprs.valid_callsign()`.
"""
for i in constants.INVALID_CALLSIGNS:
self.assertFalse(
aprs.valid_callsign(i), "%s is an invalid call" % i)
if __name__ == '__main__':
unittest.main()
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,856 | PhirePhly/aprs | refs/heads/master | /tests/test_aprsframe.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module APRS Frame Tests."""
import logging
import logging.handlers
import random
import unittest
from .context import aprs
from . import constants
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
class FrameTestCase(unittest.TestCase): # pylint: disable=R0904
"""Tests for `aprs.Frame`."""
_logger = logging.getLogger(__name__)
if not _logger.handlers:
_logger.setLevel(aprs.LOG_LEVEL)
_console_handler = logging.StreamHandler()
_console_handler.setLevel(aprs.LOG_LEVEL)
_console_handler.setFormatter(aprs.LOG_FORMAT)
_logger.addHandler(_console_handler)
_logger.propagate = False
@classmethod
def random(cls, length=8, alphabet=constants.ALPHANUM):
"""
Generates a random string for test cases.
:param length: Length of string to generate.
:param alphabet: Alphabet to use to create string.
:type length: int
:type alphabet: str
"""
return ''.join(random.choice(alphabet) for _ in xrange(length))
def setUp(self): # pylint: disable=C0103
"""Setup."""
self.test_frames = open(constants.TEST_FRAMES, 'r')
self.test_frame = self.test_frames.readlines()[0].strip()
self.fake_callsign = ''.join([
self.random(1, 'KWN'),
self.random(1, constants.NUMBERS),
self.random(3, constants.ALPHABET),
'-',
self.random(1, constants.POSITIVE_NUMBERS)
])
self.real_callsign = '-'.join(
['W2GMD', self.random(1, constants.POSITIVE_NUMBERS)])
self._logger.debug(
"fake_callsign=%s real_callsign=%s",
self.fake_callsign,
self.real_callsign
)
def tearDown(self): # pylint: disable=C0103
"""Teardown."""
self.test_frames.close()
def test_format_aprs_frame(self):
"""
Tests formatting an APRS frame-as-string from an APRS frame-as-dict
using `aprs.util.format_aprs_frame()`.
"""
frame = "%s>%s,WIDE1-1:>test_format_aprs_frame" % \
(self.real_callsign, self.fake_callsign)
formatted_frame = aprs.Frame(frame)
self.assertEqual(str(formatted_frame), frame)
def test_decode_aprs_ascii_frame(self):
"""
Tests creating an Frame Object from an APRS ASCII Frame
using `aprs.Frame`.
"""
ascii_frame = (
"%s>APOTC1,WIDE1-1,WIDE2-1:!3745.94N/12228.05W>118/010/"
"A=000269 http://w2gmd.org/ Twitter: @ampledata" %
self.real_callsign)
aprs_frame = aprs.Frame(ascii_frame)
self.assertEqual(str(aprs_frame), ascii_frame)
self.assertEqual(str(aprs_frame.source), self.real_callsign)
self.assertEqual(str(aprs_frame.destination), 'APOTC1')
self.assertEqual(str(aprs_frame.path[0]), 'WIDE1-1')
self.assertEqual(str(aprs_frame.path[1]), 'WIDE2-1')
def test_encode_ascii_frame_as_kiss(self):
"""
Tests KISS-encoding an ASCII APRS frame using `aprs.Frame()`.
"""
frame = 'W2GMD-1>OMG,WIDE1-1:test_encode_frame'
kiss_frame = (
'9e9a8e40404060ae648e9a884062ae92888a62406303f074657'
'3745f656e636f64655f6672616d65')
aprs_frame = aprs.Frame(frame)
self.assertEqual(kiss_frame.decode('hex'), aprs_frame.encode_kiss())
if __name__ == '__main__':
unittest.main()
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,857 | PhirePhly/aprs | refs/heads/master | /tests/context.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module Test Context."""
import os
import sys
sys.path.insert(0, os.path.abspath('..'))
import aprs # NOQA pylint: disable=W0611
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,858 | PhirePhly/aprs | refs/heads/master | /aprs/exceptions.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module Exception Definitions."""
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
__license__ = 'Apache License, Version 2.0'
class BadCallsignError(Exception):
"""Bad Callsign Error."""
pass
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,859 | PhirePhly/aprs | refs/heads/master | /aprs/classes.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module Class Definitions."""
import logging
import logging.handlers
import socket
import kiss
import requests
import aprs
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
__license__ = 'Apache License, Version 2.0'
class APRS(object):
"""APRS Object."""
_logger = logging.getLogger(__name__)
if not _logger.handlers:
_logger.setLevel(aprs.LOG_LEVEL)
_console_handler = logging.StreamHandler()
_console_handler.setLevel(aprs.LOG_LEVEL)
_console_handler.setFormatter(aprs.LOG_FORMAT)
_logger.addHandler(_console_handler)
_logger.propagate = False
def __init__(self, user, password='-1'):
self.user = user
self._auth = ' '.join(
['user', user, 'pass', password, 'vers', aprs.APRSIS_SW_VERSION])
self._full_auth = None
self.interface = None
self.use_i_construct = False
def start(self):
"""
Abstract method for starting connection to APRS-IS.
"""
pass
def send(self, message):
"""
Abstract method for sending messages to APRS-IS.
"""
pass
def receive(self, callback=None):
"""
Abstract method for receiving messages from APRS-IS.
"""
pass
class Frame(object):
"""
Frame Class.
Defines the components of an APRS Frame and can decode a frame
from either ASCII or KISS.
"""
__slots__ = ['frame', 'source', 'destination', 'path', 'text']
_logger = logging.getLogger(__name__)
if not _logger.handlers:
_logger.setLevel(aprs.LOG_LEVEL)
_console_handler = logging.StreamHandler()
_console_handler.setLevel(aprs.LOG_LEVEL)
_console_handler.setFormatter(aprs.LOG_FORMAT)
_logger.addHandler(_console_handler)
_logger.propagate = False
def __init__(self, frame=None):
self.source = ''
self.destination = 'APRS'
self.path = []
self.text = ''
if frame is not None:
self.frame = kiss.strip_df_start(str(frame))
self.parse()
def __repr__(self):
full_path = [str(self.destination)]
full_path.extend([str(p) for p in self.path])
frame = "%s>%s:%s" % (
self.source,
','.join(full_path),
self.text
)
return frame.encode('UTF-8')
def to_h(self):
"""
Returns an Frame as a Hex String.
"""
return str(self).encode('hex')
def parse(self, frame=None):
"""
Parses an Frame from either ASCII or KISS Encoded frame.
"""
# Allows to be called as class method:
if frame is not None:
self.frame = frame
try:
self.parse_kiss()
except IndexError as exc:
self._logger.info('Not a KISS Frame? %s', self.frame.encode('hex'))
if not self.source or not self.destination:
try:
self.parse_text()
except UnicodeDecodeError as exc:
self._logger.info(
'Cannot decode frame=%s', self.frame.encode('hex'))
self._logger.exception(exc)
def parse_text(self):
"""
Parses and Extracts the components of an ASCII-Encoded Frame.
"""
frame_so_far = ''
for char in self.frame.decode('UTF-8'):
if '>' in char and not self.source:
self.source = Callsign(frame_so_far)
frame_so_far = ''
elif ':' in char:
if not self.path:
if ',' in frame_so_far:
self.destination = Callsign(frame_so_far.split(',')[0])
self.path = []
for path in frame_so_far.split(',')[1:]:
self.path.append(Callsign(path))
frame_so_far = ''
elif not self.destination:
self.destination = Callsign(frame_so_far)
frame_so_far = ''
else:
frame_so_far = ''.join([frame_so_far, char])
else:
frame_so_far = ''.join([frame_so_far, char])
else:
frame_so_far = ''.join([frame_so_far, char])
self.text = frame_so_far.encode('UTF-8')
def parse_kiss(self):
"""
Parses and Extracts the components of an KISS-Encoded Frame.
"""
frame_len = len(self.frame)
if frame_len < 16:
self._logger.debug('Frame len(%s) < 16, Exiting.', frame_len)
return
for raw_slice in range(0, frame_len):
# Is address field length correct?
# Find the first ODD Byte followed by the next boundary:
if (ord(self.frame[raw_slice]) & 0x01
and ((raw_slice + 1) % 7) == 0):
i = (raw_slice + 1) / 7
# Less than 2 callsigns?
if 1 < i < 11:
# For frames <= 70 bytes
if frame_len >= raw_slice + 2:
if (ord(self.frame[raw_slice + 1]) & 0x03 == 0x03 and
ord(self.frame[raw_slice + 2]) in
[0xf0, 0xcf]):
self._extract_kiss_text(raw_slice)
self._extract_kiss_destination()
self._extract_kiss_source()
self._extract_kiss_path(i)
def encode_kiss(self):
"""
Encodes an Frame as KISS.
"""
enc_frame = ''.join([
self.destination.encode_kiss(),
self.source.encode_kiss(),
''.join([path_call.encode_kiss() for path_call in self.path])
])
return ''.join([
enc_frame[:-1],
chr(ord(enc_frame[-1]) | 0x01),
kiss.SLOT_TIME,
chr(0xF0),
self.text.encode('UTF-8')
])
def _extract_kiss_text(self, raw_slice):
"""
Extracts a Text portion of a KISS-Encoded Frame.
"""
self.text = self.frame[raw_slice + 3:]
def _extract_kiss_source(self):
"""
Extracts a Source Callsign of a KISS-Encoded Frame.
"""
self.source = Callsign(self.frame[7:])
def _extract_kiss_destination(self):
"""
Extracts a Destination Callsign of a KISS-Encoded Frame.
"""
self.destination = Callsign(self.frame)
def _extract_kiss_path(self, start):
"""
Extracts path from raw APRS KISS frame.
"""
for i in range(2, start):
path_call = Callsign(self.frame[i * 7:])
if path_call:
if ord(self.frame[i * 7 + 6]) & 0x80:
path_call.digi = True
self.path.append(path_call)
class Callsign(object):
"""
Callsign Class.
Defines parts of a Callsign decoded from either ASCII or KISS.
"""
_logger = logging.getLogger(__name__)
if not _logger.handlers:
_logger.setLevel(aprs.LOG_LEVEL)
_console_handler = logging.StreamHandler()
_console_handler.setLevel(aprs.LOG_LEVEL)
_console_handler.setFormatter(aprs.LOG_FORMAT)
_logger.addHandler(_console_handler)
_logger.propagate = False
__slots__ = ['callsign', 'ssid', 'digi']
def __init__(self, callsign):
self.callsign = ''
self.ssid = str(0)
self.digi = False
self.parse(callsign)
def __repr__(self):
if int(self.ssid) > 0:
call_repr = '-'.join([self.callsign, str(self.ssid)])
else:
call_repr = self.callsign
if self.digi:
return ''.join([call_repr, '*'])
else:
return call_repr
def to_h(self):
"""
Returns a Callsign as a Hex String.
"""
return str(self).encode('hex')
def parse(self, callsign):
"""
Parse and extract the components of a Callsign from ASCII or KISS.
"""
try:
self._extract_callsign_from_kiss_frame(callsign)
except IndexError:
self._logger.debug(
'Not a KISS Callsign? "%s"', callsign.encode('hex'))
if not aprs.valid_callsign(self.callsign):
self.parse_text(callsign)
if not aprs.valid_callsign(self.callsign):
raise aprs.BadCallsignError(
'Could not extract callsign from %s',
self.callsign.encode('hex'))
def parse_text(self, callsign):
"""
Parses and extracts a Callsign and SSID from an ASCII-Encoded APRS
Callsign or Callsign-SSID.
:param callsign: ASCII-Encoded APRS Callsign
:type callsign: str
"""
self._logger.debug('callsign=%s', callsign.encode('hex'))
_callsign = callsign
ssid = str(0)
if '-' in callsign:
_callsign, ssid = callsign.split('-')
if _callsign[-1] == '*':
_callsign = _callsign[:-1]
self.digi = True
self.callsign = _callsign.lstrip().rstrip()
self.ssid = ssid.lstrip().rstrip()
def encode_kiss(self):
"""
Encodes Callsign (or Callsign-SSID) as KISS.
"""
encoded_ssid = (int(self.ssid) << 1) | 0x60
_callsign = self.callsign
if self.digi:
# _callsign = ''.join([_callsign, '*'])
encoded_ssid |= 0x80
# Pad the callsign to at least 6 characters.
while len(_callsign) < 6:
_callsign = ''.join([_callsign, ' '])
encoded_callsign = ''.join([chr(ord(p) << 1) for p in _callsign])
return ''.join([encoded_callsign, chr(encoded_ssid)])
def _extract_callsign_from_kiss_frame(self, frame):
"""
Extracts a Callsign and SSID from a KISS-Encoded APRS Frame.
:param frame: KISS-Encoded APRS Frame as str of octs.
:type frame: str
"""
self._logger.debug('frame=%s', frame.encode('hex'))
callsign = ''.join([chr(ord(x) >> 1) for x in frame[:6]])
self.callsign = callsign.lstrip().rstrip()
self.ssid = str((ord(frame[6]) >> 1) & 0x0F)
class TCP(APRS):
"""APRS-IS TCP Class."""
def __init__(self, user, password='-1', server=None, port=None,
aprs_filter=None):
super(TCP, self).__init__(user, password)
server = server or aprs.APRSIS_SERVER
port = port or aprs.APRSIS_FILTER_PORT
self.address = (server, int(port))
aprs_filter = aprs_filter or '/'.join(['p', user])
self._full_auth = ' '.join([self._auth, 'filter', aprs_filter])
self.use_i_construct = True
def start(self):
"""
Connects & logs in to APRS-IS.
"""
self.interface = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._logger.info('Connecting to to "%s"', self.address)
self.interface.connect(self.address)
self._logger.debug('Sending full_auth=%s', self._full_auth)
self.interface.sendall(self._full_auth + '\n\r')
def send(self, frame):
"""
Sends frame to APRS-IS.
:param frame: Frame to send to APRS-IS.
:type frame: str
"""
self._logger.debug('Sending frame="%s"', frame)
return self.interface.send("%s\n\r" % frame) # Ensure cast->str.
def receive(self, callback=None):
"""
Receives from APRS-IS.
:param callback: Optional callback to deliver frame to.
:type callback: func
:returns: Nothing, but calls a callback with an Frame object.
:rtype: None
"""
recvd_data = ''
try:
while 1:
recv_data = self.interface.recv(aprs.RECV_BUFFER)
if not recv_data:
break
recvd_data += recv_data
self._logger.debug('recv_data=%s', recv_data.strip())
if recvd_data.endswith('\r\n'):
lines = recvd_data.strip().split('\r\n')
recvd_data = ''
else:
lines = recvd_data.split('\r\n')
recvd_data = str(lines.pop(-1))
for line in lines:
if line.startswith('#'):
if 'logresp' in line:
self._logger.debug('logresp=%s', line)
else:
self._logger.debug('line=%s', line)
if callback:
callback(Frame(line))
except socket.error as sock_err:
self._logger.error(sock_err)
raise
class UDP(APRS):
"""APRS-IS UDP Class."""
def __init__(self, user, password='-1', server=None, port=None):
super(UDP, self).__init__(user, password)
server = server or aprs.APRSIS_SERVER
port = port or aprs.APRSIS_RX_PORT
self._addr = (server, int(port))
self.use_i_construct = True
def start(self):
"""
Connects & logs in to APRS-IS.
"""
self.interface = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
def send(self, frame):
"""
Sends frame to APRS-IS.
:param frame: Frame to send to APRS-IS.
:type frame: str
"""
self._logger.debug('frame="%s"', frame)
content = "\n".join([self._auth, str(frame)])
return self.interface.sendto(content, self._addr)
class HTTP(APRS):
"""APRS-IS HTTP Class."""
def __init__(self, user, password='-1', url=None, headers=None):
super(HTTP, self).__init__(user, password)
self.url = url or aprs.APRSIS_URL
self.headers = headers or aprs.APRSIS_HTTP_HEADERS
self.use_i_construct = True
def start(self):
"""
Connects & logs in to APRS-IS.
"""
self.interface = requests.post
def send(self, frame):
"""
Sends frame to APRS-IS.
:param frame: Frame to send to APRS-IS.
:type frame: str
"""
content = "\n".join([self._auth, str(frame)])
result = self.interface(self.url, data=content, headers=self.headers)
return result.status_code == 204
class SerialKISS(kiss.SerialKISS):
"""APRS interface for KISS serial devices."""
def __init__(self, port, speed, strip_df_start=False):
super(SerialKISS, self).__init__(port, speed, strip_df_start)
self.send = self.write
self.receive = self.read
self.use_i_construct = False
def write(self, frame):
"""Writes APRS-encoded frame to KISS device.
:param frame: APRS frame to write to KISS device.
:type frame: str
"""
super(SerialKISS, self).write(frame.encode_kiss())
class TCPKISS(kiss.TCPKISS):
"""APRS interface for KISS serial devices."""
def __init__(self, host, port, strip_df_start=False):
super(TCPKISS, self).__init__(host, port, strip_df_start)
self.send = self.write
self.receive = self.read
self.use_i_construct = False
def write(self, frame):
"""
Writes APRS-encoded frame to KISS device.
:param frame: APRS frame to write to KISS device.
:type frame: str
"""
super(TCPKISS, self).write(frame.encode_kiss())
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,860 | PhirePhly/aprs | refs/heads/master | /tests/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module Tests."""
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,861 | PhirePhly/aprs | refs/heads/master | /tests/constants.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module Test Constants."""
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
TEST_FRAMES = 'tests/test_frames.log'
PANGRAM = 'the quick brown fox jumps over the lazy dog'
ALPHABET = PANGRAM.replace(' ', '')
NUMBERS = ''.join([str(x) for x in range(0, 10)])
POSITIVE_NUMBERS = NUMBERS[1:]
ALPHANUM = ''.join([ALPHABET, NUMBERS])
VALID_CALLSIGNS = ['W2GMD', 'W2GMD-1', 'KF4MKT', 'KF4MKT-1', 'KF4LZA-15']
INVALID_CALLSIGNS = ['xW2GMDx', 'W2GMD-16', 'W2GMD-A', 'W', 'W2GMD-1-0',
'W*GMD', 'W2GMD-123']
TEST_FRAME = (
'82a0a4b0646860ae648e9a88406cae92888a62406303f021333734352e3735'
'4e4931323232382e303557235732474d442d3620496e6e65722053756e73657'
'42c2053462069476174652f4469676970656174657220687474703a2f2f7732'
'676d642e6f7267')
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,862 | PhirePhly/aprs | refs/heads/master | /tests/test_callsign.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Python APRS Module APRS Callsign Tests."""
import logging
import logging.handlers
import unittest
from .context import aprs
from . import constants
__author__ = 'Greg Albrecht W2GMD <oss@undef.net>'
__license__ = 'Apache License, Version 2.0'
__copyright__ = 'Copyright 2016 Orion Labs, Inc. and Contributors'
class CallsignTestCase(unittest.TestCase): # pylint: disable=R0904
"""Tests for Python APRS Callsign."""
_logger = logging.getLogger(__name__)
if not _logger.handlers:
_logger.setLevel(aprs.LOG_LEVEL)
_console_handler = logging.StreamHandler()
_console_handler.setLevel(aprs.LOG_LEVEL)
_console_handler.setFormatter(aprs.LOG_FORMAT)
_logger.addHandler(_console_handler)
_logger.propagate = False
def setUp(self): # pylint: disable=C0103
"""Setup."""
self.test_frames = open(constants.TEST_FRAMES, 'r')
self.test_frame = self.test_frames.readlines()[0].strip()
def tearDown(self): # pylint: disable=C0103
"""Teardown."""
self.test_frames.close()
def test_extract_callsign_source(self):
"""
Tests extracting the source callsign from a KISS-encoded APRS frame
using `aprs.Callsign`.
"""
callsign = 'W2GMD'
ssid = str(6)
full = '-'.join([callsign, ssid])
extracted_callsign = aprs.Callsign(
constants.TEST_FRAME.decode('hex')[7:])
self.assertEqual(full, str(extracted_callsign))
self.assertEqual(callsign, extracted_callsign.callsign)
self.assertEqual(ssid, extracted_callsign.ssid)
def test_extract_callsign_dest(self):
"""
Tests extracting the destination callsign from a KISS-encoded APRS
frame using `aprs.Callsign`.
"""
extracted_callsign = aprs.Callsign(constants.TEST_FRAME.decode('hex'))
self.assertEqual(extracted_callsign.callsign, 'APRX24')
def test_full_callsign_with_ssid(self):
"""
Tests creating a full callsign string from a callsign+ssid using
`aprs.Callsign`.
"""
callsign = 'W2GMD-1'
full_callsign = aprs.Callsign(callsign)
self.assertEqual(str(full_callsign), callsign)
def test_full_callsign_with_ssid_0(self):
"""
Tests creating a full callsign string from a callsign using
`aprs.Callsign`.
"""
callsign = 'W2GMD-0'
full_callsign = aprs.Callsign(callsign)
self.assertEqual(str(full_callsign), callsign.split('-')[0])
def test_full_callsign_sans_ssid(self):
"""
Tests creating a full Callsign string from a Callsign sans SSID.
"""
callsign = 'W2GMD'
full_callsign = aprs.Callsign(callsign)
self.assertEqual(str(full_callsign), callsign)
def test_encode_kiss(self):
"""
Tests encoding a non-digipeated Callsign.
"""
encoded_callsign = aprs.Callsign('W2GMD-1').encode_kiss()
self.assertEqual('\xaed\x8e\x9a\x88@b', encoded_callsign)
# FIXME: Currently not working...
def test_encode_kiss_digipeated(self):
"""
Tests encoding a digipeated callsign.
"""
callsign = 'W2GMD*'
callsign_obj = aprs.Callsign(callsign)
print callsign_obj.encode_kiss().encode('hex')
print '\xaed\x8e\x9a\x88@\xe2'.encode('hex')
# self.assertEqual('\xaed\x8e\x9a\x88@\xe2', callsign_obj.encode_kiss())
if __name__ == '__main__':
unittest.main()
| {"/tests/test_util.py": ["/tests/context.py", "/tests/__init__.py"], "/tests/test_aprsframe.py": ["/tests/context.py", "/tests/__init__.py"]} |
54,863 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discovery/api/views.py | from __future__ import absolute_import, division
from rest_framework import permissions, status, viewsets
from rest_framework.exceptions import ParseError
from rest_framework.response import Response
from widget.models import Version
from ..models import Question, Manuscript, Rating
from .serializers import RatingSerializer
class RatingViewSet(viewsets.ModelViewSet):
model = Rating
serializer_class = RatingSerializer
permission_classes = [permissions.IsAuthenticated]
def get_queryset(self):
user = self.request.user
return Rating.objects.filter(user=user)
def create(self, request):
user = request.user
data = request.POST
version = Version.objects.get()
# if not all([data.get('doi', None), data.get('dv', None), data.get('ac', None), data.get('cr', None), data.get('ex', None),]):
if not data.get('doi', None):
raise ParseError("Missing DOI")
if not data.get('dv', None):
raise ParseError("Missing Discovery Value")
if not data.get('ac', None):
raise ParseError("Missing Accountability")
if not data.get('cr', None):
raise ParseError("Missing Concreteness")
if not data.get('ex', None):
raise ParseError("Missing Expertise")
manuscript, created = Manuscript.objects.get_or_create(doi=data["doi"])
# Get the questions
dvq = Question.objects.get(category='DV')
acq = Question.objects.get(category='AC')
crq = Question.objects.get(category='CR')
exq = Question.objects.get(category='EX')
# Get the values
dvv = int(data.get('dv'))
acv = int(data.get('ac'))
crv = int(data.get('cr'))
exv = int(data.get('ex'))
# Create the objects
dvr = Rating.objects.create(user=user, question=dvq, manuscript=manuscript, version=version, value=dvv/100)
acr = Rating.objects.create(user=user, question=acq, manuscript=manuscript, version=version, value=acv/100)
crr = Rating.objects.create(user=user, question=crq, manuscript=manuscript, version=version, value=crv/100)
exr = Rating.objects.create(user=user, question=exq, manuscript=manuscript, version=version, value=exv/100)
# Return response
return Response(status=status.HTTP_201_CREATED)
###################################
##### NO DESTRUCTIVE METHODS ######
###################################
def update(self, request, pk=None):
pass
def partial_update(self, request, pk=None):
pass
def destroy(self, request, pk=None):
pass
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,864 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/capsule/models.py | from __future__ import unicode_literals
# Create your models here.
class CapsuleObject(object):
@property
def API_PATH(self):
raise NotImplementedError("API_PATH must be set in subclass")
class Person(CapsuleObject):
TITLE_OPTIONS = [
"Mr",
"Master",
"Mrs",
"Miss",
"Ms",
"Dr",
]
def __init__(self, first_name, last_name, title=None, job_title=None, about=None,organization_name=None,organization_id=None):
self._first_name = first_name
self._last_name = last_name
self._title = title if title in TITLE_OPTIONS else raise AttributeError("Title must be one of the following: %s" % ", ".join(TITLE_OPTIONS))
self._job_title = job_title
self._about = about
self._organization_name = organization_name
self._organization_id = organization_id
def title():
doc = "The title of a Person."
def fget(self):
return self._title
def fset(self, value):
if value not in TITLE_OPTIONS:
raise AttributeError("Title must be one of the following: %s" % ", ".join(TITLE_OPTIONS))
self._title = value
def fdel(self):
del self._title
return locals()
title = property(**title())
@property
def first_name(self):
"""Get the first name of the Person"""
return self._first_name
@property
def last_name(self):
"""Get the first name of the Person"""
return self._last_name
@property
def job_title(self):
"""Get the first name of the Person"""
return self._job_title
@property
def about(self):
"""Get the first name of the Person"""
return self._about
@property
def organization_name(self):
"""Get the first name of the Person"""
return self._organization_name
@property
def organization_id(self):
"""Get the first name of the Person"""
return self._organization_id | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,865 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/internal/views.py | from django.http import Http404
from django.shortcuts import render
# Create your views here.
def analytics(request):
if request.user.is_staff or request.user.is_superuser:
return render(request, 'internal/analytics.html')
else:
raise Http404 | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,866 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discovery/models.py | from __future__ import unicode_literals, absolute_import
from django.contrib.auth.models import User
from django.db import models
from widget.models import Version
# Create your models here.
class Question(models.Model):
question_text = models.CharField(max_length=200)
help_text = models.TextField()
DISCOVERY_VALUE = "DV"
ACTIONABILITY = "AC"
CERTITUDE = "CR"
EXPERTISE = "EX"
CATEGORY_CHOICES = (
(DISCOVERY_VALUE, "Discovery Value"),
(ACTIONABILITY, "Actionability"),
(CERTITUDE, "Certitude"),
(EXPERTISE, "Expertise"),
)
category = models.CharField(
max_length=2,
choices=CATEGORY_CHOICES,
default=DISCOVERY_VALUE
)
SLIDER = "SL"
QUESTION_TYPE_CHOICES = (
(SLIDER, "Slider"),
)
question_type = models.CharField(
max_length=2,
choices=QUESTION_TYPE_CHOICES,
default=SLIDER
)
def __unicode__(self):
return "%s: %s" % (self.category, self.question_text)
class Manuscript(models.Model):
doi = models.CharField(max_length=200)
def __unicode__(self):
return self.doi
class Rating(models.Model):
user = models.ForeignKey(User)
question = models.ForeignKey(Question)
manuscript = models.ForeignKey(Manuscript)
version = models.ForeignKey(Version)
value = models.FloatField()
create_date = models.DateTimeField(auto_now_add=True)
modify_date = models.DateTimeField(auto_now=True)
def __unicode__(self):
return "%s: %.2f" % (self.manuscript.doi, self.value) | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,867 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discovery/api/urls.py | """
API URLs for Discovery
"""
from __future__ import absolute_import
from django.conf.urls import include, url
from rest_framework import routers
from .views import RatingViewSet
# Routers
router = routers.SimpleRouter()
router.register(r'ratings', RatingViewSet, base_name='rating')
# URLs
urlpatterns = [
url(r'^', include(router.urls)),
]
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,868 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/internal/api/views.py | from __future__ import absolute_import, division
from datetime import date, timedelta
import time
from django.contrib.auth.models import User
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework.views import APIView
class CountUsersView(APIView):
"""
Returns the counts of users for the day and the total up to today.
If a date is specified, it will return fo that date.
If a range is specified, it will return a value for each date in the range.
* Only admin users are able to access this view.
"""
permission_classes = (permissions.IsAdminUser,)
def _get_user_counts(self, start_date, end_date):
data_to_return = []
while start_date <= end_date:
inner_data = {}
inner_data['timestamp'] = time.mktime(start_date.timetuple())
# Since date returns at midnight, add a day but count it for the previous day
# e.g. Looking for user counts for October 10, date(YEAR, 10, 10) would result
# in a date object for 10/10 at midnight, which won't include users for that date.
# To get users for 10/10, make the query date 10/11, and query on users LESS THAN
# that date (10/10 at 11:59pm)
start_date = start_date + timedelta(days=1)
inner_data['daily_users'] = User.objects.filter(date_joined__gte=start_date-timedelta(days=1), date_joined__lt=start_date).count()
inner_data['total_users'] = User.objects.filter(date_joined__lt=start_date).count()
data_to_return.append(inner_data)
return data_to_return
def get(self, request, format=None):
"""
Returns counts of users based on date ranges.
"""
queries = request.GET
start_time = queries.get('start_date', time.mktime(date.today().timetuple()))
end_time = queries.get('end_date', time.mktime(date.today().timetuple()))
start_date = date.fromtimestamp(float(start_time))
end_date = date.fromtimestamp(float(end_time))
if end_date < start_date:
return Response(data={'error': 'End date cannot be before start date.'}, status=status.HTTP_400_BAD_REQUEST)
user_counts = self._get_user_counts(start_date, end_date)
return Response(user_counts) | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,869 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/auth_disco/api/serializers.py | from __future__ import absolute_import
from django.contrib.auth.models import User
from rest_framework import serializers
from ..models import Rating
class UserSerializer(serializers.ModelSerializer):
orcid = serializers.CharField(source='username')
class Meta:
model = User
fields = (
'first_name',
'last_name',
'email',
'orcid',
)
class RatingSerializer(serializers.ModelSerializer):
class Meta:
model = Rating
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,870 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/auth_disco/api/urls.py | """
API URLs for Discovery
"""
from __future__ import absolute_import
from django.conf.urls import include, url
from rest_framework import routers
from .views import ProvideEmailViewSet
# Routers
router = routers.SimpleRouter()
router.register(r'emails', ProvideEmailViewSet, base_name='emails')
# URLs
urlpatterns = [
url(r'^', include(router.urls)),
]
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,871 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/widget/models.py | from __future__ import unicode_literals
from django.db import models
# Create your models here.
class Version(models.Model):
major = models.PositiveSmallIntegerField(default=0)
minor = models.PositiveSmallIntegerField(default=0)
patch = models.PositiveSmallIntegerField(default=0)
class Meta:
unique_together = ('major', 'minor', 'patch')
def __unicode__(self):
return "%d.%d.%d" % (self.major, self.minor, self.patch) | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,872 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/internal/api/urls.py | """
API URLs for Internal
"""
from __future__ import absolute_import
from django.conf.urls import include, url
from rest_framework import routers
from .views import CountUsersView
# Routers
analytics_urls = [
url(r'^count-users/', CountUsersView.as_view()),
]
# URLs
urlpatterns = [
url(r'^analytics/', include(analytics_urls)),
]
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,873 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discoveryengine/settings.py | """
Django settings for discoveryengine project.
Generated by 'django-admin startproject' using Django 1.10.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
import six
from django.core.exceptions import ImproperlyConfigured
def get_env_var(var_name, default=None):
"""
Get the environment variable VAR_NAME from the system,
using the DEFAULT value if VAR_NAME is not found,
and if no DEFAULT is provided, raising an error
"""
def process_var_value(value):
"""
Process the value of the environment variable,
converting any boolean values into python booleans if needed,
and returning other values as strings.
"""
if isinstance(value, six.string_types):
if value.lower() == 'true' or value == '1':
return True
if value.lower() == 'false' or value == '0':
return False
return value
value = default
try:
value = process_var_value(os.environ[var_name])
except KeyError, e:
pass
if value == None:
raise ImproperlyConfigured("Environment variable %s is missing." % var_name)
return value
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = get_env_var("DJANGO_SECRET_KEY")
# IS THE ENVIRONMENT PRODUCTION
IS_PRODUCTION = get_env_var("IS_PRODUCTION", False)
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = not IS_PRODUCTION
ALLOWED_HOSTS = []
if IS_PRODUCTION:
ALLOWED_HOSTS.append('rate.thediscoveryengine.org')
else:
ALLOWED_HOSTS.append('localhost')
ALLOWED_HOSTS.append('staging.thediscoveryengine.org')
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'djsupervisor',
'social.apps.django_app.default',
'rest_framework',
'discoveryengine',
'widget',
'bookmarklet',
'auth_disco',
'discovery',
# Always last
'internal',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'discoveryengine.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'discoveryengine.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': get_env_var('DATABASE_NAME'),
'USER': get_env_var('DATABASE_USER'),
'PASSWORD': get_env_var('DATABASE_PASSWORD'),
'HOST': get_env_var('DATABASE_HOST'),
'PORT': get_env_var('DATABASE_PORT'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Social authentication
# http://psa.matiasaguirre.net/docs/configuration/django.html
AUTHENTICATION_BACKENDS = []
if IS_PRODUCTION:
AUTHENTICATION_BACKENDS.append('auth_disco.backends.orcid.ORCIDMemberOAuth2')
else:
AUTHENTICATION_BACKENDS.append('auth_disco.backends.orcid.ORCIDSandboxMemberOAuth2')
AUTHENTICATION_BACKENDS.append('django.contrib.auth.backends.ModelBackend')
SOCIAL_AUTH_PIPELINE = (
# Get the information we can about the user and return it in a simple
# format to create the user instance later. On some cases the details are
# already part of the auth response from the provider, but sometimes this
# could hit a provider API.
'social.pipeline.social_auth.social_details',
# Get the social uid from whichever service we're authing thru. The uid is
# the unique identifier of the given user in the provider.
'social.pipeline.social_auth.social_uid',
# Verifies that the current auth process is valid within the current
# project, this is were emails and domains whitelists are applied (if
# defined).
'social.pipeline.social_auth.auth_allowed',
# Checks if the current social-account is already associated in the site.
'social.pipeline.social_auth.social_user',
# Make up a username for this person, appends a random string at the end if
# there's any collision.
# 'social.pipeline.user.get_username',
# Send a validation email to the user to verify its email address.
# Disabled by default.
# 'social.pipeline.mail.mail_validation',
# Associates the current social details with another user account with
# a similar email address. Disabled by default.
# 'social.pipeline.social_auth.associate_by_email',
# Create a user account if we haven't found one yet.
'social.pipeline.user.create_user',
# Create the record that associated the social account with this user.
'social.pipeline.social_auth.associate_user',
# Populate the extra_data field in the social record with the values
# specified by settings (and the default ones like access_token, etc).
'social.pipeline.social_auth.load_extra_data',
# Update the user record with any changed info from the auth service.
'social.pipeline.user.user_details',
)
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# Capsule Integration
CAPSULE_ID = get_env_var("CAPSULE_ID")
CAPSULE_API_KEY = get_env_var("CAPSULE_API_KEY")
# ORCID Integration
SOCIAL_AUTH_ORCID_MEMBER_KEY = get_env_var("ORCID_API_KEY")
SOCIAL_AUTH_ORCID_MEMBER_SECRET = get_env_var("ORCID_API_SECRET")
SOCIAL_AUTH_ORCID_SANDBOX_MEMBER_KEY = get_env_var("ORCID_API_KEY")
SOCIAL_AUTH_ORCID_SANDBOX_MEMBER_SECRET = get_env_var("ORCID_API_SECRET")
SOCIAL_AUTH_ORCID_MEMBER_SCOPE = ['/orcid-bio', '/update']
ORCID_API_KEY = get_env_var("ORCID_API_KEY")
ORCID_API_SECRET = get_env_var("ORCID_API_SECRET")
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,874 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discovery/admin.py | from __future__ import absolute_import
from django.contrib import admin
from .models import Question, Manuscript, Rating
# Register your models here.
admin.site.register(Question)
admin.site.register(Manuscript)
admin.site.register(Rating) | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,875 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/auth_disco/api/views.py | from __future__ import absolute_import
from rest_framework import permissions, status, viewsets, mixins
from rest_framework.response import Response
class ProvideEmailViewSet(mixins.CreateModelMixin,
viewsets.GenericViewSet):
def create(self, request):
user = request.user
data = request.POST
permission_classes = [permissions.IsAuthenticated]
email = data.get('email', None)
if email:
user.email = data.get('email')
user.save()
return Response()
return Response(status=status.HTTP_400_BAD_REQUEST)
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,876 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discoveryengine/api.py | from __future__ import absolute_import
from django.conf.urls import url, include
urlpatterns = [
url(r'^discovery/', include('discovery.api.urls')),
url(r'^auth/', include('auth_disco.api.urls')),
url(r'^internal/', include('internal.api.urls')),
]
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,877 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/capsule/api_manager.py | from __future__ import unicode_literals
import requests
from discoveryengine.settings import CAPSULE_ID, CAPSULE_API_KEY
from .models import *
class APIManager(object):
BASE_URL = "https://%s.capsulecrm.com/api/"
def add_person(Person):
pass | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,878 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discoveryengine/views.py | from django.conf import settings
from django.contrib.auth import logout as lo
from django.shortcuts import render, redirect
# Create your views here.
def home(request):
if request.user.is_anonymous():
context = {'IS_PRODUCTION': settings.IS_PRODUCTION, 'next': '/'}
# If there are query params, they should be in the NEXT login redirect
if request.GET:
context['next'] = context['next']+'?'+request.GET.urlencode()
# If the query param is a DOI, they should go to the anonymous rate page
if request.GET.get('doi', None):
return render(request, 'discovery/rate_anonymous.html', context=context)
# Go to index to login!
return render(request, 'discovery/index.html', context=context)
# If the user has an email, they can rate!
if request.user.email:
return render(request, 'discovery/rate.html')
# We need emails, so please oblige
return render(request, 'auth_disco/provide_email.html')
def logout(request):
lo(request)
return redirect('/') | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,879 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/widget/migrations/0002_auto_20161101_2033.py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-01 20:33
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('widget', '0001_initial'),
]
operations = [
migrations.AlterUniqueTogether(
name='version',
unique_together=set([('major', 'minor', 'patch')]),
),
]
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,880 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/auth_disco/apps.py | from __future__ import unicode_literals
from django.apps import AppConfig
class AuthDiscoConfig(AppConfig):
name = 'auth_disco'
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,881 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/widget/admin.py | from __future__ import absolute_import
from django.contrib import admin
from .models import Version
# Register your models here.
admin.site.register(Version) | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,882 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/discovery/views.py | from django.conf import settings
from django.shortcuts import render
# Create your views here.
def home(request):
if request.user.is_anonymous():
context = {'IS_PRODUCTION': settings.IS_PRODUCTION}
return render(request, 'discovery/index.html', context=context)
if request.user.email:
return render(request, 'discovery/rate.html')
return render(request, 'auth_disco/provide_email.html') | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,883 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/internal/urls.py | """
View URLs for Internal
"""
from __future__ import absolute_import
from django.conf.urls import include, url
from rest_framework import routers
from .views import analytics
# URLs
urlpatterns = [
url(r'^analytics/', analytics, name='analytics'),
]
| {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,884 | ksalehi/bookmarklet | refs/heads/master | /discoveryengine/auth_disco/backends/orcid.py | from social.backends.oauth import BaseOAuth2
import requests
from xml.etree import ElementTree
class ORCIDOAuth2(BaseOAuth2):
"""ORCID OAuth2 authentication backend"""
AUTHORIZATION_URL = 'https://orcid.org/oauth/authorize'
SCOPE_SEPARATOR = ''
ACCESS_TOKEN_METHOD = 'POST'
ID_KEY = 'orcid'
def indent(self, elem, level=0):
i = "\n" + level*" "
j = "\n" + (level-1)*" "
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + " "
if not elem.tail or not elem.tail.strip():
elem.tail = i
for subelem in elem:
self.indent(subelem, level+1)
if not elem.tail or not elem.tail.strip():
elem.tail = j
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = j
return elem
def get_user_details(self, response):
"""Return user details from ORCID account"""
return {
'username': response.get('orcid'),
'first_name': response.get('first_name'),
'last_name': response.get('last_name'),
'orcid': response.get('orcid'),
}
def user_data(self, access_token, *args, **kwargs):
"""Loads user data from service"""
orcid = kwargs.get('response').get('orcid')
url = self.API_URL + orcid
headers = {'Authorization': 'Bearer ' + access_token}
try:
r = requests.get(url, headers=headers)
data = self._user_data_from_XML(r.content)
data["orcid"] = orcid
return data
except ValueError:
return None
def _user_data_from_XML(self, xml_string):
"""Parses user data from ORCID XML"""
namespaces = {
'ns0': 'http://www.orcid.org/ns/orcid',
}
tree = ElementTree.fromstring(xml_string)
profile = tree.find('ns0:orcid-profile', namespaces)
bio = profile.find('ns0:orcid-bio', namespaces)
# Personal details
details = bio.find('ns0:personal-details', namespaces)
first_name = details.find('ns0:given-names', namespaces)
if first_name is not None:
first_name = first_name.text
last_name = details.find('ns0:family-name', namespaces)
if last_name is not None:
last_name = last_name.text
# Contact information
contact = bio.find('ns0:contact-details', namespaces)
email = None
if contact is not None:
emailXML = contact.find('ns0:email', namespaces)
if emailXML is not None:
email = contact.find('ns0:email', namespaces).text
return {
'first_name': first_name,
'last_name': last_name,
'email': email,
}
class ORCIDPublicOAuth2(ORCIDOAuth2):
"""ORCID OAuth2 authentication backend for Public API"""
name = 'orcid-public'
ACCESS_TOKEN_URL = 'https://orcid.org/oauth/token'
API_URL = 'https://pub.orcid.org/v1.2/'
class ORCIDMemberOAuth2(ORCIDOAuth2):
"""ORCID OAuth2 authentication backend for Member API"""
name = 'orcid-member'
ACCESS_TOKEN_URL = 'https://orcid.org/oauth/token'
API_URL = 'https://api.orcid.org/v1.2/'
################################################################
################ ORCID SANDBOX OAUTH2 BACKENDS #################
################################################################
class ORCIDSandboxOAuth2(ORCIDOAuth2):
"""ORCID Sandbox OAuth2 authentication backend"""
AUTHORIZATION_URL = 'https://sandbox.orcid.org/oauth/authorize'
class ORCIDSandboxPublicOAuth2(ORCIDSandboxOAuth2):
"""ORCID Sandbox OAuth2 authentication backend for Public API"""
name = 'orcid-sandbox-public'
ACCESS_TOKEN_URL = 'https://sandbox.orcid.org/oauth/token'
API_URL = 'https://pub.sandbox.orcid.org/v1.2/'
class ORCIDSandboxMemberOAuth2(ORCIDSandboxOAuth2):
"""ORCID Sandbox OAuth2 authentication backend for Member API"""
name = 'orcid-sandbox-member'
ACCESS_TOKEN_URL = 'https://sandbox.orcid.org/oauth/token'
API_URL = 'https://api.sandbox.orcid.org/v1.2/' | {"/discoveryengine/discovery/api/views.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/discovery/api/urls.py": ["/discoveryengine/discovery/api/views.py"], "/discoveryengine/auth_disco/api/urls.py": ["/discoveryengine/auth_disco/api/views.py"], "/discoveryengine/internal/api/urls.py": ["/discoveryengine/internal/api/views.py"], "/discoveryengine/discovery/admin.py": ["/discoveryengine/discovery/models.py"], "/discoveryengine/capsule/api_manager.py": ["/discoveryengine/capsule/models.py"], "/discoveryengine/widget/admin.py": ["/discoveryengine/widget/models.py"], "/discoveryengine/internal/urls.py": ["/discoveryengine/internal/views.py"]} |
54,887 | Seeeeeyo/django_project_forum | refs/heads/master | /app/topics/admin.py | from django.contrib import admin
from .models import Topic, Response
admin.site.register(Topic)
admin.site.register(Response) | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,888 | Seeeeeyo/django_project_forum | refs/heads/master | /app/main/views.py | from django.views.generic import TemplateView
class HomeTemplateView(TemplateView):
template_name = 'main/home.html'
class LoginTemplateView(TemplateView):
template_name = 'main/login.html'
class RegisterTemplateView(TemplateView):
template_name = 'main/register.html'
class ResetPwdFormTemplateView(TemplateView):
template_name = 'main/reset_pwd_form.html'
class ResetPwdDoneTemplateView(TemplateView):
template_name = 'main/reset_pwd_done.html'
class ResetPwdConfirmTemplateView(TemplateView):
template_name = 'main/reset_pwd_confirm.html'
class ResetPwdCompleteTemplateView(TemplateView):
template_name = 'main/reset_pwd_complete.html'
class ProfilTemplateView(TemplateView):
template_name = 'main/profil_detail.html'
class TopicListTemplateView(TemplateView):
template_name = 'main/topic_list.html'
class TopicDetailTemplateView(TemplateView):
template_name = 'main/topic_detail.html'
class TopicCreateTempalteView(TemplateView):
template_name = 'main/topic_create.html'
class ReactTempalteView(TemplateView):
template_name = 'main/react.html' | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,889 | Seeeeeyo/django_project_forum | refs/heads/master | /app/user/serializers.py | from rest_framework import serializers
from .models import User
class AuthorSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ['first_name', 'last_name', 'email', 'avatar']
# def create(self, validated_data):
# """
# Create and return a new `User` instance, given the validated data.
# """
# return User.objects.create(**validated_data)
# def update(self, instance, validated_data):
# """
# Update and return an existing `User` instance, given the validated data.
# """
# instance.first_name = validated_data.get('first_name', instance.first_name)
# instance.last_name = validated_data.get('last_name', instance.last_name)
# instance.email = validated_data.get('email', instance.email)
# instance.author_avatar = validated_data.get('author_avatar', instance.author_avatar)
# instance.save()
# return instance | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,890 | Seeeeeyo/django_project_forum | refs/heads/master | /app/topics/urls.py | from django.urls import path, include
from rest_framework.routers import DefaultRouter
from rest_framework.urlpatterns import format_suffix_patterns
import topics.api_views
import topics.views
# router = DefaultRouter()
# router.register(r'topics', topics.api_views.TopicDetail)
urlpatterns = [
path('', topics.views.TopicListTemplateView.as_view(), name='topic_list'),
path('<int:pk>/', topics.views.TopicDetailTemplateView.as_view(), name='topic_detail'),
path('new/', topics.views.TopicCreateTemplateView.as_view(), name='topic_create'),
path('react/', topics.views.ReactTemplateView.as_view(), name='topic_react'),
path('response/<int:topic_id>', topics.views.response, name='response'),
path('topics/', topics.api_views.TopicList.as_view(), name='topic_api_list'),
path('topics/<int:pk>/', topics.api_views.TopicDetail.as_view(), name='topic_api_detail'),
path('topics/<int:pk>/respond', topics.api_views.ResponseCreate.as_view(), name='topic_api_response'),
# path('', include(router.urls))
] | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,891 | Seeeeeyo/django_project_forum | refs/heads/master | /app/user/urls.py | from django.urls import path, include
# import main.views
import user.views
urlpatterns = [
path('', include("django.contrib.auth.urls")),
path('register/', user.views.RegisterView.as_view(), name='register'),
path('<int:pk>/profile/update/', user.views.ProfileUpdateView.as_view(), name='profile_update'),
path('<int:pk>/profile/detail/', user.views.ProfileDetailView.as_view(), name='profile_detail'),
] | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,892 | Seeeeeyo/django_project_forum | refs/heads/master | /app/topics/models.py | from django.db import models
from user.models import User
from django.utils import timezone
class Topic(models.Model):
title = models.CharField(max_length=200)
date = models.DateTimeField("date published", default=timezone.now()) # verbose used to debug
text = models.TextField()
solved = models.BooleanField(default=False)
author = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
def get_last_message_author(self):
if self.response_set.all().last() is not None:
return self.response_set.all().last().get_author()
def get_last_message_date(self):
if self.response_set.all().last() is not None:
return self.response_set.all().last().get_date()
def get_replies_count(self):
return self.response_set.count()
def __str__(self):
return self.title
class Response(models.Model):
date = models.DateTimeField("date published", default=timezone.now()) # verbose used to debug
text = models.TextField()
author = models.ForeignKey(User, on_delete=models.SET_NULL, null=True)
topic = models.ForeignKey(Topic, on_delete=models.CASCADE)
def get_author(self):
return str(self.author)
def get_date(self):
return self.date
def get_avatar(self):
return self.author.get_avatar_url()
def __str__(self):
return self.text | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,893 | Seeeeeyo/django_project_forum | refs/heads/master | /app/topics/views.py | from django import template
from django.urls import reverse
from django.views.generic import TemplateView, ListView, DetailView, CreateView
from .models import Topic, Response
from django.db.models import Max
from django.utils import timezone
from django.shortcuts import HttpResponseRedirect, get_object_or_404
class TopicListTemplateView(ListView):
template_name = 'topics/topic_list.html'
model = Topic
paginate_by = 3
def get_queryset(self):
search = self.request.GET.get('search')
cat = self.request.GET.get('filter')
# object_list = Topic.objects.filter(title__icontains=search)
object_list = Topic.objects.annotate(number_replies=Max('response'))
if search is not None:
object_list = object_list.filter(title__icontains=search)
if cat == '1':
object_list = object_list.filter(solved=True)
if cat == '2':
object_list = object_list.filter(solved=False)
if cat == '3':
object_list = object_list.filter(number_replies=None)
return object_list
class TopicDetailTemplateView(DetailView):
template_name = 'topics/topic_detail.html'
model = Topic
def response(request, topic_id):
if request.user.is_authenticated:
topic = get_object_or_404(Topic, pk=topic_id)
topic.response_set.create(text=request.POST.get('text'), date=timezone.now(), author=request.user)
return HttpResponseRedirect(reverse('topic_detail', args=(topic_id,)))
class TopicCreateTemplateView(CreateView):
template_name = 'topics/topic_create.html'
model = Topic
fields = ['title', 'text']
success_url = {}
def render_to_response(self, context, **response_kwargs):
if not self.request.user.is_authenticated:
return HttpResponseRedirect(reverse('login'))
else:
response_kwargs.setdefault('content_type', self.content_type)
return self.response_class(
request=self.request,
template=self.get_template_names(),
context=context,
using=self.template_engine,
**response_kwargs
)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.date = timezone.now()
self.object.author = self.request.user
self.object.save()
# print(request.user.is_authenticated)
return HttpResponseRedirect(reverse('topic_detail', args=(self.object.id,)))
class ReactTemplateView(TemplateView):
template_name = 'topics/react.html' | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,894 | Seeeeeyo/django_project_forum | refs/heads/master | /app/user/views.py | from django.shortcuts import render
from django.views.generic import CreateView, UpdateView, DetailView
from django.urls import reverse
from .forms import SignUpForm
from .models import User
class RegisterView(CreateView):
template_name = 'registration/register.html'
model = User
form_class = SignUpForm
success_url = '/'
class ProfileUpdateView(UpdateView):
model = User
fields = ['first_name', 'last_name', 'avatar']
template_name = 'registration/profil_update.html'
def get_success_url(self):
return reverse('profile_detail', args=(self.object.pk,))
# def form_valid(self, form):
# import pdb;pdb.set_trace()
# return super().form_valid(form)
class ProfileDetailView(DetailView):
template_name = 'registration/profil_detail.html'
model = User
| {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,895 | Seeeeeyo/django_project_forum | refs/heads/master | /app/main/urls.py | from django.urls import path, include
import main.views
main_urlpatterns = [
# path('login/', main.views.LoginTemplateView.as_view(), name='login'),
# path('register/', main.views.RegisterTemplateView.as_view(), name='register'),
# path('reset_pwd_form/', main.views.ResetPwdFormTemplateView.as_view(), name='reset_pwd_form'),
# path('reset_pwd_done/', main.views.ResetPwdDoneTemplateView.as_view(), name='reset_pwd_done'),
# path('reset_pwd_confirm/', main.views.ResetPwdConfirmTemplateView.as_view(), name='reset_pwd_confirm'),
# path('reset_pwd_complete/', main.views.ResetPwdCompleteTemplateView.as_view(), name='reset_pwd_complete'),
# path('profil/', main.views.ProfilTemplateView.as_view(), name='profil'),
# path('topics/', main.views.TopicListTemplateView.as_view(), name='topic_list'),
# path('topics/topic_pk/', main.views.TopicDetailTemplateView.as_view(), name='topic_detail'),
# path('topics/new/', main.views.TopicCreateTempalteView.as_view(), name='topic_create'),
# path('react/', main.views.ReactTempalteView.as_view(), name='react'),
]
urlpatterns = [
path('', main.views.HomeTemplateView.as_view(), name='home'),
path('main/', include(main_urlpatterns)),
] | {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,896 | Seeeeeyo/django_project_forum | refs/heads/master | /app/user/admin.py | from django.contrib import admin
from django.contrib.auth.models import Group
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
import user.models
@admin.register(user.models.User)
class UserAdmin(BaseUserAdmin):
# The fields to be used in displaying the User model.
# These override the definitions on the base UserAdmin
# that reference specific fields on auth.User.
list_display = ('email', 'first_name', 'last_name', 'is_superuser', 'is_active')
list_filter = ('is_active', )
fieldsets = (
(None, {'fields': ('email', 'password')}),
('Personal info', {'fields': (
'avatar', 'first_name', 'last_name',
)}),
('Permissions', {'fields': ('is_active', 'is_staff', 'is_superuser',
'groups', 'user_permissions')}),
('Important dates', {'fields': ('last_login', 'date_joined')}),
)
# add_fieldsets is not a standard ModelAdmin attribute. UserAdmin
# overrides get_fieldsets to use this attribute when creating a user.
add_fieldsets = (
(None, {
'classes': ('wide',),
'fields': (
'email', 'password1', 'password2', 'avatar',
'first_name', 'last_name',
),
}),
)
search_fields = ('email', )
ordering = ('email', )
admin.site.unregister(Group)
| {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,897 | Seeeeeyo/django_project_forum | refs/heads/master | /app/topics/api_views.py | from rest_framework import generics, viewsets, status
from rest_framework.response import Response as resp
from .models import Topic, Response
from .serializers import TopicListSerializer, TopicDetailSerializer, ResponseSerializer
from django.db.models import Max
class TopicList(generics.ListCreateAPIView):
serializer_class = TopicListSerializer
def create(self, request, *args, **kwargs):
data = request.data.dict()
data['author'] = request.user.pk
serializer = self.get_serializer(data=data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return resp(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
def get_queryset(self):
search = self.request.GET.get('search')
cat = self.request.GET.get('filter')
object_list = Topic.objects.annotate(number_replies=Max('response'))
if search is not None:
object_list = object_list.filter(title__icontains=search)
if cat == 'solved':
object_list = object_list.filter(solved=True)
if cat == 'unsolved':
object_list = object_list.filter(solved=False)
if cat == 'noreply':
object_list = object_list.filter(number_replies=None)
return object_list
class TopicDetail(generics.RetrieveAPIView):
serializer_class = TopicDetailSerializer
queryset = Topic.objects.all()
class ResponseCreate(generics.CreateAPIView):
serializer_class = ResponseSerializer
def create(self, request, *args, **kwargs):
data = request.data.dict()
data['author'] = request.user.pk
data['topic'] = kwargs['pk']
serializer = self.get_serializer(data=data)
serializer.is_valid(raise_exception=True)
self.perform_create(serializer)
headers = self.get_success_headers(serializer.data)
return resp(serializer.data, status=status.HTTP_201_CREATED, headers=headers)
| {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,898 | Seeeeeyo/django_project_forum | refs/heads/master | /app/djangoforum/mail.py | from django.conf import settings
from django.core.mail.backends.smtp import EmailBackend
class CustomEmailBackend(EmailBackend):
""" Custom Email backend used in the project """
def send_messages(self, messages):
for message in messages:
# if settings.DEBUG is enabled we want to send the e-mail to a specific debug e-mail instead of
# sending it to end users. This way while deployed in local and DEV, e-mails are not send to real people.
if settings.DEBUG:
message.subject = "{subject} [{to}]".format(
subject=message.subject,
to=', '.join(message.to)
)
message.to = [settings.DEBUG_EMAIL]
return super(CustomEmailBackend, self).send_messages(messages)
# Documentation
# https://docs.djangoproject.com/fr/3.0/topics/email/
# Quick example
#
# from django.core.mail import send_mail
# send_mail('Subject here', 'Here is the message.', 'from@example.com', ['to@example.com'], fail_silently=False)
# Html Template example
#
# from django.core.mail import send_mail
# from django.template.loader import render_to_string
# msg_plain = render_to_string('templates/email.txt', {'some_params': some_params})
# msg_html = render_to_string('templates/email.html', {'some_params': some_params})
# send_mail('Subject here', msg_plain, 'from@example.com', ['to@example.com'], html_message=msg_html, fail_silently=False)
| {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,899 | Seeeeeyo/django_project_forum | refs/heads/master | /app/topics/serializers.py | from rest_framework import serializers
from .models import Topic, Response
# from user.serializers import AuthorSerializer
class TopicLastMessageSerializer(serializers.ModelSerializer):
author = serializers.CharField(source='get_last_message_author', read_only=True)
date = serializers.CharField(source='get_last_message_date', read_only=True)
class Meta:
model = Topic
fields = ['author', 'date']
class TopicListSerializer(serializers.ModelSerializer):
# author = AuthorSerializer(read_only=False)
date = serializers.DateTimeField(read_only=True)
last_message = TopicLastMessageSerializer(source='*', read_only=True)
count_replies = serializers.CharField(source='get_replies_count', read_only=True)
def is_valid(self, raise_exception=False):
print(self.initial_data)
serializers.BaseSerializer.is_valid(self, raise_exception)
class Meta:
model = Topic
fields = ['id', 'title', 'date', 'text', 'solved', 'count_replies', 'author', 'last_message']
depth = 1
# def create(self, validated_data):
# """
# Create and return a new `Topic` instance, given the validated data.
# """
# return Topic.objects.create(**validated_data)
# def update(self, instance, validated_data):
# """
# Update and return an existing `Topic` instance, given the validated data.
# """
# instance.id = validated_data.get('id', instance.title)
# instance.title = validated_data.get('title', instance.title)
# instance.date = validated_data.get('date', instance.date)
# instance.text = validated_data.get('text', instance.text)
# instance.count_replies = validated_data.get('count_replies', instance.count_replies)
# instance.solved = validated_data.get('solved', instance.solved)
# instance.author = validated_data.get('author', instance.author)
# instance.last_message_author = validated_data.get('last_message_author', instance.last_message_author)
# instance.last_message_date = validated_data.get('number_replies', instance.last_message_date)
# instance.save()
# return instance
class ResponseSerializer(serializers.ModelSerializer):
# avatar_url = serializers.CharField(source='get_avatar', required=False)
# author_name = serializers.CharField(source='get_author', required=False)
class Meta:
model = Response
fields = ['text', 'author', 'topic']
class TopicDetailSerializer(serializers.ModelSerializer):
# response = ResponseSerializer(read_only=True)
class Meta:
model = Topic
fields = ['title', 'text', 'date', 'solved', 'author', 'response_set']
depth = 1
| {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,900 | Seeeeeyo/django_project_forum | refs/heads/master | /app/user/migrations/0002_user_avatar2.py | # Generated by Django 3.2.4 on 2021-06-30 08:01
from django.db import migrations, models
import user.models
class Migration(migrations.Migration):
dependencies = [
('user', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='user',
name='avatar2',
field=models.ImageField(blank=True, null=True, upload_to=user.models.upload_avatar2, verbose_name='Avatar'),
),
]
| {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,901 | Seeeeeyo/django_project_forum | refs/heads/master | /app/user/models.py | from django.db import models
from django.contrib.auth.models import BaseUserManager, AbstractUser
from django.contrib.staticfiles.storage import staticfiles_storage
from django.utils.translation import ugettext_lazy as _
# Custom Auth
# https://docs.djangoproject.com/fr/3.0/topics/auth/customizing/
class UserManager(BaseUserManager):
"""Define a model manager for User model with no username field."""
use_in_migrations = True
def _create_user(self, email, password, **extra_fields):
"""Create and save a User with the given email and password."""
if not email:
raise ValueError('The given email must be set')
email = self.normalize_email(email)
user = self.model(email=email, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_user(self, email, password=None, **extra_fields):
"""Create and save a regular User with the given email and password."""
extra_fields.setdefault('is_staff', False)
extra_fields.setdefault('is_superuser', False)
return self._create_user(email, password, **extra_fields)
def create_superuser(self, email, password, **extra_fields):
"""Create and save a SuperUser with the given email and password."""
extra_fields.setdefault('is_staff', True)
extra_fields.setdefault('is_superuser', True)
if extra_fields.get('is_staff') is not True:
raise ValueError('Superuser must have is_staff=True.')
if extra_fields.get('is_superuser') is not True:
raise ValueError('Superuser must have is_superuser=True.')
return self._create_user(email, password, **extra_fields)
def upload_avatar2(object, filename):
return '{pk}/{file}'.format(pk=object.pk, file=filename)
# "%s/%s" % (variable1, variable2)
# "{}/{}".format(variable1, variable2)
# "{variable1}/{variable2}".format(variable1=variable1, variable2=variable2)
class User(AbstractUser):
email = models.EmailField(
verbose_name=_("Adresse Email"),
max_length=255,
unique=True,
)
avatar = models.ImageField(
verbose_name=_("Avatar"),
upload_to='avatar',
blank=True, null=True,
)
avatar2 = models.ImageField(
verbose_name=_("Avatar"),
upload_to=upload_avatar2,
blank=True, null=True,
)
first_name = models.CharField(
verbose_name=_("Prénom"),
max_length=255,
)
last_name = models.CharField(
verbose_name=_("Nom"),
max_length=255,
)
username = None
objects = UserManager()
USERNAME_FIELD = 'email'
REQUIRED_FIELDS = ['first_name', 'last_name', ]
class Meta:
verbose_name = _("Utilisateur")
verbose_name_plural = _("Utilisateurs")
def __str__(self):
return self.get_fullname()
def get_fullname(self):
return "%s %s" % (self.first_name, self.last_name)
def get_avatar_url(self):
if self.avatar and self.avatar.file:
return self.avatar.url
return staticfiles_storage.url('img/avatar.png')
| {"/app/topics/admin.py": ["/app/topics/models.py"], "/app/user/serializers.py": ["/app/user/models.py"], "/app/topics/views.py": ["/app/topics/models.py"], "/app/user/views.py": ["/app/user/models.py"], "/app/topics/api_views.py": ["/app/topics/models.py", "/app/topics/serializers.py"], "/app/topics/serializers.py": ["/app/topics/models.py"]} |
54,904 | nighttime/DL-Projects | refs/heads/master | /keras_extensions/maxout.py | from keras.engine.topology import Layer
from keras import backend as K
class Maxout(Layer):
def __init__(self, units, num_competitors, **kwargs):
self.c = num_competitors
self.output_size = units
super(Maxout, self).__init__(**kwargs)
def build(self, input_shape):
# defined only for inputs with shape (batch_size, vec)
assert len(input_shape) == 2
# Create a trainable weight variable for this layer.
self.competing_weights = self.add_weight(name='maxweight',
shape=(self.c, input_shape[1], self.output_size),
initializer='uniform',
trainable=True)
self.competing_biases = self.add_weight(name='maxbias',
shape=(self.c, self.output_size),
initializer='uniform',
trainable=True)
super(Maxout, self).build(input_shape)
def call(self, X):
# X : [batch_size, input_size]
# W : [c, input_size, output_size]
# S, Sb : [batch_size, c, output_size]
# --> [batch_size, output_size]
S = K.dot(X, self.competing_weights)
Sb = K.bias_add(S, self.competing_biases)
return K.max(Sb, axis=1)
def compute_output_shape(self, input_shape):
return (None, self.output_size)
| {"/MNIST-conv.py": ["/support/output.py", "/support/data.py", "/keras_extensions/train.py"], "/IMDB-summarizer.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"], "/MNIST-maxout-ff.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"]} |
54,905 | nighttime/DL-Projects | refs/heads/master | /MNIST-conv.py | from support.output import *
from support.data import *
from keras_extensions.train import *
import argparse
from keras.models import Sequential
from keras.layers import *
from keras import optimizers
from keras import backend as TF
import numpy as np
#################################################################
# Model Hyperparameters
#################################################################
epochs = 13
batch_size = 40
lr = 1e-4
hidden_layer_1_size = 600
hidden_layer_1_dropout = 0.3
num_classes = 10
# Adam optimizer BEST RUN (98.68%)
# epochs = 2
# batch_size = 40
# lr = 0.0005
# dropout = 0.3
# hidden_layer_1_size = 600
#################################################################
# Program Architecture
#################################################################
def prep_data():
(xs_train, ys_train), (xs_test, ys_test), data_shape = mnist_data(flatten=False)
xs_train = np.expand_dims(xs_train, -1)
xs_test = np.expand_dims(xs_test, -1)
return (xs_train, ys_train), (xs_test, ys_test), np.shape(xs_train)[1:]
def build_model(data_shape):
model = Sequential()
model.add(Conv2D(32, (6,6), padding='same', input_shape=(data_shape[0], data_shape[1], 1)))
model.add(Activation('relu'))
model.add(Dropout(hidden_layer_1_dropout))
model.add(MaxPooling2D(padding='same'))
model.add(Conv2D(64, (4,4), padding='same'))
model.add(Activation('relu'))
model.add(Dropout(hidden_layer_1_dropout))
model.add(MaxPooling2D(padding='same'))
model.add(Reshape((7 * 7 * 64,)))
# model.add(Dense(hidden_layer_1_size))
# model.add(Activation('relu'))
model.add(Dropout(hidden_layer_1_dropout))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='sparse_categorical_crossentropy', optimizer=optimizers.Adam(lr=lr), metrics=['accuracy'])
return model
def train(model, train_data, test_data):
print('\n=== Training ===')
testChecker = TestSetCallback(test_data)
model.fit(train_data[0], train_data[1], epochs=epochs, batch_size=batch_size)
def test(model, test_data):
print('\n=== Testing ===')
scores = model.evaluate(test_data[0], test_data[1])
print(Colors.OKGREEN + Colors.BOLD + 'Test Accuracy: {:.2%}'.format(scores[1]) + Colors.ENDC)
print(Colors.OKBLUE + 'epochs={} batch_size={} hidden={} opt=adam lr={}'.format(epochs, batch_size, hidden_layer_1_size, lr) + Colors.ENDC +'\n')
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('-p', action='store_true', help='print model visualization')
return parser.parse_args()
def main():
args = get_args()
train_data, test_data, data_size = prep_data()
model = build_model(data_size)
if args.p:
model.summary()
viz_model(model)
exit(0)
train(model, train_data, test_data)
test(model, test_data)
if __name__ == '__main__':
main()
| {"/MNIST-conv.py": ["/support/output.py", "/support/data.py", "/keras_extensions/train.py"], "/IMDB-summarizer.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"], "/MNIST-maxout-ff.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"]} |
54,906 | nighttime/DL-Projects | refs/heads/master | /support/data.py | from keras.datasets import mnist
from keras.datasets import imdb
from keras.preprocessing.sequence import *
import numpy as np
import re
import random
# from collections import defaultdict
# from itertools import chain
import pdb
def mnist_data(flatten=False):
"""Retrieves the MNIST image data and preprocesses it"""
# xs_train, xs_test : [uint8] @shape(sample_size, 28, 28)
# ys_train, ys_test : [uint8] @shape(sample_size,) @range[0,9]
(xs_train, ys_train), (xs_test, ys_test) = mnist.load_data()
# xs_train, xs_test : [float] @shape(sample_size, 28, 28) @range[0,1.0]
xs_train = xs_train.astype(float) / 255.0
xs_test = xs_test.astype(float) / 255.0
data_shape = np.shape(xs_train)[1:]
if flatten:
flat_data_shape = data_shape[0] * data_shape[1]
xs_train = np.reshape(xs_train, [-1, flat_data_shape])
xs_test = np.reshape(xs_test, [-1, flat_data_shape])
data_shape = (flat_data_shape,)
return (xs_train, ys_train), (xs_test, ys_test), data_shape
def _cleaned_lines(review):
"""Tokenizes a given review. Separates sentences on .?! and tokenizes on word chars only"""
sents = re.split(r'\.|\?|\!+', review)
word_sents = [[w for w in re.findall(r'\w+', s)] for s in sents]
cleaned = [[w.lower() for w in s] for s in word_sents if s]
return word_sents
def _encode_reviews(sets):
"""Accepts any number of data sets and encodes them. Returns the index scheme produced"""
d = {'':0, 'UNK':1}
i = 2
def enc(w):
nonlocal i, d
if w in d:
return d[w]
else:
if len(d) > 12000:
return 1
else:
d[w] = i
i += 1
return i-1
for data in sets:
data[:] = [[[enc(w) for w in s] for s in r] for r in data]
return d
def _gen_xs_and_ys(pos, neg):
"""Combines positive and negative sets and produces training labels while shuffling data"""
# pos, neg = list(pos), list(neg)
xs = pos + neg
ys = ([1] * len(pos)) + ([0] * len(neg))
xs_and_ys = list(zip(xs, ys))
random.shuffle(xs_and_ys)
xs, ys = zip(*xs_and_ys)
return list(xs), list(ys)
def imdb_data(data_folder, standardize_num_sen=0, standardize_sen_len=0):
"""Retrieves the IMDB movie review data and preprocesses it"""
def _get_contents(fname):
"""Opens the given file and cleans+tokenizes each review"""
with open(data_folder + '/' + fname) as file:
return [_cleaned_lines(review) for review in file]
train = _gen_xs_and_ys(_get_contents('train_pos'), _get_contents('train_neg'))
test = _gen_xs_and_ys(_get_contents('test_pos'), _get_contents('test_neg'))
print('-- loaded data')
index_cache = _encode_reviews([train[0], test[0]])
print('-- encoded data')
def std_seq(seq, std, ext=[0]):
"""Pads or truncates a sequence according to the given threshold"""
if len(seq) < std:
seq.extend(ext * (std - len(seq)))
elif len(seq) > std:
del seq[std:]
if standardize_num_sen:
for data in [train, test]:
for r in data[0]:
std_seq(r, standardize_num_sen, ext=[[0]])
if standardize_sen_len:
for data in [train, test]:
for r in data[0]:
for s in r:
std_seq(s, standardize_sen_len)
print('-- standardized data')
return train, test, (standardize_num_sen, standardize_sen_len), index_cache
| {"/MNIST-conv.py": ["/support/output.py", "/support/data.py", "/keras_extensions/train.py"], "/IMDB-summarizer.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"], "/MNIST-maxout-ff.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"]} |
54,907 | nighttime/DL-Projects | refs/heads/master | /IMDB-summarizer.py | from support.output import *
from support.data import *
from keras_extensions.maxout import *
from keras_extensions.train import *
from keras.models import Sequential
from keras.layers import *
from keras import optimizers
from keras import backend as K
import numpy as np
import argparse
#################################################################
# Model Hyperparameters
#################################################################
epochs = 1
batch_size = 20
lr = 0.05
num_classes = 2
word_embedding_size = 32
sent_embedding_size = 64
uttr_embedding_size = 512
# Adam optimizer BEST RUN (97.28%)
# epochs = 5
# batch_size = 40
# lr = 0.0005
# hidden_layer_1_size = 50
# number of competing weights = 2
#################################################################
# Program Architecture
#################################################################
def build_model(data_shape, word_index_cache):
# input_layer = Input(shape=data_shape, dtype='int32', name='input')
# x = TimeDistributed(Embedding(len(word_index_cache), embedding_size))(main_input)
#
# lstm_out = LSTM(32)(x)
model = Sequential()
# data shape : (num_sentences, num_words_per_sentence)
model.add(Embedding(len(word_index_cache), word_embedding_size, input_shape=data_shape))
# data shape : (num_sentences, num_words_per_sentence, word_embedding_size)
# model.add(TimeDistributed(LSTM(sent_embedding_size, activation='relu')))
model.add(Reshape((data_shape[0], data_shape[1] * word_embedding_size)))
model.add(Dense(sent_embedding_size, activation='relu'))
# data shape : (num_sentences, sent_embedding_size)
# model.add(LSTM(uttr_embedding_size, activation='relu'))
model.add(Reshape((data_shape[0] * sent_embedding_size,)))
model.add(Dense(uttr_embedding_size, activation='relu'))
# data shape : (uttr_embedding_size)
model.add(Dense(num_classes, activation='softmax'))
# Use sparse categorical crossentropy as our loss function, since we have
# output : [probability] @shape(10,)
# target : integer label (note: NOT one-hot vector)
model.compile(loss='sparse_categorical_crossentropy',
# optimizer=optimizers.Adam(lr=lr),
optimizer=optimizers.SGD(lr=lr),
metrics=['accuracy'])
return model
def train(model, train_data, test_data):
print('\n=== Training ===')
# testChecker = TestSetCallback(test_data)
model.fit(train_data[0], train_data[1], epochs=epochs, batch_size=batch_size)#, callbacks=[testChecker])
def test(model, test_data):
print('\n=== Testing ===')
scores = model.evaluate(test_data[0], test_data[1])
print(Colors.OKGREEN + Colors.BOLD + 'Test Accuracy: {:.2%}'.format(scores[1]) + Colors.ENDC)
print(Colors.OKBLUE + 'epochs={} batch_size={} opt=sgd lr={}'.format(epochs, batch_size, lr) + Colors.ENDC +'\n')
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('-p', action='store_true', help='print model visualization')
return parser.parse_args()
def main():
args = get_args()
if args.p:
data_shape = (20, 25)
index_cache = [0] * 12000
else:
train_data, test_data, data_shape, index_cache = imdb_data('support/imdb_data', standardize_num_sen=20, standardize_sen_len=25)
model = build_model(data_shape, index_cache)
if args.p:
model.summary()
# viz_model(model, name='sentiment_summarizer_net.png')
exit(0)
print('\a')
train(model, train_data, test_data)
test(model, test_data)
if __name__ == '__main__':
main()
| {"/MNIST-conv.py": ["/support/output.py", "/support/data.py", "/keras_extensions/train.py"], "/IMDB-summarizer.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"], "/MNIST-maxout-ff.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"]} |
54,908 | nighttime/DL-Projects | refs/heads/master | /MNIST-maxout-ff.py | from support.output import *
from support.data import *
from keras_extensions.maxout import *
from keras_extensions.train import *
import argparse
from keras.models import Sequential
from keras.layers import *
from keras import optimizers
from keras import backend as K
import numpy as np
#################################################################
# Model Hyperparameters
#################################################################
epochs = 5
batch_size = 40
lr = 0.0005
hidden_layer_1_size = 50
num_competitors = 2
num_classes = 10
# Adam optimizer BEST RUN (97.28%)
# epochs = 5
# batch_size = 40
# lr = 0.0005
# hidden_layer_1_size = 50
# number of competing weights = 2
#################################################################
# Program Architecture
#################################################################
def build_model(data_shape):
model = Sequential()
model.add(Maxout(hidden_layer_1_size, num_competitors, input_shape=data_shape))
# model.add(Maxout(hidden_layer_2_size, 2))
# Add a final dense layer
model.add(Dense(num_classes, activation='softmax'))
# Use sparse categorical crossentropy as our loss function, since we have
# output : [probability] @shape(10,)
# target : integer label (note: NOT one-hot vector)
model.compile(loss='sparse_categorical_crossentropy',
optimizer=optimizers.Adam(lr=lr),
metrics=['accuracy'])
return model
def train(model, train_data, test_data):
print('\n=== Training ===')
testChecker = TestSetCallback(test_data)
model.fit(train_data[0], train_data[1], epochs=epochs, batch_size=batch_size, callbacks=[testChecker])
def test(model, test_data):
print('\n=== Testing ===')
scores = model.evaluate(test_data[0], test_data[1])
print(Colors.OKGREEN + Colors.BOLD + 'Test Accuracy: {:.2%}'.format(scores[1]) + Colors.ENDC)
print(Colors.OKBLUE + 'epochs={} batch_size={} hidden={} opt=adam lr={}'.format(epochs, batch_size, hidden_layer_1_size, lr) + Colors.ENDC +'\n')
def get_args():
parser = argparse.ArgumentParser()
parser.add_argument('-p', action='store_true', help='print model visualization')
return parser.parse_args()
def main():
args = get_args()
train_data, test_data, data_shape = mnist_data(flatten=True)
model = build_model(data_shape)
if args.p:
model.summary()
viz_model(model, name='maxout_net.png')
exit(0)
train(model, train_data, test_data)
test(model, test_data)
if __name__ == '__main__':
main()
| {"/MNIST-conv.py": ["/support/output.py", "/support/data.py", "/keras_extensions/train.py"], "/IMDB-summarizer.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"], "/MNIST-maxout-ff.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"]} |
54,909 | nighttime/DL-Projects | refs/heads/master | /support/output.py | class Colors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def viz_model(model, name='viz.png'):
from keras.utils import plot_model
plot_model(model, show_shapes=True, to_file=name)
print('generated chart of the model') | {"/MNIST-conv.py": ["/support/output.py", "/support/data.py", "/keras_extensions/train.py"], "/IMDB-summarizer.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"], "/MNIST-maxout-ff.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"]} |
54,910 | nighttime/DL-Projects | refs/heads/master | /keras_extensions/train.py | from keras import backend as K
from keras.callbacks import *
# from ..support.output import *
class TestSetCallback(Callback):
def __init__(self, test_data):
self.test_data = test_data
super().__init__()
def on_epoch_end(self, epoch, logs={}):
x, y = self.test_data
loss, acc = self.model.evaluate(x, y, verbose=0)
print(('\nTesting loss: {}, acc: {}\n').format(loss, acc))
# print((Colors.BOLD + '\nTesting loss: {}, acc: {}\n' + Colors.ENDC).format(loss, acc))
| {"/MNIST-conv.py": ["/support/output.py", "/support/data.py", "/keras_extensions/train.py"], "/IMDB-summarizer.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"], "/MNIST-maxout-ff.py": ["/support/output.py", "/support/data.py", "/keras_extensions/maxout.py", "/keras_extensions/train.py"]} |
54,912 | germmand/ProgLog-App | refs/heads/master | /app/mod_auth/controllers.py | from flask import (
Blueprint,
request,
render_template,
flash,
g,
session,
redirect,
url_for
)
from werkzeug import (
generate_password_hash,
check_password_hash
)
from app.mod_auth.forms import (
LoginForm,
SignupForm
)
from app.handlers import db
from app.mod_auth.models import User
from sqlalchemy.exc import IntegrityError
mod_auth = Blueprint('auth',
__name__,
url_prefix='/auth')
@mod_auth.route('/signin/', methods=['GET', 'POST'])
def signin():
form = LoginForm(request.form)
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and check_password_hash(user.password, form.password.data):
session['user_id'] = user.id
flash('Bienvenido, %s' % user.name)
return redirect(url_for('tests.dashboard'))
flash('Email o contraseña incorrectos.', 'error')
return render_template('auth/signin.html', form=form)
@mod_auth.route('/signup/', methods=['GET', 'POST'])
def signup():
form = SignupForm(request.form)
if form.validate_on_submit():
user = User(email = form.email.data,
name = form.name.data,
password = generate_password_hash(form.password.data))
try:
db.session.add(user)
db.session.commit()
# Since we are redirecting to the signin page
# we don't need to save the session.
# session['user_id'] = user.id
flash('Usuario creado con éxito. :)')
return redirect(url_for('auth.signin'))
except IntegrityError as ie:
db.session.rollback()
flash(str(ie))
except Exception as e:
db.session.rollback()
flash(str(e))
return render_template('auth/signup.html', form=form)
| {"/app/mod_auth/controllers.py": ["/app/mod_auth/forms.py", "/app/handlers/__init__.py", "/app/mod_auth/models.py"], "/app/__init__.py": ["/app/handlers/__init__.py", "/app/mod_auth/controllers.py", "/app/mod_tests/controllers.py"], "/app/mod_tests/controllers.py": ["/app/mod_tests/models.py", "/app/mod_auth/models.py", "/app/handlers/__init__.py", "/app/mod_tests/forms.py", "/app/mod_tests/helpers.py"], "/app/mod_auth/models.py": ["/app/handlers/__init__.py"], "/app/mod_tests/models.py": ["/app/handlers/__init__.py"]} |
54,913 | germmand/ProgLog-App | refs/heads/master | /app/mod_tests/forms.py | from flask_wtf import FlaskForm
from wtforms import StringField, RadioField
from wtforms.validators import InputRequired
class ResponseForm(FlaskForm):
answered_node = RadioField('Respuesta', [
InputRequired(message='Debe seleccionar una respuesta.')],
coerce=int)
subject_name = StringField('Tema', [
InputRequired()])
current_node = StringField('Pregunta', [
InputRequired()])
| {"/app/mod_auth/controllers.py": ["/app/mod_auth/forms.py", "/app/handlers/__init__.py", "/app/mod_auth/models.py"], "/app/__init__.py": ["/app/handlers/__init__.py", "/app/mod_auth/controllers.py", "/app/mod_tests/controllers.py"], "/app/mod_tests/controllers.py": ["/app/mod_tests/models.py", "/app/mod_auth/models.py", "/app/handlers/__init__.py", "/app/mod_tests/forms.py", "/app/mod_tests/helpers.py"], "/app/mod_auth/models.py": ["/app/handlers/__init__.py"], "/app/mod_tests/models.py": ["/app/handlers/__init__.py"]} |
54,914 | germmand/ProgLog-App | refs/heads/master | /app/__init__.py | import os
from flask import Flask, render_template, send_from_directory, redirect, url_for
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from app.handlers import db
config = {
"development": "app.settings.DevelopmentConfig",
"testing": "app.setting.TestingConfig",
"production": "app.settings.ProductionConfig",
"default": "app.settings.DevelopmentConfig"
}
def create_app():
app = Flask(__name__)
config_name = os.getenv('FLASK_CONFIGURATION', 'default')
app.config.from_object(config[config_name])
db.init_app(app)
migrate = Migrate(app, db)
@app.errorhandler(404)
def not_found(error):
return render_template('404.html'), 404
@app.route('/favicon.ico')
def favicon():
return send_from_directory(os.path.join(app.root_path, 'static'),
'favicon.ico', mimetype='image/vnd.microsoft.icon')
@app.route('/', methods=['GET'])
def root():
return redirect(url_for('auth.signin'))
from app.mod_auth.controllers import mod_auth as auth_module
from app.mod_tests.controllers import mod_tests as tests_module
app.register_blueprint(auth_module)
app.register_blueprint(tests_module)
return app
| {"/app/mod_auth/controllers.py": ["/app/mod_auth/forms.py", "/app/handlers/__init__.py", "/app/mod_auth/models.py"], "/app/__init__.py": ["/app/handlers/__init__.py", "/app/mod_auth/controllers.py", "/app/mod_tests/controllers.py"], "/app/mod_tests/controllers.py": ["/app/mod_tests/models.py", "/app/mod_auth/models.py", "/app/handlers/__init__.py", "/app/mod_tests/forms.py", "/app/mod_tests/helpers.py"], "/app/mod_auth/models.py": ["/app/handlers/__init__.py"], "/app/mod_tests/models.py": ["/app/handlers/__init__.py"]} |
54,915 | germmand/ProgLog-App | refs/heads/master | /app/mod_tests/helpers.py | import os
def get_node_file_path(subject_name, node_id):
path = os.path.abspath(os.path.dirname(__file__)) + \
"/cpp/{}/{}.cpp".format(subject_name, node_id)
return path
| {"/app/mod_auth/controllers.py": ["/app/mod_auth/forms.py", "/app/handlers/__init__.py", "/app/mod_auth/models.py"], "/app/__init__.py": ["/app/handlers/__init__.py", "/app/mod_auth/controllers.py", "/app/mod_tests/controllers.py"], "/app/mod_tests/controllers.py": ["/app/mod_tests/models.py", "/app/mod_auth/models.py", "/app/handlers/__init__.py", "/app/mod_tests/forms.py", "/app/mod_tests/helpers.py"], "/app/mod_auth/models.py": ["/app/handlers/__init__.py"], "/app/mod_tests/models.py": ["/app/handlers/__init__.py"]} |
54,916 | germmand/ProgLog-App | refs/heads/master | /app/handlers/__init__.py | from app.handlers.db_handler import db
| {"/app/mod_auth/controllers.py": ["/app/mod_auth/forms.py", "/app/handlers/__init__.py", "/app/mod_auth/models.py"], "/app/__init__.py": ["/app/handlers/__init__.py", "/app/mod_auth/controllers.py", "/app/mod_tests/controllers.py"], "/app/mod_tests/controllers.py": ["/app/mod_tests/models.py", "/app/mod_auth/models.py", "/app/handlers/__init__.py", "/app/mod_tests/forms.py", "/app/mod_tests/helpers.py"], "/app/mod_auth/models.py": ["/app/handlers/__init__.py"], "/app/mod_tests/models.py": ["/app/handlers/__init__.py"]} |
54,917 | germmand/ProgLog-App | refs/heads/master | /migrations/versions/582315d9543d_.py | """empty message
Revision ID: 582315d9543d
Revises:
Create Date: 2018-11-13 21:32:26.287985
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '582315d9543d'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
subjects = op.create_table('subjects',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=120), nullable=False),
sa.Column('prerequisite', sa.Integer(), nullable=False),
sa.Column('minimum_approved', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id')
)
types = op.create_table('types',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=100), nullable=False),
sa.PrimaryKeyConstraint('id')
)
users = op.create_table('users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.Column('name', sa.String(length=150), nullable=False),
sa.Column('email', sa.String(length=250), nullable=False),
sa.Column('password', sa.String(length=250), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email')
)
nodes = op.create_table('nodes',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('date_created', sa.DateTime(), nullable=True),
sa.Column('date_modified', sa.DateTime(), nullable=True),
sa.Column('answer_parent', sa.String(length=250), nullable=False),
sa.Column('parent_node', sa.Integer(), nullable=False),
sa.Column('score', sa.Integer(), nullable=False),
sa.Column('type_id', sa.Integer(), nullable=False),
sa.Column('subject_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['subject_id'], ['subjects.id'], ),
sa.ForeignKeyConstraint(['type_id'], ['types.id'], ),
sa.PrimaryKeyConstraint('id')
)
user_subjects_rel = op.create_table('user_subjects_approved',
sa.Column('user_id', sa.Integer(), nullable=False),
sa.Column('subject_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(['subject_id'], ['subjects.id'], ),
sa.ForeignKeyConstraint(['user_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('user_id', 'subject_id')
)
# Seeding database
op.bulk_insert(subjects, [
{'name': 'Variables', 'minimum_approved': 10, 'prerequisite': 0},
{'name': 'Estructuras de Control', 'minimum_approved': 15, 'prerequisite': 1}
])
op.bulk_insert(types, [
{'name': 'Correcta'},
{'name': 'Incorrecta'},
])
op.bulk_insert(nodes, [
{'subject_id': 1, 'type_id': 1, 'score': 10, 'answer_parent': 'None', 'parent_node': 0},
{'subject_id': 1, 'type_id': 1, 'score': -5, 'answer_parent': 'c = 15', 'parent_node': 1},
{'subject_id': 1, 'type_id': 2, 'score': 10, 'answer_parent': 'c = \'15\'', 'parent_node': 1},
{'subject_id': 1, 'type_id': 2, 'score': -5, 'answer_parent': 'Error', 'parent_node': 1},
])
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('user_subjects_approved')
op.drop_table('nodes')
op.drop_table('users')
op.drop_table('types')
op.drop_table('subjects')
# ### end Alembic commands ###
| {"/app/mod_auth/controllers.py": ["/app/mod_auth/forms.py", "/app/handlers/__init__.py", "/app/mod_auth/models.py"], "/app/__init__.py": ["/app/handlers/__init__.py", "/app/mod_auth/controllers.py", "/app/mod_tests/controllers.py"], "/app/mod_tests/controllers.py": ["/app/mod_tests/models.py", "/app/mod_auth/models.py", "/app/handlers/__init__.py", "/app/mod_tests/forms.py", "/app/mod_tests/helpers.py"], "/app/mod_auth/models.py": ["/app/handlers/__init__.py"], "/app/mod_tests/models.py": ["/app/handlers/__init__.py"]} |
54,918 | germmand/ProgLog-App | refs/heads/master | /app/mod_tests/controllers.py | from flask import (
Blueprint,
request,
render_template,
flash,
g,
session,
redirect,
url_for
)
from app.mod_tests.models import (
Type,
Subject,
Node
)
from app.mod_auth.models import User
from app.handlers import db
from app.mod_tests.forms import ResponseForm
from app.mod_tests.helpers import get_node_file_path
mod_tests = Blueprint('tests',
__name__,
url_prefix='/tests')
@mod_tests.route('/dashboard', methods=['GET'])
def dashboard():
subjects = Subject.query.order_by(Subject.id).all()
return render_template('tests/test_dashboard.html', subjects = subjects)
@mod_tests.route('/<string:subject_name>', methods=['GET'], defaults={'node_id': None})
@mod_tests.route('/<string:subject_name>/<int:node_id>', methods=['GET'])
def subject_test(subject_name, node_id):
subject_name = subject_name.replace('+', ' ')
subject = Subject.query.filter_by(name = subject_name).first()
if subject is None:
return render_template('404.html')
try:
if node_id is None:
node = list(filter(lambda x: x.parent_node == 0, subject.nodes))[0]
else:
node = list(filter(lambda x: x.id == node_id, subject.nodes))[0]
except IndexError as ie:
return render_template('404.html')
child_nodes = Node.query.filter_by(parent_node=node.id).all()
if len(child_nodes) > 0:
try:
node_code = open(get_node_file_path(subject_name, node.id), 'r').read()
except FileNotFoundError as fnfe:
return render_template('404.html')
tree_data = {
"code" : node_code,
"subject" : subject.name,
"children" : child_nodes,
"current_node": node.id
}
form = ResponseForm()
form.answered_node.choices = [(node.id, node.answer_parent) for node in child_nodes]
return render_template('tests/test.html',
branch = tree_data,
form = form)
return redirect(url_for('tests.dashboard'))
@mod_tests.route('/validate-response', methods=['POST'])
def validate_route():
form = ResponseForm(request.form)
subject = Subject.query.filter_by(name = form.subject_name.data).first()
node = list(filter(lambda x: x.id == int(form.current_node.data), subject.nodes))[0]
child_nodes = Node.query.filter_by(parent_node=node.id).all()
form.answered_node.choices = [(node.id, node.answer_parent) for node in child_nodes]
if form.validate_on_submit():
return redirect(url_for('tests.subject_test',
subject_name = form.subject_name.data,
node_id = form.answered_node.data))
return redirect(url_for('tests.subject_test',
subject_name = form.subject_name.data,
node_id = None))
| {"/app/mod_auth/controllers.py": ["/app/mod_auth/forms.py", "/app/handlers/__init__.py", "/app/mod_auth/models.py"], "/app/__init__.py": ["/app/handlers/__init__.py", "/app/mod_auth/controllers.py", "/app/mod_tests/controllers.py"], "/app/mod_tests/controllers.py": ["/app/mod_tests/models.py", "/app/mod_auth/models.py", "/app/handlers/__init__.py", "/app/mod_tests/forms.py", "/app/mod_tests/helpers.py"], "/app/mod_auth/models.py": ["/app/handlers/__init__.py"], "/app/mod_tests/models.py": ["/app/handlers/__init__.py"]} |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.