hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ae0e1342adc959978ce2df9edec93bd093cab6fe
| 4,704
|
py
|
Python
|
booktracker.py
|
stonewell/booktracker
|
8fc324f10b4bc9d8a0a22a40871282bbef00e5ad
|
[
"MIT"
] | null | null | null |
booktracker.py
|
stonewell/booktracker
|
8fc324f10b4bc9d8a0a22a40871282bbef00e5ad
|
[
"MIT"
] | null | null | null |
booktracker.py
|
stonewell/booktracker
|
8fc324f10b4bc9d8a0a22a40871282bbef00e5ad
|
[
"MIT"
] | null | null | null |
import argparse
import sys
import logging
import json
def args_parser():
parser = argparse.ArgumentParser(prog='booktracker',
description='book update tracker in python')
parser.add_argument('-f', '--urls_file', type=argparse.FileType('r'), help='a file contains book urls, could be a text file list urls or complex json file for url and attributes', required=False)
parser.add_argument('-l', '--url', type=str, help='a book url to track', required=False)
parser.add_argument('-o', '--output', type=str, help='directory to store book content', required=True)
parser.add_argument('--epub', action='store_true', help='generate epub of book', required=False)
parser.add_argument('--timeout', type=int, help='network request timeout value, default=13s', required=False, default=13)
parser.add_argument('--author', type=str, help='author of the book', required=False, default='')
parser.add_argument('--title', type=str, help='title of the book', required=False, default='')
parser.add_argument('--header', type=str, action='append', help='http request header', required=False, dest='headers')
parser.add_argument('-v', '--verbose', action='count', help='print debug information', required=False, default=0)
return parser
def parse_urls_file_txt(urls_file):
urls = set()
for url in urls_file:
url = url.strip().replace('\n', '').replace('\r', '')
parts = url.split('|')
headers = []
if len(parts) > 3:
headers = '|'.join(parts[3:]).split(',')
urls.add(
(parts[0],
parts[1] if len(parts) > 1 else '',
parts[2] if len(parts) > 2 else '',
tuple(headers))
)
return urls
def parse_urls_file_json(urls_file):
urls = set()
books = json.load(urls_file)
for book in books:
url = book['url'].strip().replace('\n', '').replace('\r', '')
author = book['author'].strip().replace('\n', '').replace('\r', '') if 'author' in book else ''
title = book['title'].strip().replace('\n', '').replace('\r', '') if 'title' in book else ''
headers = book['headers'] if 'headers' in book else []
logging.debug('url:%s, author:%s, title:%s, headers:%s',
url, author, title, headers)
urls.add((url, author, title, tuple(headers)))
return urls
if __name__ == '__main__':
parser = args_parser().parse_args()
if parser.verbose >= 1:
logging.getLogger('').setLevel(logging.DEBUG)
if parser.urls_file is None and parser.url is None:
args_parser().print_usage()
sys.exit()
urls = set()
if parser.urls_file:
try:
urls = parse_urls_file_json(parser.urls_file)
except:
logging.exception('urls file:%s is not json try text file', parser.urls_file)
parser.urls_file.seek(0)
urls = parse_urls_file_txt(parser.urls_file)
if parser.url:
urls.add((parser.url,
parser.author,
parser.title,
tuple(parser.headers) if parser.headers else tuple([]))
)
for url, author, title, headers in sorted(urls):
try:
if url.find('piaotian') > 0 or url.find('ptwxz') > 0:
from piaotian.book_tracker import Tracker as PiaoTianTracker
tracker = PiaoTianTracker(url, author, title, parser.output, parser.timeout)
elif url.find('23us') > 0:
from dingdian.book_tracker import Tracker as DingDianTracker
tracker = DingDianTracker(url, author, title, parser.output, parser.timeout)
elif url.find('youdubook') > 0:
from youdu.book_tracker import Tracker as YouduTracker
tracker = YouduTracker(url, author, title, parser.output, parser.timeout)
elif url.find('shuku') > 0:
from shuku.book_tracker import Tracker as ShuKuTracker
tracker = ShuKuTracker(url, author, title, parser.output, parser.timeout)
elif url.find('uukanshu') > 0:
from uukanshu.book_tracker import Tracker as UUKanShuTracker
tracker = UUKanShuTracker(url, author, title, parser.output, parser.timeout)
if not tracker:
raise ValueError("tracker not found")
tracker.headers = list(headers)
update_count = tracker.refresh()
print(tracker.title, 'update count:', update_count)
if parser.epub:
tracker.gen_epub()
except:
logging.exception("update failed:{}".format(url))
| 40.904348
| 199
| 0.60119
| 572
| 4,704
| 4.84965
| 0.236014
| 0.046143
| 0.055155
| 0.043259
| 0.232516
| 0.153208
| 0.119322
| 0.105263
| 0.105263
| 0.072098
| 0
| 0.006347
| 0.26318
| 4,704
| 114
| 200
| 41.263158
| 0.793999
| 0
| 0
| 0.098901
| 0
| 0.010989
| 0.141794
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032967
| false
| 0
| 0.098901
| 0
| 0.164835
| 0.032967
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae0e3edf6f720a4fb2dd231e188dd1e1fa7fe663
| 667
|
py
|
Python
|
06-python-functions-1.py
|
reysmerwvr/python-playgrounds
|
1e039639d96044986ba5cc894a210180cc2b08e0
|
[
"MIT"
] | null | null | null |
06-python-functions-1.py
|
reysmerwvr/python-playgrounds
|
1e039639d96044986ba5cc894a210180cc2b08e0
|
[
"MIT"
] | null | null | null |
06-python-functions-1.py
|
reysmerwvr/python-playgrounds
|
1e039639d96044986ba5cc894a210180cc2b08e0
|
[
"MIT"
] | null | null | null |
import math
def rectangle_area(b=None, h=None):
if b is None or b is None:
print("Error wrong parameters")
return
return b * h
def circle_area(radium):
return (radium ** 2) * math.pi
print(circle_area(5))
def intermediate_number(a, b):
return (a + b) / 2
print(intermediate_number(-24, 24))
def separate(list_to_separate):
list_to_separate.sort()
evens_list = []
odds_list = []
for n in list_to_separate:
if n % 2 == 0:
evens_list.append(n)
else:
odds_list.append(n)
return evens_list, odds_list
evens, odds = separate([6, 5, 2, 1, 7])
print(evens)
print(odds)
| 16.675
| 39
| 0.610195
| 101
| 667
| 3.861386
| 0.39604
| 0.046154
| 0.107692
| 0.112821
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028866
| 0.272864
| 667
| 39
| 40
| 17.102564
| 0.775258
| 0
| 0
| 0
| 0
| 0
| 0.032984
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.16
| false
| 0
| 0.04
| 0.08
| 0.36
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae0ef85218f1bd293decfce58f18a3dbb6559d3c
| 3,647
|
py
|
Python
|
cloudfront/resource.py
|
iPlantCollaborativeOpenSource/iPlant-Atmosphere
|
d67b953561e813dd30ffa52c8440af7cc2d990cf
|
[
"Unlicense"
] | 1
|
2017-10-05T08:03:37.000Z
|
2017-10-05T08:03:37.000Z
|
cloudfront/resource.py
|
iPlantCollaborativeOpenSource/iPlant-Atmosphere
|
d67b953561e813dd30ffa52c8440af7cc2d990cf
|
[
"Unlicense"
] | null | null | null |
cloudfront/resource.py
|
iPlantCollaborativeOpenSource/iPlant-Atmosphere
|
d67b953561e813dd30ffa52c8440af7cc2d990cf
|
[
"Unlicense"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright (c) 2010, iPlant Collaborative, University of Arizona, Cold Spring Harbor Laboratories, University of Texas at Austin
# This software is licensed under the CC-GNU GPL version 2.0 or later.
# License: http://creativecommons.org/licenses/GPL/2.0/
#
# Author: Seung-jin Kim
# Contact: seungjin@email.arizona.edu
# Twitter: @seungjin
#
import logging
import httplib
import urllib
from urlparse import urlparse
import string
import datetime
from django.http import HttpResponse
from django.template import Context
from django.template.loader import get_template
from django.http import HttpResponse, Http404
from django.contrib.auth.models import User
from django.http import HttpResponseRedirect
from django.contrib.auth import logout
from django.http import HttpResponseNotFound
from django.http import HttpResponseForbidden
from django.utils import simplejson
from atmosphere.cloudfront.models import *
def getToken(request, username, password):
auth_server_url_obj = Configs.objects.filter(key="auth_server_url").order_by('value')[0]
auth_server_url = auth_server_url_obj.value
o = urlparse(auth_server_url)
auth_server_url = string.split(o.netloc,":")[0]
auth_server_port = int(string.split(o.netloc,":")[1])
auth_server_path = o.path
method = "GET"
params = None
headers = {
"Content-type" : "application/x-www-form-urlencoded",
"Accept" : "text/plain",
"X-Auth-User" : username,
"X-Auth-Key" : password,
"User-Agent" : "Atmo/CloudFront"
}
conn = httplib.HTTPSConnection(auth_server_url,auth_server_port)
conn.request(method,auth_server_path,params,headers)
r1 = conn.getresponse()
headers = r1.getheaders()
conn.close()
api_service_url = None
api_service_token = None
for header in headers:
if header[0] == "x-server-management-url" :
api_service_url = header[1]
if header[0] == "x-auth-token" :
api_service_token = header[1]
issued_token = Tokens(username = username, x_auth_token = api_service_token, x_server_management_url = api_service_url, issued_at = datetime.datetime.now())
issued_token.save()
request.session['username'] = username
request.session['token'] = api_service_token
request.session['api_server'] = api_service_url
return True
def request(request,method):
# emulating
# ./resource_request seungjin e1463572-517a-41c7-a43c-5a3eb884562e GET http://bond.iplantcollaborative.org:8000/resources/v1/getImageList
if request.session.has_key('username') == False:
return HttpResponseForbidden('HTTP/1.0 401 UNAUTHORIZED')
username = request.session['username']
token = request.session['token']
method_type = str(request.META['REQUEST_METHOD'])
resource_url = request.session['api_server'] + "/" + method
o = urlparse(resource_url)
protocol = o.scheme
url = string.split(o.netloc,":")[0]
port = int(string.split(o.netloc,":")[1])
path = o.path + "/"
params = None
if str(method_type).upper() == "GET" :
params = '&'.join( [ u"%s=%s"%(f,v) for f,v in request.GET.iteritems() if f])
elif str(method_type).upper() == "POST":
params = '&'.join( [ u"%s=%s"%(f,v) for f,v in request.POST.iteritems() if f])
headers = {
"Content-type" : "application/x-www-form-urlencoded",
"Accept" : "text/plain",
"X-Auth-User" : username,
"X-Auth-Token" : token,
"X-Api-Server" : request.session['api_server'] + "/",
"X-Api-Version" : "v1",
"User-Agent" : "Atmo/CloudFront"
}
logging.debug(params)
conn = httplib.HTTPSConnection(url,port)
conn.request("POST",path,params,headers)
r1 = conn.getresponse()
return r1.read()
| 32.855856
| 158
| 0.716753
| 497
| 3,647
| 5.136821
| 0.331992
| 0.043087
| 0.035644
| 0.03917
| 0.251469
| 0.217391
| 0.134743
| 0.088523
| 0.088523
| 0.088523
| 0
| 0.018662
| 0.147793
| 3,647
| 110
| 159
| 33.154545
| 0.802767
| 0.135454
| 0
| 0.170732
| 0
| 0
| 0.138173
| 0.028335
| 0
| 0
| 0
| 0
| 0
| 1
| 0.02439
| false
| 0.02439
| 0.207317
| 0
| 0.268293
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae0f418d25ef8016cb9f505cbfcc08043b51e1d4
| 4,964
|
py
|
Python
|
calculator.py
|
xizhongzhao/challenge5
|
fd4535479a0466eb0dec3c5f0078efea5fa40401
|
[
"BSD-3-Clause"
] | null | null | null |
calculator.py
|
xizhongzhao/challenge5
|
fd4535479a0466eb0dec3c5f0078efea5fa40401
|
[
"BSD-3-Clause"
] | null | null | null |
calculator.py
|
xizhongzhao/challenge5
|
fd4535479a0466eb0dec3c5f0078efea5fa40401
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python3
import sys
from multiprocessing import Queue,Process,Lock
from datetime import datetime
import getopt
import configparser
class Config(object):
def __init__(self,filename,arg='DEFAULT'):
self._filename = filename
self._arg = arg
self._obj = configparser.ConfigParser(strict=False)
self._obj.read(self._filename)
@property
def basel(self):
return self._obj.getfloat(self._arg,'JiShuL')
@property
def baseh(self):
return self._obj.getfloat(self._arg,'JiShuH')
@property
def soinsurp(self):
sum = 0
for i in ['YangLao','GongJiJin','ShengYu','GongShang','ShiYe','YiLiao']:
sum += self._obj.getfloat(self._arg,i)
return sum
class UserData(object):
def __init__(self,userdatafile):
self._userdatafile = userdatafile
@property
def userdata(self):
userdata = {}
with open(self._userdatafile) as file:
for line in file:
s = line.split(',')
fkey = s[0].strip()
fvalue = s[1].strip()
userdata[fkey] = float(fvalue)
return userdata
class Salary(object):
#bftax is salary before the pitax
#soinsurp is socail insur pecentage
#basel is the lowest base
#baseh is the hightest base
def __init__(self,bftax,soinsurp,basel,baseh):
self._bftax = bftax
self._soinsurp = soinsurp
self._basel = basel
self._baseh = baseh
@property
def soinsur(self):
if self._bftax <= self._basel:
return self._basel * self._soinsurp
elif self._bftax >= self._baseh:
return self._baseh * self._soinsurp
else:
return self._bftax * self._soinsurp
@property
def pitax(self):
taxbase = self._bftax - self.soinsur - 3500
if taxbase <= 0:
return 0
elif taxbase > 0 and taxbase <= 1500:
return taxbase * 0.03
elif taxbase > 1500 and taxbase <= 4500:
return (taxbase * 0.1 - 105)
elif taxbase > 4500 and taxbase <= 9000:
return (taxbase * 0.2 - 555)
elif taxbase > 9000 and taxbase <= 35000:
return (taxbase * 0.25 - 1005)
elif taxbase > 35000 and taxbase <= 55000:
return (taxbase * 0.3 - 2755)
elif taxbase > 55000 and taxbase <= 80000:
return (taxbase * 0.35 - 5505)
else:
return (taxbase * 0.45 - 13505)
@property
def aftax(self):
return self._bftax - self.soinsur - self.pitax
que1 = Queue()
que2 = Queue()
def putda_func(arg,lock):
#
user_inst = UserData(arg)
g = [ (k,v) for k,v in\
user_inst.userdata.items()]
for i in g:
with lock:
que1.put(i)
def comp_func(soinsurp,basel,baseh,lock):
while True:
i = que1.get()
bftax = i[1]
salary = Salary(bftax,soinsurp,basel,baseh)
sal_list = [i[0],i[1],salary.soinsur,salary.pitax,\
salary.aftax]
with lock:
que2.put(sal_list)
if que1.empty():
break
def outfi_func(arg):
while True:
lis = que2.get()
with open(arg,'a') as file:
file.write(lis[0])
for i in lis[1:]:
file.write(','+'{:.2f}'.format(i))
t = datetime.now()
t_str = datetime.strftime(t,'%Y-%m-%d %H:%M:%S')
file.write(',' + t_str)
file.write('\n')
if que2.empty():
break
def usage():
line ='Usage: ' + sys.argv[0] + ' -C cityname -c configfile -d userdata -o resultdata'
print(line)
def main():
try:
opts,args = getopt.getopt(sys.argv[1:],'ho:d:C:c:',['help',])
except getopt.GetoptError as err:
print(err)
usage()
sys.exit(2)
cityname = 'DEFAULT'
userfile = None
configfile = None
outfile = None
try:
for o,a in opts:
if o in ('-h','--help'):
usage()
sys.exit()
if o == '-o':
outfile = a
elif o == '-C':
cityname = a
elif o == '-d':
userfile = a
elif o == '-c':
configfile = a
else:
raise NameError
config = Config(configfile,cityname.upper())
lo1 = Lock()
lo2 = Lock()
Process(target=putda_func,args=(userfile,lo1)).start()
Process(target=comp_func, args=(config.soinsurp,\
config.basel,config.baseh,lo2)).start()
Process(target=outfi_func, args=(outfile,)).start()
except NameError as err:
usage()
print(err)
sys.exit(2)
if __name__ == '__main__':
main()
| 27.88764
| 91
| 0.52357
| 573
| 4,964
| 4.429319
| 0.280977
| 0.028369
| 0.038613
| 0.022459
| 0.033885
| 0.025217
| 0.025217
| 0
| 0
| 0
| 0
| 0.038619
| 0.35838
| 4,964
| 177
| 92
| 28.045198
| 0.758242
| 0.027599
| 0
| 0.170068
| 0
| 0
| 0.040241
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102041
| false
| 0
| 0.034014
| 0.020408
| 0.265306
| 0.020408
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae0f8d2404360860d62fb249f2d3aa6934c5170c
| 1,730
|
py
|
Python
|
scripts/financials.py
|
pwaring/125-accounts
|
a8d577110184e5f833368977c36b1e407c7357f6
|
[
"MIT"
] | null | null | null |
scripts/financials.py
|
pwaring/125-accounts
|
a8d577110184e5f833368977c36b1e407c7357f6
|
[
"MIT"
] | 7
|
2017-04-30T11:11:26.000Z
|
2020-09-24T15:23:24.000Z
|
scripts/financials.py
|
pwaring/125-accounts
|
a8d577110184e5f833368977c36b1e407c7357f6
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import argparse
import yaml
import pathlib
import decimal
import datetime
import os
decimal.getcontext().prec = 10
parser = argparse.ArgumentParser()
parser.add_argument('--data', help='path to data directory', required=True)
args = parser.parse_args()
script_path = os.path.dirname(os.path.realpath(__file__))
config_path = script_path + '/../config'
# Configuration
config = {}
with open(config_path + '/tax.yaml') as f:
config['tax'] = yaml.safe_load(f.read())
# Find current tax year
today = datetime.date.today()
config['current_tax'] = next(x for x in config['tax'] if x['start_date'] <= today and x['end_date'] >= today)
# Data
total_sales = decimal.Decimal(0.00)
total_payments = decimal.Decimal(0.00)
data_directory = str(args.data)
data_path = pathlib.Path(data_directory)
invoice_files = list(data_path.glob('data/invoices/*.yaml'))
for invoice_file in invoice_files:
fp = invoice_file.open()
invoice_data = yaml.safe_load(fp.read())
fp.close()
if invoice_data['issue_date'] >= config['current_tax']['start_date'] and invoice_data['issue_date'] <= config['current_tax']['end_date'] and invoice_data['issue_date'] <= today:
print(invoice_data['number'])
total_sales += decimal.Decimal(invoice_data['total'])
print(invoice_data['total'])
# Subtract any payments from accounts receivable
if 'payments' in invoice_data:
for payment in invoice_data['payments']:
print(payment['amount'])
total_payments += decimal.Decimal(payment['amount'])
print()
print("Total sales: %.2f" % total_sales)
print("Total payments: %.2f" % total_payments)
# Calculate tax and national insurance
| 28.833333
| 181
| 0.695954
| 234
| 1,730
| 4.952991
| 0.346154
| 0.085418
| 0.041415
| 0.051769
| 0.091458
| 0.091458
| 0.062123
| 0
| 0
| 0
| 0
| 0.007591
| 0.162428
| 1,730
| 59
| 182
| 29.322034
| 0.792271
| 0.084393
| 0
| 0
| 0
| 0
| 0.16033
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.162162
| 0
| 0.162162
| 0.162162
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae10738b2828081524171edff4d9e154279c3a52
| 4,131
|
py
|
Python
|
index.py
|
welshonion/GB_Tweet_Eraser
|
5ba77864e12bbdfc0f44fd417e1584a672120dd6
|
[
"MIT"
] | null | null | null |
index.py
|
welshonion/GB_Tweet_Eraser
|
5ba77864e12bbdfc0f44fd417e1584a672120dd6
|
[
"MIT"
] | null | null | null |
index.py
|
welshonion/GB_Tweet_Eraser
|
5ba77864e12bbdfc0f44fd417e1584a672120dd6
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
#for local
#import config
#config.write_environ()
import os,json
from flask import Flask, render_template, request, redirect, url_for, session
from requests_oauthlib import OAuth1Session
from datetime import timedelta
import twitter_auth
import twitter_delete
import postTweet
import databaseIO
app = Flask(__name__)
app.secret_key = os.environ['APP_SECRET_KEY']
app.permanent_session_lifetime = timedelta(minutes=5)
#session.permanent = True
#scheduler = BackgroundScheduler(daemon = True)
##################################################################
## トークン関連
CK = os.environ.get('CONSUMER_KEY', '0')
CS = os.environ.get('CONSUMER_SECRET', '0')
##################################################################
is_verified = False
name = ""
screen_name = ""
w = ('stop','running')
@app.route('/')
def index():
session['is_verified'] = False
session['auth_process'] = False
return render_template('index.html')
@app.route('/authorize')
def authorize():
session['auth_process'] = True
authorize_endpoint = twitter_auth.user_authorize()
return redirect(authorize_endpoint)
@app.route('/authenticate')
def authenticate():
session['auth_process'] = True
authenticate_endpoint = twitter_auth.user_authenticate()
return redirect(authenticate_endpoint)
#return #render_template('tweet.html',message=message,title=title)
"""@app.route('/verified')
def verified():
is_verified,name,screen_name = twitter_auth.user_verified()
#return redirect('http://127.0.0.1:5000/')
return render_template('verified.html',is_verified = is_verified,name=name,screen_name=screen_name)
@app.route('/setting_authenticate')
def authenticate():
authenticate_url = twitter_auth.user_authenticate_setting()
return redirect(authenticate_url)
#return #render_template('tweet.html',message=message,title=title)
"""
@app.route('/setting', methods=['GET','POST'])
def setting():
global is_verified, name, screen_name
user_id = ""
if session.get('is_verified') != True:
session['is_verified'] = False
if session.get('auth_process') != True:
print("no auth_process")
session['auth_process'] = False
if session['auth_process'] == True :
try:
twitter_auth.user_verified()
print("verify success")
session['auth_process'] = False
except:
print("verify failed")
session['auth_process'] = False
return render_template('setting.html',is_verified = False)
else:
if session['is_verified'] == True:
#設定保存時
if(request.form["work"]=='running'):
work_value = 1
else:
work_value = 0
databaseIO.set_value(session['user_id'], work_value, request.form["deletetime"])
print(request.form["work"])
print(request.form["deletetime"])
#param = json.loads(request.data.decode('utf-8'))
#print(param["work"])
#print(param.get('deletetime'))
print("verified")
else:
print("invalid transition")
session['auth_process'] = False
return render_template('setting.html',is_verified = False)
user_id = session['user_id']
userinfo = databaseIO.get_value(user_id)
is_verified = session['is_verified']
name = session['name']
screen_name = session['screen_name']
work = userinfo[3]
delete_time = userinfo[4]
print(name)
return render_template('setting.html',is_verified = is_verified,name=name,screen_name=screen_name,work=w[work],delete_time=delete_time)
@app.route('/delete', methods=['GET','POST'])
def delete():
if request.method == 'POST':
databaseIO.auth_deleteuser(session['user_id'])
print("delete")
return render_template('delete.html',deleted=True)
else:
return render_template('delete.html',deleted=False)
return render_template('delete.html',deleted=False)
if __name__ == '__main__':
#app.debug = True
app.run(threaded=True)
| 27
| 139
| 0.641007
| 470
| 4,131
| 5.417021
| 0.229787
| 0.058916
| 0.078555
| 0.045169
| 0.236842
| 0.217989
| 0.203456
| 0.142969
| 0.142969
| 0.142969
| 0
| 0.00602
| 0.195836
| 4,131
| 153
| 140
| 27
| 0.760385
| 0.078673
| 0
| 0.202381
| 0
| 0
| 0.159975
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.059524
| false
| 0
| 0.095238
| 0
| 0.261905
| 0.107143
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae11598e927b79f190c3f53d990ca4e8744816b6
| 21,209
|
py
|
Python
|
shades/shades.py
|
benrrutter/Shades
|
06c1d2e9b7ba6044892a6bf7529e706574fb923c
|
[
"MIT"
] | 1
|
2020-11-28T19:41:39.000Z
|
2020-11-28T19:41:39.000Z
|
shades/shades.py
|
benrrutter/Shades
|
06c1d2e9b7ba6044892a6bf7529e706574fb923c
|
[
"MIT"
] | null | null | null |
shades/shades.py
|
benrrutter/Shades
|
06c1d2e9b7ba6044892a6bf7529e706574fb923c
|
[
"MIT"
] | null | null | null |
"""
shades
contains classes and functions relating to Shades' shade object
"""
from abc import ABC, abstractmethod
from typing import Tuple, List
import numpy as np
from PIL import Image
from .noise_fields import NoiseField, noise_fields
from .utils import color_clamp
class Shade(ABC):
"""
An Abstract base clase Shade.
Methods are used to mark shapes onto images according to various color rules.
Initialisation parameters of warp_noise takes two noise_fields affecting how
much a point is moved across x and y axis.
warp_size determines the amount that a warp_noise result of 1 (maximum perlin
value) translates as
"""
def __init__(
self,
color: Tuple[int, int, int] = (0, 0, 0),
warp_noise: Tuple[NoiseField] = noise_fields(channels=2),
warp_size: float = 0,
):
self.color = color
self.warp_noise = warp_noise
self.warp_size = warp_size
@abstractmethod
def determine_shade(self, xy_coords: Tuple[int, int]) -> Tuple[int, int, int]:
"""
Determines the shade/color for given xy coordinate.
"""
def adjust_point(self, xy_coords: Tuple[int, int]) -> Tuple[int, int]:
"""
If warp is applied in shade, appropriately adjusts location of point.
"""
if self.warp_size == 0:
return xy_coords
x_coord = xy_coords[0] + (self.warp_noise[0].noise(xy_coords) * self.warp_size)
y_coord = xy_coords[1] + (self.warp_noise[1].noise(xy_coords) * self.warp_size)
return (x_coord, y_coord)
def point(self, canvas: Image, xy_coords: Tuple[int, int]) -> None:
"""
Determines colour and draws a point on an image.
"""
color = self.determine_shade(xy_coords)
if color is None:
return
xy_coords = self.adjust_point(xy_coords)
if self.in_bounds(canvas, xy_coords):
canvas.putpixel((int(xy_coords[0]), int(xy_coords[1])), color)
def in_bounds(self, canvas: Image, xy_coords: Tuple[int, int]) -> bool:
"""
determined whether xy_coords are within the size of canvas image
"""
if (xy_coords[0] < 0) or (xy_coords[0] >= canvas.width):
return False
if (xy_coords[1] < 0) or (xy_coords[1] >= canvas.height):
return False
return True
def weighted_point(self, canvas: Image, xy_coords: Tuple[int, int], weight: int):
"""
Determines colour and draws a weighted point on an image.
"""
color = self.determine_shade(xy_coords)
if self.warp_size != 0:
xy_coords = self.adjust_point(xy_coords)
for x_coord in range(0, weight):
for y_coord in range(0, weight):
new_point = (int(xy_coords[0]+x_coord), int(xy_coords[1]+y_coord))
if self.in_bounds(canvas, new_point):
canvas.putpixel(new_point, color)
def pixels_inside_edge(self, edge_pixels: List) -> List:
"""
Returns a list of pixels from inside a edge of points using ray casting algorithm
https://en.wikipedia.org/wiki/Point_in_polygon
vertex correction requires improvements, unusual or particularly angular shapes may
cause difficulties
"""
inner_pixels = []
x_coords = {i[0] for i in edge_pixels}
for x_coord in range(min(x_coords), max(x_coords)+1):
y_coords = {i[1] for i in edge_pixels if i[0] == x_coord}
y_coords = [i for i in y_coords if i-1 not in y_coords]
ray_count = 0
for y_coord in range(min(y_coords), max(y_coords)+1):
if y_coord in y_coords and (x_coord, y_coord):
ray_count += 1
if ray_count % 2 == 1:
inner_pixels.append((x_coord, y_coord))
return list(set(inner_pixels + edge_pixels))
def pixels_between_two_points(self, xy_coord_1: Tuple, xy_coord_2: Tuple) -> List:
"""
Returns a list of pixels that form a straight line between two points.
Parameters:
xy_coord_1 (int iterable): Coordinates for first point.
xy_coord_2 (int iterable): Coordinates for second point.
Returns:
pixels (int iterable): List of pixels between the two points.
"""
if abs(xy_coord_1[0] - xy_coord_2[0]) > abs(xy_coord_1[1] - xy_coord_2[1]):
if xy_coord_1[0] > xy_coord_2[0]:
x_step = -1
else:
x_step = 1
y_step = (abs(xy_coord_1[1] - xy_coord_2[1]) / abs(xy_coord_1[0] - xy_coord_2[0]))
if xy_coord_1[1] > xy_coord_2[1]:
y_step *= -1
i_stop = abs(xy_coord_1[0] - xy_coord_2[0])
else:
if xy_coord_1[1] > xy_coord_2[1]:
y_step = -1
else:
y_step = 1
x_step = (abs(xy_coord_1[0] - xy_coord_2[0]) / abs(xy_coord_1[1] - xy_coord_2[1]))
if xy_coord_1[0] > xy_coord_2[0]:
x_step *= -1
i_stop = abs(xy_coord_1[1]-xy_coord_2[1])
pixels = []
x_coord, y_coord = xy_coord_1
for _ in range(0, int(i_stop) + 1):
pixels.append((int(x_coord), int(y_coord)))
x_coord += x_step
y_coord += y_step
return pixels
def line(
self,
canvas: Image,
xy_coords_1: Tuple[int, int],
xy_coords_2: Tuple[int, int],
weight: int = 2,
) -> None:
"""
Draws a weighted line on the image.
"""
for pixel in self.pixels_between_two_points(xy_coords_1, xy_coords_2):
self.weighted_point(canvas, pixel, weight)
def fill(self, canvas: Image) -> None:
"""
Fills the entire image with color.
"""
# we'll temporarily turn off warping as it isn't needed here
warp_size_keeper = self.warp_size
self.warp_size = 0
for x_coord in range(0, canvas.width):
for y_coord in range(0, canvas.height):
self.point(canvas, (x_coord, y_coord))
#[[self.point(canvas, (x, y)) for x in range(0, canvas.width)]
# for y in range(0, canvas.height)]
self.warp_size = warp_size_keeper
def get_shape_edge(self, list_of_points: List[Tuple[int, int]]) -> List[Tuple]:
"""
Returns list of coordinates making up the edge of a shape
"""
edge = self.pixels_between_two_points(
list_of_points[-1], list_of_points[0])
for i in range(0, len(list_of_points)-1):
edge += self.pixels_between_two_points(
list_of_points[i], list_of_points[i+1])
return edge
def shape(self, canvas: Image, points: List[Tuple[int, int]]) -> None:
"""
Draws a shape on an image based on a list of points.
"""
edge = self.get_shape_edge(points)
for pixel in self.pixels_inside_edge(edge):
self.point(canvas, pixel)
def shape_outline(
self,
canvas: Image,
points: List[Tuple[int, int]],
weight: int = 2,
) -> None:
"""
Draws a shape outline on an image based on a list of points.
"""
for pixel in self.get_shape_edge(points):
self.weighted_point(canvas, pixel, weight)
def rectangle(
self,
canvas: Image,
top_corner: Tuple[int, int],
width: int,
height: int,
) -> None:
"""
Draws a rectangle on the image.
"""
for x_coord in range(top_corner[0], top_corner[0] + width):
for y_coord in range(top_corner[1], top_corner[1] + height):
self.point(canvas, (x_coord, y_coord))
def square(
self,
canvas: Image,
top_corner: Tuple[int, int],
size: int,
) -> None:
"""
Draws a square on the canvas
"""
self.rectangle(canvas, top_corner, size, size)
def triangle(
self,
canvas,
xy1: Tuple[int, int],
xy2: Tuple[int, int],
xy3: Tuple[int, int],
) -> None:
"""
Draws a triangle on the image.
This is the same as calling Shade.shape with a list of three points.
"""
self.shape(canvas, [xy1, xy2, xy3])
def triangle_outline(
self,
canvas,
xy1: Tuple[int, int],
xy2: Tuple[int, int],
xy3: Tuple[int, int],
weight: int = 2,
) -> None:
"""
Draws a triangle outline on the image.
Note that this is the same as calling Shade.shape_outline with a list of three points.
"""
self.shape_outline(canvas, [xy1, xy2, xy3], weight)
def get_circle_edge(
self,
center: Tuple[int, int],
radius: int,
) -> List[Tuple[int, int]]:
"""
Returns the edge coordinates of a circle
"""
edge_pixels = []
circumference = radius * 2 * np.pi
for i in range(0, int(circumference)+1):
angle = (i/circumference) * 360
opposite = np.sin(np.radians(angle)) * radius
adjacent = np.cos(np.radians(angle)) * radius
point = (int(center[0] + adjacent), int(center[1] + opposite))
edge_pixels.append(point)
return edge_pixels
def circle(
self,
canvas: Image,
center: Tuple[int, int],
radius: int,
) -> None:
"""
Draws a circle on the image.
"""
edge_pixels = self.get_circle_edge(center, radius)
for pixel in self.pixels_inside_edge(edge_pixels):
self.point(canvas, pixel)
def circle_outline(
self,
canvas: Image,
center: Tuple[int, int],
radius: int,
weight: int = 2,
) -> None:
"""
Draws a circle outline on the image.
"""
edge_pixels = self.get_circle_edge(center, radius)
for pixel in edge_pixels:
self.weighted_point(canvas, pixel, weight)
def circle_slice(
self,
canvas: Image,
center: Tuple[int, int],
radius: int,
start_angle: int,
degrees_of_slice: int,
) -> None:
"""
Draws a partial circle based on degrees.
(will have the appearance of a 'pizza slice' or 'pacman' depending on degrees).
"""
# due to Shade.pixels_between_two_points vertex correction issues,
# breaks down shape into smaller parts
def _internal(canvas, center, radius, start_angle, degrees_of_slice):
circumference = radius * 2 * np.pi
start_point = int(
(((start_angle - 90) % 361) / 360) * circumference)
slice_length = int((degrees_of_slice / 360) * circumference)
end_point = start_point + slice_length
edge_pixels = []
for i in range(start_point, end_point + 1):
angle = (i/circumference) * 360
opposite = np.sin(np.radians(angle)) * radius
adjacent = np.cos(np.radians(angle)) * radius
point = (int(center[0] + adjacent), int(center[1] + opposite))
edge_pixels.append(point)
if i in [start_point, end_point]:
edge_pixels += self.pixels_between_two_points(point, center)
for pixel in self.pixels_inside_edge(edge_pixels):
self.point(canvas, pixel)
if degrees_of_slice > 180:
_internal(canvas, center, radius, start_angle, 180)
_internal(canvas, center, radius, start_angle +
180, degrees_of_slice - 180)
else:
_internal(canvas, center, radius, start_angle, degrees_of_slice)
class BlockColor(Shade):
"""
Type of shade that will always fill with defined color without variation.
"""
def determine_shade(self, xy_coords: Tuple[int, int]) -> Tuple[int, int, int]:
"""
Ignores xy coordinates and returns defined color.
"""
return self.color
class NoiseGradient(Shade):
"""
Type of shade that will produce varying gradient based on noise fields.
Unique Parameters:
color_variance: How much noise is allowed to affect the color from the central shade
color_fields: A noise field for each channel (r,g,b)
"""
def __init__(
self,
color: Tuple[int, int, int] = (0, 0, 0),
warp_noise: Tuple[NoiseField, NoiseField, NoiseField] = noise_fields(channels=3),
warp_size: int = 0,
color_variance: int = 70,
color_fields: Tuple[NoiseField, NoiseField, NoiseField] = noise_fields(channels=3),
):
super().__init__(color, warp_noise, warp_size)
self.color_variance = color_variance
self.color_fields = tuple(color_fields)
def determine_shade(self, xy_coords: Tuple[int, int]) -> Tuple[int, int, int]:
"""
Measures noise from coordinates and affects color based upon return.
"""
def apply_noise(i):
noise = self.color_fields[i].noise(xy_coords) - 0.5
color_affect = noise * (2*self.color_variance)
return self.color[i] + color_affect
return color_clamp([apply_noise(i) for i in range(len(self.color))])
class DomainWarpGradient(Shade):
"""
Type of shade that will produce varying gradient based on recursive noise fields.
Unique Parameters:
color_variance: How much noise is allowed to affect the color from the central shade
color_fields: A noise field for each channel (r,g,b)
depth: Number of recursions within noise to make
feedback: Affect of recursive calls, recomended around 0-2
"""
def __init__(
self,
color: Tuple[int, int, int] = (0, 0, 0),
warp_noise: Tuple[NoiseField, NoiseField] = noise_fields(channels=2),
warp_size: int = 0,
color_variance: int = 70,
color_fields: Tuple[NoiseField, NoiseField, NoiseField] = noise_fields(channels=3),
depth: int = 2,
feedback: float = 0.7,
):
super().__init__(color, warp_noise, warp_size)
self.color_variance = color_variance
self.color_fields = tuple(color_fields)
self.depth = depth
self.feedback = feedback
def determine_shade(self, xy_coords: Tuple[int, int]) -> Tuple[int, int, int]:
"""
Determines shade based on xy coordinates.
"""
def apply_noise(i):
noise = self.color_fields[i].recursive_noise(
xy_coords, self.depth, self.feedback) - 0.5
color_affect = noise * (2*self.color_variance)
return self.color[i] + color_affect
return color_clamp([apply_noise(i) for i in range(len(self.color))])
class SwirlOfShades(Shade):
"""
Type of shade that will select from list of other shades based on recursive noise field.
Unique Parameters:
swirl_field: a NoiseField from which the selection of the shade is made
depth: Number of recursive calls to make from swirl_field.noise (defaults to 0)
feedback: Affect of recursive calls from swirl_field.noise
shades: this one is very specific, and determines when shades are used.
must be list of tuples of this form:
(lower_bound, upper_bound, Shade)
because the 'shades' arguments potentially confusing, here's an example.
The below will color white when noise of 0 - 0.5 is returned, and black if noise of 0.5 - 1
[(0, 0.5, shades.BlockColor((255, 255, 255)), (0.5, 1, shades.BlockColor((0, 0, 0)))]
"""
def __init__(
self,
shades: List[Tuple[float, float, Shade]],
warp_noise: Tuple[NoiseField, NoiseField] = noise_fields(channels=2),
warp_size: int = 0,
color_variance: int = 70,
swirl_field: NoiseField = NoiseField(),
depth: int = 1,
feedback: float = 0.7,
):
super().__init__(warp_noise=warp_noise, warp_size=warp_size)
self.color_variance = color_variance
self.swirl_field = swirl_field
self.depth = depth
self.feedback = feedback
self.shades = shades
def determine_shade(self, xy_coords: Tuple[int, int]):
"""
Determines shade based on xy coordinates.
"""
noise = self.swirl_field.recursive_noise(xy_coords, self.depth, self.feedback)
shades = [i for i in self.shades if i[0] <= noise < i[1]]
if len(shades) > 0:
shade = shades[0][2]
return shade.determine_shade(xy_coords)
return None
class LinearGradient(Shade):
"""
Type of shade that will determine color based on transition between various 'color_points'
Unique Parameters:
color_points: Groups of colours and coordinate at which they should appear
axis: 0 for horizontal gradient, 1 for vertical
Here's an example of color_points
in this, anything before 50 (on whichever axis specified) will be black,
anything after 100 will be white
between 50 and 100 will be grey, with tone based on proximity to 50 or 100
[((0, 0, 0), 50), ((250, 250, 250), 100)]
"""
def __init__(
self,
color_points: List[Tuple[int, Tuple[int, int, int]]],
axis: int = 0,
warp_noise: Tuple[NoiseField, NoiseField] = noise_fields(channels=2),
warp_size: int = 0,
):
super().__init__(warp_noise=warp_noise, warp_size=warp_size)
self.color_points = color_points
self.axis = axis
def determine_shade(self, xy_coords):
"""
Determines shade based on xy coordinates.
Parameters:
xy (iterable): xy coordinates
Returns:
color in form of tuple
"""
larger = [i[1] for i in self.color_points if i[1] >= xy_coords[self.axis]]
smaller = [i[1] for i in self.color_points if i[1] < xy_coords[self.axis]]
if len(smaller) == 0:
next_item = min(larger)
next_color = [i[0] for i in self.color_points if i[1] == next_item][0]
return next_color
if len(larger) == 0:
last_item = max(smaller)
last_color = [i[0] for i in self.color_points if i[1] == last_item][0]
return last_color
next_item = min(larger)
last_item = max(smaller)
next_color = [i[0] for i in self.color_points if i[1] == next_item][0]
last_color = [i[0] for i in self.color_points if i[1] == last_item][0]
distance_from_next = abs(next_item - xy_coords[self.axis])
distance_from_last = abs(last_item - xy_coords[self.axis])
from_last_to_next = distance_from_last / (distance_from_next + distance_from_last)
color = [0 for i in len(next_color)]
for i, _ in enumerate(next_color):
color_difference = (
last_color[i] - next_color[i]) * from_last_to_next
color[i] = last_color[i] - color_difference
return color_clamp(color)
class VerticalGradient(LinearGradient):
"""
Type of shade that will determine color based on transition between various 'color_points'
Unique Parameters:
color_points: Groups of colours and coordinate at which they should appear
Here's an example of color_points
in this, anything before 50 (on y axis) will be black,
anything after 100 will be white
between 50 and 100 will be grey, with tone based on proximity to 50 or 100
"""
def __init__(
self,
color_points: List[Tuple[int, Tuple[int, int, int]]],
warp_noise: Tuple[NoiseField, NoiseField] = noise_fields(channels=2),
warp_size: int = 0,
):
super().__init__(
color_points=color_points,
axis=1,
warp_noise=warp_noise,
warp_size=warp_size,
)
class HorizontalGradient(LinearGradient):
"""
Type of shade that will determine color based on transition between various 'color_points'
Unique Parameters:
color_points: Groups of colours and coordinate at which they should appear
Here's an example of color_points
in this, anything before 50 (on x axis) will be black,
anything after 100 will be white
between 50 and 100 will be grey, with tone based on proximity to 50 or 100
"""
def __init__(self,
color_points: List[Tuple[int, Tuple[int, int, int]]],
warp_noise: Tuple[NoiseField, NoiseField] = noise_fields(channels=2),
warp_size: int = 0,
):
super().__init__(
color_points=color_points,
axis=0,
warp_noise=warp_noise,
warp_size=warp_size,
)
| 34.768852
| 95
| 0.588571
| 2,811
| 21,209
| 4.253646
| 0.117396
| 0.024086
| 0.034959
| 0.011709
| 0.590031
| 0.53475
| 0.495777
| 0.473447
| 0.410638
| 0.347662
| 0
| 0.022253
| 0.315621
| 21,209
| 609
| 96
| 34.825944
| 0.801516
| 0.244
| 0
| 0.49711
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101156
| false
| 0
| 0.017341
| 0
| 0.199422
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae131115e85d42f0478a7f770cbcfcd854b30f6f
| 4,104
|
py
|
Python
|
BCAWT/CA.py
|
AliYoussef96/BCAW-Tool
|
a296a52f8795325f08e0c6f00838b9e851f9459e
|
[
"MIT"
] | 3
|
2019-10-22T07:08:40.000Z
|
2021-07-27T14:12:25.000Z
|
BCAWT/CA.py
|
AliYoussef96/BCAW-Tool
|
a296a52f8795325f08e0c6f00838b9e851f9459e
|
[
"MIT"
] | 13
|
2019-06-26T07:21:25.000Z
|
2021-07-23T15:01:31.000Z
|
BCAWT/CA.py
|
AliYoussef96/BCAW-Tool
|
a296a52f8795325f08e0c6f00838b9e851f9459e
|
[
"MIT"
] | 3
|
2019-07-25T00:13:36.000Z
|
2020-09-25T01:58:34.000Z
|
def CA(file):
"""correspondence analysis.
Args:
file (directory): csv file contains genes' RSCU values
Returns:
- csv file contains genes' values for the first 4 axes of the correspondence analysis result
- csv file contains codons' values for the first 4 axes of the correspondence analysis result
- plot the genes first 2 axes values of the correspondence analysis result
- plot the codons first 2 axes values of the correspondence analysis result
"""
import pandas as pd
import prince
import matplotlib.pyplot as plt
file = str(file)
df = pd.read_csv(file)
df.set_index(df.iloc[:,0] , inplace=True)# to make the first column is the index
df.drop(df.columns[0], axis=1,inplace= True)
df.replace(0,0.0000001,inplace=True)
#with prince # make onle CA for 2 axis
ca = prince.CA(
n_components=4,
n_iter=3,
copy=True,
check_input=True,
engine='auto',
random_state=42
)
df.columns.rename('Gene Name', inplace=True)
df.index.rename('Codons', inplace=True)
ca = ca.fit(df)
codons = ca.row_coordinates(df) # for Codons
genes = ca.column_coordinates(df) #for genes
#ca.eigenvalues_
ca.total_inertia_ #total inertia
ca.explained_inertia_ #inertia for each axis
inertia = ca.explained_inertia_
#save information
file_genes = file.replace(".csv",'')
file_genes = file_genes + "genes"
file_genes = file_genes + ".csv"
genes.rename(columns={genes.columns[0]: 'axis 1', genes.columns[1]: 'axis 2', genes.columns[2]: 'axis 3', genes.columns[3]: 'axis 4'}, inplace=True)
genes.to_csv(file_genes,sep=',', index=True, header=True) # return csv file for genes ca result
file_codons = file.replace(".csv",'')
file_codons = file_codons+ "codons"
file_codons = file_codons + ".csv"
codons.rename(columns={codons.columns[0]: 'axis 1', codons.columns[1]: 'axis 2', codons.columns[2]: 'axis 3', codons.columns[3]: 'axis 4'},inplace=True)
codons.to_csv(file_codons, sep=',', index=True, header=True) # return csv file for codon ca result
file_inertia = file.replace('.csv','.txt')
with open(file_inertia, 'a') as f:
f.write("explained inertia" + "\n")
for i in range(len(inertia)):
i_count = i + 1
with open(file_inertia,'a') as f:
f.write ("axis " + str(i_count) + " = " + str(inertia[i]) + "\n" )
with open(file_inertia,'a') as f:
f.write("Total Inertia = " + str(ca.total_inertia_))
#plot For genes
plt.style.use('seaborn-dark-palette')
fig = plt.figure()
plt.xlabel("Axis 1")
plt.ylabel("Axis 2")
plt.title("CA-plot")
plt.scatter(genes['axis 1'],genes['axis 2'],s=10,marker ='o')
plt.axhline(0, color='black', linestyle='-')
plt.axvline(0, color='black', linestyle='-')
save_file_name__ca_plot = file + "_CA_gens_plot.png"
plt.savefig(save_file_name__ca_plot) # return plot file for gene ca result
#for codons
plt.style.use('seaborn-dark-palette')
fig3 = plt.figure()
plt.xlabel("Axis 1")
plt.ylabel("Axis 2")
plt.title("CA-plot")
plt.scatter(codons['axis 1'],codons['axis 2'], s=10,marker ='o')
plt.axhline(0, color='black', linestyle='-')
plt.axvline(0, color='black', linestyle='-')
if len(codons) < 200:
for x , y , t in zip(codons['axis 1'],codons['axis 2'] , codons.index.values):
x = x * (1 + 0.01)
y = y * (1 + 0.01)
plt.text(x,y,t)
file = file.replace('.csv','')
save_file_name__ca_codons_plot = file + "_CA_codos_plot.png"
plt.savefig(save_file_name__ca_codons_plot) # return plot file for codon ca result
read_genes_file = pd.read_csv(file_genes)
read_genes_file.rename(columns={genes.columns[0]: 'gene id', genes.columns[1]: 'axis 1', genes.columns[2]: 'axis 2'}, inplace=True)
return read_genes_file
| 32.832
| 157
| 0.615497
| 589
| 4,104
| 4.164686
| 0.212224
| 0.03139
| 0.030982
| 0.044028
| 0.420709
| 0.366082
| 0.29311
| 0.287403
| 0.262128
| 0.155728
| 0
| 0.023609
| 0.246589
| 4,104
| 124
| 158
| 33.096774
| 0.769728
| 0.192008
| 0
| 0.205479
| 0
| 0
| 0.111714
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013699
| false
| 0
| 0.041096
| 0
| 0.068493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae14d95fbddd637652559526a0abec1bcbb1d2a1
| 4,343
|
py
|
Python
|
src/jibo_animation_ui.py
|
marketneutral/jibo-teleop
|
dce5e131a364b2dc8108dd766a74cb7547077eed
|
[
"MIT"
] | 3
|
2019-06-03T15:12:15.000Z
|
2019-06-24T03:44:40.000Z
|
src/jibo_animation_ui.py
|
marketneutral/jibo-teleop
|
dce5e131a364b2dc8108dd766a74cb7547077eed
|
[
"MIT"
] | null | null | null |
src/jibo_animation_ui.py
|
marketneutral/jibo-teleop
|
dce5e131a364b2dc8108dd766a74cb7547077eed
|
[
"MIT"
] | 1
|
2019-04-24T13:15:57.000Z
|
2019-04-24T13:15:57.000Z
|
# Jacqueline Kory Westlund
# May 2016
#
# The MIT License (MIT)
#
# Copyright (c) 2016 Personal Robots Group
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PySide import QtGui # basic GUI stuff
from jibo_msgs.msg import JiboAction # ROS msgs
from jibo_teleop_ros import jibo_teleop_ros
from functools import partial
class jibo_animation_ui(QtGui.QWidget):
# List of animations for Tega. Not all the SILENT animations are here.
animations = [
JiboAction.EMOJI_SHARK,
JiboAction.EMOJI_BEER,
JiboAction.EMOJI_PARTY_PINK,
JiboAction.EMOJI_PARTY_BLUE,
JiboAction.EMOJI_RAINCLOUD,
JiboAction.HAPPY_GO_LUCKY_DANCE
]
def __init__(self, ros_node):
""" Make a button for each animation """
super(jibo_animation_ui, self).__init__()
# get reference to ros node so we can do callbacks to publish messages
self.ros_node = ros_node
self.hold_last_frame = False #tracks state of whether jibo will hold last frame of animation or not. False by default
# put buttons in a box
anim_box = QtGui.QGroupBox(self)
anim_layout = QtGui.QGridLayout(anim_box)
anim_box.setTitle("Animations")
# create animation buttons and add to layout
col = 0
row = 1
for anim in self.animations:
button = QtGui.QPushButton(anim.lower().replace("\"", ""), anim_box)
button.clicked.connect(partial(self.ros_node.send_motion_message, anim))
# if in the top left, make button green
if (col < 3 and row < 7):
button.setStyleSheet('QPushButton {color: green;}')
# if in top right, make button red
if (col > 2 and row < 3):
button.setStyleSheet('QPushButton {color: red;}')
anim_layout.addWidget(button, row, col)
col += 1
if(col >= 4): # ten animation buttons per row
col = 0
row += 1
#set button to toggle Hold Last Frame
row += 1
self.anim_trans_button = QtGui.QPushButton("Turn Hold-Last-Frame ON",anim_box)
self.anim_trans_button.setStyleSheet('QPushButton {color: green;}')
self.anim_trans_button.clicked.connect(self.on_hold_last_frame_pressed)
anim_layout.addWidget(self.anim_trans_button, row, 0)
def on_hold_last_frame_pressed(self):
if self.hold_last_frame: #we are switching to False state, so the next button press should take us back to TRUE
self.anim_trans_button.setText('"Turn Hold-Last-Frame ON')
self.anim_trans_button.setStyleSheet('QPushButton {color: green;}')
self.ros_node.send_anim_transition_message(JiboAction.ANIMTRANS_RESET)
else:
self.anim_trans_button.setText('"Turn Hold-Last-Frame OFF')
self.anim_trans_button.setStyleSheet('QPushButton {color: red;}')
self.ros_node.send_anim_transition_message(JiboAction.ANIMTRANS_KEEP_LASTFRAME)
self.hold_last_frame = not self.hold_last_frame #flip state to reflect button press
def on_stop_record(self):
print("Stop Recording")
self.record_button.clicked.disconnect()
self.record_button.clicked.connect(self.on_start_record)
| 39.844037
| 125
| 0.686162
| 589
| 4,343
| 4.906621
| 0.385399
| 0.03045
| 0.049481
| 0.052595
| 0.190311
| 0.122491
| 0.122491
| 0.105882
| 0.105882
| 0
| 0
| 0.00607
| 0.241308
| 4,343
| 108
| 126
| 40.212963
| 0.871017
| 0.3965
| 0
| 0.113208
| 0
| 0
| 0.089216
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056604
| false
| 0
| 0.075472
| 0
| 0.169811
| 0.018868
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae160d8656b4e6e4a094903dfd38d5d1ed77aedf
| 1,447
|
py
|
Python
|
es_common/command/check_reservations_command.py
|
ES-TUDelft/interaction-design-tool-ir
|
d6fffa8d76c9e3df4ed1f505ee9427e5af5b8082
|
[
"MIT"
] | 1
|
2021-03-07T12:36:13.000Z
|
2021-03-07T12:36:13.000Z
|
es_common/command/check_reservations_command.py
|
ES-TUDelft/interaction-design-tool-ir
|
d6fffa8d76c9e3df4ed1f505ee9427e5af5b8082
|
[
"MIT"
] | null | null | null |
es_common/command/check_reservations_command.py
|
ES-TUDelft/interaction-design-tool-ir
|
d6fffa8d76c9e3df4ed1f505ee9427e5af5b8082
|
[
"MIT"
] | 1
|
2021-02-20T15:10:37.000Z
|
2021-02-20T15:10:37.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# **
#
# ======================== #
# CHECK_RESERVATIONS_COMMAND #
# ======================== #
# Command for checking reservations.
#
# @author ES
# **
import logging
from collections import OrderedDict
from es_common.command.es_command import ESCommand
from es_common.enums.command_enums import ActionCommand
class CheckReservationsCommand(ESCommand):
def __init__(self, is_speech_related=False):
super(CheckReservationsCommand, self).__init__(is_speech_related=is_speech_related)
self.logger = logging.getLogger("GetReservations Command")
self.command_type = ActionCommand.CHECK_RESERVATIONS
# =======================
# Override Parent methods
# =======================
def execute(self):
success = False
try:
self.logger.info("Not implemented!")
except Exception as e:
self.logger.error("Error while checking the reservation! {}".format(e))
finally:
return success
def reset(self):
pass
def clone(self):
return CheckReservationsCommand()
###
# SERIALIZATION
###
def serialize(self):
return OrderedDict([
("id", self.id),
("command_type", self.command_type.name)
])
def deserialize(self, data, hashmap={}):
self.id = data["id"]
hashmap[data["id"]] = self
return True
| 24.525424
| 91
| 0.591569
| 140
| 1,447
| 5.942857
| 0.485714
| 0.028846
| 0.054087
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.000912
| 0.24188
| 1,447
| 58
| 92
| 24.948276
| 0.757521
| 0.179682
| 0
| 0
| 0
| 0
| 0.083405
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0.033333
| 0.133333
| 0.066667
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae16f26a49eb3ff276ad91bfaa98b238072f3c5f
| 2,471
|
py
|
Python
|
mr/hermes/tests.py
|
dokai/mr.hermes
|
a7809af6ebeebc7e2df4aea7d69c571e78abce03
|
[
"MIT"
] | null | null | null |
mr/hermes/tests.py
|
dokai/mr.hermes
|
a7809af6ebeebc7e2df4aea7d69c571e78abce03
|
[
"MIT"
] | null | null | null |
mr/hermes/tests.py
|
dokai/mr.hermes
|
a7809af6ebeebc7e2df4aea7d69c571e78abce03
|
[
"MIT"
] | null | null | null |
# coding: utf-8
from email.mime.text import MIMEText
from email.parser import Parser
import os
import pytest
@pytest.fixture
def debugsmtp(request, tmpdir):
from mr.hermes import DebuggingServer
debugsmtp = DebuggingServer(('localhost', 0), ('localhost', 0))
debugsmtp.path = str(tmpdir)
yield debugsmtp
debugsmtp.close()
@pytest.fixture
def debugsmtp_thread(debugsmtp):
import asyncore
import threading
thread = threading.Thread(
target=asyncore.loop,
kwargs=dict(
timeout=1))
thread.start()
yield thread
debugsmtp.close()
thread.join()
@pytest.fixture
def sendmail(debugsmtp, debugsmtp_thread):
def sendmail(msg):
import smtplib
(host, port) = debugsmtp.socket.getsockname()
s = smtplib.SMTP(host, port)
s.sendmail(msg['From'], [msg['To']], msg.as_string())
s.quit()
return sendmail
@pytest.fixture
def email_msg():
msg = MIMEText(u'Söme text', 'plain', 'utf-8')
msg['Subject'] = 'Testmail'
msg['From'] = 'sender@example.com'
msg['To'] = 'receiver@example.com'
return msg
def test_mails_filename_order(debugsmtp):
me = 'bar@example.com'
you = 'foo@example.com'
for i in range(10):
msg = MIMEText('Mail%02i.' % i)
msg['Subject'] = 'Test'
msg['From'] = me
msg['To'] = you
debugsmtp.process_message(('localhost', 0), me, [you], msg.as_string())
mail_content = []
path = os.path.join(debugsmtp.path, 'foo@example.com')
for filename in os.listdir(path):
with open(os.path.join(path, filename)) as f:
msg = Parser().parsestr(f.read())
mail_content.append(msg.get_payload())
assert mail_content == [
'Mail00.', 'Mail01.', 'Mail02.', 'Mail03.', 'Mail04.',
'Mail05.', 'Mail06.', 'Mail07.', 'Mail08.', 'Mail09.']
def test_functional(sendmail, email_msg, tmpdir):
sendmail(email_msg)
(receiver,) = tmpdir.listdir()
assert receiver.basename == 'receiver@example.com'
(email_path,) = receiver.listdir()
assert email_path.basename.endswith('.eml')
with email_path.open() as f:
email = Parser().parsestr(f.read())
body = email.get_payload(decode=True)
body = body.decode(email.get_content_charset())
assert email['Subject'] == 'Testmail'
assert email['From'] == 'sender@example.com'
assert email['To'] == 'receiver@example.com'
assert u'Söme text' in body
| 29.070588
| 79
| 0.631728
| 302
| 2,471
| 5.096026
| 0.350993
| 0.051982
| 0.041585
| 0.032489
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015496
| 0.216512
| 2,471
| 84
| 80
| 29.416667
| 0.779442
| 0.005261
| 0
| 0.084507
| 0
| 0
| 0.140065
| 0
| 0
| 0
| 0
| 0
| 0.098592
| 1
| 0.098592
| false
| 0
| 0.112676
| 0
| 0.239437
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae18dc2b432f7078f03eeb502869d0c99af4f1dd
| 21,967
|
py
|
Python
|
src/lib/pipeline.py
|
nelhage/data
|
50a1ab91b786c9f89a8ff6ff10ea57ea5335490d
|
[
"Apache-2.0"
] | null | null | null |
src/lib/pipeline.py
|
nelhage/data
|
50a1ab91b786c9f89a8ff6ff10ea57ea5335490d
|
[
"Apache-2.0"
] | 1
|
2022-03-02T14:54:27.000Z
|
2022-03-02T14:54:27.000Z
|
src/lib/pipeline.py
|
nelhage/data
|
50a1ab91b786c9f89a8ff6ff10ea57ea5335490d
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import uuid
import warnings
import importlib
import traceback
import subprocess
from io import StringIO
from pathlib import Path
from functools import partial
from multiprocessing import cpu_count
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
import yaml
import numpy
import requests
from pandas import DataFrame, Int64Dtype, isnull, isna, read_csv, NA
from tqdm import tqdm
from .anomaly import detect_anomaly_all, detect_stale_columns
from .cast import column_convert
from .concurrent import process_map
from .net import download_snapshot
from .io import read_file, fuzzy_text, export_csv
from .utils import (
ROOT,
CACHE_URL,
combine_tables,
drop_na_records,
filter_output_columns,
infer_new_and_total,
stratify_age_and_sex,
)
class DataSource:
"""
Interface for data sources. A data source consists of a series of steps performed in the
following order:
1. Fetch: download resources into raw data
1. Parse: convert raw data to structured format
1. Merge: associate each record with a known `key`
The default implementation of a data source includes the following functionality:
* Fetch: downloads raw data from a list of URLs into ../snapshots folder. See [lib.net].
* Merge: outputs a key from the auxiliary dataset after performing best-effort matching.
The merge function provided here is crucial for many sources that use it. The easiest/fastest
way to merge records is by providing the exact `key` that will match an existing record in the
[data/metadata.csv] file.
"""
config: Dict[str, Any]
def __init__(self, config: Dict[str, Any] = None):
super().__init__()
self.config = config or {}
def fetch(
self, output_folder: Path, cache: Dict[str, str], fetch_opts: List[Dict[str, Any]]
) -> List[str]:
"""
Downloads the required resources and returns a list of local paths.
Args:
output_folder: Root folder where snapshot, intermediate and tables will be placed.
cache: Map of data sources that are stored in the cache layer (used for daily-only).
fetch_opts: Additional options defined in the DataPipeline config.yaml.
Returns:
List[str]: List of absolute paths where the fetched resources were stored, in the same
order as they are defined in `config`.
"""
return [
download_snapshot(source_config["url"], output_folder, **source_config.get("opts", {}))
for source_config in fetch_opts
]
def _read(self, file_paths: List[str], **read_opts) -> List[DataFrame]:
""" Reads a raw file input path into a DataFrame """
return [read_file(file_path, **read_opts) for file_path in file_paths]
def parse(self, sources: List[str], aux: Dict[str, DataFrame], **parse_opts) -> DataFrame:
""" Parses a list of raw data records into a DataFrame. """
# Some read options are passed as parse_opts
read_opts = {k: v for k, v in parse_opts.items() if k in ("sep",)}
return self.parse_dataframes(self._read(sources, **read_opts), aux, **parse_opts)
def parse_dataframes(
self, dataframes: List[DataFrame], aux: Dict[str, DataFrame], **parse_opts
) -> DataFrame:
""" Parse the inputs into a single output dataframe """
raise NotImplementedError()
def merge(self, record: Dict[str, Any], aux: Dict[str, DataFrame]) -> Optional[str]:
"""
Outputs a key used to merge this record with the datasets.
The key must be present in the `aux` DataFrame index.
"""
# Merge only needs the metadata auxiliary data table
metadata = aux["metadata"]
# Exact key match might be possible and it's the fastest option
if "key" in record and not isnull(record["key"]):
if record["key"] in metadata["key"].values:
return record["key"]
else:
warnings.warn("Key provided but not found in metadata: {}".format(record))
return None
# Start by filtering the auxiliary dataset as much as possible
for column_prefix in ("country", "subregion1", "subregion2"):
for column_suffix in ("code", "name"):
column = "{}_{}".format(column_prefix, column_suffix)
if column not in record:
continue
elif isnull(record[column]):
metadata = metadata[metadata[column].isna()]
elif record[column]:
metadata = metadata[metadata[column] == record[column]]
# Auxiliary dataset might have a single record left, then we are done
if len(metadata) == 1:
return metadata.iloc[0]["key"]
# Compute a fuzzy version of the record's match string for comparison
match_string = fuzzy_text(record["match_string"]) if "match_string" in record else None
# Provided match string could be a subregion code / name
if match_string is not None:
for column_prefix in ("subregion1", "subregion2"):
for column_suffix in ("code", "name"):
column = "{}_{}".format(column_prefix, column_suffix)
aux_match = metadata[column + "_fuzzy"] == match_string
if sum(aux_match) == 1:
return metadata[aux_match].iloc[0]["key"]
# Provided match string could be identical to `match_string` (or with simple fuzzy match)
if match_string is not None:
aux_match_1 = metadata["match_string_fuzzy"] == match_string
if sum(aux_match_1) == 1:
return metadata[aux_match_1].iloc[0]["key"]
aux_match_2 = metadata["match_string"] == record["match_string"]
if sum(aux_match_2) == 1:
return metadata[aux_match_2].iloc[0]["key"]
# Last resort is to match the `match_string` column with a regex from aux
if match_string is not None:
aux_mask = ~metadata["match_string"].isna()
aux_regex = metadata["match_string"][aux_mask].apply(
lambda x: re.compile(x, re.IGNORECASE)
)
for search_string in (match_string, record["match_string"]):
aux_match = aux_regex.apply(lambda x: True if x.match(search_string) else False)
if sum(aux_match) == 1:
metadata = metadata[aux_mask]
return metadata[aux_match].iloc[0]["key"]
# Uncomment when debugging mismatches
# print(aux_regex)
# print(match_string)
# print(record)
# print(metadata)
# raise ValueError()
warnings.warn("No key match found for:\n{}".format(record))
return None
def run(
self,
output_folder: Path,
cache: Dict[str, str],
aux: Dict[str, DataFrame],
skip_existing: bool = False,
) -> DataFrame:
"""
Executes the fetch, parse and merge steps for this data source.
Args:
output_folder: Root folder where snapshot, intermediate and tables will be placed.
cache: Map of data sources that are stored in the cache layer (used for daily-only).
aux: Map of auxiliary DataFrames used as part of the processing of this DataSource.
skip_existing: Flag indicating whether to use the locally stored snapshots if possible.
Returns:
DataFrame: Processed data, with columns defined in config.yaml corresponding to the
DataPipeline that this DataSource is part of.
"""
data: DataFrame = None
# Insert skip_existing flag to fetch options if requested
fetch_opts = self.config.get("fetch", [])
if skip_existing:
for opt in fetch_opts:
opt["opts"] = {**opt.get("opts", {}), "skip_existing": True}
# Fetch the data, feeding the cached resources to the fetch step
data = self.fetch(output_folder, cache, fetch_opts)
# Make yet another copy of the auxiliary table to avoid affecting future steps in `parse`
parse_opts = self.config.get("parse", {})
data = self.parse(data, {name: df.copy() for name, df in aux.items()}, **parse_opts)
# Merge expects for null values to be NaN (otherwise grouping does not work as expected)
data.replace([None], numpy.nan, inplace=True)
# Merging is done record by record, but can be sped up if we build a map first aggregating
# by the non-temporal fields and only matching the aggregated records with keys
merge_opts = self.config.get("merge", {})
key_merge_columns = [
col for col in data if col in aux["metadata"].columns and len(data[col].unique()) > 1
]
if not key_merge_columns or (merge_opts and merge_opts.get("serial")):
data["key"] = data.apply(lambda x: self.merge(x, aux), axis=1)
else:
# "_nan_magic_number" replacement necessary to work around
# https://github.com/pandas-dev/pandas/issues/3729
# This issue will be fixed in Pandas 1.1
_nan_magic_number = -123456789
grouped_data = (
data.fillna(_nan_magic_number)
.groupby(key_merge_columns)
.first()
.reset_index()
.replace([_nan_magic_number], numpy.nan)
)
# Build a _vec column used to merge the key back from the groups into data
make_key_vec = lambda x: "|".join([str(x[col]) for col in key_merge_columns])
grouped_data["_vec"] = grouped_data.apply(make_key_vec, axis=1)
data["_vec"] = data.apply(make_key_vec, axis=1)
# Iterate only over the grouped data to merge with the metadata key
grouped_data["key"] = grouped_data.apply(lambda x: self.merge(x, aux), axis=1)
# Merge the grouped data which has key back with the original data
if "key" in data.columns:
data = data.drop(columns=["key"])
data = data.merge(grouped_data[["key", "_vec"]], on="_vec").drop(columns=["_vec"])
# Drop records which have no key merged
# TODO: log records with missing key somewhere on disk
data = data.dropna(subset=["key"])
# Filter out data according to the user-provided filter function
if "query" in self.config:
data = data.query(self.config["query"])
# Get the schema of our index table, necessary for processing to infer which columns in the
# data belong to the index and should not be aggregated
index_schema = DataPipeline.load("index").schema
# Provide a stratified view of certain key variables
if any(stratify_column in data.columns for stratify_column in ("age", "sex")):
data = stratify_age_and_sex(data, index_schema)
# Process each record to add missing cumsum or daily diffs
data = infer_new_and_total(data, index_schema)
# Return the final dataframe
return data
class DataPipeline:
"""
A pipeline chain is a collection of individual [DataSource]s which produce a full table
ready for output. This is a very thin wrapper that runs the data pipelines and combines their
outputs.
One of the reasons for a dedicated class is to allow for discovery of [DataPipeline] objects
via reflection, users of this class are encouraged to override its methods if custom processing
is required.
A pipeline chain is responsible for loading the auxiliary datasets that are passed to the
individual pipelines. Pipelines can load data themselves, but if the same auxiliary dataset
is used by many of them it is more efficient to load it here.
"""
schema: Dict[str, Any]
""" Names and corresponding dtypes of output columns """
data_sources: List[Tuple[DataSource, Dict[str, Any]]]
""" List of <data source, option> tuples executed in order """
auxiliary_tables: Dict[str, Union[Path, str]] = {
"metadata": ROOT / "src" / "data" / "metadata.csv"
}
""" Auxiliary datasets passed to the pipelines during processing """
def __init__(
self,
schema: Dict[str, type],
auxiliary: Dict[str, Union[Path, str]],
data_sources: List[Tuple[DataSource, Dict[str, Any]]],
):
super().__init__()
self.schema = schema
self.auxiliary_tables = {**self.auxiliary_tables, **auxiliary}
self.data_sources = data_sources
@staticmethod
def load(name: str):
config_path = ROOT / "src" / "pipelines" / name / "config.yaml"
with open(config_path, "r") as fd:
config_yaml = yaml.safe_load(fd)
schema = {
name: DataPipeline._parse_dtype(dtype) for name, dtype in config_yaml["schema"].items()
}
auxiliary = {name: ROOT / path for name, path in config_yaml.get("auxiliary", {}).items()}
pipelines = []
for pipeline_config in config_yaml["sources"]:
module_tokens = pipeline_config["name"].split(".")
class_name = module_tokens[-1]
module_name = ".".join(module_tokens[:-1])
module = importlib.import_module(module_name)
pipelines.append(getattr(module, class_name)(pipeline_config))
return DataPipeline(schema, auxiliary, pipelines)
@staticmethod
def _parse_dtype(dtype_name: str) -> type:
if dtype_name == "str":
return str
if dtype_name == "int":
return Int64Dtype()
if dtype_name == "float":
return float
raise TypeError(f"Unsupported dtype: {dtype_name}")
def output_table(self, data: DataFrame) -> DataFrame:
"""
This function performs the following operations:
1. Filters out columns not in the output schema
2. Converts each column to the appropriate type
3. Sorts the values based on the column order
4. Outputs the resulting data
"""
output_columns = list(self.schema.keys())
# Make sure all columns are present and have the appropriate type
for column, dtype in self.schema.items():
if column not in data:
data[column] = None
data[column] = column_convert(data[column], dtype)
# Filter only output columns and output the sorted data
return drop_na_records(data[output_columns], ["date", "key"]).sort_values(output_columns)
@staticmethod
def _run_wrapper(
output_folder: Path,
cache: Dict[str, str],
aux: Dict[str, DataFrame],
data_source: DataSource,
) -> Optional[DataFrame]:
""" Workaround necessary for multiprocess pool, which does not accept lambda functions """
try:
return data_source.run(output_folder, cache, aux)
except Exception:
data_source_name = data_source.__class__.__name__
warnings.warn(
f"Error running data source {data_source_name} with config {data_source.config}"
)
traceback.print_exc()
return None
def run(
self,
pipeline_name: str,
output_folder: Path,
process_count: int = cpu_count(),
verify: str = "simple",
progress: bool = True,
) -> DataFrame:
"""
Main method which executes all the associated [DataSource] objects and combines their
outputs.
"""
# Read the cache directory from our cloud storage
try:
cache = requests.get("{}/sitemap.json".format(CACHE_URL)).json()
except:
cache = {}
warnings.warn("Cache unavailable")
# Read the auxiliary input files into memory
aux = {name: read_file(file_name) for name, file_name in self.auxiliary_tables.items()}
# Precompute some useful transformations in the auxiliary input files
aux["metadata"]["match_string_fuzzy"] = aux["metadata"].match_string.apply(fuzzy_text)
for category in ("country", "subregion1", "subregion2"):
for suffix in ("code", "name"):
column = "{}_{}".format(category, suffix)
aux["metadata"]["{}_fuzzy".format(column)] = aux["metadata"][column].apply(
fuzzy_text
)
# Get all the pipeline outputs
# This operation is parallelized but output order is preserved
# Make a copy of the auxiliary table to prevent modifying it for everyone, but this way
# we allow for local modification (which might be wanted for optimization purposes)
aux_copy = {name: df.copy() for name, df in aux.items()}
# Create a function to be used during mapping. The nestedness is an unfortunate outcome of
# the multiprocessing module's limitations when dealing with lambda functions, coupled with
# the "sandboxing" we implement to ensure resiliency.
run_func = partial(DataPipeline._run_wrapper, output_folder, cache, aux_copy)
# If the process count is less than one, run in series (useful to evaluate performance)
data_sources_count = len(self.data_sources)
progress_label = f"Run {pipeline_name} pipeline"
if process_count <= 1 or data_sources_count <= 1:
map_func = tqdm(
map(run_func, self.data_sources),
total=data_sources_count,
desc=progress_label,
disable=not progress,
)
else:
map_func = process_map(
run_func, self.data_sources, desc=progress_label, disable=not progress
)
# Save all intermediate results (to allow for reprocessing)
intermediate_outputs = output_folder / "intermediate"
intermediate_outputs_files = []
for data_source, result in zip(self.data_sources, map_func):
data_source_class = data_source.__class__
data_source_config = str(data_source.config)
source_full_name = f"{data_source_class.__module__}.{data_source_class.__name__}"
intermediate_name = uuid.uuid5(
uuid.NAMESPACE_DNS, f"{source_full_name}.{data_source_config}"
)
intermediate_file = intermediate_outputs / f"{intermediate_name}.csv"
intermediate_outputs_files += [intermediate_file]
if result is not None:
export_csv(result, intermediate_file)
# Reload all intermediate results from disk
# In-memory results are discarded, this ensures reproducibility and allows for data sources
# to fail since the last successful intermediate result will be used in the combined output
pipeline_outputs = []
for source_output in intermediate_outputs_files:
try:
pipeline_outputs += [read_file(source_output)]
except Exception as exc:
warnings.warn(f"Failed to read intermediate file {source_output}. Error: {exc}")
# Get rid of all columns which are not part of the output to speed up data combination
pipeline_outputs = [
source_output[filter_output_columns(source_output.columns, self.schema)]
for source_output in pipeline_outputs
]
# Combine all pipeline outputs into a single DataFrame
if not pipeline_outputs:
warnings.warn("Empty result for pipeline chain {}".format(pipeline_name))
data = DataFrame(columns=self.schema.keys())
else:
progress_label = pipeline_name if progress else None
data = combine_tables(pipeline_outputs, ["date", "key"], progress_label=progress_label)
# Return data using the pipeline's output parameters
data = self.output_table(data)
# Skip anomaly detection unless requested
if verify == "simple":
# Validate that the table looks good
detect_anomaly_all(self.schema, data, [pipeline_name])
if verify == "full":
# Perform stale column detection for each known key
map_iter = data.key.unique()
map_func = lambda key: detect_stale_columns(
self.schema, data[data.key == key], (pipeline_name, key)
)
progress_label = f"Verify {pipeline_name} pipeline"
if process_count <= 1 or len(map_iter) <= 1:
map_func = tqdm(
map(map_func, map_iter),
total=len(map_iter),
desc=progress_label,
disable=not progress,
)
else:
map_func = process_map(
map_func, map_iter, desc=progress_label, disable=not progress
)
# Show progress as the results arrive if requested
if progress:
map_func = tqdm(
map_func, total=len(map_iter), desc=f"Verify {pipeline_name} pipeline"
)
# Consume the results
_ = list(map_func)
return data
| 42.489362
| 99
| 0.628488
| 2,756
| 21,967
| 4.867199
| 0.20537
| 0.018861
| 0.005218
| 0.007082
| 0.150738
| 0.119129
| 0.09751
| 0.080215
| 0.057701
| 0.053377
| 0
| 0.004344
| 0.287386
| 21,967
| 516
| 100
| 42.571705
| 0.852562
| 0.32212
| 0
| 0.177258
| 0
| 0
| 0.076285
| 0.008507
| 0
| 0
| 0
| 0.001938
| 0
| 1
| 0.043478
| false
| 0
| 0.076923
| 0
| 0.207358
| 0.003344
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae22121e986bc6059cb536b9769429d2efd4c361
| 1,665
|
py
|
Python
|
python/advent_of_code/y2015/day01.py
|
stonecharioteer/advent-of-code
|
c18e47e378e82f82b77558a114e7d7c3a43c8429
|
[
"MIT"
] | null | null | null |
python/advent_of_code/y2015/day01.py
|
stonecharioteer/advent-of-code
|
c18e47e378e82f82b77558a114e7d7c3a43c8429
|
[
"MIT"
] | null | null | null |
python/advent_of_code/y2015/day01.py
|
stonecharioteer/advent-of-code
|
c18e47e378e82f82b77558a114e7d7c3a43c8429
|
[
"MIT"
] | null | null | null |
"""--- Day 1: Not Quite Lisp ---
Santa was hoping for a white Christmas, but his weather machine's "snow" function is powered by stars, and he's fresh out! To save Christmas, he needs you to collect fifty stars by December 25th.
Collect stars by helping Santa solve puzzles. Two puzzles will be made available on each day in the Advent calendar; the second puzzle is unlocked when you complete the first. Each puzzle grants one star. Good luck!
Here's an easy puzzle to warm you up.
Santa is trying to deliver presents in a large apartment building, but he can't find the right floor - the directions he got are a little confusing. He starts on the ground floor (floor 0) and then follows the instructions one character at a time.
An opening parenthesis, (, means he should go up one floor, and a closing parenthesis, ), means he should go down one floor.
The apartment building is very tall, and the basement is very deep; he will never find the top or bottom floors.
For example:
(()) and ()() both result in floor 0.
((( and (()(()( both result in floor 3.
))((((( also results in floor 3.
()) and ))( both result in floor -1 (the first basement level).
))) and )())()) both result in floor -3.
To what floor do the instructions take Santa?"""
from typing import TextIO, Tuple
def run(inp: TextIO) -> Tuple[int, int]:
"""Returns floor count"""
data = inp.read()
floor = 0
basement = None
for ix, character in enumerate(data):
if character == "(":
floor += 1
elif character == ")":
floor -= 1
if floor == -1 and basement is None:
basement = ix+1
return floor, basement
| 42.692308
| 247
| 0.684685
| 266
| 1,665
| 4.285714
| 0.507519
| 0.030702
| 0.045614
| 0.052632
| 0.117544
| 0.036842
| 0
| 0
| 0
| 0
| 0
| 0.01087
| 0.226426
| 1,665
| 38
| 248
| 43.815789
| 0.874224
| 0.761562
| 0
| 0
| 0
| 0
| 0.005181
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae2625e0bfcb85513b735f8abfbccb014e1bc0b8
| 875
|
py
|
Python
|
setup.py
|
nbgallery/ipylogging
|
fa54a7ace0262398b5d7a9dd3ec6938248a70752
|
[
"MIT"
] | 1
|
2021-10-18T22:12:37.000Z
|
2021-10-18T22:12:37.000Z
|
setup.py
|
nbgallery/ipylogging
|
fa54a7ace0262398b5d7a9dd3ec6938248a70752
|
[
"MIT"
] | null | null | null |
setup.py
|
nbgallery/ipylogging
|
fa54a7ace0262398b5d7a9dd3ec6938248a70752
|
[
"MIT"
] | null | null | null |
# vim: expandtab tabstop=4 shiftwidth=4
from setuptools import setup
# read the contents of your README file
from os import path
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), 'r') as f:
long_description = f.read()
setup(
name='ipylogging',
version='2020.342.1',
author='Bill Allen',
author_email='photo.allen@gmail.com',
description='Easy log messages in Jupyter notebooks.',
long_description=long_description,
long_description_content_type='text/markdown',
license='MIT',
keywords='logging logger logs ipython jupyter notebook messages'.split(),
url='https://github.com/nbgallery/ipylogging',
packages=['ipylogging'],
classifiers=[
'Development Status :: 4 - Beta',
'Topic :: Utilities',
'License :: OSI Approved :: MIT License'
]
)
| 30.172414
| 77
| 0.691429
| 107
| 875
| 5.53271
| 0.71028
| 0.101351
| 0.064189
| 0.101351
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.015363
| 0.181714
| 875
| 28
| 78
| 31.25
| 0.811453
| 0.085714
| 0
| 0
| 0
| 0
| 0.38143
| 0.026349
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.086957
| 0
| 0.086957
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae288231dc020ec00eec037bd175a4539730e6b8
| 2,594
|
py
|
Python
|
utils/i18n.py
|
minsukkahng/pokr.kr
|
169475778c998b4198ac7d6a1cebbc3c389e41b8
|
[
"Apache-2.0"
] | 76
|
2015-01-19T12:39:43.000Z
|
2021-10-14T06:10:25.000Z
|
utils/i18n.py
|
minsukkahng/pokr.kr
|
169475778c998b4198ac7d6a1cebbc3c389e41b8
|
[
"Apache-2.0"
] | 22
|
2015-01-03T01:00:53.000Z
|
2019-09-14T11:55:06.000Z
|
utils/i18n.py
|
minsukkahng/pokr.kr
|
169475778c998b4198ac7d6a1cebbc3c389e41b8
|
[
"Apache-2.0"
] | 28
|
2015-01-14T15:45:00.000Z
|
2020-06-03T13:29:41.000Z
|
from babel import Locale
from flask import current_app as cur_app, request
from flask.ext.babel import Babel, get_locale
from functools import wraps
from popong_nlp.utils.translit import translit
__all__ = ['PopongBabel']
class PopongBabel(Babel):
def init_app(self, app):
super(PopongBabel, self).init_app(app)
self.localeselector(localeselector)
# shortcuts
app.babel = self
app.LOCALES = self.list_translations() + [Locale('en')]
# cmd-line locale option
if hasattr(app, 'locale') and getattr(app, 'locale') in app.LOCALES:
app.babel.force_locale(app.locale)
# jinja filters
app.jinja_env.filters['translit'] = filter_translit
app.jinja_env.globals.update(translit=filter_translit)
# context processor
app.context_processor(inject_locales)
def force_locale(self, locale):
self.locale_selector_func = lambda: locale
class InvalidLocaleError(Exception):
pass
class NotInAppContextError(Exception):
pass
@wraps
def babel_context(f):
def decorated(*args, **kwargs):
if not hasattr(cur_app, 'babel') or not hasattr(cur_app, 'LOCALES'):
raise NotInAppContextError()
f(*args, **kwargs)
return decorated
@babel_context
def is_valid_locale(locale):
return locale in cur_app.LOCALES
def assert_valid_locale(locale):
if not is_valid_locale(locale):
raise InvalidLocaleError()
def host(locale=None):
assert_valid_locale(locale)
t = request.host.split('.', 1)
if len(t) < 2 or not is_valid_locale(t[0]):
host = request.host
else:
host = t[1]
return '{locale}.{host}'.format(locale=locale, host=host)
@babel_context
def localeselector():
locale = request.host.split('.', 1)[0]
if not is_valid_locale(locale):
locale = cur_app.babel.default_locale
return locale
@babel_context
def inject_locales():
# TODO: caching
locale_links = {
locale: request.url.replace(request.host, host(locale))
for locale in cur_app.LOCALES
}
return dict(locale_links=locale_links,
locale=str(get_locale()))
def filter_translit(*args, **kwargs):
locale = str(get_locale())
_type = kwargs.get('type')
if len(args) == 1:
string = args[0]
return translit(string, 'ko', locale, _type) if locale != 'ko' else string
elif args:
raise Exception('filter_translit() only accepts one or zero argument')
else:
return lambda x: filter_translit(x, type=_type)
| 24.018519
| 82
| 0.662298
| 328
| 2,594
| 5.070122
| 0.292683
| 0.050511
| 0.051112
| 0.034275
| 0.054119
| 0.028864
| 0
| 0
| 0
| 0
| 0
| 0.004012
| 0.231303
| 2,594
| 107
| 83
| 24.242991
| 0.82999
| 0.030069
| 0
| 0.132353
| 0
| 0
| 0.048207
| 0
| 0
| 0
| 0
| 0.009346
| 0.029412
| 1
| 0.147059
| false
| 0.029412
| 0.073529
| 0.014706
| 0.367647
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae28fbfcfc5475fc99a477407eec02fb25989dcb
| 5,240
|
py
|
Python
|
Model/lookalike-model/lookalike_model/application/pipeline/top_n_similarity_table_generator.py
|
sanjaynirmal/blue-marlin
|
725d614e941e5de76562d354edf11ac18897f242
|
[
"Apache-2.0"
] | 1
|
2020-03-06T09:41:49.000Z
|
2020-03-06T09:41:49.000Z
|
Model/lookalike-model/lookalike_model/application/pipeline/top_n_similarity_table_generator.py
|
sanjaynirmal/blue-marlin
|
725d614e941e5de76562d354edf11ac18897f242
|
[
"Apache-2.0"
] | null | null | null |
Model/lookalike-model/lookalike_model/application/pipeline/top_n_similarity_table_generator.py
|
sanjaynirmal/blue-marlin
|
725d614e941e5de76562d354edf11ac18897f242
|
[
"Apache-2.0"
] | null | null | null |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.html
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
import argparse
import pyspark.sql.functions as fn
from pyspark import SparkContext
from pyspark.sql import HiveContext
from pyspark.sql.types import FloatType, StringType, StructType, StructField, ArrayType, MapType, StructType
# from rest_client import predict, str_to_intlist
import requests
import json
import argparse
from pyspark.sql.functions import udf
from math import sqrt
import time
import numpy as np
import itertools
import heapq
'''
This process generates the top-n-similarity table.
spark-submit --master yarn --num-executors 20 --executor-cores 5 --executor-memory 16G --driver-memory 16G --conf spark.driver.maxResultSize=5g --conf spark.hadoop.hive.exec.dynamic.partition=true --conf spark.hadoop.hive.exec.dynamic.partition.mode=nonstrict top_n_similarity_table_generator.py config.yml
The top-n-similarity table is
|user| top-N-similarity|top-n-users
|:-------------| :------------: |
|user-1-did| [similarity-score-11, similarity-score-12, similarity-score-13] |[user-did-1, user-did-2, user-did-3]|
|user-2-did| [similarity-score-21, similarity-score-22, similarity-score-23] |[user-did-10, user-did-20, user-did-30]|
|user-3-did| [similarity-score-31, similarity-score-32, similarity-score-33] |[user-did-23, user-did-87, user-did-45]|
'''
def __save_as_table(df, table_name, hive_context, create_table):
if create_table:
command = """
DROP TABLE IF EXISTS {}
""".format(table_name)
hive_context.sql(command)
df.createOrReplaceTempView("r907_temp_table")
command = """
CREATE TABLE IF NOT EXISTS {} as select * from r907_temp_table
""".format(table_name)
hive_context.sql(command)
def run(sc, hive_context, cfg):
score_vector_alpha_table = cfg['score_vector_rebucketing']['score_vector_alpha_table']
similarity_table = cfg['top_n_similarity']['similarity_table']
N = cfg['top_n_similarity']['top_n']
command = "SELECT did, score_vector FROM {}".format(score_vector_alpha_table)
# |0004f3b4731abafa9ac54d04cb88782ed61d30531262decd799d91beb6d6246a|0 |
# [0.24231663, 0.20828941, 0.0]|
df = hive_context.sql(command)
df = df.withColumn('top_n_user_score', fn.array())
alpha_bucket_size = cfg['score_vector_rebucketing']['alpha_did_bucket_size']
alpha_bucket_step = cfg['top_n_similarity']['alpha_did_bucket_step']
first_round = True
for start_bucket in range(0, alpha_bucket_size,alpha_bucket_step):
command = "SELECT did, did_bucket, score_vector, alpha_did_bucket FROM {} WHERE alpha_did_bucket BETWEEN {} AND {}".format(score_vector_alpha_table,
start_bucket, start_bucket + alpha_bucket_size - 1)
df_user = hive_context.sql(command)
block_user = df_user.select('did', 'score_vector').collect()
block_user = ([_['did'] for _ in block_user], [_['score_vector'] for _ in block_user])
block_user_broadcast = sc.broadcast(block_user)
def calculate_similarity(user_score_vector, top_n_user_score):
user_score_vector = np.array(user_score_vector)
dids, other_score_vectors = block_user_broadcast.value
other_score_vectors = np.array(other_score_vectors)
product = np.matmul(user_score_vector, other_score_vectors.transpose()).tolist()
user_score_s = list(itertools.izip(dids, product))
user_score_s.extend(top_n_user_score)
user_score_s = heapq.nlargest(N, user_score_s, key=lambda x: x[1])
return user_score_s
elements_type = StructType([StructField('did', StringType(), False), StructField('score', FloatType(), False)])
df = df.withColumn('top_n_user_score', udf(calculate_similarity, ArrayType(elements_type))(df.score_vector, df.top_n_user_score))
__save_as_table(df.select('did', 'top_n_user_score'), similarity_table, hive_context, True)
if __name__ == "__main__":
start = time.time()
parser = argparse.ArgumentParser(description=" ")
parser.add_argument('config_file')
args = parser.parse_args()
with open(args.config_file, 'r') as yml_file:
cfg = yaml.safe_load(yml_file)
sc = SparkContext.getOrCreate()
sc.setLogLevel('INFO')
hive_context = HiveContext(sc)
run(sc=sc, hive_context=hive_context, cfg=cfg)
sc.stop()
end = time.time()
print('Runtime of the program is:', (end - start))
| 40.620155
| 306
| 0.717748
| 726
| 5,240
| 4.949036
| 0.326446
| 0.040078
| 0.027275
| 0.021709
| 0.126635
| 0.069023
| 0.056777
| 0
| 0
| 0
| 0
| 0.02694
| 0.171183
| 5,240
| 128
| 307
| 40.9375
| 0.800368
| 0.176145
| 0
| 0.115942
| 0
| 0
| 0.17554
| 0.032806
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.217391
| 0
| 0.275362
| 0.014493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae294288f339abaa44909776daf88e26d1673f50
| 1,056
|
py
|
Python
|
lib/auth.py
|
p4lsec/autoshoppr
|
a0dba3060e26008c2d441358ff7f4a909ba4fcab
|
[
"MIT"
] | null | null | null |
lib/auth.py
|
p4lsec/autoshoppr
|
a0dba3060e26008c2d441358ff7f4a909ba4fcab
|
[
"MIT"
] | null | null | null |
lib/auth.py
|
p4lsec/autoshoppr
|
a0dba3060e26008c2d441358ff7f4a909ba4fcab
|
[
"MIT"
] | null | null | null |
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
import pickle
import configparser
class AmazonLogin:
def __init__(self, driver=None):
self.url = "https://www.amazon.com/your-account"
if driver is not None:
self.driver = driver
else:
self.driver = webdriver.Chrome()
self.wait = WebDriverWait(self.driver, 10)
def login(self):
try:
self.driver.get(self.url)
self.load_cookies()
self.driver.find_element_by_xpath("//*[contains(text(), 'Login & security')]").click()
config = configparser.ConfigParser()
config.read('shoppr.conf')
except:
raise Exception("Could not add to cart")
def load_cookies(self):
cookies = pickle.load(open("amazon.pkl", "rb"))
for cookie in cookies:
self.driver.add_cookie(cookie)
| 35.2
| 98
| 0.629735
| 123
| 1,056
| 5.317073
| 0.528455
| 0.107034
| 0.09633
| 0.085627
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002584
| 0.267045
| 1,056
| 30
| 99
| 35.2
| 0.842377
| 0
| 0
| 0
| 0
| 0
| 0.113529
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.222222
| 0
| 0.37037
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae29895c6324b4119860a3e674198d1b40dd9964
| 1,317
|
py
|
Python
|
Verulean/days/aoc15.py
|
BasedJellyfish11/Advent-of-Code-2021
|
9ed84902958c99c341ec2444d5db561c84348911
|
[
"MIT"
] | 3
|
2021-12-03T22:40:17.000Z
|
2021-12-23T21:17:16.000Z
|
Verulean/days/aoc15.py
|
BasedJellyfish11/Advent-of-Code-2021
|
9ed84902958c99c341ec2444d5db561c84348911
|
[
"MIT"
] | null | null | null |
Verulean/days/aoc15.py
|
BasedJellyfish11/Advent-of-Code-2021
|
9ed84902958c99c341ec2444d5db561c84348911
|
[
"MIT"
] | null | null | null |
import numpy as np
import heapq
class PriorityQueue(list):
def pop(self):
return heapq.heappop(self)
def push(self, value):
return heapq.heappush(self, value)
def neighbors(i, j):
return ((i-1, j), (i+1, j), (i, j-1), (i, j+1))
def numpy_dijkstra(costs):
m, n = costs.shape
start = (0, 0)
end = (m - 1, n - 1)
q = PriorityQueue()
q.push((0, start))
g = np.full_like(costs, np.inf)
g[start] = 0
while q:
cost, node = q.pop()
if node == end:
return int(g[end])
for adj in neighbors(*node):
if not (0 <= adj[0] < m and 0 <= adj[1] < n):
continue
adj_cost = cost + costs[adj]
if adj_cost < g[adj]:
g[adj] = adj_cost
q.push((adj_cost, adj))
def expand_block(block, M, N):
m, n = block.shape
shift = np.add.outer(np.arange(M), np.arange(N))
shift = np.repeat(np.repeat(shift, m, axis=0), n, axis=1)
return ((np.tile(block, (M, N)) + shift - 1) % 9) + 1
def solve(data):
costs_a = np.array([list(row) for row in data], dtype=float)
ans_a = numpy_dijkstra(costs_a)
costs_b = expand_block(costs_a, 5, 5)
ans_b = numpy_dijkstra(costs_b)
return ans_a, ans_b
| 24.388889
| 64
| 0.525437
| 205
| 1,317
| 3.282927
| 0.312195
| 0.011887
| 0.080238
| 0.011887
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.023649
| 0.32574
| 1,317
| 54
| 65
| 24.388889
| 0.734234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.153846
| false
| 0
| 0.051282
| 0.076923
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae2d0af0f1b9daeb6ad913a0cc22fcfa911b9c6b
| 5,291
|
py
|
Python
|
pypy/module/_minimal_curses/fficurses.py
|
microvm/pypy-mu
|
6b03fbe93052d0eb3a4c67152c987c16837b3484
|
[
"Apache-2.0",
"OpenSSL"
] | 34
|
2015-07-09T04:53:27.000Z
|
2021-07-19T05:22:27.000Z
|
pypy/module/_minimal_curses/fficurses.py
|
microvm/pypy-mu
|
6b03fbe93052d0eb3a4c67152c987c16837b3484
|
[
"Apache-2.0",
"OpenSSL"
] | 6
|
2015-05-30T17:20:45.000Z
|
2017-06-12T14:29:23.000Z
|
pypy/module/_minimal_curses/fficurses.py
|
microvm/pypy-mu
|
6b03fbe93052d0eb3a4c67152c987c16837b3484
|
[
"Apache-2.0",
"OpenSSL"
] | 11
|
2015-09-07T14:26:08.000Z
|
2020-04-10T07:20:41.000Z
|
""" The ffi for rpython, need to be imported for side effects
"""
from rpython.rtyper.lltypesystem import rffi
from rpython.rtyper.lltypesystem import lltype
from rpython.rtyper.tool import rffi_platform
from rpython.rtyper.extfunc import register_external
from pypy.module._minimal_curses import interp_curses
from rpython.translator.tool.cbuild import ExternalCompilationInfo
# We cannot trust ncurses5-config, it's broken in various ways in
# various versions. For example it might not list -ltinfo even though
# it's needed, or --cflags might be completely empty. On Ubuntu 10.04
# it gives -I/usr/include/ncurses, which doesn't exist at all. Crap.
def try_cflags():
yield ExternalCompilationInfo(includes=['curses.h', 'term.h'])
yield ExternalCompilationInfo(includes=['curses.h', 'term.h'],
include_dirs=['/usr/include/ncurses'])
yield ExternalCompilationInfo(includes=['ncurses/curses.h',
'ncurses/term.h'])
def try_ldflags():
yield ExternalCompilationInfo(libraries=['curses'])
yield ExternalCompilationInfo(libraries=['curses', 'tinfo'])
yield ExternalCompilationInfo(libraries=['ncurses'])
yield ExternalCompilationInfo(libraries=['ncurses'],
library_dirs=['/usr/lib64'])
def try_tools():
try:
yield ExternalCompilationInfo.from_pkg_config("ncurses")
except Exception:
pass
try:
yield ExternalCompilationInfo.from_config_tool("ncurses5-config")
except Exception:
pass
def try_eci():
for eci in try_tools():
yield eci.merge(ExternalCompilationInfo(includes=['curses.h',
'term.h']))
for eci1 in try_cflags():
for eci2 in try_ldflags():
yield eci1.merge(eci2)
def guess_eci():
for eci in try_eci():
class CConfig:
_compilation_info_ = eci
HAS = rffi_platform.Has("setupterm")
if rffi_platform.configure(CConfig)['HAS']:
return eci
raise ImportError("failed to guess where ncurses is installed. "
"You might need to install libncurses5-dev or similar.")
eci = guess_eci()
INT = rffi.INT
INTP = lltype.Ptr(lltype.Array(INT, hints={'nolength':True}))
c_setupterm = rffi.llexternal('setupterm', [rffi.CCHARP, INT, INTP], INT,
compilation_info=eci)
c_tigetstr = rffi.llexternal('tigetstr', [rffi.CCHARP], rffi.CCHARP,
compilation_info=eci)
c_tparm = rffi.llexternal('tparm', [rffi.CCHARP, INT, INT, INT, INT, INT,
INT, INT, INT, INT], rffi.CCHARP,
compilation_info=eci)
ERR = rffi.CConstant('ERR', lltype.Signed)
OK = rffi.CConstant('OK', lltype.Signed)
def curses_setupterm(term, fd):
intp = lltype.malloc(INTP.TO, 1, flavor='raw')
err = rffi.cast(lltype.Signed, c_setupterm(term, fd, intp))
try:
if err == ERR:
errret = rffi.cast(lltype.Signed, intp[0])
if errret == 0:
msg = "setupterm: could not find terminal"
elif errret == -1:
msg = "setupterm: could not find terminfo database"
else:
msg = "setupterm: unknown error"
raise interp_curses.curses_error(msg)
interp_curses.module_info.setupterm_called = True
finally:
lltype.free(intp, flavor='raw')
def curses_setupterm_null_llimpl(fd):
curses_setupterm(lltype.nullptr(rffi.CCHARP.TO), fd)
def curses_setupterm_llimpl(term, fd):
ll_s = rffi.str2charp(term)
try:
curses_setupterm(ll_s, fd)
finally:
rffi.free_charp(ll_s)
register_external(interp_curses._curses_setupterm_null,
[int], llimpl=curses_setupterm_null_llimpl,
export_name='_curses.setupterm_null')
register_external(interp_curses._curses_setupterm,
[str, int], llimpl=curses_setupterm_llimpl,
export_name='_curses.setupterm')
def check_setup_invoked():
if not interp_curses.module_info.setupterm_called:
raise interp_curses.curses_error("must call (at least) setupterm() first")
def tigetstr_llimpl(cap):
check_setup_invoked()
ll_cap = rffi.str2charp(cap)
try:
ll_res = c_tigetstr(ll_cap)
num = lltype.cast_ptr_to_int(ll_res)
if num == 0 or num == -1:
raise interp_curses.TermError()
res = rffi.charp2str(ll_res)
return res
finally:
rffi.free_charp(ll_cap)
register_external(interp_curses._curses_tigetstr, [str], str,
export_name='_curses.tigetstr', llimpl=tigetstr_llimpl)
def tparm_llimpl(s, args):
check_setup_invoked()
l = [0, 0, 0, 0, 0, 0, 0, 0, 0]
for i in range(min(len(args), 9)):
l[i] = args[i]
ll_s = rffi.str2charp(s)
# XXX nasty trick stolen from CPython
ll_res = c_tparm(ll_s, l[0], l[1], l[2], l[3], l[4], l[5], l[6],
l[7], l[8])
rffi.free_charp(ll_s)
res = rffi.charp2str(ll_res)
return res
register_external(interp_curses._curses_tparm, [str, [int]], str,
export_name='_curses.tparm', llimpl=tparm_llimpl)
| 36.743056
| 82
| 0.634096
| 657
| 5,291
| 4.931507
| 0.284627
| 0.050926
| 0.019444
| 0.022222
| 0.260494
| 0.121914
| 0.059259
| 0.011111
| 0
| 0
| 0
| 0.010936
| 0.256851
| 5,291
| 143
| 83
| 37
| 0.813072
| 0.068796
| 0
| 0.2
| 0
| 0
| 0.104192
| 0.004477
| 0
| 0
| 0
| 0
| 0
| 1
| 0.095652
| false
| 0.017391
| 0.06087
| 0
| 0.208696
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae313f7b22dd8a45cb53e8bfba694df52241d4b5
| 1,310
|
py
|
Python
|
exercises/development/intermediate/exercise_5.py
|
littlekign/comp-think.github.io
|
21bce306c7672b6355a6fdaf260824542dbca595
|
[
"CC0-1.0",
"CC-BY-4.0"
] | 40
|
2019-01-25T11:14:30.000Z
|
2021-12-05T15:04:11.000Z
|
exercises/development/intermediate/exercise_5.py
|
littlekign/comp-think.github.io
|
21bce306c7672b6355a6fdaf260824542dbca595
|
[
"CC0-1.0",
"CC-BY-4.0"
] | 1
|
2020-11-08T15:18:58.000Z
|
2020-11-19T22:44:28.000Z
|
exercises/development/intermediate/exercise_5.py
|
littlekign/comp-think.github.io
|
21bce306c7672b6355a6fdaf260824542dbca595
|
[
"CC0-1.0",
"CC-BY-4.0"
] | 19
|
2019-12-28T16:06:01.000Z
|
2021-12-14T15:52:44.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2019, Silvio Peroni <essepuntato@gmail.com>
#
# Permission to use, copy, modify, and/or distribute this software for any purpose
# with or without fee is hereby granted, provided that the above copyright notice
# and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
# FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT,
# OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE,
# DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS
# SOFTWARE.
from collections import deque
# Test case for the function
def test_do_it(queue, number, expected):
result = do_it(queue, number)
if expected == result:
return True
else:
return False
# Code of the function
def do_it(queue, number):
if number <= len(queue):
for i in range(number):
queue.popleft()
return queue
# Tests
print(test_do_it(deque(["a", "b"]), 3, None))
print(test_do_it(deque(["a", "b", "c", "d", "e"]), 3, deque(["d", "e"])))
| 33.589744
| 84
| 0.70458
| 198
| 1,310
| 4.621212
| 0.540404
| 0.021858
| 0.02623
| 0.04918
| 0.080874
| 0.043716
| 0.043716
| 0
| 0
| 0
| 0
| 0.006679
| 0.2
| 1,310
| 38
| 85
| 34.473684
| 0.866412
| 0.632824
| 0
| 0
| 0
| 0
| 0.019438
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.071429
| 0
| 0.428571
| 0.142857
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae3425a0e350725139bf2c51d7938fab7269b9d6
| 516
|
py
|
Python
|
src/lib/spaces/orientedplane.py
|
Wombatlord/PygamePong
|
d56b1529fe095e6a30b27b6039d9d52105ad900d
|
[
"MIT"
] | null | null | null |
src/lib/spaces/orientedplane.py
|
Wombatlord/PygamePong
|
d56b1529fe095e6a30b27b6039d9d52105ad900d
|
[
"MIT"
] | 2
|
2021-02-19T05:05:43.000Z
|
2021-02-20T02:16:53.000Z
|
src/lib/spaces/orientedplane.py
|
Wombatlord/PygamePong
|
d56b1529fe095e6a30b27b6039d9d52105ad900d
|
[
"MIT"
] | 1
|
2020-08-13T10:14:46.000Z
|
2020-08-13T10:14:46.000Z
|
from src.lib.spaces.vector import Vector
class OrientedPlane:
def __init__(self, normal: Vector) -> None:
self.normal = normal.normalise()
def reflect(self, initialVector: Vector):
normalComponent: float = initialVector.dot(self.normal)
if normalComponent < 0:
normalComponentVector = self.normal.scale(normalComponent)
reflector = normalComponentVector.scale(-2)
else:
reflector = Vector(0, 0)
return initialVector + reflector
| 30.352941
| 70
| 0.660853
| 50
| 516
| 6.74
| 0.54
| 0.118694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.010363
| 0.251938
| 516
| 16
| 71
| 32.25
| 0.862694
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.083333
| 0
| 0.416667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae371c01a5249a7ea65891e859df84f39ceed04c
| 1,357
|
py
|
Python
|
UI for prediction/prediction_file.py
|
berfin-t/HeartAttackPrediction
|
a9acbd0356f3c3e4100b1964862242f6afe7da3b
|
[
"Apache-2.0"
] | null | null | null |
UI for prediction/prediction_file.py
|
berfin-t/HeartAttackPrediction
|
a9acbd0356f3c3e4100b1964862242f6afe7da3b
|
[
"Apache-2.0"
] | null | null | null |
UI for prediction/prediction_file.py
|
berfin-t/HeartAttackPrediction
|
a9acbd0356f3c3e4100b1964862242f6afe7da3b
|
[
"Apache-2.0"
] | null | null | null |
import pickle
import os
import sys
import pandas as pd
from sklearn.metrics import accuracy_score
from sklearn.model_selection import train_test_split
import warnings
warnings.filterwarnings("ignore", message="Reloaded modules: <module_name>")
def train():
data = pd.read_csv('heart.csv')
Y = data["target"]
X = data.drop('target',axis=1)
X_train, X_test, Y_train, Y_test = train_test_split(X, Y, test_size=0.20, random_state = 0)
from sklearn.linear_model import LogisticRegression
model = LogisticRegression(solver='liblinear')
loj_reg=model.fit(X_train,Y_train.values.ravel())
with open('svc.pkl','wb') as m:
pickle.dump(loj_reg,m)
test(X_test,Y_test)
def test(X_test,Y_test):
with open('svc.pkl','rb') as mod:
p=pickle.load(mod)
pre=p.predict(X_test)
print (accuracy_score(Y_test,pre))
def find_data_file(filename):
if getattr(sys, "frozen", False):
datadir = os.path.dirname(sys.executable)
else:
datadir = os.path.dirname(__file__)
return os.path.join(datadir, filename)
def check_input(data) ->int :
df=pd.DataFrame(data=data,index=[0])
with open(find_data_file('svc.pkl'),'rb') as model:
p=pickle.load(model)
op=p.predict(df)
return op[0]
if __name__=='__main__':
train()
| 26.096154
| 95
| 0.664702
| 201
| 1,357
| 4.273632
| 0.427861
| 0.029104
| 0.020955
| 0.032596
| 0.032596
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006494
| 0.205601
| 1,357
| 52
| 96
| 26.096154
| 0.790353
| 0
| 0
| 0
| 0
| 0
| 0.079529
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.210526
| 0
| 0.368421
| 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae39d5bb797b8ed6a1c3f37606a273b2c5c79dbb
| 8,326
|
py
|
Python
|
tests/test_optimization.py
|
davidusb-geek/emhass
|
5d6a5ad45c26b819c6bc1cb0e8943940d7fc8f17
|
[
"MIT"
] | 17
|
2021-09-12T22:32:09.000Z
|
2022-03-17T17:45:29.000Z
|
tests/test_optimization.py
|
davidusb-geek/emhass
|
5d6a5ad45c26b819c6bc1cb0e8943940d7fc8f17
|
[
"MIT"
] | 1
|
2021-12-22T21:10:04.000Z
|
2021-12-22T21:10:04.000Z
|
tests/test_optimization.py
|
davidusb-geek/emhass
|
5d6a5ad45c26b819c6bc1cb0e8943940d7fc8f17
|
[
"MIT"
] | 2
|
2021-11-03T10:29:05.000Z
|
2021-11-19T12:08:24.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unittest
import pandas as pd
import numpy as np
import pathlib
import pickle
from datetime import datetime, timezone
from emhass.retrieve_hass import retrieve_hass
from emhass.optimization import optimization
from emhass.forecast import forecast
from emhass.utils import get_root, get_yaml_parse, get_days_list, get_logger
# the root folder
root = str(get_root(__file__, num_parent=2))
# create logger
logger, ch = get_logger(__name__, root, save_to_file=False)
class TestOptimization(unittest.TestCase):
def setUp(self):
get_data_from_file = True
params = None
retrieve_hass_conf, optim_conf, plant_conf = get_yaml_parse(pathlib.Path(root+'/config_emhass.yaml'), use_secrets=False)
self.retrieve_hass_conf, self.optim_conf, self.plant_conf = \
retrieve_hass_conf, optim_conf, plant_conf
self.rh = retrieve_hass(self.retrieve_hass_conf['hass_url'], self.retrieve_hass_conf['long_lived_token'],
self.retrieve_hass_conf['freq'], self.retrieve_hass_conf['time_zone'],
params, root, logger)
if get_data_from_file:
with open(pathlib.Path(root+'/data/test_df_final.pkl'), 'rb') as inp:
self.rh.df_final, self.days_list, self.var_list = pickle.load(inp)
else:
self.days_list = get_days_list(self.retrieve_hass_conf['days_to_retrieve'])
self.var_list = [self.retrieve_hass_conf['var_load'], self.retrieve_hass_conf['var_PV']]
self.rh.get_data(self.days_list, self.var_list,
minimal_response=False, significant_changes_only=False)
self.rh.prepare_data(self.retrieve_hass_conf['var_load'], load_negative = self.retrieve_hass_conf['load_negative'],
set_zero_min = self.retrieve_hass_conf['set_zero_min'],
var_replace_zero = self.retrieve_hass_conf['var_replace_zero'],
var_interp = self.retrieve_hass_conf['var_interp'])
self.df_input_data = self.rh.df_final.copy()
self.fcst = forecast(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
params, root, logger, get_data_from_file=get_data_from_file)
self.df_weather = self.fcst.get_weather_forecast(method=optim_conf['weather_forecast_method'])
self.P_PV_forecast = self.fcst.get_power_from_weather(self.df_weather)
self.P_load_forecast = self.fcst.get_load_forecast(method=optim_conf['load_forecast_method'])
self.df_input_data_dayahead = pd.concat([self.P_PV_forecast, self.P_load_forecast], axis=1)
self.df_input_data_dayahead.columns = ['P_PV_forecast', 'P_load_forecast']
self.costfun = 'profit'
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
self.df_input_data = self.fcst.get_load_cost_forecast(self.df_input_data)
self.df_input_data = self.fcst.get_prod_price_forecast(self.df_input_data)
self.input_data_dict = {
'retrieve_hass_conf': retrieve_hass_conf,
}
def test_perform_perfect_forecast_optim(self):
self.opt_res = self.opt.perform_perfect_forecast_optim(self.df_input_data, self.days_list)
self.assertIsInstance(self.opt_res, type(pd.DataFrame()))
self.assertIsInstance(self.opt_res.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(self.opt_res.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertTrue('cost_fun_'+self.costfun in self.opt_res.columns)
def test_perform_dayahead_forecast_optim(self):
self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead)
self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead)
self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast)
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertIsInstance(self.opt_res_dayahead.index, pd.core.indexes.datetimes.DatetimeIndex)
self.assertIsInstance(self.opt_res_dayahead.index.dtype, pd.core.dtypes.dtypes.DatetimeTZDtype)
self.assertTrue('cost_fun_'+self.costfun in self.opt_res_dayahead.columns)
self.assertTrue(self.opt_res_dayahead['P_deferrable0'].sum()*(
self.retrieve_hass_conf['freq'].seconds/3600) == self.optim_conf['P_deferrable_nom'][0]*self.optim_conf['def_total_hours'][0])
# Testing estimation of the current index
now_precise = datetime.now(self.input_data_dict['retrieve_hass_conf']['time_zone']).replace(second=0, microsecond=0)
idx_closest = self.opt_res_dayahead.index.get_indexer([now_precise], method='ffill')[0]
idx_closest = self.opt_res_dayahead.index.get_indexer([now_precise], method='nearest')[0]
# Test the battery
self.optim_conf.update({'set_use_battery': True})
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
self.opt_res_dayahead = self.opt.perform_dayahead_forecast_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast)
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertTrue('P_batt' in self.opt_res_dayahead.columns)
self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns)
self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], self.plant_conf['SOCtarget'])
# Test table conversion
opt_res = pd.read_csv(root+'/data/opt_res_latest.csv', index_col='timestamp')
cost_cols = [i for i in opt_res.columns if 'cost_' in i]
table = opt_res[cost_cols].reset_index().sum(numeric_only=True).to_frame(name='Cost Totals').reset_index()
def test_perform_naive_mpc_optim(self):
self.df_input_data_dayahead = self.fcst.get_load_cost_forecast(self.df_input_data_dayahead)
self.df_input_data_dayahead = self.fcst.get_prod_price_forecast(self.df_input_data_dayahead)
# Test the battery
self.optim_conf.update({'set_use_battery': True})
self.opt = optimization(self.retrieve_hass_conf, self.optim_conf, self.plant_conf,
self.fcst.var_load_cost, self.fcst.var_prod_price,
self.costfun, root, logger)
prediction_horizon = 10
soc_init = 0.4
soc_final = 0.6
def_total_hours = [2, 3]
self.opt_res_dayahead = self.opt.perform_naive_mpc_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast, prediction_horizon,
soc_init=soc_init, soc_final=soc_final, def_total_hours=def_total_hours)
self.assertIsInstance(self.opt_res_dayahead, type(pd.DataFrame()))
self.assertTrue('P_batt' in self.opt_res_dayahead.columns)
self.assertTrue('SOC_opt' in self.opt_res_dayahead.columns)
self.assertTrue(np.abs(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt']-soc_final)<1e-3)
term1 = self.optim_conf['P_deferrable_nom'][0]*def_total_hours[0]
term2 = self.opt_res_dayahead['P_deferrable0'].sum()*(self.retrieve_hass_conf['freq'].seconds/3600)
self.assertTrue(np.abs(term1-term2)<1e-3)
soc_init = 0.8
soc_final = 0.5
self.opt_res_dayahead = self.opt.perform_naive_mpc_optim(
self.df_input_data_dayahead, self.P_PV_forecast, self.P_load_forecast, prediction_horizon,
soc_init=soc_init, soc_final=soc_final, def_total_hours=def_total_hours)
self.assertAlmostEqual(self.opt_res_dayahead.loc[self.opt_res_dayahead.index[-1],'SOC_opt'], soc_final)
if __name__ == '__main__':
unittest.main()
ch.close()
logger.removeHandler(ch)
| 60.773723
| 138
| 0.700336
| 1,159
| 8,326
| 4.653149
| 0.164797
| 0.048025
| 0.053773
| 0.080104
| 0.659559
| 0.587799
| 0.564065
| 0.521788
| 0.495457
| 0.495457
| 0
| 0.006579
| 0.196733
| 8,326
| 136
| 139
| 61.220588
| 0.799791
| 0.020178
| 0
| 0.278261
| 0
| 0
| 0.066994
| 0.008589
| 0
| 0
| 0
| 0
| 0.165217
| 1
| 0.034783
| false
| 0
| 0.086957
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae3a5afb8c080bcd642ec9b461aca11065494bcb
| 4,555
|
py
|
Python
|
experiments/counters.py
|
TenantBase/django-experiments
|
b75cf11159da4f4c75d9798dff3ddfd1ca454261
|
[
"MIT"
] | null | null | null |
experiments/counters.py
|
TenantBase/django-experiments
|
b75cf11159da4f4c75d9798dff3ddfd1ca454261
|
[
"MIT"
] | 1
|
2019-05-29T00:00:15.000Z
|
2019-05-29T00:00:15.000Z
|
experiments/counters.py
|
TenantBase/django-experiments
|
b75cf11159da4f4c75d9798dff3ddfd1ca454261
|
[
"MIT"
] | null | null | null |
from django.conf import settings
from django.utils.functional import cached_property
import redis
from redis.sentinel import Sentinel
from redis.exceptions import ConnectionError, ResponseError
COUNTER_CACHE_KEY = 'experiments:participants:%s'
COUNTER_FREQ_CACHE_KEY = 'experiments:freq:%s'
class Counters(object):
@cached_property
def _redis(self):
if getattr(settings, 'EXPERIMENTS_REDIS_SENTINELS', None):
sentinel = Sentinel(settings.EXPERIMENTS_REDIS_SENTINELS, socket_timeout=settings.EXPERIMENTS_REDIS_SENTINELS_TIMEOUT)
host, port = sentinel.discover_master(settings.EXPERIMENTS_REDIS_MASTER_NAME)
else:
host = getattr(settings, 'EXPERIMENTS_REDIS_HOST', 'localhost')
port = getattr(settings, 'EXPERIMENTS_REDIS_PORT', 6379)
password = getattr(settings, 'EXPERIMENTS_REDIS_PASSWORD', None)
db = getattr(settings, 'EXPERIMENTS_REDIS_DB', 0)
return redis.Redis(host=host, port=port, password=password, db=db)
def increment(self, key, participant_identifier, count=1):
if count == 0:
return
try:
cache_key = COUNTER_CACHE_KEY % key
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
new_value = self._redis.hincrby(cache_key, participant_identifier, count)
# Maintain histogram of per-user counts
if new_value > count:
self._redis.hincrby(freq_cache_key, new_value - count, -1)
self._redis.hincrby(freq_cache_key, new_value, 1)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
pass
def clear(self, key, participant_identifier):
try:
# Remove the direct entry
cache_key = COUNTER_CACHE_KEY % key
pipe = self._redis.pipeline()
freq, _ = pipe.hget(cache_key, participant_identifier).hdel(cache_key, participant_identifier).execute()
# Handle cases where the cache_key isn't found gracefully.
if freq is None:
return
# Remove from the histogram
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
self._redis.hincrby(freq_cache_key, freq, -1)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
pass
def get(self, key):
try:
cache_key = COUNTER_CACHE_KEY % key
return self._redis.hlen(cache_key)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return 0
def get_frequency(self, key, participant_identifier):
try:
cache_key = COUNTER_CACHE_KEY % key
freq = self._redis.hget(cache_key, participant_identifier)
return int(freq) if freq else 0
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return 0
def get_frequencies(self, key):
try:
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
# In some cases when there are concurrent updates going on, there can
# briefly be a negative result for some frequency count. We discard these
# as they shouldn't really affect the result, and they are about to become
# zero anyway.
return dict((int(k), int(v)) for (k, v) in self._redis.hgetall(freq_cache_key).items() if int(v) > 0)
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return tuple()
def reset(self, key):
try:
cache_key = COUNTER_CACHE_KEY % key
self._redis.delete(cache_key)
freq_cache_key = COUNTER_FREQ_CACHE_KEY % key
self._redis.delete(freq_cache_key)
return True
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return False
def reset_pattern(self, pattern_key):
#similar to above, but can pass pattern as arg instead
try:
cache_key = COUNTER_CACHE_KEY % pattern_key
for key in self._redis.keys(cache_key):
self._redis.delete(key)
freq_cache_key = COUNTER_FREQ_CACHE_KEY % pattern_key
for key in self._redis.keys(freq_cache_key):
self._redis.delete(key)
return True
except (ConnectionError, ResponseError):
# Handle Redis failures gracefully
return False
| 38.931624
| 130
| 0.639517
| 525
| 4,555
| 5.314286
| 0.251429
| 0.108961
| 0.073118
| 0.100358
| 0.451971
| 0.407168
| 0.360573
| 0.360573
| 0.292115
| 0.226523
| 0
| 0.004337
| 0.291328
| 4,555
| 116
| 131
| 39.267241
| 0.859975
| 0.143578
| 0
| 0.432099
| 0
| 0
| 0.044284
| 0.031926
| 0
| 0
| 0
| 0
| 0
| 1
| 0.098765
| false
| 0.049383
| 0.061728
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae3bab1dfe4bf59579d4fb381bd53583200e99c5
| 447
|
py
|
Python
|
irl_gym/envs/env_utils.py
|
uidilr/irl_gym
|
3352cb9189f3d5076a116db6678207e186ff4fc6
|
[
"MIT"
] | 1
|
2020-12-29T11:04:56.000Z
|
2020-12-29T11:04:56.000Z
|
irl_gym/envs/env_utils.py
|
uidilr/irl_gym
|
3352cb9189f3d5076a116db6678207e186ff4fc6
|
[
"MIT"
] | null | null | null |
irl_gym/envs/env_utils.py
|
uidilr/irl_gym
|
3352cb9189f3d5076a116db6678207e186ff4fc6
|
[
"MIT"
] | null | null | null |
import os
ENV_ASSET_DIR = os.path.join(os.path.dirname(__file__), 'assets')
def get_asset_xml(xml_name):
return os.path.join(ENV_ASSET_DIR, xml_name)
def test_env(env, T=100):
aspace = env.action_space
env.reset()
for t in range(T):
o, r, done, infos = env.step(aspace.sample())
print('---T=%d---' % t)
print('rew:', r)
print('obs:', o)
env.render()
if done:
break
| 20.318182
| 65
| 0.568233
| 68
| 447
| 3.529412
| 0.558824
| 0.075
| 0.091667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.009202
| 0.270694
| 447
| 21
| 66
| 21.285714
| 0.726994
| 0
| 0
| 0
| 0
| 0
| 0.053812
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0.066667
| 0.266667
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae3d3e28bf5a8518622d4a9ff1865444e5e3583f
| 1,889
|
py
|
Python
|
Project_1-Alien_Invasion/settings.py
|
Vandeilsonln/Python-Crash-Course
|
39b4f421504618f947672304a8e97edf7bc7f13d
|
[
"MIT"
] | null | null | null |
Project_1-Alien_Invasion/settings.py
|
Vandeilsonln/Python-Crash-Course
|
39b4f421504618f947672304a8e97edf7bc7f13d
|
[
"MIT"
] | null | null | null |
Project_1-Alien_Invasion/settings.py
|
Vandeilsonln/Python-Crash-Course
|
39b4f421504618f947672304a8e97edf7bc7f13d
|
[
"MIT"
] | null | null | null |
import pygame
class Settings():
# A class to store all settings for Alien Invasion.
def __init__(self):
# Screen Settings.
self.screen_width = 1080
self.screen_height = 630
self.bg_image = pygame.image.load('Project_1-Alien_Invasion/_images/background_stars_moving.jpg')
self.bg_moving_speed = 0.3
self.bg_initial_position = -1705
# Ship Settings
self.ship_speed_factor = 1.2
self.ship_limit = 2
# Bullet settings
self.bullet_speed_factor = 3
self.bullet_width = 4 # 4
self.bullet_height = 15
self.bullet_color = 130, 60, 60
self.bullets_allowed = 5 #5
# Alien Settings
self.alien_speed_factor = 1
self.fleet_drop_speed = 30 #15
# fleet direction of 1 represents right; -1 represents left
self.fleet_direction = 1
# How quickly the game speeds up
self.speedup_scale = 1.15
# How quickly the alien values increase
self.score_scale = 1.4
self.initialize_dynamic_settings()
def initialize_dynamic_settings(self):
"""Initialize settings that change throughout the game."""
self.ship_speed_factor = 1.5
self.bullet_speed_factor = 3
self.alien_speed_factor = 1.1
self.bg_moving_speed = 0.3
# Fleet_direction of 1 represents right | -1 represents left.
self.fleet_direction = 1
# Scoring
self.alien_points = 50
def increase_speed(self):
# Increase speed settings and alien point values
self.ship_speed_factor *= self.speedup_scale
self.bullet_speed_factor *= self.speedup_scale
self.alien_speed_factor *= self.speedup_scale
self.bg_moving_speed *= (self.speedup_scale * 1.4)
self.alien_points = int(self.alien_points * self.score_scale)
| 32.568966
| 105
| 0.643727
| 247
| 1,889
| 4.668016
| 0.315789
| 0.085863
| 0.069384
| 0.044232
| 0.34085
| 0.2732
| 0.114484
| 0.114484
| 0.114484
| 0.114484
| 0
| 0.042899
| 0.284277
| 1,889
| 58
| 106
| 32.568966
| 0.809911
| 0.218105
| 0
| 0.176471
| 0
| 0
| 0.041124
| 0.041124
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088235
| false
| 0
| 0.029412
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae3e733e97f3939f4c5a55b9fab69488409a8357
| 1,153
|
py
|
Python
|
app/main/views/letter_jobs.py
|
karlchillmaid/notifications-admin
|
9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880
|
[
"MIT"
] | null | null | null |
app/main/views/letter_jobs.py
|
karlchillmaid/notifications-admin
|
9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880
|
[
"MIT"
] | null | null | null |
app/main/views/letter_jobs.py
|
karlchillmaid/notifications-admin
|
9ef6da4ef9e2fa97b7debb4b573cb035a5cb8880
|
[
"MIT"
] | null | null | null |
from flask import redirect, render_template, request, session, url_for
from flask_login import login_required
from app import letter_jobs_client
from app.main import main
from app.utils import user_is_platform_admin
@main.route("/letter-jobs", methods=['GET', 'POST'])
@login_required
@user_is_platform_admin
def letter_jobs():
letter_jobs_list = letter_jobs_client.get_letter_jobs()
if request.method == 'POST':
if len(request.form.getlist('job_id')) > 0:
job_ids = request.form.getlist('job_id')
session['job_ids'] = job_ids
response = letter_jobs_client.send_letter_jobs(job_ids)
msg = response['response']
else:
msg = 'No jobs selected'
session['msg'] = msg
return redirect(url_for('main.letter_jobs'))
msg = session.pop('msg', None)
job_ids = session.pop('job_ids', None)
if job_ids:
for job_id in job_ids:
job = [j for j in letter_jobs_list if job_id == j['id']][0]
job['sending'] = 'sending'
return render_template('views/letter-jobs.html', letter_jobs_list=letter_jobs_list, message=msg)
| 31.162162
| 100
| 0.666956
| 164
| 1,153
| 4.414634
| 0.317073
| 0.179558
| 0.077348
| 0.052486
| 0.129834
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002225
| 0.220295
| 1,153
| 36
| 101
| 32.027778
| 0.803115
| 0
| 0
| 0
| 0
| 0
| 0.115351
| 0.019081
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.185185
| 0
| 0.296296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae3ea51dd07df4bb77e861ac50689fed8f983f65
| 909
|
py
|
Python
|
dev-test/1_euler/srayan/euler-2.py
|
sgango/Y1-Project
|
89205600552ede6f8da29231cfa52a3538ae8df4
|
[
"BSD-2-Clause"
] | 2
|
2020-09-23T13:27:26.000Z
|
2021-09-14T14:15:30.000Z
|
dev-test/1_euler/srayan/euler-2.py
|
sgango/Y1-Project
|
89205600552ede6f8da29231cfa52a3538ae8df4
|
[
"BSD-2-Clause"
] | 1
|
2020-06-18T14:02:59.000Z
|
2020-06-18T14:02:59.000Z
|
dev-test/1_euler/srayan/euler-2.py
|
sgango/Y1-Project
|
89205600552ede6f8da29231cfa52a3538ae8df4
|
[
"BSD-2-Clause"
] | null | null | null |
"""
Adapting Euler method to handle 2nd order ODEs
Srayan Gangopadhyay
2020-05-16
"""
import numpy as np
import matplotlib.pyplot as plt
"""
y' = dy/dx
For a function of form y'' = f(x, y, y')
Define y' = v so y'' = v'
"""
def func(y, v, x): # RHS of v' = in terms of y, v, x
return x + v - 3*y
# PARAMETERS
y0 = 1 # y(x=0) =
v0 = -2 # y'(x=0) =
delta = 0.01 # step size
end = 4 # x-value to stop integration
steps = int(end/delta) + 1 # number of steps
x = np.linspace(0, end, steps) # array of x-values (discrete time)
y = np.zeros(steps) # empty array for solution
v = np.zeros(steps)
y[0] = y0 # inserting initial value
v[0] = v0
# INTEGRATING
for i in range(1, steps):
v[i] = v[i-1] + (delta*func(y[i-1], v[i-1], x[i-1]))
y[i] = y[i-1] + (delta*v[i-1])
plt.plot(x, y, label='Approx. soln (Euler)')
plt.plot(x, y, 'o')
plt.xlabel('x')
plt.ylabel('y')
plt.legend()
plt.show()
| 21.139535
| 67
| 0.59516
| 179
| 909
| 3.022346
| 0.446927
| 0.022181
| 0.016636
| 0.033272
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.046218
| 0.214521
| 909
| 42
| 68
| 21.642857
| 0.711485
| 0.320132
| 0
| 0
| 0
| 0
| 0.04466
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.043478
| false
| 0
| 0.086957
| 0.043478
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae3f83f14ff4a0be7289a02711f0b034c72507db
| 3,022
|
py
|
Python
|
dss_sm_so/tests/test_backends.py
|
MobileCloudNetworking/dssaas
|
87b6f7d60ecc397a88326a955b2ddfd3d73205d1
|
[
"Apache-2.0"
] | null | null | null |
dss_sm_so/tests/test_backends.py
|
MobileCloudNetworking/dssaas
|
87b6f7d60ecc397a88326a955b2ddfd3d73205d1
|
[
"Apache-2.0"
] | null | null | null |
dss_sm_so/tests/test_backends.py
|
MobileCloudNetworking/dssaas
|
87b6f7d60ecc397a88326a955b2ddfd3d73205d1
|
[
"Apache-2.0"
] | 1
|
2018-10-09T06:28:36.000Z
|
2018-10-09T06:28:36.000Z
|
__author__ = 'florian'
import unittest
from occi.backend import ActionBackend, KindBackend
from sm.sm.backends import ServiceBackend
from mock import patch
from sm.sm.so_manager import SOManager
from occi.core_model import Kind
from occi.core_model import Resource
@patch('mcn.sm.so_manager.CONFIG')
@patch('mcn.sm.so_manager.LOG')
class TestBackendsConstruction(unittest.TestCase):
def setUp(self):
pass
@patch('os.system')
@patch('mcn.sm.so_manager.SOManager', spec='mcn.sm.so_manager.SOManager')
def test_init_for_sanity(self, mock_som, mock_os, mock_log, mock_config):
mock_os.return_value = 0
self.service_backend = ServiceBackend()
# Test that service_backend contains a SOManager instance
self.assertEqual(self.service_backend.som.__class__, SOManager)
# assertInstance should work there
# self.assertIsInstance(self.service_backend.som, SOManager)
# print type(self.service_backend.som)
class TestBackendsMethods(unittest.TestCase):
def setUp(self):
kind = Kind('http://schemas.mobile-cloud-networking.eu/occi/sm#',
'myservice',
title='Test Service',
attributes={'mcn.test.attribute1': 'immutable'},
related=[Resource.kind],
actions=[])
self.test_entity = Resource('my-id', kind, None)
self.patcher_system = patch('os.system', return_value=0)
self.patcher_system.start()
self.patcher_config = patch('mcn.sm.so_manager.CONFIG')
self.patcher_config.start()
self.patcher_log = patch('mcn.sm.so_manager.LOG')
self.patcher_log.start()
# Check why service backend cannot be created there with a mock (mock not taken into account)
@patch('mcn.sm.so_manager.SOManager.deploy')
def test_create_for_sanity(self, mock_deploy):
self.service_backend = ServiceBackend()
self.service_backend.create(self.test_entity, None)
mock_deploy.assert_called_once_with(self.test_entity, None)
@patch('mcn.sm.so_manager.SOManager.so_details')
def test_retrieve_for_sanity(self, mock_so_details):
service_backend = ServiceBackend()
service_backend.retrieve(self.test_entity, None)
mock_so_details.assert_called_once_with(self.test_entity, None)
@patch('mcn.sm.so_manager.SOManager.dispose')
def test_delete_for_sanity(self, mock_dispose):
service_backend = ServiceBackend()
service_backend.delete(self.test_entity, None)
mock_dispose.assert_called_once_with(self.test_entity, None)
# def testNotImplemented(self):
# service_backend = ServiceBackend()
# # self.assertRaises(NotImplementedError, service_backend.update(None, None, None))
# self.assertRaises(NotImplementedError, service_backend.replace(None, None, None))
def tearDown(self):
self.patcher_config.stop()
self.patcher_log.stop()
self.patcher_system.stop()
| 38.74359
| 101
| 0.697551
| 372
| 3,022
| 5.432796
| 0.271505
| 0.103909
| 0.054429
| 0.062345
| 0.405245
| 0.161306
| 0.084117
| 0.084117
| 0.065314
| 0.065314
| 0
| 0.001239
| 0.198875
| 3,022
| 77
| 102
| 39.246753
| 0.83354
| 0.172402
| 0
| 0.113208
| 0
| 0
| 0.152672
| 0.100844
| 0
| 0
| 0
| 0
| 0.075472
| 1
| 0.132075
| false
| 0.018868
| 0.132075
| 0
| 0.301887
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae41b46656d025e136cbbd3d68dd912515307e97
| 1,370
|
py
|
Python
|
setup.py
|
eduk8s/prototype-cli
|
74443dafb08e5b65f48ea3b9a7a03a803f79437a
|
[
"Apache-2.0"
] | 1
|
2019-12-30T02:52:56.000Z
|
2019-12-30T02:52:56.000Z
|
setup.py
|
eduk8s/prototype-cli
|
74443dafb08e5b65f48ea3b9a7a03a803f79437a
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
eduk8s/prototype-cli
|
74443dafb08e5b65f48ea3b9a7a03a803f79437a
|
[
"Apache-2.0"
] | null | null | null |
import sys
import os
from setuptools import setup
long_description = open("README.rst").read()
classifiers = [
"Development Status :: 3 - Alpha",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
]
setup_kwargs = dict(
name="eduk8s-cli",
version="0.1.0",
description="Command line client for eduk8s.",
long_description=long_description,
url="https://github.com/eduk8s/eduk8s-cli",
author="Graham Dumpleton",
author_email="Graham.Dumpleton@gmail.com",
license="Apache License, Version 2.0",
python_requires=">=3.7.0",
classifiers=classifiers,
keywords="eduk8s kubernetes",
packages=["eduk8s", "eduk8s.cli", "eduk8s.kube",],
package_dir={"eduk8s": "src/eduk8s"},
package_data={"eduks.crds": ["session.yaml", "workshop.yaml"],},
entry_points={
"console_scripts": ["eduk8s = eduk8s.cli:main"],
"eduk8s_cli_plugins": [
"workshop = eduk8s.cli.workshop",
"session = eduk8s.cli.session",
"install = eduk8s.cli.install",
],
},
install_requires=[
"click",
"requests",
"rstr",
"PyYaml",
"kopf==0.23.2",
"openshift==0.10.1",
],
)
setup(**setup_kwargs)
| 27.4
| 68
| 0.607299
| 150
| 1,370
| 5.453333
| 0.52
| 0.08802
| 0.091687
| 0.095355
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.035883
| 0.227007
| 1,370
| 49
| 69
| 27.959184
| 0.736544
| 0
| 0
| 0.045455
| 0
| 0
| 0.472993
| 0.018978
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.068182
| 0
| 0.068182
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae490aaf317fe81f8776bee9c9b05dfe568d8efd
| 3,538
|
py
|
Python
|
tests/system/workspace_factory.py
|
davetcoleman/catkin_tools
|
3dd28ffab0e48775b14c6bab5a7b8b974cdd126c
|
[
"Apache-2.0"
] | null | null | null |
tests/system/workspace_factory.py
|
davetcoleman/catkin_tools
|
3dd28ffab0e48775b14c6bab5a7b8b974cdd126c
|
[
"Apache-2.0"
] | null | null | null |
tests/system/workspace_factory.py
|
davetcoleman/catkin_tools
|
3dd28ffab0e48775b14c6bab5a7b8b974cdd126c
|
[
"Apache-2.0"
] | null | null | null |
import os
import shutil
from ..utils import temporary_directory
class workspace_factory(temporary_directory):
def __init__(self, source_space='src', prefix=''):
super(workspace_factory, self).__init__(prefix=prefix)
self.source_space = source_space
def __enter__(self):
self.temporary_directory = super(workspace_factory, self).__enter__()
self.workspace_factory = WorkspaceFactory(self.temporary_directory, self.source_space)
return self.workspace_factory
def __exit__(self, exc_type, exc_value, traceback):
super(workspace_factory, self).__exit__(exc_type, exc_value, traceback)
class WorkspaceFactory(object):
def __init__(self, workspace, source_space):
self.workspace = workspace
self.source_space = os.path.join(self.workspace, source_space)
self.packages = {}
class Package(object):
def __init__(self, name, depends, build_depends, run_depends, test_depends):
self.name = name
self.build_depends = (build_depends or []) + (depends or [])
self.run_depends = (run_depends or []) + (depends or [])
self.test_depends = (test_depends or [])
def add_package(self, pkg_name, depends=None, build_depends=None, run_depends=None, test_depends=None):
self.packages[pkg_name] = self.Package(pkg_name, depends, build_depends, run_depends, test_depends)
def build(self):
cwd = os.getcwd()
if not os.path.isdir(self.workspace):
if os.path.exists(self.workspace):
raise RuntimeError("Cannot build workspace in '{0}' because it is a file".format(self.workspace))
os.makedirs(self.workspace)
if os.path.exists(self.source_space):
print("WARNING: source space given to WorkspaceFactory exists, clearing before build()'ing")
self.clear()
os.makedirs(self.source_space)
try:
os.chdir(self.source_space)
for name, pkg in self.packages.items():
pkg_dir = os.path.join(self.source_space, name)
os.makedirs(pkg_dir)
pkg_xml_path = os.path.join(pkg_dir, 'package.xml')
pkg_xml = """\
<?xml version="1.0"?>
<package>
<name>{name}</name>
<version>0.0.0</version>
<description>
Description for {name}
</description>
<maintainer email="person@email.com">Firstname Lastname</maintainer>
<license>MIT</license>
"""
pkg_xml += '\n'.join(
[' <build_depend>{0}</build_depend>'.format(x) for x in pkg.build_depends] +
[' <run_depend>{0}</run_depend>'.format(x) for x in pkg.run_depends] +
[' <test_depend>{0}</test_depend>'.format(x) for x in pkg.test_depends]
)
pkg_xml += """
<export>
<build_type>cmake</build_type>
</export>
</package>
"""
with open(pkg_xml_path, 'w') as f:
f.write(pkg_xml.format(name=name))
cmakelists_txt_path = os.path.join(pkg_dir, 'CMakeLists.txt')
cmakelists_txt = """\
cmake_minimum_required(VERSION 2.8.3)
project({name})
add_custom_target(install)
"""
with open(cmakelists_txt_path, 'w') as f:
f.write(cmakelists_txt.format(name=name, find_package=' '.join(pkg.build_depends)))
finally:
os.chdir(cwd)
def clear(self):
if os.path.exists(self.workspace):
shutil.rmtree(self.workspace)
| 37.242105
| 113
| 0.623233
| 433
| 3,538
| 4.836028
| 0.249423
| 0.063037
| 0.057307
| 0.035817
| 0.223496
| 0.152818
| 0.103152
| 0.042025
| 0
| 0
| 0
| 0.004539
| 0.252685
| 3,538
| 94
| 114
| 37.638298
| 0.787443
| 0
| 0
| 0.063291
| 0
| 0
| 0.186546
| 0.078293
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101266
| false
| 0
| 0.037975
| 0
| 0.189873
| 0.012658
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae4a834438c1be65dcec72110a53d1ee4b52eb26
| 7,722
|
py
|
Python
|
zero/recommendation_algorithm.py
|
Akulen/mangaki-zero
|
5eb2de06b8684ed948b8b903e9f567f06c35e3ef
|
[
"MIT"
] | null | null | null |
zero/recommendation_algorithm.py
|
Akulen/mangaki-zero
|
5eb2de06b8684ed948b8b903e9f567f06c35e3ef
|
[
"MIT"
] | null | null | null |
zero/recommendation_algorithm.py
|
Akulen/mangaki-zero
|
5eb2de06b8684ed948b8b903e9f567f06c35e3ef
|
[
"MIT"
] | null | null | null |
from zero.side import SideInformation
from zero.chrono import Chrono
from collections import defaultdict
from itertools import product
import numpy as np
import pickle
import os.path
import logging
class RecommendationAlgorithmFactory:
def __init__(self):
self.algorithm_registry = {}
self.algorithm_factory = {}
self.logger = logging.getLogger(__name__ + '.' +
self.__class__.__name__)
self.initialized = False
self.size = 0
def initialize(self):
# FIXME: make it less complicated and go for a commonly used design
# pattern.
# Behind the hood, it's called in `utils.__init__.py` which triggers
# the `algos.__init__.py`
# which in turn triggers registration on this instance.
# Then, once it reach `recommendation_algorithm` file, it's good to go.
self.logger.debug('Recommendation algorithm factory initialized.'
'{} algorithms available in the factory.'
.format(len(self.algorithm_registry)))
self.initialized = True
def register(self, name, klass, default_kwargs):
self.algorithm_registry[name] = klass
self.algorithm_factory[name] = default_kwargs
self.logger.debug('Registered {} as a recommendation algorithm'.format(
name))
class RecommendationAlgorithm:
factory = RecommendationAlgorithmFactory()
def __init__(self, verbose_level=1):
self.verbose_level = verbose_level
self.chrono = Chrono(self.verbose_level)
self.nb_users = None
self.nb_works = None
self.size = 0 # For backup files
self.metrics = {category: defaultdict(list)
for category in {'train', 'test'}}
self.dataset = None
self.X_train = None
self.y_train = None
self.X_test = None
self.y_test = None
def get_backup_path(self, folder, filename):
if not self.is_serializable:
raise NotImplementedError
if filename is None:
filename = '%s.pickle' % self.get_shortname()
return os.path.join(folder, filename)
# def has_backup(self, filename=None):
# if filename is None:
# filename = self.get_backup_filename()
# return os.path.isfile(self.get_backup_path(filename))
@property
def is_serializable(self):
return False
def save(self, folder, filename=None):
self.backup_path = self.get_backup_path(folder, filename)
with open(self.backup_path, 'wb') as f:
pickle.dump(self.__dict__, f, pickle.HIGHEST_PROTOCOL)
self.size = os.path.getsize(self.backup_path) # In bytes
def load(self, folder, filename=None):
"""
This function raises FileNotFoundException if no backup exists.
"""
self.backup_path = self.get_backup_path(folder, filename)
with open(self.backup_path, 'rb') as f:
backup = pickle.load(f)
self.__dict__.update(backup)
def delete_snapshot(self):
os.remove(self.backup_path)
def recommend(self, user_ids, item_ids=None, k=None, method='mean'):
"""
Recommend :math:`k` items to a group of users.
:param user_ids: the users
:param item_ids: a subset of items. If is it None, then it is all items.
:param k: the number of items to recommend, if None then it is all items.
:param method: a way to combine the predictions. By default it is mean.
:returns: a numpy array with two columns, `item_id` and recommendation score
:complexity: :math:`O(N + K \log K)`
"""
if item_ids is None:
item_ids = np.arange(self.nb_works)
n = len(item_ids)
if k is None:
k = n
X = np.array(list(product(user_ids, item_ids)))
pred = self.predict(X).reshape(len(user_ids), -1)
if method == 'mean':
combined_pred = pred.mean(axis=0)
indices = np.argpartition(combined_pred, n - k)[-k:]
results = np.empty(k, dtype=[('item_id', int), ('score', combined_pred.dtype)])
results['item_id'] = indices
results['score'] = combined_pred
results.sort(order='score')
return results[::-1]
else:
raise NotImplementedError
def load_tags(self, T=None, perform_scaling=True, with_mean=False):
side = SideInformation(T, perform_scaling, with_mean)
self.nb_tags = side.nb_tags
self.T = side.T
def set_parameters(self, nb_users, nb_works):
self.nb_users = nb_users
self.nb_works = nb_works
def get_shortname(self):
return 'algo'
@staticmethod
def compute_rmse(y_pred, y_true):
return np.power(y_true - y_pred, 2).mean() ** 0.5
@staticmethod
def compute_mae(y_pred, y_true):
return np.abs(y_true - y_pred).mean()
def get_ranked_gains(self, y_pred, y_true):
return y_true[np.argsort(y_pred)[::-1]]
def compute_dcg(self, y_pred, y_true):
'''
Computes the discounted cumulative gain as stated in:
https://gist.github.com/bwhite/3726239
'''
ranked_gains = self.get_ranked_gains(y_pred, y_true)
return self.dcg_at_k(ranked_gains, 100)
def compute_ndcg(self, y_pred, y_true):
ranked_gains = self.get_ranked_gains(y_pred, y_true)
return self.ndcg_at_k(ranked_gains, 100)
def dcg_at_k(self, r, k):
r = np.asfarray(r)[:k]
if r.size:
return np.sum(np.subtract(np.power(2, r), 1) /
np.log2(np.arange(2, r.size + 2)))
return 0.
def ndcg_at_k(self, r, k):
idcg = self.dcg_at_k(sorted(r, reverse=True), k)
if not idcg:
return 0.
return self.dcg_at_k(r, k) / idcg
def compute_metrics(self):
if self.X_train is not None:
y_train_pred = self.predict(self.X_train)
train_rmse = self.compute_rmse(self.y_train, y_train_pred)
self.metrics['train']['rmse'].append(train_rmse)
logging.warning('Train RMSE=%f', train_rmse)
if self.X_test is not None:
y_test_pred = self.predict(self.X_test)
test_rmse = self.compute_rmse(self.y_test, y_test_pred)
self.metrics['test']['rmse'].append(test_rmse)
logging.warning('Test RMSE=%f', test_rmse)
@staticmethod
def available_evaluation_metrics():
return ['rmse', 'mae', 'dcg', 'ndcg']
@classmethod
def register_algorithm(cls, name, klass, default_kwargs=None):
cls.factory.register(name, klass, default_kwargs)
@classmethod
def list_available_algorithms(cls):
return list(cls.factory.algorithm_registry.keys())
@classmethod
def instantiate_algorithm(cls, name):
klass = cls.factory.algorithm_registry.get(name)
default_kwargs = cls.factory.algorithm_factory.get(name) or {}
if not klass:
raise KeyError('No algorithm named "{}" in the registry! Did you '
'forget a @register_algorithm? A typo?'
.format(name))
return klass(**default_kwargs)
def __str__(self):
return '[%s]' % self.get_shortname().upper()
def register_algorithm(algorithm_name, default_kwargs=None):
if default_kwargs is None:
default_kwargs = {}
def decorator(cls):
RecommendationAlgorithm.register_algorithm(algorithm_name, cls,
default_kwargs)
return cls
return decorator
| 35.916279
| 91
| 0.615644
| 990
| 7,722
| 4.594949
| 0.239394
| 0.028578
| 0.009233
| 0.015388
| 0.127281
| 0.08925
| 0.061992
| 0.051
| 0.051
| 0.051
| 0
| 0.005422
| 0.283476
| 7,722
| 214
| 92
| 36.084112
| 0.816736
| 0.135587
| 0
| 0.104575
| 0
| 0
| 0.051545
| 0
| 0
| 0
| 0
| 0.004673
| 0
| 1
| 0.183007
| false
| 0
| 0.052288
| 0.052288
| 0.379085
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae4aea0f2b66c03f8fc9b59889443427e5fe285c
| 150,103
|
py
|
Python
|
venv/Lib/site-packages/pyo/lib/_wxwidgets.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyo/lib/_wxwidgets.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
venv/Lib/site-packages/pyo/lib/_wxwidgets.py
|
mintzer/pupillometry-rf-back
|
cfa86fa984a49dce0123798f8de5b838c02e10d5
|
[
"CC-BY-4.0"
] | null | null | null |
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
"""
Copyright 2009-2015 Olivier Belanger
This file is part of pyo, a python module to help digital signal
processing script creation.
pyo is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
pyo is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with pyo. If not, see <http://www.gnu.org/licenses/>.
"""
import wx, os, sys, math, time, unicodedata
import wx.stc as stc
from ._core import rescale
if "phoenix" in wx.version():
wx.GraphicsContext_Create = wx.GraphicsContext.Create
wx.EmptyBitmap = wx.Bitmap
wx.EmptyImage = wx.Image
wx.BitmapFromImage = wx.Bitmap
wx.Image_HSVValue = wx.Image.HSVValue
wx.Image_HSVtoRGB = wx.Image.HSVtoRGB
if sys.version_info[0] < 3:
unicode_t = unicode
else:
unicode_t = str
BACKGROUND_COLOUR = "#EBEBEB"
def interpFloat(t, v1, v2):
"interpolator for a single value; interprets t in [0-1] between v1 and v2"
return (v2 - v1) * t + v1
def tFromValue(value, v1, v2):
"returns a t (in range 0-1) given a value in the range v1 to v2"
if (v2 - v1) == 0:
return 1.0
else:
return float(value - v1) / (v2 - v1)
def clamp(v, minv, maxv):
"clamps a value within a range"
if v < minv:
v = minv
if v > maxv:
v = maxv
return v
def toLog(t, v1, v2):
return math.log10(t / v1) / math.log10(v2 / v1)
def toExp(t, v1, v2):
return math.pow(10, t * (math.log10(v2) - math.log10(v1)) + math.log10(v1))
POWOFTWO = {
2: 1,
4: 2,
8: 3,
16: 4,
32: 5,
64: 6,
128: 7,
256: 8,
512: 9,
1024: 10,
2048: 11,
4096: 12,
8192: 13,
16384: 14,
32768: 15,
65536: 16,
}
def powOfTwo(x):
"Return 2 raised to the power of x."
return 2 ** x
def powOfTwoToInt(x):
"Return the exponent of 2 correponding to the value x."
return POWOFTWO[x]
def GetRoundBitmap(w, h, r):
maskColor = wx.Color(0, 0, 0)
shownColor = wx.Color(5, 5, 5)
b = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(b)
dc.SetBrush(wx.Brush(maskColor))
dc.DrawRectangle(0, 0, w, h)
dc.SetBrush(wx.Brush(shownColor))
dc.SetPen(wx.Pen(shownColor))
dc.DrawRoundedRectangle(0, 0, w, h, r)
dc.SelectObject(wx.NullBitmap)
b.SetMaskColour(maskColor)
return b
class ControlSlider(wx.Panel):
def __init__(
self,
parent,
minvalue,
maxvalue,
init=None,
pos=(0, 0),
size=(200, 16),
log=False,
outFunction=None,
integer=False,
powoftwo=False,
backColour=None,
orient=wx.HORIZONTAL,
ctrllabel="",
):
if size == (200, 16) and orient == wx.VERTICAL:
size = (40, 200)
wx.Panel.__init__(
self, parent=parent, id=wx.ID_ANY, pos=pos, size=size, style=wx.NO_BORDER | wx.WANTS_CHARS | wx.EXPAND
)
self.parent = parent
if backColour:
self.backgroundColour = backColour
else:
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.orient = orient
# self.SetMinSize(self.GetSize())
if self.orient == wx.VERTICAL:
self.knobSize = 17
self.knobHalfSize = 8
self.sliderWidth = size[0] - 29
else:
self.knobSize = 40
self.knobHalfSize = 20
self.sliderHeight = size[1] - 5
self.outFunction = outFunction
self.integer = integer
self.log = log
self.powoftwo = powoftwo
if self.powoftwo:
self.integer = True
self.log = False
self.ctrllabel = ctrllabel
self.SetRange(minvalue, maxvalue)
self.borderWidth = 1
self.selected = False
self._enable = True
self.propagate = True
self.midictl = None
self.new = ""
if init is not None:
self.SetValue(init)
self.init = init
else:
self.SetValue(minvalue)
self.init = minvalue
self.clampPos()
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_LEFT_DCLICK, self.DoubleClick)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.Bind(wx.EVT_CHAR, self.onChar)
self.Bind(wx.EVT_KILL_FOCUS, self.LooseFocus)
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
self.font = wx.Font(7, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
else:
self.dcref = wx.PaintDC
self.font = wx.Font(10, wx.FONTFAMILY_TELETYPE, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
def getCtrlLabel(self):
return self.ctrllabel
def setMidiCtl(self, x, propagate=True):
self.propagate = propagate
self.midictl = x
self.Refresh()
def getMidiCtl(self):
return self.midictl
def getMinValue(self):
return self.minvalue
def getMaxValue(self):
return self.maxvalue
def Enable(self):
self._enable = True
wx.CallAfter(self.Refresh)
def Disable(self):
self._enable = False
wx.CallAfter(self.Refresh)
def setSliderHeight(self, height):
self.sliderHeight = height
self.Refresh()
def setSliderWidth(self, width):
self.sliderWidth = width
def getInit(self):
return self.init
def SetRange(self, minvalue, maxvalue):
self.minvalue = minvalue
self.maxvalue = maxvalue
def getRange(self):
return [self.minvalue, self.maxvalue]
def scale(self):
if self.orient == wx.VERTICAL:
h = self.GetSize()[1]
inter = tFromValue(h - self.pos, self.knobHalfSize, self.GetSize()[1] - self.knobHalfSize)
else:
inter = tFromValue(self.pos, self.knobHalfSize, self.GetSize()[0] - self.knobHalfSize)
if not self.integer:
return interpFloat(inter, self.minvalue, self.maxvalue)
elif self.powoftwo:
return powOfTwo(int(interpFloat(inter, self.minvalue, self.maxvalue)))
else:
return int(interpFloat(inter, self.minvalue, self.maxvalue))
def SetValue(self, value, propagate=True):
self.propagate = propagate
if self.HasCapture():
self.ReleaseMouse()
if self.powoftwo:
value = powOfTwoToInt(value)
value = clamp(value, self.minvalue, self.maxvalue)
if self.log:
t = toLog(value, self.minvalue, self.maxvalue)
self.value = interpFloat(t, self.minvalue, self.maxvalue)
else:
t = tFromValue(value, self.minvalue, self.maxvalue)
self.value = interpFloat(t, self.minvalue, self.maxvalue)
if self.integer:
self.value = int(self.value)
if self.powoftwo:
self.value = powOfTwo(self.value)
self.clampPos()
self.selected = False
wx.CallAfter(self.Refresh)
def GetValue(self):
if self.log:
t = tFromValue(self.value, self.minvalue, self.maxvalue)
val = toExp(t, self.minvalue, self.maxvalue)
else:
val = self.value
if self.integer:
val = int(val)
return val
def LooseFocus(self, event):
self.selected = False
self.Refresh()
def onChar(self, event):
if self.selected:
char = ""
if event.GetKeyCode() in range(wx.WXK_NUMPAD0, wx.WXK_NUMPAD9 + 1):
char = str(event.GetKeyCode() - wx.WXK_NUMPAD0)
elif event.GetKeyCode() in [wx.WXK_SUBTRACT, wx.WXK_NUMPAD_SUBTRACT]:
char = "-"
elif event.GetKeyCode() in [wx.WXK_DECIMAL, wx.WXK_NUMPAD_DECIMAL]:
char = "."
elif event.GetKeyCode() == wx.WXK_BACK:
if self.new != "":
self.new = self.new[0:-1]
elif event.GetKeyCode() < 256:
char = chr(event.GetKeyCode())
if char in ["0", "1", "2", "3", "4", "5", "6", "7", "8", "9", ".", "-"]:
self.new += char
elif event.GetKeyCode() in [wx.WXK_RETURN, wx.WXK_NUMPAD_ENTER]:
self.SetValue(eval(self.new))
self.new = ""
self.selected = False
self.Refresh()
event.Skip()
def MouseDown(self, evt):
if evt.ShiftDown():
self.DoubleClick(evt)
return
if self._enable:
size = self.GetSize()
if self.orient == wx.VERTICAL:
self.pos = clamp(evt.GetPosition()[1], self.knobHalfSize, size[1] - self.knobHalfSize)
else:
self.pos = clamp(evt.GetPosition()[0], self.knobHalfSize, size[0] - self.knobHalfSize)
self.value = self.scale()
self.CaptureMouse()
self.selected = False
self.Refresh()
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def DoubleClick(self, event):
if self._enable:
w, h = self.GetSize()
pos = event.GetPosition()
if self.orient == wx.VERTICAL:
if wx.Rect(0, self.pos - self.knobHalfSize, w, self.knobSize).Contains(pos):
self.selected = True
else:
if wx.Rect(self.pos - self.knobHalfSize, 0, self.knobSize, h).Contains(pos):
self.selected = True
self.Refresh()
event.Skip()
def MouseMotion(self, evt):
if self._enable:
size = self.GetSize()
if self.HasCapture():
if self.orient == wx.VERTICAL:
self.pos = clamp(evt.GetPosition()[1], self.knobHalfSize, size[1] - self.knobHalfSize)
else:
self.pos = clamp(evt.GetPosition()[0], self.knobHalfSize, size[0] - self.knobHalfSize)
self.value = self.scale()
self.selected = False
self.Refresh()
def OnResize(self, evt):
self.clampPos()
self.Refresh()
def clampPos(self):
size = self.GetSize()
if self.powoftwo:
val = powOfTwoToInt(self.value)
else:
val = self.value
if self.orient == wx.VERTICAL:
self.pos = tFromValue(val, self.minvalue, self.maxvalue) * (size[1] - self.knobSize) + self.knobHalfSize
self.pos = clamp(size[1] - self.pos, self.knobHalfSize, size[1] - self.knobHalfSize)
else:
self.pos = tFromValue(val, self.minvalue, self.maxvalue) * (size[0] - self.knobSize) + self.knobHalfSize
self.pos = clamp(self.pos, self.knobHalfSize, size[0] - self.knobHalfSize)
def setBackgroundColour(self, colour):
self.backgroundColour = colour
self.SetBackgroundColour(self.backgroundColour)
self.Refresh()
def OnPaint(self, evt):
w, h = self.GetSize()
if w <= 0 or h <= 0:
evt.Skip()
return
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush(self.backgroundColour, wx.SOLID))
dc.Clear()
# Draw background
dc.SetPen(wx.Pen(self.backgroundColour, width=self.borderWidth, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
# Draw inner part
if self._enable:
sliderColour = "#99A7CC"
else:
sliderColour = "#BBBBBB"
if self.orient == wx.VERTICAL:
w2 = (w - self.sliderWidth) // 2
rec = wx.Rect(w2, 0, self.sliderWidth, h)
brush = gc.CreateLinearGradientBrush(w2, 0, w2 + self.sliderWidth, 0, "#646986", sliderColour)
else:
h2 = self.sliderHeight // 4
rec = wx.Rect(0, h2, w, self.sliderHeight)
brush = gc.CreateLinearGradientBrush(0, h2, 0, h2 + self.sliderHeight, "#646986", sliderColour)
gc.SetBrush(brush)
gc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 2)
if self.midictl is not None:
if sys.platform == "win32" or sys.platform.startswith("linux"):
dc.SetFont(wx.Font(6, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
else:
dc.SetFont(wx.Font(9, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL))
dc.SetTextForeground("#FFFFFF")
if self.orient == wx.VERTICAL:
dc.DrawLabel(str(self.midictl), wx.Rect(w2, 2, self.sliderWidth, 12), wx.ALIGN_CENTER)
dc.DrawLabel(str(self.midictl), wx.Rect(w2, h - 12, self.sliderWidth, 12), wx.ALIGN_CENTER)
else:
dc.DrawLabel(str(self.midictl), wx.Rect(2, 0, h, h), wx.ALIGN_CENTER)
dc.DrawLabel(str(self.midictl), wx.Rect(w - h, 0, h, h), wx.ALIGN_CENTER)
# Draw knob
if self._enable:
knobColour = "#888888"
else:
knobColour = "#DDDDDD"
if self.orient == wx.VERTICAL:
rec = wx.Rect(0, self.pos - self.knobHalfSize, w, self.knobSize - 1)
if self.selected:
brush = wx.Brush("#333333", wx.SOLID)
else:
brush = gc.CreateLinearGradientBrush(0, 0, w, 0, "#323854", knobColour)
gc.SetBrush(brush)
gc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 3)
else:
rec = wx.Rect(int(self.pos) - self.knobHalfSize, 0, self.knobSize - 1, h)
if self.selected:
brush = wx.Brush("#333333", wx.SOLID)
else:
brush = gc.CreateLinearGradientBrush(
self.pos - self.knobHalfSize, 0, self.pos + self.knobHalfSize, 0, "#323854", knobColour
)
gc.SetBrush(brush)
gc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 3)
dc.SetFont(self.font)
# Draw text
if self.selected and self.new:
val = self.new
else:
if self.integer:
val = "%d" % self.GetValue()
elif abs(self.GetValue()) >= 1000:
val = "%.0f" % self.GetValue()
elif abs(self.GetValue()) >= 100:
val = "%.1f" % self.GetValue()
elif abs(self.GetValue()) >= 10:
val = "%.2f" % self.GetValue()
elif abs(self.GetValue()) < 10:
val = "%.3f" % self.GetValue()
if sys.platform.startswith("linux"):
width = len(val) * (dc.GetCharWidth() - 3)
else:
width = len(val) * dc.GetCharWidth()
dc.SetTextForeground("#FFFFFF")
dc.DrawLabel(val, rec, wx.ALIGN_CENTER)
# Send value
if self.outFunction and self.propagate:
self.outFunction(self.GetValue())
self.propagate = True
evt.Skip()
# TODO: key, command and slmap should be removed from the multislider widget.
# It should work in the same way as the ControlSlider widget.
class MultiSlider(wx.Panel):
def __init__(self, parent, init, key, command, slmap, ctrllabel=""):
wx.Panel.__init__(self, parent, size=(250, 250))
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self._slmap = slmap
self.ctrllabel = ctrllabel
self._values = [slmap.set(x) for x in init]
self._nchnls = len(init)
self._labels = init
self._key = key
self._command = command
self._height = 16
if sys.platform == "win32" or sys.platform.startswith("linux"):
self._font = wx.Font(7, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
else:
self._font = wx.Font(10, wx.FONTFAMILY_ROMAN, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_NORMAL)
self.SetSize((250, self._nchnls * 16))
self.SetMinSize((250, self._nchnls * 16))
def getCtrlLabel(self):
return self.ctrllabel
def OnResize(self, event):
self.Layout()
self.Refresh()
def OnPaint(self, event):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBrush(wx.Brush(self.backgroundColour))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
dc.SetBrush(wx.Brush("#000000"))
dc.SetFont(self._font)
dc.SetTextForeground("#999999")
for i in range(self._nchnls):
x = int(self._values[i] * w)
y = self._height * i
dc.DrawRectangle(0, y + 1, x, self._height - 2)
rec = wx.Rect(w // 2 - 15, y, 30, self._height)
dc.DrawLabel("%s" % self._labels[i], rec, wx.ALIGN_CENTER)
def MouseDown(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
slide = pos[1] // self._height
if slide >= 0 and slide < self._nchnls:
self._values[slide] = pos[0] / float(w)
if self._slmap._res == "int":
self._labels = [int(self._slmap.get(x)) for x in self._values]
else:
self._labels = [self._slmap.get(x) for x in self._values]
self._command(self._key, self._labels)
self.CaptureMouse()
self.Refresh()
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def MouseMotion(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
if evt.Dragging() and evt.LeftIsDown():
slide = pos[1] // self._height
if slide >= 0 and slide < self._nchnls:
self._values[slide] = pos[0] / float(w)
if self._slmap._res == "int":
self._labels = [int(self._slmap.get(x)) for x in self._values]
else:
self._labels = [self._slmap.get(x) for x in self._values]
self._command(self._key, self._labels)
self.Refresh()
def GetValue(self):
return self._labels
class VuMeter(wx.Panel):
def __init__(self, parent, size=(200, 11), numSliders=2, orient=wx.HORIZONTAL, pos=wx.DefaultPosition, style=0):
if orient == wx.HORIZONTAL:
size = (size[0], numSliders * 5 + 1)
else:
size = (numSliders * 5 + 1, size[1])
wx.Panel.__init__(self, parent, -1, pos=pos, size=size, style=style)
self.parent = parent
self.orient = orient
self.SetBackgroundColour("#000000")
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.old_nchnls = numSliders
self.numSliders = numSliders
self.amplitude = [0] * self.numSliders
self.createBitmaps()
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_CLOSE, self.OnClose)
def OnSize(self, evt):
self.createBitmaps()
wx.CallAfter(self.Refresh)
def createBitmaps(self):
w, h = self.GetSize()
b = wx.EmptyBitmap(w, h)
f = wx.EmptyBitmap(w, h)
dcb = wx.MemoryDC(b)
dcf = wx.MemoryDC(f)
dcb.SetPen(wx.Pen("#000000", width=1))
dcf.SetPen(wx.Pen("#000000", width=1))
if self.orient == wx.HORIZONTAL:
height = 6
steps = int(w / 10.0 + 0.5)
else:
width = 6
steps = int(h / 10.0 + 0.5)
bounds = int(steps / 6.0)
for i in range(steps):
if i == (steps - 1):
dcb.SetBrush(wx.Brush("#770000"))
dcf.SetBrush(wx.Brush("#FF0000"))
elif i >= (steps - bounds):
dcb.SetBrush(wx.Brush("#440000"))
dcf.SetBrush(wx.Brush("#CC0000"))
elif i >= (steps - (bounds * 2)):
dcb.SetBrush(wx.Brush("#444400"))
dcf.SetBrush(wx.Brush("#CCCC00"))
else:
dcb.SetBrush(wx.Brush("#004400"))
dcf.SetBrush(wx.Brush("#00CC00"))
if self.orient == wx.HORIZONTAL:
dcb.DrawRectangle(i * 10, 0, 11, height)
dcf.DrawRectangle(i * 10, 0, 11, height)
else:
ii = steps - 1 - i
dcb.DrawRectangle(0, ii * 10, width, 11)
dcf.DrawRectangle(0, ii * 10, width, 11)
if self.orient == wx.HORIZONTAL:
dcb.DrawLine(w - 1, 0, w - 1, height)
dcf.DrawLine(w - 1, 0, w - 1, height)
else:
dcb.DrawLine(0, 0, width, 0)
dcf.DrawLine(0, 0, width, 0)
dcb.SelectObject(wx.NullBitmap)
dcf.SelectObject(wx.NullBitmap)
self.backBitmap = b
self.bitmap = f
def setNumSliders(self, numSliders):
w, h = self.GetSize()
oldChnls = self.old_nchnls
self.numSliders = numSliders
self.amplitude = [0] * self.numSliders
gap = (self.numSliders - oldChnls) * 5
parentSize = self.parent.GetSize()
if self.orient == wx.HORIZONTAL:
self.SetSize((w, self.numSliders * 5 + 1))
self.SetMinSize((w, 5 * self.numSliders + 1))
self.parent.SetSize((parentSize[0], parentSize[1] + gap))
self.parent.SetMinSize((parentSize[0], parentSize[1] + gap))
else:
self.SetSize((self.numSliders * 5 + 1, h))
self.SetMinSize((5 * self.numSliders + 1, h))
self.parent.SetSize((parentSize[0] + gap, parentSize[1]))
self.parent.SetMinSize((parentSize[0] + gap, parentSize[1]))
wx.CallAfter(self.Refresh)
wx.CallAfter(self.parent.Layout)
wx.CallAfter(self.parent.Refresh)
def setRms(self, *args):
if args[0] < 0:
return
if not args:
self.amplitude = [0 for i in range(self.numSliders)]
else:
self.amplitude = args
wx.CallAfter(self.Refresh)
def OnPaint(self, event):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBrush(wx.Brush("#000000"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
if self.orient == wx.HORIZONTAL:
height = 6
for i in range(self.numSliders):
y = i * (height - 1)
if i < len(self.amplitude):
db = math.log10(self.amplitude[i] + 0.00001) * 0.2 + 1.0
width = int(db * w)
else:
width = 0
dc.DrawBitmap(self.backBitmap, 0, y)
if width > 0:
dc.SetClippingRegion(0, y, width, height)
dc.DrawBitmap(self.bitmap, 0, y)
dc.DestroyClippingRegion()
else:
width = 6
for i in range(self.numSliders):
y = i * (width - 1)
if i < len(self.amplitude):
db = math.log10(self.amplitude[i] + 0.00001) * 0.2 + 1.0
height = int(db * h)
else:
height = 0
dc.DrawBitmap(self.backBitmap, y, 0)
if height > 0:
dc.SetClippingRegion(y, h - height, width, height)
dc.DrawBitmap(self.bitmap, y, 0)
dc.DestroyClippingRegion()
event.Skip()
def OnClose(self, evt):
self.Destroy()
# TODO: BACKGROUND_COLOUR hard-coded all over the place in this class.
class RangeSlider(wx.Panel):
def __init__(
self,
parent,
minvalue,
maxvalue,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="int",
log=False,
function=None,
backColour=None,
):
wx.Panel.__init__(self, parent=parent, id=wx.ID_ANY, pos=pos, size=size, style=wx.NO_BORDER)
if backColour:
self.backgroundColour = backColour
else:
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.SetMinSize(self.GetSize())
self.sliderHeight = 15
self.borderWidth = 1
self.action = None
self.fillcolor = "#AAAAAA" # SLIDER_BACK_COLOUR
self.knobcolor = "#333333" # SLIDER_KNOB_COLOUR
self.handlecolor = wx.Colour(
int(self.knobcolor[1:3]) - 10, int(self.knobcolor[3:5]) - 10, int(self.knobcolor[5:7]) - 10
)
self.outFunction = function
if valtype.startswith("i"):
self.myType = int
else:
self.myType = float
self.log = log
self.SetRange(minvalue, maxvalue)
self.handles = [minvalue, maxvalue]
if init is not None:
if type(init) in [list, tuple]:
if len(init) == 1:
self.SetValue([init[0], init[0]])
else:
self.SetValue([init[0], init[1]])
else:
self.SetValue([minvalue, maxvalue])
else:
self.SetValue([minvalue, maxvalue])
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_RIGHT_DOWN, self.MouseRightDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_RIGHT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnResize)
def createSliderBitmap(self):
w, h = self.GetSize()
b = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(b)
dc.SetPen(wx.Pen(self.backgroundColour, width=1))
dc.SetBrush(wx.Brush(self.backgroundColour))
dc.DrawRectangle(0, 0, w, h)
dc.SetBrush(wx.Brush("#777777"))
dc.SetPen(wx.Pen("#FFFFFF", width=1))
h2 = self.sliderHeight // 4
dc.DrawRoundedRectangle(0, h2, w, self.sliderHeight, 4)
dc.SelectObject(wx.NullBitmap)
b.SetMaskColour("#777777")
self.sliderMask = b
def setFillColour(self, col1, col2):
self.fillcolor = col1
self.knobcolor = col2
self.handlecolor = wx.Colour(self.knobcolor[0] * 0.35, self.knobcolor[1] * 0.35, self.knobcolor[2] * 0.35)
self.createSliderBitmap()
def SetRange(self, minvalue, maxvalue):
self.minvalue = minvalue
self.maxvalue = maxvalue
def scale(self, pos):
tmp = []
for p in pos:
inter = tFromValue(p, 1, self.GetSize()[0] - 1)
inter2 = interpFloat(inter, self.minvalue, self.maxvalue)
tmp.append(inter2)
return tmp
def MouseRightDown(self, evt):
size = self.GetSize()
xpos = evt.GetPosition()[0]
if xpos > (self.handlePos[0] - 5) and xpos < (self.handlePos[1] + 5):
self.lastpos = xpos
self.length = self.handlePos[1] - self.handlePos[0]
self.action = "drag"
self.handles = self.scale(self.handlePos)
self.CaptureMouse()
self.Refresh()
def MouseDown(self, evt):
size = self.GetSize()
xpos = evt.GetPosition()[0]
self.middle = (self.handlePos[1] - self.handlePos[0]) // 2 + self.handlePos[0]
midrec = wx.Rect(self.middle - 7, 4, 15, size[1] - 9)
if midrec.Contains(evt.GetPosition()):
self.lastpos = xpos
self.length = self.handlePos[1] - self.handlePos[0]
self.action = "drag"
elif xpos < self.middle:
self.handlePos[0] = clamp(xpos, 1, self.handlePos[1])
self.action = "left"
elif xpos > self.middle:
self.handlePos[1] = clamp(xpos, self.handlePos[0], size[0] - 1)
self.action = "right"
self.handles = self.scale(self.handlePos)
self.CaptureMouse()
self.Refresh()
def MouseMotion(self, evt):
size = self.GetSize()
if evt.Dragging() and self.HasCapture() and evt.LeftIsDown() or evt.RightIsDown():
xpos = evt.GetPosition()[0]
if self.action == "drag":
off = xpos - self.lastpos
self.lastpos = xpos
self.handlePos[0] = clamp(self.handlePos[0] + off, 1, size[0] - self.length)
self.handlePos[1] = clamp(self.handlePos[1] + off, self.length, size[0] - 1)
if self.action == "left":
self.handlePos[0] = clamp(xpos, 1, self.handlePos[1] - 20)
elif self.action == "right":
self.handlePos[1] = clamp(xpos, self.handlePos[0] + 20, size[0] - 1)
self.handles = self.scale(self.handlePos)
self.Refresh()
def MouseUp(self, evt):
while self.HasCapture():
self.ReleaseMouse()
def OnResize(self, evt):
self.createSliderBitmap()
self.createBackgroundBitmap()
self.clampHandlePos()
self.Refresh()
def clampHandlePos(self):
size = self.GetSize()
tmp = []
for handle in [min(self.handles), max(self.handles)]:
pos = tFromValue(handle, self.minvalue, self.maxvalue) * size[0]
pos = clamp(pos, 1, size[0] - 1)
tmp.append(pos)
self.handlePos = tmp
class HRangeSlider(RangeSlider):
def __init__(
self,
parent,
minvalue,
maxvalue,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="int",
log=False,
function=None,
backColour=None,
):
RangeSlider.__init__(self, parent, minvalue, maxvalue, init, pos, size, valtype, log, function, backColour)
self.SetMinSize((50, 15))
self.createSliderBitmap()
# self.createBackgroundBitmap()
self.clampHandlePos()
def setSliderHeight(self, height):
self.sliderHeight = height
self.createSliderBitmap()
# self.createBackgroundBitmap()
self.Refresh()
def createBackgroundBitmap(self):
w, h = self.GetSize()
self.backgroundBitmap = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(self.backgroundBitmap)
dc.SetBrush(wx.Brush(self.backgroundColour, wx.SOLID))
dc.Clear()
# Draw background
dc.SetPen(wx.Pen(self.backgroundColour, width=self.borderWidth, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
# Draw inner part
h2 = self.sliderHeight // 4
rec = wx.Rect(0, h2, w, self.sliderHeight)
dc.GradientFillLinear(rec, "#666666", self.fillcolor, wx.BOTTOM)
dc.DrawBitmap(self.sliderMask, 0, 0, True)
dc.SelectObject(wx.NullBitmap)
def SetOneValue(self, value, which):
self.lasthandles = self.handles
value = clamp(value, self.minvalue, self.maxvalue)
if self.log:
t = toLog(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
else:
t = tFromValue(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
if self.myType == int:
value = int(value)
self.handles[which] = value
self.OnResize(None)
def SetValue(self, values):
self.lasthandles = self.handles
tmp = []
for val in values:
value = clamp(val, self.minvalue, self.maxvalue)
if self.log:
t = toLog(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
else:
t = tFromValue(value, self.minvalue, self.maxvalue)
value = interpFloat(t, self.minvalue, self.maxvalue)
if self.myType == int:
value = int(value)
tmp.append(value)
self.handles = tmp
self.OnResize(None)
def GetValue(self):
tmp = []
for value in self.handles:
if self.log:
t = tFromValue(value, self.minvalue, self.maxvalue)
val = toExp(t, self.minvalue, self.maxvalue)
else:
val = value
if self.myType == int:
val = int(val)
tmp.append(val)
tmp = [min(tmp), max(tmp)]
return tmp
def OnPaint(self, evt):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
# Draw background
dc.SetBrush(wx.Brush(self.backgroundColour))
dc.Clear()
dc.SetPen(wx.Pen(self.backgroundColour))
dc.DrawRectangle(0, 0, w, h)
# dc.DrawBitmap(self.backgroundBitmap, 0, 0)
# Draw handles
dc.SetPen(wx.Pen(self.handlecolor, width=1, style=wx.SOLID))
dc.SetBrush(wx.Brush(self.handlecolor))
rec = (self.handlePos[0], 3, self.handlePos[1] - self.handlePos[0], h - 7)
dc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 4)
dc.SetPen(wx.Pen(self.fillcolor, width=1, style=wx.SOLID))
dc.SetBrush(wx.Brush(self.fillcolor))
mid = (self.handlePos[1] - self.handlePos[0]) // 2 + self.handlePos[0]
rec = (mid - 4, 4, 8, h - 9)
dc.DrawRoundedRectangle(rec[0], rec[1], rec[2], rec[3], 3)
# Send value
if self.outFunction:
self.outFunction(self.GetValue())
######################################################################
### Control window for PyoObject
######################################################################
class Command:
def __init__(self, func, key):
self.func = func
self.key = key
def __call__(self, value):
self.func(self.key, value)
class PyoObjectControl(wx.Frame):
def __init__(self, parent=None, obj=None, map_list=None):
wx.Frame.__init__(self, parent)
from .controls import SigTo
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(9999, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.fileMenu.Bind(wx.EVT_MENU, self._destroy, id=9999)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(
10000, "Copy all parameters to the clipboard (4 digits of precision)\tCtrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10000)
self.fileMenu.Append(
10001, "Copy all parameters to the clipboard (full precision)\tShift+Ctrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10001)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self._obj = obj
self._map_list = map_list
self._sliders = []
self._excluded = []
self._values = {}
self._displays = {}
self._maps = {}
self._sigs = {}
panel = wx.Panel(self)
panel.SetBackgroundColour(BACKGROUND_COLOUR)
mainBox = wx.BoxSizer(wx.VERTICAL)
self.box = wx.FlexGridSizer(10, 2, 5, 5)
for i, m in enumerate(self._map_list):
key, init, mini, maxi, scl, res, dataOnly = m.name, m.init, m.min, m.max, m.scale, m.res, m.dataOnly
# filters PyoObjects
if type(init) not in [list, float, int]:
self._excluded.append(key)
else:
self._maps[key] = m
# label (param name)
if dataOnly:
label = wx.StaticText(panel, -1, key + " *")
else:
label = wx.StaticText(panel, -1, key)
# create and pack slider
if type(init) != list:
if scl == "log":
scl = True
else:
scl = False
if res == "int":
res = True
else:
res = False
self._sliders.append(
ControlSlider(
panel,
mini,
maxi,
init,
log=scl,
size=(300, 16),
outFunction=Command(self.setval, key),
integer=res,
ctrllabel=key,
)
)
self.box.AddMany([(label, 0, wx.LEFT, 5), (self._sliders[-1], 1, wx.EXPAND | wx.LEFT, 5)])
else:
self._sliders.append(MultiSlider(panel, init, key, self.setval, m, ctrllabel=key))
self.box.AddMany([(label, 0, wx.LEFT, 5), (self._sliders[-1], 1, wx.EXPAND | wx.LEFT, 5)])
# set obj attribute to PyoObject SigTo
if not dataOnly:
self._values[key] = init
self._sigs[key] = SigTo(init, 0.025, init)
refStream = self._obj.getBaseObjects()[0]._getStream()
server = self._obj.getBaseObjects()[0].getServer()
for k in range(len(self._sigs[key].getBaseObjects())):
curStream = self._sigs[key].getBaseObjects()[k]._getStream()
server.changeStreamPosition(refStream, curStream)
setattr(self._obj, key, self._sigs[key])
self.box.AddGrowableCol(1, 1)
mainBox.Add(self.box, 1, wx.EXPAND | wx.TOP | wx.BOTTOM | wx.RIGHT, 10)
panel.SetSizerAndFit(mainBox)
self.SetClientSize(panel.GetSize())
self.SetMinSize(self.GetSize())
self.SetMaxSize((-1, self.GetSize()[1]))
def _destroy(self, event):
for m in self._map_list:
key = m.name
if key not in self._excluded and key in self._values:
setattr(self._obj, key, self._values[key])
del self._sigs[key]
self.Destroy()
def setval(self, key, x):
if key in self._values:
self._values[key] = x
setattr(self._sigs[key], "value", x)
else:
setattr(self._obj, key, x)
def copy(self, evt):
labels = [slider.getCtrlLabel() for slider in self._sliders]
values = [slider.GetValue() for slider in self._sliders]
if evt.GetId() == 10000:
pstr = ""
for i in range(len(labels)):
pstr += "%s=" % labels[i]
if type(values[i]) == list:
pstr += "["
pstr += ", ".join(["%.4f" % val for val in values[i]])
pstr += "]"
else:
pstr += "%.4f" % values[i]
if i < (len(labels) - 1):
pstr += ", "
else:
pstr = ""
for i in range(len(labels)):
pstr += "%s=" % labels[i]
if type(values[i]) == list:
pstr += "["
pstr += ", ".join([str(val) for val in values[i]])
pstr += "]"
else:
pstr += str(values[i])
if i < (len(labels) - 1):
pstr += ", "
data = wx.TextDataObject(pstr)
if wx.TheClipboard.Open():
wx.TheClipboard.Clear()
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
######################################################################
### View window for PyoTableObject
######################################################################
class ViewTable(wx.Frame):
def __init__(self, parent, samples=None, tableclass=None, object=None):
wx.Frame.__init__(self, parent, size=(500, 200))
self.SetMinSize((300, 150))
menubar = wx.MenuBar()
fileMenu = wx.Menu()
closeItem = fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
menubar.Append(fileMenu, "&File")
self.SetMenuBar(menubar)
self.tableclass = tableclass
self.object = object
self.Bind(wx.EVT_CLOSE, self._destroy)
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.box = wx.BoxSizer(wx.VERTICAL)
self.wavePanel = ViewTablePanel(self.panel, object)
self.box.Add(self.wavePanel, 1, wx.EXPAND | wx.ALL, 5)
self.panel.SetSizerAndFit(self.box)
self.update(samples)
def update(self, samples):
self.wavePanel.draw(samples)
def _destroy(self, evt):
self.object._setViewFrame(None)
self.Destroy()
class ViewTablePanel(wx.Panel):
def __init__(self, parent, obj):
wx.Panel.__init__(self, parent)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.obj = obj
self.samples = []
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def draw(self, samples):
self.samples = samples
wx.CallAfter(self.Refresh)
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.SetPen(wx.Pen("#BBBBBB", width=1, style=wx.SOLID))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
gc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush("#FFFFFF"))
if len(self.samples) > 1:
gc.DrawLines(self.samples)
dc.DrawLine(0, h // 2 + 1, w, h // 2 + 1)
def OnSize(self, evt):
wx.CallAfter(self.obj.refreshView)
class SndViewTable(wx.Frame):
def __init__(self, parent, obj=None, tableclass=None, mouse_callback=None):
wx.Frame.__init__(self, parent, size=(500, 250))
self.SetMinSize((300, 150))
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.obj = obj
self.chnls = len(self.obj)
self.dur = self.obj.getDur(False)
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.box = wx.BoxSizer(wx.VERTICAL)
self.wavePanel = SndViewTablePanel(self.panel, obj, mouse_callback)
self.box.Add(self.wavePanel, 1, wx.EXPAND | wx.ALL, 5)
self.zoomH = HRangeSlider(
self.panel,
minvalue=0,
maxvalue=1,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="float",
log=False,
function=self.setZoomH,
)
self.box.Add(self.zoomH, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
self.panel.SetSizer(self.box)
def setZoomH(self, values):
self.wavePanel.setBegin(self.dur * values[0])
self.wavePanel.setEnd(self.dur * values[1])
self.update()
def update(self):
self.wavePanel.setImage()
def _destroy(self, evt):
self.obj._setViewFrame(None)
self.Destroy()
class SndViewTablePanel(wx.Panel):
def __init__(self, parent, obj=None, mouse_callback=None, select_callback=None):
wx.Panel.__init__(self, parent)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.OnMouseDown)
self.Bind(wx.EVT_LEFT_UP, self.OnMouseUp)
self.Bind(wx.EVT_RIGHT_DOWN, self.OnRightDown)
self.Bind(wx.EVT_RIGHT_UP, self.OnMouseUp)
self.Bind(wx.EVT_MOTION, self.OnMotion)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.refresh_from_selection = False
self.background_bitmap = None
self.obj = obj
self.selstart = self.selend = self.movepos = None
self.moveSelection = False
self.createSelection = False
self.begin = 0
if self.obj is not None:
self.chnls = len(self.obj)
self.end = self.obj.getDur(False)
else:
self.chnls = 1
self.end = 1.0
self.img = [[]]
self.mouse_callback = mouse_callback
self.select_callback = select_callback
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
self.setImage()
def getDur(self):
if self.obj is not None:
return self.obj.getDur(False)
else:
return 1.0
def resetSelection(self):
self.selstart = self.selend = None
if self.background_bitmap is not None:
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((0.0, 1.0))
def setSelection(self, start, stop):
self.selstart = start
self.selend = stop
if self.background_bitmap is not None:
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((self.selstart, self.selend))
def setBegin(self, x):
self.begin = x
def setEnd(self, x):
self.end = x
def setImage(self):
if self.obj is not None:
self.img = self.obj.getViewTable(self.GetSize(), self.begin, self.end)
wx.CallAfter(self.Refresh)
def clipPos(self, pos):
if pos[0] < 0.0:
x = 0.0
elif pos[0] > 1.0:
x = 1.0
else:
x = pos[0]
if pos[1] < 0.0:
y = 0.0
elif pos[1] > 1.0:
y = 1.0
else:
y = pos[1]
if self.obj is not None:
x = x * ((self.end - self.begin) / self.obj.getDur(False)) + (self.begin / self.obj.getDur(False))
return (x, y)
def OnMouseDown(self, evt):
size = self.GetSize()
pos = evt.GetPosition()
if pos[1] <= 0:
pos = (float(pos[0]) / size[0], 1.0)
else:
pos = (float(pos[0]) / size[0], 1.0 - (float(pos[1]) / size[1]))
pos = self.clipPos(pos)
if self.mouse_callback is not None:
self.mouse_callback(pos)
self.CaptureMouse()
def OnRightDown(self, evt):
size = self.GetSize()
pos = evt.GetPosition()
if pos[1] <= 0:
pos = (float(pos[0]) / size[0], 1.0)
else:
pos = (float(pos[0]) / size[0], 1.0 - (float(pos[1]) / size[1]))
pos = self.clipPos(pos)
if evt.ShiftDown():
if self.selstart is not None and self.selend is not None:
self.moveSelection = True
self.movepos = pos[0]
elif evt.CmdDown():
self.selstart = self.selend = None
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((0.0, 1.0))
else:
self.createSelection = True
self.selstart = pos[0]
self.CaptureMouse()
def OnMotion(self, evt):
if self.HasCapture():
size = self.GetSize()
pos = evt.GetPosition()
if pos[1] <= 0:
pos = (float(pos[0]) / size[0], 1.0)
else:
pos = (float(pos[0]) / size[0], 1.0 - (float(pos[1]) / size[1]))
pos = self.clipPos(pos)
if evt.LeftIsDown():
if self.mouse_callback is not None:
self.mouse_callback(pos)
elif evt.RightIsDown():
refresh = False
if self.createSelection:
self.selend = pos[0]
refresh = True
elif self.moveSelection:
diff = pos[0] - self.movepos
self.movepos = pos[0]
self.selstart += diff
self.selend += diff
refresh = True
if refresh:
self.refresh_from_selection = True
self.Refresh()
if self.select_callback is not None:
self.select_callback((self.selstart, self.selend))
def OnMouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
self.createSelection = self.moveSelection = False
def create_background(self):
w, h = self.GetSize()
self.background_bitmap = wx.EmptyBitmap(w, h)
dc = wx.MemoryDC(self.background_bitmap)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
off = h // self.chnls // 2
gc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush("#FFFFFF", style=wx.TRANSPARENT))
dc.SetTextForeground("#444444")
if sys.platform in "darwin":
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
font.SetPointSize(ptsize - 3)
dc.SetFont(font)
else:
font = dc.GetFont()
font.SetPointSize(8)
dc.SetFont(font)
tickstep = w // 10
if tickstep < 40:
timelabel = "%.1f"
elif tickstep < 80:
timelabel = "%.2f"
elif tickstep < 120:
timelabel = "%.3f"
else:
timelabel = "%.4f"
timestep = (self.end - self.begin) * 0.1
for i, samples in enumerate(self.img):
y = h // self.chnls * i
if len(samples):
gc.DrawLines(samples)
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
dc.DrawLine(0, y + off, w, y + off)
for j in range(10):
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
dc.DrawLine(j * tickstep, 0, j * tickstep, h)
dc.DrawText(timelabel % (self.begin + j * timestep), j * tickstep + 2, h - y - 12)
dc.SetPen(wx.Pen("#000000", width=1))
dc.DrawLine(0, h - y, w, h - y)
dc.SelectObject(wx.NullBitmap)
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
if not self.refresh_from_selection:
self.create_background()
dc.DrawBitmap(self.background_bitmap, 0, 0)
if self.selstart is not None and self.selend is not None:
gc.SetPen(wx.Pen(wx.Colour(0, 0, 0, 64)))
gc.SetBrush(wx.Brush(wx.Colour(0, 0, 0, 64)))
if self.obj is not None:
dur = self.obj.getDur(False)
else:
dur = 1.0
selstartabs = min(self.selstart, self.selend) * dur
selendabs = max(self.selstart, self.selend) * dur
if selstartabs < self.begin:
startpix = 0
else:
startpix = ((selstartabs - self.begin) / (self.end - self.begin)) * w
if selendabs > self.end:
endpix = w
else:
endpix = ((selendabs - self.begin) / (self.end - self.begin)) * w
gc.DrawRectangle(startpix, 0, endpix - startpix, h)
self.refresh_from_selection = False
def OnSize(self, evt):
wx.CallAfter(self.setImage)
######################################################################
## View window for PyoMatrixObject
#####################################################################
class ViewMatrixBase(wx.Frame):
def __init__(self, parent, size=None, object=None):
wx.Frame.__init__(self, parent)
self.object = object
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.SetClientSize(size)
self.SetMinSize(self.GetSize())
self.SetMaxSize(self.GetSize())
def update(self, samples):
self.setImage(samples)
def _destroy(self, evt):
self.object._setViewFrame(None)
self.Destroy()
class ViewMatrix(ViewMatrixBase):
def __init__(self, parent, samples=None, size=None, object=None):
ViewMatrixBase.__init__(self, parent, size, object)
self.size = size
self.setImage(samples)
def setImage(self, samples):
image = wx.EmptyImage(self.size[0], self.size[1])
image.SetData(samples)
self.img = wx.BitmapFromImage(image)
wx.CallAfter(self.Refresh)
def OnPaint(self, evt):
dc = wx.PaintDC(self)
dc.DrawBitmap(self.img, 0, 0)
######################################################################
## Spectrum Display
######################################################################
class SpectrumDisplay(wx.Frame):
def __init__(self, parent, obj=None):
wx.Frame.__init__(self, parent, size=(600, 350))
self.SetMinSize((400, 240))
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
pollMenu = wx.Menu()
pollID = 20000
self.availableSpeeds = [0.01, 0.025, 0.05, 0.1, 0.25, 0.5, 1]
for speed in self.availableSpeeds:
pollMenu.Append(pollID, "%.3f" % speed, kind=wx.ITEM_RADIO)
if speed == 0.05:
pollMenu.Check(pollID, True)
self.Bind(wx.EVT_MENU, self.setPollTime, id=pollID)
pollID += 1
self.menubar.Append(pollMenu, "&Polling Speed")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.obj = obj
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.mainBox = wx.BoxSizer(wx.VERTICAL)
self.toolBox = wx.BoxSizer(wx.HORIZONTAL)
if sys.platform == "darwin":
X_OFF = 24
else:
X_OFF = 16
if self.obj is None:
initgain = 0.0
self.channelNamesVisible = True
self.channelNames = []
else:
initgain = self.obj.gain
self.channelNamesVisible = self.obj.channelNamesVisible
self.channelNames = self.obj.channelNames
tw, th = self.GetTextExtent("Start")
self.activeTog = wx.ToggleButton(self.panel, -1, label="Start", size=(tw + X_OFF, th + 10))
self.activeTog.SetValue(1)
self.activeTog.Bind(wx.EVT_TOGGLEBUTTON, self.activate)
self.toolBox.Add(self.activeTog, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("Freq Log")
self.freqTog = wx.ToggleButton(self.panel, -1, label="Freq Log", size=(tw + X_OFF, th + 10))
self.freqTog.SetValue(0)
self.freqTog.Bind(wx.EVT_TOGGLEBUTTON, self.setFreqScale)
self.toolBox.Add(self.freqTog, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("Mag Log")
self.magTog = wx.ToggleButton(self.panel, -1, label="Mag Log", size=(tw + X_OFF, th + 10))
self.magTog.SetValue(1)
self.magTog.Bind(wx.EVT_TOGGLEBUTTON, self.setMagScale)
self.toolBox.Add(self.magTog, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("Blackman 3-term")
self.winPopup = wx.Choice(
self.panel,
-1,
choices=[
"Rectangular",
"Hamming",
"Hanning",
"Bartlett",
"Blackman 3",
"Blackman-H 4",
"Blackman-H 7",
"Tuckey",
"Half-sine",
],
size=(tw + X_OFF, th + 10),
)
self.winPopup.SetSelection(2)
self.winPopup.Bind(wx.EVT_CHOICE, self.setWinType)
self.toolBox.Add(self.winPopup, 0, wx.TOP | wx.LEFT, 5)
tw, th = self.GetTextExtent("16384")
self.sizePopup = wx.Choice(
self.panel,
-1,
choices=["64", "128", "256", "512", "1024", "2048", "4096", "8192", "16384"],
size=(-1, th + 10),
)
self.sizePopup.SetSelection(4)
self.sizePopup.Bind(wx.EVT_CHOICE, self.setSize)
self.toolBox.Add(self.sizePopup, 0, wx.TOP | wx.LEFT, 5)
self.mainBox.Add(self.toolBox, 0, wx.EXPAND)
self.dispBox = wx.BoxSizer(wx.HORIZONTAL)
self.box = wx.BoxSizer(wx.VERTICAL)
self.spectrumPanel = SpectrumPanel(
self.panel,
len(self.obj),
self.obj.getLowfreq(),
self.obj.getHighfreq(),
self.obj.getFscaling(),
self.obj.getMscaling(),
)
self.box.Add(self.spectrumPanel, 1, wx.EXPAND | wx.LEFT | wx.RIGHT | wx.TOP, 5)
self.zoomH = HRangeSlider(
self.panel,
minvalue=0,
maxvalue=0.5,
init=None,
pos=(0, 0),
size=(200, 15),
valtype="float",
log=False,
function=self.setZoomH,
)
self.box.Add(self.zoomH, 0, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
self.dispBox.Add(self.box, 1, wx.EXPAND, 0)
self.gainSlider = ControlSlider(self.panel, -24, 24, initgain, outFunction=self.setGain, orient=wx.VERTICAL)
self.dispBox.Add(self.gainSlider, 0, wx.EXPAND | wx.TOP, 5)
self.dispBox.AddSpacer(5)
self.mainBox.Add(self.dispBox, 1, wx.EXPAND)
self.panel.SetSizer(self.mainBox)
def activate(self, evt):
if evt.GetInt() == 1:
self.obj.poll(1)
else:
self.obj.poll(0)
def setPollTime(self, evt):
value = self.availableSpeeds[evt.GetId() - 20000]
self.obj.polltime(value)
def setFreqScale(self, evt):
if evt.GetInt() == 1:
self.obj.setFscaling(1)
else:
self.obj.setFscaling(0)
def setMagScale(self, evt):
if evt.GetInt() == 1:
self.obj.setMscaling(1)
else:
self.obj.setMscaling(0)
def setWinType(self, evt):
self.obj.wintype = evt.GetInt()
def setSize(self, evt):
size = 1 << (evt.GetInt() + 6)
self.obj.size = size
def setGain(self, gain):
self.obj.setGain(pow(10.0, gain * 0.05))
def setZoomH(self, values):
self.spectrumPanel.setLowFreq(self.obj.setLowbound(values[0]))
self.spectrumPanel.setHighFreq(self.obj.setHighbound(values[1]))
wx.CallAfter(self.spectrumPanel.Refresh)
def setDisplaySize(self, size):
self.obj.setWidth(size[0])
self.obj.setHeight(size[1])
def update(self, points):
self.spectrumPanel.setImage(points)
def setFscaling(self, x):
self.spectrumPanel.setFscaling(x)
wx.CallAfter(self.spectrumPanel.Refresh)
def setMscaling(self, x):
self.spectrumPanel.setMscaling(x)
wx.CallAfter(self.spectrumPanel.Refresh)
def showChannelNames(self, visible):
self.spectrumPanel.showChannelNames(visible)
self.channelNamesVisible = visible
def setChannelNames(self, names):
self.channelNames = names
self.spectrumPanel.setChannelNames(names)
def _destroy(self, evt):
self.obj._setViewFrame(None)
self.Destroy()
# TODO: Adjust the font size according to the size of the panel.
class SpectrumPanel(wx.Panel):
def __init__(
self, parent, chnls, lowfreq, highfreq, fscaling, mscaling, pos=wx.DefaultPosition, size=wx.DefaultSize, style=0
):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetMinSize((300, 100))
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
if chnls == 1:
self.chnls = 64
else:
self.chnls = chnls
try:
self.channelNamesVisible = self.GetParent().GetParent().channelNamesVisible
except:
self.channelNamesVisible = True
try:
self.channelNames = self.GetParent().GetParent().channelNames
except:
self.channelNames = []
self.img = None
self.obj = None
self.lowfreq = lowfreq
self.highfreq = highfreq
self.fscaling = fscaling
self.mscaling = mscaling
self.setPens()
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def OnSize(self, evt):
try:
self.GetParent().GetParent().setDisplaySize(self.GetSize())
except:
pass
try:
size = self.GetSize()
self.obj.setWidth(size[0])
self.obj.setHeight(size[1])
except:
pass
self.Refresh()
def setImage(self, points):
self.img = [points[i] for i in range(len(points))]
wx.CallAfter(self.Refresh)
def setPens(self):
self.pens = []
self.brushes = []
for x in range(self.chnls):
hue = rescale(x, xmin=0, xmax=self.chnls - 1, ymin=0, ymax=2.0 / 3)
hsv = wx.Image_HSVValue(hue, 1.0, 0.6)
rgb = wx.Image_HSVtoRGB(hsv)
self.pens.append(wx.Pen(wx.Colour(rgb.red, rgb.green, rgb.blue), 1))
self.brushes.append(wx.Brush(wx.Colour(rgb.red, rgb.green, rgb.blue, 128)))
def setChnls(self, x):
if x == 1:
self.chnls = 64
else:
self.chnls = x
self.setPens()
def setFscaling(self, x):
self.fscaling = x
def setMscaling(self, x):
self.mscaling = x
def setLowFreq(self, x):
self.lowfreq = x
def setHighFreq(self, x):
self.highfreq = x
def showChannelNames(self, visible):
self.channelNamesVisible = visible
def setChannelNames(self, names):
self.channelNames = names
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
tw, th = dc.GetTextExtent("0")
# background
background = gc.CreatePath()
background.AddRectangle(0, 0, w - 1, h - 1)
gc.SetPen(wx.BLACK_PEN)
gc.SetBrush(wx.WHITE_BRUSH)
gc.DrawPath(background)
dc.SetTextForeground("#555555")
dc.SetPen(wx.Pen("#555555", style=wx.DOT))
# frequency linear grid
if not self.fscaling:
text = str(int(self.lowfreq))
tw, th = dc.GetTextExtent(text)
step = (self.highfreq - self.lowfreq) / 8
dc.DrawText(text, 2, 2)
w8 = w // 8
for i in range(1, 8):
pos = w8 * i
dc.DrawLine(pos, th + 4, pos, h - 2)
text = str(int(self.lowfreq + step * i))
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, pos - tw // 2, 2)
# frequency logarithmic grid
else:
if self.lowfreq < 20:
lf = math.log10(20)
else:
lf = math.log10(self.lowfreq)
hf = math.log10(self.highfreq)
lrange = hf - lf
mag = pow(10.0, math.floor(lf))
if lrange > 6:
t = pow(10.0, math.ceil(lf))
base = pow(10.0, math.floor(lrange / 6))
def inc(t, floor_t):
return t * base - t
else:
t = math.ceil(pow(10.0, lf) / mag) * mag
def inc(t, floor_t):
return pow(10.0, floor_t)
majortick = int(math.log10(mag))
while t <= pow(10, hf):
floor_t = int(math.floor(math.log10(t) + 1e-16))
if majortick != floor_t:
majortick = floor_t
ticklabel = "1e%d" % majortick
ticklabel = str(int(float(ticklabel)))
tw, th = dc.GetTextExtent(ticklabel)
else:
if hf - lf < 2:
minortick = int(t / pow(10.0, majortick) + 0.5)
ticklabel = "%de%d" % (minortick, majortick)
ticklabel = str(int(float(ticklabel)))
tw, th = dc.GetTextExtent(ticklabel)
if not minortick % 2 == 0:
ticklabel = ""
else:
ticklabel = ""
pos = int((math.log10(t) - lf) / lrange * w)
if pos < (w - 25):
dc.DrawLine(pos, th + 4, pos, h - 2)
dc.DrawText(ticklabel, pos - tw // 2, 2)
t += inc(t, floor_t)
# magnitude linear grid
if not self.mscaling:
h4 = h * 0.75
step = h4 * 0.1
for i in range(1, 11):
pos = int(h - i * step)
text = "%.1f" % (i * 0.1)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - tw - 4, pos)
dc.SetPen(wx.Pen("#555555", style=wx.SOLID))
dc.DrawLine(0, pos, w - tw - 6, pos)
dc.SetPen(wx.Pen("#555555", style=wx.DOT))
i += 1
while i * step < (h - th - 5):
pos = int(h - i * step)
text = "%.1f" % (i * 0.1)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - tw - 6, pos)
i += 1
# magnitude logarithmic grid
else:
mw, mh = dc.GetTextExtent("-54")
h4 = h * 0.75
step = h4 * 0.1
for i in range(1, 11):
pos = int(h - i * step)
mval = int((10 - i) * -6.0)
if mval == -0:
mval = 0
text = "%d" % mval
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - mw - 6, pos)
dc.SetPen(wx.Pen("#555555", style=wx.SOLID))
dc.DrawLine(0, pos, w - mw - 4, pos)
dc.SetPen(wx.Pen("#555555", style=wx.DOT))
i += 1
while i * step < (h - th - 5):
pos = int(h - i * step)
text = "%d" % int((10 - i) * -6.0)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - mw - 6, pos)
i += 1
# spectrum
if self.img is not None:
last_tw = tw
# legend
if len(self.img) > 1 and self.channelNamesVisible:
if not self.channelNames:
tw, th = dc.GetTextExtent("chan 8")
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7)
else:
numChars = max([len(x) for x in self.channelNames])
tw, th = dc.GetTextExtent("0" * numChars)
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
if i < len(self.channelNames):
dc.DrawText(self.channelNames[i], w - tw - 20 - last_tw, i * th + th + 7)
else:
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7)
# channel spectrums
for i, samples in enumerate(self.img):
gc.SetPen(self.pens[i % self.chnls])
gc.SetBrush(self.brushes[i % self.chnls])
gc.DrawLines(samples)
######################################################################
## Spectrum Display
######################################################################
class ScopeDisplay(wx.Frame):
def __init__(self, parent, obj=None):
wx.Frame.__init__(self, parent, size=(600, 350))
self.SetMinSize((400, 240))
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
closeItem = self.fileMenu.Append(-1, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self._destroy, closeItem)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
self.Bind(wx.EVT_CLOSE, self._destroy)
self.obj = obj
gain = self.obj.gain
length = self.obj.length
self.panel = wx.Panel(self)
self.panel.SetBackgroundColour(BACKGROUND_COLOUR)
self.mainBox = wx.BoxSizer(wx.VERTICAL)
self.toolBox = wx.BoxSizer(wx.HORIZONTAL)
if sys.platform == "darwin":
X_OFF = 24
else:
X_OFF = 16
tw, th = self.GetTextExtent("Start")
self.activeTog = wx.ToggleButton(self.panel, -1, label="Start", size=(tw + X_OFF, th + 10))
self.activeTog.SetValue(1)
self.activeTog.Bind(wx.EVT_TOGGLEBUTTON, self.activate)
self.toolBox.Add(self.activeTog, 0, wx.TOP | wx.LEFT | wx.RIGHT, 5)
self.toolBox.AddSpacer(10)
self.toolBox.Add(wx.StaticText(self.panel, -1, label="Window length (ms):"), 0, wx.TOP, 11)
self.lenSlider = ControlSlider(self.panel, 10, 1000, length * 1000, log=True, outFunction=self.setLength)
self.toolBox.Add(self.lenSlider, 1, wx.TOP | wx.LEFT | wx.RIGHT, 11)
self.toolBox.AddSpacer(40)
self.mainBox.Add(self.toolBox, 0, wx.EXPAND)
self.dispBox = wx.BoxSizer(wx.HORIZONTAL)
self.box = wx.BoxSizer(wx.VERTICAL)
self.scopePanel = ScopePanel(self.panel, self.obj)
self.box.Add(self.scopePanel, 1, wx.EXPAND | wx.LEFT | wx.RIGHT, 5)
self.dispBox.Add(self.box, 1, wx.EXPAND | wx.BOTTOM, 5)
self.gainSlider = ControlSlider(
self.panel, -24, 24, 20.0 * math.log10(gain), outFunction=self.setGain, orient=wx.VERTICAL
)
self.dispBox.Add(self.gainSlider, 0, wx.EXPAND | wx.BOTTOM, 5)
self.dispBox.AddSpacer(5)
self.mainBox.Add(self.dispBox, 1, wx.EXPAND)
self.panel.SetSizer(self.mainBox)
def activate(self, evt):
self.obj.poll(evt.GetInt())
def setLength(self, length):
length *= 0.001
self.obj.setLength(length)
self.scopePanel.setLength(length)
def setGain(self, gain):
gain = pow(10.0, gain * 0.05)
self.scopePanel.setGain(gain)
self.obj.setGain(gain)
def update(self, points):
self.scopePanel.setImage(points)
def showChannelNames(self, visible):
self.scopePanel.showChannelNames(visible)
def setChannelNames(self, names):
self.scopePanel.setChannelNames(names)
def _destroy(self, evt):
self.obj._setViewFrame(None)
self.Destroy()
class ScopePanel(wx.Panel):
def __init__(self, parent, obj=None, pos=wx.DefaultPosition, size=wx.DefaultSize, style=0):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetMinSize((300, 100))
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.img = [[]]
self.obj = obj
if self.obj is not None:
self.gain = self.obj.gain
self.length = self.obj.length
self.chnls = len(self.obj)
self.channelNamesVisible = self.obj.channelNamesVisible
self.channelNames = self.obj.channelNames
else:
self.gain = 1
self.length = 0.05
self.chnls = 64
self.channelNamesVisible = True
self.channelNamesVisible = []
self.setPens()
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def OnSize(self, evt):
try:
size = self.GetSize()
self.obj.setWidth(size[0])
self.obj.setHeight(size[1])
except:
pass
wx.CallAfter(self.Refresh)
def setChnls(self, x):
if x == 1:
self.chnls = 64
else:
self.chnls = x
self.setPens()
def setPens(self):
self.pens = []
if self.chnls < 2:
hsv = wx.Image.HSVValue(0.0, 1.0, 0.6)
rgb = wx.Image.HSVtoRGB(hsv)
self.pens.append(wx.Pen(wx.Colour(rgb.red, rgb.green, rgb.blue), 1))
else:
for x in range(self.chnls):
hue = rescale(x, xmin=0, xmax=self.chnls - 1, ymin=0, ymax=2.0 / 3)
hsv = wx.Image.HSVValue(hue, 0.99, 0.6)
rgb = wx.Image.HSVtoRGB(hsv)
self.pens.append(wx.Pen(wx.Colour(rgb.red, rgb.green, rgb.blue), 1))
def setGain(self, gain):
self.gain = gain
def setLength(self, length):
self.length = length
def setImage(self, points):
self.img = points
wx.CallAfter(self.Refresh)
def showChannelNames(self, visible=True):
self.channelNamesVisible = visible
def setChannelNames(self, names):
self.channelNames = names
def OnPaint(self, evt):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
tw, th = dc.GetTextExtent("0")
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
gc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
gc.SetBrush(wx.Brush("#FFFFFF", style=wx.TRANSPARENT))
dc.SetTextForeground("#444444")
if sys.platform == "darwin":
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
font.SetPointSize(ptsize - 3)
dc.SetFont(font)
elif sys.platform.startswith("linux"):
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
font.SetPointSize(ptsize - 1)
dc.SetFont(font)
elif sys.platform == "win32":
font = dc.GetFont()
font.SetPointSize(8)
dc.SetFont(font)
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
# horizontal grid
step = h // 6
ampstep = 1.0 / 3.0 / self.gain
for i in range(1, 6):
pos = int(h - i * step)
npos = i - 3
text = "%.2f" % (ampstep * npos)
tw, th = dc.GetTextExtent(text)
dc.DrawText(text, w - tw - 2, pos - th // 2)
dc.DrawLine(0, pos, w - tw - 10, pos)
# vertical grid
tickstep = w // 4
timestep = self.length * 0.25
for j in range(4):
dc.SetPen(wx.Pen("#888888", width=1, style=wx.DOT))
dc.DrawLine(j * tickstep, 0, j * tickstep, h)
dc.DrawText("%.3f" % (j * timestep), j * tickstep + 2, h - 15)
# draw waveforms
for i, samples in enumerate(self.img):
gc.SetPen(self.pens[i % 8])
if len(samples) > 1:
gc.DrawLines(samples)
# legend
last_tw = tw
if len(self.img) > 1 and self.channelNamesVisible:
if not self.channelNames:
tw, th = dc.GetTextExtent("chan 8")
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7) # 10
else:
numChars = max([len(x) for x in self.channelNames])
tw, th = dc.GetTextExtent("0" * numChars)
for i in range(len(self.img)):
dc.SetTextForeground(self.pens[i % self.chnls].GetColour())
if i < len(self.channelNames):
dc.DrawText(self.channelNames[i], w - tw - 20 - last_tw, i * th + th + 7)
else:
dc.DrawText("chan %d" % (i + 1), w - tw - 20 - last_tw, i * th + th + 7)
######################################################################
## Grapher window for PyoTableObject control
######################################################################
OFF = 10
OFF2 = OFF * 2
RAD = 3
RAD2 = RAD * 2
AREA = RAD + 2
AREA2 = AREA * 2
class Grapher(wx.Panel):
def __init__(
self,
parent,
xlen=8192,
yrange=(0.0, 1.0),
init=[(0.0, 0.0), (1.0, 1.0)],
mode=0,
exp=10.0,
inverse=True,
tension=0.0,
bias=0.0,
outFunction=None,
pos=(0, 0),
size=(300, 200),
style=0,
):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.Bind(wx.EVT_LEAVE_WINDOW, self.OnLeave)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.mode = mode
self.exp = exp
self.inverse = inverse
self.tension = tension
self.bias = bias
self.pos = (OFF + RAD, OFF + RAD)
self.selected = None
self.xlen = xlen
self.yrange = yrange
self.init = [tup for tup in init]
self.points = [tup for tup in init]
self.outFunction = outFunction
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
self.SetFocus()
wx.CallAfter(self.sendValues)
def setInitPoints(self, pts):
self.init = [(p[0], p[1]) for p in pts]
self.points = [(p[0], p[1]) for p in pts]
self.selected = None
self.sendValues()
self.Refresh()
def pointToPixels(self, pt):
w, h = self.GetSize()
w, h = w - OFF2 - RAD2, h - OFF2 - RAD2
x = int(round(pt[0] * w)) + OFF + RAD
y = int(round(pt[1] * h)) + OFF + RAD
return x, y
def pixelsToPoint(self, pos):
w, h = self.GetSize()
w, h = w - OFF2 - RAD2, h - OFF2 - RAD2
x = (pos[0] - OFF - RAD) / float(w)
y = (pos[1] - OFF - RAD) / float(h)
return x, y
def pointToValues(self, pt):
x = pt[0] * self.xlen
if type(self.xlen) == int:
x = int(x)
y = pt[1] * (self.yrange[1] - self.yrange[0]) + self.yrange[0]
return x, y
def valuesToPoint(self, val):
x = val[0] / float(self.xlen)
y = (val[1] - self.yrange[0]) / float(self.yrange[1] - self.yrange[0])
return x, y
def borderClip(self, pos):
w, h = self.GetSize()
if pos[0] < (OFF + RAD):
pos[0] = OFF + RAD
elif pos[0] > (w - OFF - RAD):
pos[0] = w - OFF - RAD
if pos[1] < (OFF + RAD):
pos[1] = OFF + RAD
elif pos[1] > (h - OFF - RAD):
pos[1] = h - OFF - RAD
return pos
def pointClip(self, pos):
w, h = self.GetSize()
if self.selected == 0:
leftclip = OFF + RAD
else:
x, y = self.pointToPixels(self.points[self.selected - 1])
leftclip = x
if self.selected == (len(self.points) - 1):
rightclip = w - OFF - RAD
else:
x, y = self.pointToPixels(self.points[self.selected + 1])
rightclip = x
if pos[0] < leftclip:
pos[0] = leftclip
elif pos[0] > rightclip:
pos[0] = rightclip
if pos[1] < (OFF + RAD):
pos[1] = OFF + RAD
elif pos[1] > (h - OFF - RAD):
pos[1] = h - OFF - RAD
return pos
def reset(self):
self.points = [tup for tup in self.init]
self.Refresh()
def getPoints(self):
return [tup for tup in self.points]
def getValues(self):
values = []
for pt in self.points:
x, y = self.pointToValues(pt)
values.append((x, y))
return values
def sendValues(self):
if self.outFunction is not None:
values = self.getValues()
self.outFunction(values)
def OnResize(self, evt):
self.Refresh()
evt.Skip()
def OnLeave(self, evt):
self.pos = (OFF + RAD, OFF + RAD)
self.Refresh()
def OnKeyDown(self, evt):
if self.selected is not None and evt.GetKeyCode() in [wx.WXK_BACK, wx.WXK_DELETE, wx.WXK_NUMPAD_DELETE]:
del self.points[self.selected]
self.sendValues()
self.selected = None
self.Refresh()
elif evt.GetKeyCode() in [wx.WXK_UP, wx.WXK_NUMPAD_UP]:
self.points = [(pt[0], pt[1] + 0.002) for pt in self.points]
self.sendValues()
self.Refresh()
elif evt.GetKeyCode() in [wx.WXK_DOWN, wx.WXK_NUMPAD_DOWN]:
self.points = [(pt[0], pt[1] - 0.002) for pt in self.points]
self.sendValues()
self.Refresh()
evt.Skip()
def MouseDown(self, evt):
self.CaptureMouse()
w, h = self.GetSize()
self.pos = self.borderClip(evt.GetPosition())
self.pos[1] = h - self.pos[1]
for i, p in enumerate(self.points):
x, y = self.pointToPixels(p)
if wx.Rect(x - AREA, y - AREA, AREA2, AREA2).Contains(self.pos):
# Grab a point
self.selected = i
self.Refresh()
return
# Add a point
pt = self.pixelsToPoint(self.pos)
for i, p in enumerate(self.points):
if p >= pt:
self.points.insert(i, pt)
break
self.selected = self.points.index(pt)
self.Refresh()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
self.sendValues()
def MouseMotion(self, evt):
w, h = self.GetSize()
self.pos = self.borderClip(evt.GetPosition())
self.pos[1] = h - self.pos[1]
if self.HasCapture():
if self.selected is not None:
self.pos = self.pointClip(self.pos)
x, y = self.pixelsToPoint(self.pos)
if self.mode == 4 and y <= 0:
y = 0.000001
self.points[self.selected] = (x, y)
self.Refresh()
def getLogPoints(self, pt1, pt2):
tmp = []
if pt1[1] <= 0.0:
pt1 = (pt1[0], 0.000001)
if pt2[1] <= 0.0:
pt2 = (pt2[0], 0.000001)
if pt1[1] > pt2[1]:
low = pt2[1]
high = pt1[1]
else:
low = pt1[1]
high = pt2[1]
steps = pt2[0] - pt1[0]
if steps > 0:
lrange = high - low
logrange = math.log10(high) - math.log10(low)
logmin = math.log10(low)
diff = (float(pt2[1]) - pt1[1]) / steps
if lrange == 0:
for i in range(steps):
tmp.append((pt1[0] + i, pt1[1]))
else:
for i in range(steps):
ratio = ((pt1[1] + diff * i) - low) / lrange
tmp.append((pt1[0] + i, pow(10, ratio * logrange + logmin)))
return tmp
def getCosLogPoints(self, pt1, pt2):
tmp = []
if pt1[1] <= 0.0:
pt1 = (pt1[0], 0.000001)
if pt2[1] <= 0.0:
pt2 = (pt2[0], 0.000001)
if pt1[1] > pt2[1]:
low = pt2[1]
high = pt1[1]
else:
low = pt1[1]
high = pt2[1]
steps = pt2[0] - pt1[0]
if steps > 0:
lrange = high - low
logrange = math.log10(high) - math.log10(low)
logmin = math.log10(low)
diff = (float(pt2[1]) - pt1[1]) / steps
if lrange == 0:
for i in range(steps):
tmp.append((pt1[0] + i, pt1[1]))
else:
for i in range(steps):
mu = float(i) / steps
mu = (1.0 - math.cos(mu * math.pi)) * 0.5
mu = pt1[1] * (1.0 - mu) + pt2[1] * mu
ratio = (mu - low) / lrange
tmp.append((pt1[0] + i, pow(10, ratio * logrange + logmin)))
return tmp
def getCosPoints(self, pt1, pt2):
tmp = []
steps = pt2[0] - pt1[0]
for i in range(steps):
mu = float(i) / steps
mu2 = (1.0 - math.cos(mu * math.pi)) * 0.5
tmp.append((pt1[0] + i, pt1[1] * (1.0 - mu2) + pt2[1] * mu2))
return tmp
def getExpPoints(self, pt1, pt2):
tmp = []
ambitus = pt2[1] - pt1[1]
steps = pt2[0] - pt1[0]
if steps == 0:
inc = 1.0 / 0.0001
else:
inc = 1.0 / steps
pointer = 0.0
if self.inverse:
if ambitus >= 0:
for i in range(steps):
scl = 1.0 - pow(1.0 - pointer, self.exp)
tmp.append((pt1[0] + i, scl * ambitus + pt1[1]))
pointer += inc
else:
for i in range(steps):
scl = pow(pointer, self.exp)
tmp.append((pt1[0] + i, scl * ambitus + pt1[1]))
pointer += inc
else:
for i in range(steps):
scl = pow(pointer, self.exp)
tmp.append((pt1[0] + i, scl * ambitus + pt1[1]))
pointer += inc
return tmp
def addImaginaryPoints(self, tmp):
lst = []
x = tmp[1][0] - tmp[0][0]
if tmp[0][1] < tmp[1][1]:
y = tmp[0][1] - tmp[1][1]
else:
y = tmp[0][1] + tmp[1][1]
lst.append((x, y))
lst.extend(tmp)
x = tmp[-2][0] - tmp[-1][0]
if tmp[-2][1] < tmp[-1][1]:
y = tmp[-1][1] + tmp[-2][1]
else:
y = tmp[-1][1] - tmp[-2][1]
lst.append((x, y))
return lst
def getCurvePoints(self, pt0, pt1, pt2, pt3):
tmp = []
y0, y1, y2, y3 = pt0[1], pt1[1], pt2[1], pt3[1]
steps = pt2[0] - pt1[0]
for i in range(steps):
mu = float(i) / steps
mu2 = mu * mu
mu3 = mu2 * mu
m0 = (y1 - y0) * (1.0 + self.bias) * (1.0 - self.tension) * 0.5
m0 += (y2 - y1) * (1.0 - self.bias) * (1.0 - self.tension) * 0.5
m1 = (y2 - y1) * (1.0 + self.bias) * (1.0 - self.tension) * 0.5
m1 += (y3 - y2) * (1.0 - self.bias) * (1.0 - self.tension) * 0.5
a0 = 2.0 * mu3 - 3.0 * mu2 + 1.0
a1 = mu3 - 2.0 * mu2 + mu
a2 = mu3 - mu2
a3 = -2.0 * mu3 + 3.0 * mu2
tmp.append((pt1[0] + i, a0 * y1 + a1 * m0 + a2 * m1 + a3 * y2))
return tmp
def OnPaint(self, evt):
w, h = self.GetSize()
corners = [(OFF, OFF), (w - OFF, OFF), (w - OFF, h - OFF), (OFF, h - OFF)]
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
gc.SetBrush(wx.Brush("#000000"))
gc.SetPen(wx.Pen("#000000"))
if sys.platform == "darwin":
font, ptsize = dc.GetFont(), dc.GetFont().GetPointSize()
else:
font, ptsize = dc.GetFont(), 10
font.SetPointSize(ptsize - 4)
dc.SetFont(font)
dc.SetTextForeground("#888888")
dc.Clear()
# Draw grid
dc.SetPen(wx.Pen("#CCCCCC", 1))
xstep = int(round((w - OFF2) / 10.0))
ystep = int(round((h - OFF2) / 10.0))
for i in range(10):
xpos = i * xstep + OFF
dc.DrawLine(xpos, OFF, xpos, h - OFF)
ypos = i * ystep + OFF
dc.DrawLine(OFF, ypos, w - OFF, ypos)
if i > 0:
if type(self.xlen) == int:
t = "%d" % int(self.xlen * i * 0.1)
else:
t = "%.2f" % (self.xlen * i * 0.1)
dc.DrawText(t, xpos + 2, h - OFF - 10)
if i < 9:
t = "%.2f" % ((9 - i) * 0.1 * (self.yrange[1] - self.yrange[0]) + self.yrange[0])
dc.DrawText(t, OFF + 2, ypos + ystep - 10)
else:
t = "%.2f" % ((9 - i) * 0.1 * (self.yrange[1] - self.yrange[0]) + self.yrange[0])
dc.DrawText(t, OFF + 2, h - OFF - 10)
dc.SetPen(wx.Pen("#000000", 1))
dc.SetBrush(wx.Brush("#000000"))
# Draw bounding box
for i in range(4):
dc.DrawLine(corners[i][0], corners[i][1], corners[(i + 1) % 4][0], corners[(i + 1) % 4][1])
# Convert points in pixels
w, h = w - OFF2 - RAD2, h - OFF2 - RAD2
tmp = []
back_y_for_log = []
for p in self.points:
x = int(round(p[0] * w)) + OFF + RAD
y = int(round((1.0 - p[1]) * h)) + OFF + RAD
tmp.append((x, y))
back_y_for_log.append(p[1])
# Draw lines
dc.SetPen(wx.Pen("#000000", 1))
last_p = None
if len(tmp) > 1:
if self.mode == 0:
for i in range(len(tmp) - 1):
gc.DrawLines([tmp[i], tmp[i + 1]])
elif self.mode == 1:
for i in range(len(tmp) - 1):
tmp2 = self.getCosPoints(tmp[i], tmp[i + 1])
if i == 0 and len(tmp2) < 2:
gc.DrawLines([tmp[i], tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, tmp[-1]])
elif self.mode == 2:
for i in range(len(tmp) - 1):
tmp2 = self.getExpPoints(tmp[i], tmp[i + 1])
if i == 0 and len(tmp2) < 2:
gc.DrawLines([tmp[i], tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, tmp[-1]])
elif self.mode == 3:
curvetmp = self.addImaginaryPoints(tmp)
for i in range(1, len(curvetmp) - 2):
tmp2 = self.getCurvePoints(curvetmp[i - 1], curvetmp[i], curvetmp[i + 1], curvetmp[i + 2])
if i == 1 and len(tmp2) < 2:
gc.DrawLines([curvetmp[i], curvetmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, curvetmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, tmp[-1]])
elif self.mode == 4:
back_tmp = [p for p in tmp]
for i in range(len(tmp)):
tmp[i] = (tmp[i][0], back_y_for_log[i])
for i in range(len(tmp) - 1):
tmp2 = self.getLogPoints(tmp[i], tmp[i + 1])
for j in range(len(tmp2)):
tmp2[j] = (tmp2[j][0], int(round((1.0 - tmp2[j][1]) * h)) + OFF + RAD)
if i == 0 and len(tmp2) < 2:
gc.DrawLines([back_tmp[i], back_tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, back_tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, back_tmp[-1]])
tmp = [p for p in back_tmp]
elif self.mode == 5:
back_tmp = [p for p in tmp]
for i in range(len(tmp)):
tmp[i] = (tmp[i][0], back_y_for_log[i])
for i in range(len(tmp) - 1):
tmp2 = self.getCosLogPoints(tmp[i], tmp[i + 1])
for j in range(len(tmp2)):
tmp2[j] = (tmp2[j][0], int(round((1.0 - tmp2[j][1]) * h)) + OFF + RAD)
if i == 0 and len(tmp2) < 2:
gc.DrawLines([back_tmp[i], back_tmp[i + 1]])
if last_p is not None:
gc.DrawLines([last_p, back_tmp[i]])
for j in range(len(tmp2) - 1):
gc.DrawLines([tmp2[j], tmp2[j + 1]])
last_p = tmp2[j + 1]
if last_p is not None:
gc.DrawLines([last_p, back_tmp[-1]])
tmp = [p for p in back_tmp]
# Draw points
for i, p in enumerate(tmp):
if i == self.selected:
gc.SetBrush(wx.Brush("#FFFFFF"))
dc.SetBrush(wx.Brush("#FFFFFF"))
else:
gc.SetBrush(wx.Brush("#000000"))
dc.SetBrush(wx.Brush("#000000"))
gc.DrawEllipse(p[0] - RAD, p[1] - RAD, RAD2, RAD2)
# Draw position values
font.SetPointSize(ptsize - 3)
dc.SetFont(font)
dc.SetTextForeground("#222222")
posptx, pospty = self.pixelsToPoint(self.pos)
xval, yval = self.pointToValues((posptx, pospty))
if type(self.xlen) == int:
dc.DrawText("%d, %.3f" % (xval, yval), w - 75, OFF)
else:
dc.DrawText("%.3f, %.3f" % (xval, yval), w - 75, OFF)
class TableGrapher(wx.Frame):
def __init__(self, parent=None, obj=None, mode=0, xlen=8192, yrange=(0.0, 1.0)):
wx.Frame.__init__(self, parent, size=(500, 250))
pts = obj.getPoints()
self.yrange = yrange
for i in range(len(pts)):
x = pts[i][0] / float(xlen)
y = (pts[i][1] - float(yrange[0])) / (yrange[1] - yrange[0])
pts[i] = (x, y)
if mode == 2:
self.graph = Grapher(
self,
xlen=xlen,
yrange=yrange,
init=pts,
mode=mode,
exp=obj.exp,
inverse=obj.inverse,
outFunction=obj.replace,
)
elif mode == 3:
self.graph = Grapher(
self,
xlen=xlen,
yrange=yrange,
init=pts,
mode=mode,
tension=obj.tension,
bias=obj.bias,
outFunction=obj.replace,
)
else:
self.graph = Grapher(self, xlen=xlen, yrange=yrange, init=pts, mode=mode, outFunction=obj.replace)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(9999, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=9999)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(
10000, "Copy all points to the clipboard (4 digits of precision)\tCtrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10000)
self.fileMenu.Append(
10001, "Copy all points to the clipboard (full precision)\tShift+Ctrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10001)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(10002, "Reset\tCtrl+R", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.reset, id=10002)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def close(self, evt):
self.Destroy()
def copy(self, evt):
pts = self.graph.getValues()
if evt.GetId() == 10000:
pstr = "["
for i, pt in enumerate(pts):
pstr += "("
if type(pt[0]) == int:
pstr += "%d," % pt[0]
else:
pstr += "%.4f," % pt[0]
pstr += "%.4f)" % pt[1]
if i < (len(pts) - 1):
pstr += ","
pstr += "]"
else:
pstr = str(pts)
data = wx.TextDataObject(pstr)
if wx.TheClipboard.Open():
wx.TheClipboard.Clear()
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
def reset(self, evt):
self.graph.reset()
class DataMultiSlider(wx.Panel):
def __init__(self, parent, init, yrange=(0, 1), outFunction=None, pos=(0, 0), size=(300, 200), style=0):
wx.Panel.__init__(self, parent, pos=pos, size=size, style=style)
self.backgroundColour = BACKGROUND_COLOUR
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(self.backgroundColour)
self.Bind(wx.EVT_SIZE, self.OnResize)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_MOTION, self.MouseMotion)
self.changed = True
self.values = [v for v in init]
self.len = len(self.values)
self.yrange = (float(yrange[0]), float(yrange[1]))
self.outFunction = outFunction
if sys.platform == "win32" or sys.platform.startswith("linux"):
self.dcref = wx.BufferedPaintDC
else:
self.dcref = wx.PaintDC
def OnResize(self, event):
self.Layout()
wx.CallAfter(self.Refresh)
def update(self, points):
self.values = points
self.changed = True
wx.CallAfter(self.Refresh)
def getValues(self):
return self.values
def OnPaint(self, event):
w, h = self.GetSize()
dc = self.dcref(self)
gc = wx.GraphicsContext_Create(dc)
dc.SetBrush(wx.Brush("#FFFFFF"))
dc.SetPen(wx.Pen("#FFFFFF"))
dc.Clear()
dc.DrawRectangle(0, 0, w, h)
gc.SetBrush(wx.Brush("#000000"))
gc.SetPen(wx.Pen("#000000"))
scl = self.yrange[1] - self.yrange[0]
mini = self.yrange[0]
bw = float(w) / self.len
points = [(0, h)]
x = 0
if bw >= 1:
for i in range(self.len):
y = h - ((self.values[i] - mini) / scl * h)
points.append((x, y))
x = (i + 1) * bw
points.append((x, y))
else:
slice = 1 / bw
p1 = 0
for i in range(w):
p2 = int((i + 1) * slice)
y = h - ((max(self.values[p1:p2]) - mini) / scl * h)
points.append((i, y))
p1 = p2
points.append((w, y))
points.append((w, h))
gc.DrawLines(points)
if self.outFunction is not None and self.changed:
self.changed = False
self.outFunction(self.values)
def MouseDown(self, evt):
w, h = self.GetSize()
self.lastpos = pos = evt.GetPosition()
self.CaptureMouse()
scl = self.yrange[1] - self.yrange[0]
mini = self.yrange[0]
bw = float(w) / self.len
x = int(pos[0] / bw)
y = (h - pos[1]) / float(h) * scl + mini
self.values[x] = y
self.changed = True
wx.CallAfter(self.Refresh)
evt.Skip()
def MouseUp(self, evt):
if self.HasCapture():
self.ReleaseMouse()
def MouseMotion(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
if pos[0] < 0:
pos[0] = 0
elif pos[0] > w:
pos[0] = w
if pos[1] < 0:
pos[1] = 0
elif pos[1] > h:
pos[1] = h
if self.HasCapture() and evt.Dragging() and evt.LeftIsDown():
scl = self.yrange[1] - self.yrange[0]
mini = self.yrange[0]
bw = float(w) / self.len
x1 = int(self.lastpos[0] / bw)
y1 = (h - self.lastpos[1]) / float(h) * scl + mini
x2 = int(pos[0] / bw)
y2 = (h - pos[1]) / float(h) * scl + mini
step = abs(x2 - x1)
if step > 1:
inc = (y2 - y1) / step
if x2 > x1:
for i in range(0, step):
self.values[x1 + i] = y1 + inc * i
else:
for i in range(1, step):
self.values[x1 - i] = y1 + inc * i
if x2 >= 0 and x2 < self.len:
self.values[x2] = y2
self.lastpos = pos
self.changed = True
wx.CallAfter(self.Refresh)
class DataTableGrapher(wx.Frame):
def __init__(self, parent=None, obj=None, yrange=(0.0, 1.0)):
wx.Frame.__init__(self, parent, size=(500, 250))
self.obj = obj
self.length = len(self.obj._get_current_data())
self.multi = DataMultiSlider(self, self.obj._get_current_data(), yrange, outFunction=self.obj.replace)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(9999, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=9999)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(
10000, "Copy all points to the clipboard (4 digits of precision)\tCtrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10000)
self.fileMenu.Append(
10001, "Copy all points to the clipboard (full precision)\tShift+Ctrl+C", kind=wx.ITEM_NORMAL
)
self.Bind(wx.EVT_MENU, self.copy, id=10001)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def getLength(self):
return self.length
def close(self, evt):
self.Destroy()
def update(self, samples):
self.multi.update(samples)
def copy(self, evt):
values = self.multi.getValues()
if evt.GetId() == 10000:
pstr = "["
for i, val in enumerate(values):
pstr += "%.4f" % val
if i < (len(values) - 1):
pstr += ", "
pstr += "]"
else:
pstr = str(values)
data = wx.TextDataObject(pstr)
if wx.TheClipboard.Open():
wx.TheClipboard.Clear()
wx.TheClipboard.SetData(data)
wx.TheClipboard.Close()
class ExprLexer(object):
"""Defines simple interface for custom lexer objects."""
(
STC_EXPR_DEFAULT,
STC_EXPR_KEYWORD,
STC_EXPR_KEYWORD2,
STC_EXPR_COMMENT,
STC_EXPR_VARIABLE,
STC_EXPR_LETVARIABLE,
) = list(range(6))
def __init__(self):
super(ExprLexer, self).__init__()
self.alpha = "abcdefghijklmnopqrstuvwxyz"
self.digits = "0123456789"
self.keywords = [
"sin",
"cos",
"tan",
"tanh",
"atan",
"atan2",
"sqrt",
"log",
"sr",
"log2",
"log10",
"pow",
"abs",
"floor",
"ceil",
"exp",
"round",
"min",
"max",
"randf",
"randi",
"sah",
"const",
"pi",
"twopi",
"e",
"if",
"rpole",
"rzero",
"neg",
"and",
"or",
"wrap",
"delay",
"complex",
"real",
"imag",
"cpole",
"czero",
"out",
]
self.keywords2 = ["define", "load", "var", "let"]
def StyleText(self, evt):
"""Handle the EVT_STC_STYLENEEDED event."""
stc = evt.GetEventObject()
last_styled_pos = stc.GetEndStyled()
line = stc.LineFromPosition(last_styled_pos)
start_pos = stc.PositionFromLine(line)
end_pos = evt.GetPosition()
var = letvar = False
while start_pos < end_pos:
stc.StartStyling(start_pos)
curchar = chr(stc.GetCharAt(start_pos))
if curchar == "$":
var = True
elif var and curchar in " \t\n()":
var = False
if curchar == "#":
letvar = True
elif letvar and curchar in " \t\n()":
letvar = False
if var:
style = self.STC_EXPR_VARIABLE
stc.SetStyling(1, style)
start_pos += 1
elif letvar:
style = self.STC_EXPR_LETVARIABLE
stc.SetStyling(1, style)
start_pos += 1
elif curchar in self.alpha:
start = stc.WordStartPosition(start_pos, True)
end = stc.WordEndPosition(start, True)
word = stc.GetTextRange(start, end)
if word in self.keywords:
style = self.STC_EXPR_KEYWORD
stc.SetStyling(len(word), style)
elif word in self.keywords2:
style = self.STC_EXPR_KEYWORD2
stc.SetStyling(len(word), style)
else:
style = self.STC_EXPR_DEFAULT
stc.SetStyling(len(word), style)
start_pos += len(word)
elif curchar == "/" and chr(stc.GetCharAt(start_pos + 1)) == "/":
eol = stc.GetLineEndPosition(stc.LineFromPosition(start_pos))
style = self.STC_EXPR_COMMENT
stc.SetStyling(eol - start_pos, style)
start_pos = eol
else:
style = self.STC_EXPR_DEFAULT
stc.SetStyling(1, style)
start_pos += 1
class ExprEditor(stc.StyledTextCtrl):
def __init__(self, parent, id=-1, obj=None):
stc.StyledTextCtrl.__init__(self, parent, id)
self.obj = obj
if sys.platform == "darwin":
accel_ctrl = wx.ACCEL_CMD
self.faces = {"mono": "Monaco", "size": 12}
else:
accel_ctrl = wx.ACCEL_CTRL
self.faces = {"mono": "Monospace", "size": 10}
atable = wx.AcceleratorTable(
[
(accel_ctrl, wx.WXK_RETURN, 10000),
(accel_ctrl, ord("z"), wx.ID_UNDO),
(accel_ctrl | wx.ACCEL_SHIFT, ord("z"), wx.ID_REDO),
]
)
self.SetAcceleratorTable(atable)
self.Bind(wx.EVT_MENU, self.onExecute, id=10000)
self.Bind(wx.EVT_MENU, self.undo, id=wx.ID_UNDO)
self.Bind(wx.EVT_MENU, self.redo, id=wx.ID_REDO)
self.Bind(stc.EVT_STC_UPDATEUI, self.OnUpdateUI)
self.lexer = ExprLexer()
self.currentfile = ""
self.modified = False
self.setup()
self.setCmdKeys()
self.setStyle()
self.SetText(self.obj.expr)
def undo(self, evt):
self.Undo()
def redo(self, evt):
self.Redo()
def setup(self):
self.SetIndent(2)
self.SetBackSpaceUnIndents(True)
self.SetTabIndents(True)
self.SetTabWidth(2)
self.SetUseTabs(False)
self.SetMargins(2, 2)
self.SetMarginWidth(1, 1)
def setCmdKeys(self):
self.CmdKeyAssign(ord("="), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMIN)
self.CmdKeyAssign(ord("-"), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMOUT)
def setStyle(self):
self.SetLexer(wx.stc.STC_LEX_CONTAINER)
self.SetStyleBits(5)
self.Bind(wx.stc.EVT_STC_STYLENEEDED, self.OnStyling)
self.SetCaretForeground("#000000")
self.SetCaretWidth(2)
# Global default styles for all languages
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleClearAll()
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_CONTROLCHAR, "face:%(mono)s" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_BRACELIGHT, "fore:#FFFFFF,back:#0000FF,bold")
self.StyleSetSpec(stc.STC_STYLE_BRACEBAD, "fore:#000000,back:#FF0000,bold")
# Expr specific styles
self.StyleSetSpec(self.lexer.STC_EXPR_DEFAULT, "fore:#000000,face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_KEYWORD, "fore:#3300DD,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_KEYWORD2, "fore:#0033FF,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_VARIABLE, "fore:#006600,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_LETVARIABLE, "fore:#555500,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_EXPR_COMMENT, "fore:#444444,face:%(mono)s,size:%(size)d,italic" % self.faces)
self.SetSelBackground(1, "#CCCCDD")
def OnStyling(self, evt):
self.lexer.StyleText(evt)
def loadfile(self, filename):
self.LoadFile(filename)
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
def savefile(self, filename):
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
self.SaveFile(filename)
self.OnUpdateUI(None)
def OnUpdateUI(self, evt):
# check for matching braces
braceAtCaret = -1
braceOpposite = -1
charBefore = None
caretPos = self.GetCurrentPos()
if caretPos > 0:
charBefore = self.GetCharAt(caretPos - 1)
styleBefore = self.GetStyleAt(caretPos - 1)
# check before
if charBefore and chr(charBefore) in "[]{}()":
braceAtCaret = caretPos - 1
# check after
if braceAtCaret < 0:
charAfter = self.GetCharAt(caretPos)
styleAfter = self.GetStyleAt(caretPos)
if charAfter and chr(charAfter) in "[]{}()":
braceAtCaret = caretPos
if braceAtCaret >= 0:
braceOpposite = self.BraceMatch(braceAtCaret)
if braceAtCaret != -1 and braceOpposite == -1:
self.BraceBadLight(braceAtCaret)
else:
self.BraceHighlight(braceAtCaret, braceOpposite)
# Check if horizontal scrollbar is needed
self.checkScrollbar()
def checkScrollbar(self):
lineslength = [self.LineLength(i) + 1 for i in range(self.GetLineCount())]
maxlength = max(lineslength)
width = self.GetCharWidth() + (self.GetZoom() * 0.5)
if (self.GetSize()[0]) < (maxlength * width):
self.SetUseHorizontalScrollBar(True)
else:
self.SetUseHorizontalScrollBar(False)
def onExecute(self, evt):
pos = self.GetCurrentPos()
self.obj.expr = self.GetText()
self.SetCurrentPos(pos)
self.SetSelection(pos, pos)
class ExprEditorFrame(wx.Frame):
def __init__(self, parent=None, obj=None):
wx.Frame.__init__(self, parent, size=(650, 450))
self.obj = obj
self.obj._editor = self
self.editor = ExprEditor(self, -1, self.obj)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(wx.ID_OPEN, "Open\tCtrl+O")
self.Bind(wx.EVT_MENU, self.open, id=wx.ID_OPEN)
self.fileMenu.Append(wx.ID_CLOSE, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=wx.ID_CLOSE)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(wx.ID_SAVE, "Save\tCtrl+S")
self.Bind(wx.EVT_MENU, self.save, id=wx.ID_SAVE)
self.fileMenu.Append(wx.ID_SAVEAS, "Save As...\tShift+Ctrl+S")
self.Bind(wx.EVT_MENU, self.saveas, id=wx.ID_SAVEAS)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def open(self, evt):
dlg = wx.FileDialog(
self, message="Choose a file", defaultDir=os.path.expanduser("~"), defaultFile="", style=wx.FD_OPEN
)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.loadfile(path)
dlg.Destroy()
def close(self, evt):
self.obj._editor = None
self.Destroy()
def save(self, evt):
path = self.editor.currentfile
if not path:
self.saveas(None)
else:
self.editor.savefile(path)
def saveas(self, evt):
deffile = os.path.split(self.editor.currentfile)[1]
dlg = wx.FileDialog(
self, message="Save file as ...", defaultDir=os.path.expanduser("~"), defaultFile=deffile, style=wx.FD_SAVE
)
dlg.SetFilterIndex(0)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.savefile(path)
dlg.Destroy()
def update(self, text):
self.editor.SetText(text)
class MMLLexer(object):
"""Defines simple interface for custom lexer objects."""
STC_MML_DEFAULT, STC_MML_KEYWORD, STC_MML_KEYWORD2, STC_MML_COMMENT, STC_MML_VARIABLE, STC_MML_VOICE_TOKEN = list(
range(6)
)
def __init__(self):
super(MMLLexer, self).__init__()
self.alpha = "abcdefghijklmnopqrstuvwxyz"
self.digits = "0123456789"
notes = ["a", "b", "c", "d", "e", "f", "g", "r"]
self.keywords = notes + ["%s%d" % (n, i) for n in notes for i in range(10)]
stmts = ["t", "o", "v"]
self.keywords2 = (
stmts + ["t%d" % i for i in range(256)] + ["o%d" % i for i in range(16)] + ["v%d" % i for i in range(101)]
)
def StyleText(self, evt):
"""Handle the EVT_STC_STYLENEEDED event."""
stc = evt.GetEventObject()
last_styled_pos = stc.GetEndStyled()
line = stc.LineFromPosition(last_styled_pos)
start_pos = stc.PositionFromLine(line)
end_pos = evt.GetPosition()
userXYZ = voiceToken = False
while start_pos < end_pos:
stc.StartStyling(start_pos)
curchar = chr(stc.GetCharAt(start_pos))
if curchar in "xyz":
userXYZ = True
elif userXYZ and curchar in " \t\n":
userXYZ = False
if curchar == "#":
voiceToken = True
elif voiceToken and curchar in " \t\n":
voiceToken = False
if userXYZ:
style = self.STC_MML_VARIABLE
stc.SetStyling(1, style)
start_pos += 1
elif voiceToken:
style = self.STC_MML_VOICE_TOKEN
stc.SetStyling(1, style)
start_pos += 1
elif curchar in self.alpha:
start = stc.WordStartPosition(start_pos, True)
end = stc.WordEndPosition(start, True)
word = stc.GetTextRange(start, end)
if word in self.keywords:
style = self.STC_MML_KEYWORD
stc.SetStyling(len(word), style)
elif word in self.keywords2:
style = self.STC_MML_KEYWORD2
stc.SetStyling(len(word), style)
else:
style = self.STC_MML_DEFAULT
stc.SetStyling(len(word), style)
start_pos += len(word)
elif curchar == ";":
eol = stc.GetLineEndPosition(stc.LineFromPosition(start_pos))
style = self.STC_MML_COMMENT
stc.SetStyling(eol - start_pos, style)
start_pos = eol
else:
style = self.STC_MML_DEFAULT
stc.SetStyling(1, style)
start_pos += 1
class MMLEditor(stc.StyledTextCtrl):
def __init__(self, parent, id=-1, obj=None):
stc.StyledTextCtrl.__init__(self, parent, id)
self.obj = obj
if sys.platform == "darwin":
accel_ctrl = wx.ACCEL_CMD
self.faces = {"mono": "Monaco", "size": 12}
else:
accel_ctrl = wx.ACCEL_CTRL
self.faces = {"mono": "Monospace", "size": 10}
atable = wx.AcceleratorTable(
[
(accel_ctrl, wx.WXK_RETURN, 10000),
(accel_ctrl, ord("z"), wx.ID_UNDO),
(accel_ctrl | wx.ACCEL_SHIFT, ord("z"), wx.ID_REDO),
]
)
self.SetAcceleratorTable(atable)
self.Bind(wx.EVT_MENU, self.onExecute, id=10000)
self.Bind(wx.EVT_MENU, self.undo, id=wx.ID_UNDO)
self.Bind(wx.EVT_MENU, self.redo, id=wx.ID_REDO)
self.Bind(stc.EVT_STC_UPDATEUI, self.OnUpdateUI)
self.lexer = MMLLexer()
self.currentfile = ""
self.modified = False
self.setup()
self.setCmdKeys()
self.setStyle()
if os.path.isfile(self.obj.music):
with open(self.obj.music, "r") as f:
music = f.read()
else:
music = self.obj.music
self.SetText(music)
def undo(self, evt):
self.Undo()
def redo(self, evt):
self.Redo()
def setup(self):
self.SetIndent(2)
self.SetBackSpaceUnIndents(True)
self.SetTabIndents(True)
self.SetTabWidth(2)
self.SetUseTabs(False)
self.SetMargins(2, 2)
self.SetMarginWidth(1, 1)
def setCmdKeys(self):
self.CmdKeyAssign(ord("="), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMIN)
self.CmdKeyAssign(ord("-"), stc.STC_SCMOD_CTRL, stc.STC_CMD_ZOOMOUT)
def setStyle(self):
self.SetLexer(wx.stc.STC_LEX_CONTAINER)
self.SetStyleBits(5)
self.Bind(wx.stc.EVT_STC_STYLENEEDED, self.OnStyling)
self.SetCaretForeground("#000000")
self.SetCaretWidth(2)
# Global default styles for all languages
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleClearAll()
self.StyleSetSpec(stc.STC_STYLE_DEFAULT, "face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_CONTROLCHAR, "face:%(mono)s" % self.faces)
self.StyleSetSpec(stc.STC_STYLE_BRACELIGHT, "fore:#FFFFFF,back:#0000FF,bold")
self.StyleSetSpec(stc.STC_STYLE_BRACEBAD, "fore:#000000,back:#FF0000,bold")
# MML specific styles
self.StyleSetSpec(self.lexer.STC_MML_DEFAULT, "fore:#000000,face:%(mono)s,size:%(size)d" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_KEYWORD, "fore:#3300DD,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_KEYWORD2, "fore:#0033FF,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_VARIABLE, "fore:#006600,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_VOICE_TOKEN, "fore:#555500,face:%(mono)s,size:%(size)d,bold" % self.faces)
self.StyleSetSpec(self.lexer.STC_MML_COMMENT, "fore:#444444,face:%(mono)s,size:%(size)d,italic" % self.faces)
self.SetSelBackground(1, "#CCCCDD")
def OnStyling(self, evt):
self.lexer.StyleText(evt)
def loadfile(self, filename):
self.LoadFile(filename)
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
def savefile(self, filename):
self.currentfile = filename
self.GetParent().SetTitle(self.currentfile)
self.SaveFile(filename)
self.OnUpdateUI(None)
def OnUpdateUI(self, evt):
# check for matching braces
braceAtCaret = -1
braceOpposite = -1
charBefore = None
caretPos = self.GetCurrentPos()
if caretPos > 0:
charBefore = self.GetCharAt(caretPos - 1)
styleBefore = self.GetStyleAt(caretPos - 1)
# check before
if charBefore and chr(charBefore) in "[]{}()":
braceAtCaret = caretPos - 1
# check after
if braceAtCaret < 0:
charAfter = self.GetCharAt(caretPos)
styleAfter = self.GetStyleAt(caretPos)
if charAfter and chr(charAfter) in "[]{}()":
braceAtCaret = caretPos
if braceAtCaret >= 0:
braceOpposite = self.BraceMatch(braceAtCaret)
if braceAtCaret != -1 and braceOpposite == -1:
self.BraceBadLight(braceAtCaret)
else:
self.BraceHighlight(braceAtCaret, braceOpposite)
# Check if horizontal scrollbar is needed
self.checkScrollbar()
def checkScrollbar(self):
lineslength = [self.LineLength(i) + 1 for i in range(self.GetLineCount())]
maxlength = max(lineslength)
width = self.GetCharWidth() + (self.GetZoom() * 0.5)
if (self.GetSize()[0]) < (maxlength * width):
self.SetUseHorizontalScrollBar(True)
else:
self.SetUseHorizontalScrollBar(False)
def onExecute(self, evt):
pos = self.GetCurrentPos()
self.obj.music = self.GetText()
self.SetCurrentPos(pos)
self.SetSelection(pos, pos)
class MMLEditorFrame(wx.Frame):
def __init__(self, parent=None, obj=None):
wx.Frame.__init__(self, parent, size=(650, 450))
self.obj = obj
self.obj._editor = self
self.editor = MMLEditor(self, -1, self.obj)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(wx.ID_OPEN, "Open\tCtrl+O")
self.Bind(wx.EVT_MENU, self.open, id=wx.ID_OPEN)
self.fileMenu.Append(wx.ID_CLOSE, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=wx.ID_CLOSE)
self.fileMenu.AppendSeparator()
self.fileMenu.Append(wx.ID_SAVE, "Save\tCtrl+S")
self.Bind(wx.EVT_MENU, self.save, id=wx.ID_SAVE)
self.fileMenu.Append(wx.ID_SAVEAS, "Save As...\tShift+Ctrl+S")
self.Bind(wx.EVT_MENU, self.saveas, id=wx.ID_SAVEAS)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def open(self, evt):
dlg = wx.FileDialog(
self, message="Choose a file", defaultDir=os.path.expanduser("~"), defaultFile="", style=wx.FD_OPEN
)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.loadfile(path)
dlg.Destroy()
def close(self, evt):
self.obj._editor = None
self.Destroy()
def save(self, evt):
path = self.editor.currentfile
if not path:
self.saveas(None)
else:
self.editor.savefile(path)
def saveas(self, evt):
deffile = os.path.split(self.editor.currentfile)[1]
dlg = wx.FileDialog(
self, message="Save file as ...", defaultDir=os.path.expanduser("~"), defaultFile=deffile, style=wx.FD_SAVE
)
dlg.SetFilterIndex(0)
if dlg.ShowModal() == wx.ID_OK:
path = ensureNFD(dlg.GetPath())
self.editor.savefile(path)
dlg.Destroy()
def update(self, text):
self.editor.SetText(text)
class Keyboard(wx.Panel):
def __init__(
self,
parent,
id=wx.ID_ANY,
pos=wx.DefaultPosition,
size=wx.DefaultSize,
poly=64,
outFunction=None,
style=wx.TAB_TRAVERSAL,
):
wx.Panel.__init__(self, parent, id, pos, size, style)
self.SetBackgroundStyle(wx.BG_STYLE_CUSTOM)
self.SetBackgroundColour(BACKGROUND_COLOUR)
self.parent = parent
self.outFunction = outFunction
self.poly = poly
self.gap = 0
self.offset = 12
self.w1 = 15
self.w2 = int(self.w1 / 2) + 1
self.hold = 1
self.keyPressed = None
self.Bind(wx.EVT_LEFT_DOWN, self.MouseDown)
self.Bind(wx.EVT_LEFT_UP, self.MouseUp)
self.Bind(wx.EVT_PAINT, self.OnPaint)
self.Bind(wx.EVT_SIZE, self.OnSize)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
self.Bind(wx.EVT_KEY_UP, self.OnKeyUp)
self.white = (0, 2, 4, 5, 7, 9, 11)
self.black = (1, 3, 6, 8, 10)
self.whiteSelected = []
self.blackSelected = []
self.whiteVelocities = {}
self.blackVelocities = {}
self.whiteKeys = []
self.blackKeys = []
self.offRec = wx.Rect(900 - 55, 0, 28, 150)
self.holdRec = wx.Rect(900 - 27, 0, 27, 150)
self.keydown = []
self.keymap = {
90: 36,
83: 37,
88: 38,
68: 39,
67: 40,
86: 41,
71: 42,
66: 43,
72: 44,
78: 45,
74: 46,
77: 47,
44: 48,
76: 49,
46: 50,
59: 51,
47: 52,
81: 60,
50: 61,
87: 62,
51: 63,
69: 64,
82: 65,
53: 66,
84: 67,
54: 68,
89: 69,
55: 70,
85: 71,
73: 72,
57: 73,
79: 74,
48: 75,
80: 76,
}
wx.CallAfter(self._setRects)
def getCurrentNotes(self):
"Returns a list of the current notes."
notes = []
for key in self.whiteSelected:
notes.append((self.white[key % 7] + int(key / 7) * 12 + self.offset, 127 - self.whiteVelocities[key]))
for key in self.blackSelected:
notes.append((self.black[key % 5] + int(key / 5) * 12 + self.offset, 127 - self.blackVelocities[key]))
notes.sort()
return notes
def reset(self):
"Resets the keyboard state."
for key in self.blackSelected:
pit = self.black[key % 5] + int(key / 5) * 12 + self.offset
note = (pit, 0)
if self.outFunction:
self.outFunction(note)
for key in self.whiteSelected:
pit = self.white[key % 7] + int(key / 7) * 12 + self.offset
note = (pit, 0)
if self.outFunction:
self.outFunction(note)
self.whiteSelected = []
self.blackSelected = []
self.whiteVelocities = {}
self.blackVelocities = {}
wx.CallAfter(self.Refresh)
def setPoly(self, poly):
"Sets the maximum number of notes that can be held at the same time."
self.poly = poly
def _setRects(self):
w, h = self.GetSize()
self.offRec = wx.Rect(w - 55, 0, 28, h)
self.holdRec = wx.Rect(w - 27, 0, 27, h)
num = int(w / self.w1)
self.gap = w - num * self.w1
self.whiteKeys = [wx.Rect(i * self.w1, 0, self.w1 - 1, h - 1) for i in range(num)]
self.blackKeys = []
height2 = int(h * 4 / 7)
for i in range(int(num / 7) + 1):
space2 = self.w1 * 7 * i
off = int(self.w1 / 2) + space2 + 3
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1 * 2
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
off += self.w1
self.blackKeys.append(wx.Rect(off, 0, self.w2, height2))
wx.CallAfter(self.Refresh)
def OnSize(self, evt):
self._setRects()
wx.CallAfter(self.Refresh)
evt.Skip()
def OnKeyDown(self, evt):
if evt.HasAnyModifiers():
evt.Skip()
return
if evt.GetKeyCode() in self.keymap and evt.GetKeyCode() not in self.keydown:
self.keydown.append(evt.GetKeyCode())
pit = self.keymap[evt.GetKeyCode()]
deg = pit % 12
total = len(self.blackSelected) + len(self.whiteSelected)
note = None
if self.hold:
if deg in self.black:
which = self.black.index(deg) + int((pit - self.offset) / 12) * 5
if which in self.blackSelected:
self.blackSelected.remove(which)
del self.blackVelocities[which]
note = (pit, 0)
else:
if total < self.poly:
self.blackSelected.append(which)
self.blackVelocities[which] = 100
note = (pit, 100)
elif deg in self.white:
which = self.white.index(deg) + int((pit - self.offset) / 12) * 7
if which in self.whiteSelected:
self.whiteSelected.remove(which)
del self.whiteVelocities[which]
note = (pit, 0)
else:
if total < self.poly:
self.whiteSelected.append(which)
self.whiteVelocities[which] = 100
note = (pit, 100)
else:
if deg in self.black:
which = self.black.index(deg) + int((pit - self.offset) / 12) * 5
if which not in self.blackSelected and total < self.poly:
self.blackSelected.append(which)
self.blackVelocities[which] = 100
note = (pit, 100)
elif deg in self.white:
which = self.white.index(deg) + int((pit - self.offset) / 12) * 7
if which not in self.whiteSelected and total < self.poly:
self.whiteSelected.append(which)
self.whiteVelocities[which] = 100
note = (pit, 100)
if note and self.outFunction and total < self.poly:
self.outFunction(note)
wx.CallAfter(self.Refresh)
evt.Skip()
def OnKeyUp(self, evt):
if evt.HasAnyModifiers():
evt.Skip()
return
if evt.GetKeyCode() in self.keydown:
del self.keydown[self.keydown.index(evt.GetKeyCode())]
if not self.hold and evt.GetKeyCode() in self.keymap:
pit = self.keymap[evt.GetKeyCode()]
deg = pit % 12
note = None
if deg in self.black:
which = self.black.index(deg) + int((pit - self.offset) / 12) * 5
if which in self.blackSelected:
self.blackSelected.remove(which)
del self.blackVelocities[which]
note = (pit, 0)
elif deg in self.white:
which = self.white.index(deg) + int((pit - self.offset) / 12) * 7
if which in self.whiteSelected:
self.whiteSelected.remove(which)
del self.whiteVelocities[which]
note = (pit, 0)
if note and self.outFunction:
self.outFunction(note)
wx.CallAfter(self.Refresh)
evt.Skip()
def MouseUp(self, evt):
if not self.hold and self.keyPressed is not None:
key = self.keyPressed[0]
pit = self.keyPressed[1]
if key in self.blackSelected:
self.blackSelected.remove(key)
del self.blackVelocities[key]
if key in self.whiteSelected:
self.whiteSelected.remove(key)
del self.whiteVelocities[key]
note = (pit, 0)
if self.outFunction:
self.outFunction(note)
self.keyPressed = None
wx.CallAfter(self.Refresh)
evt.Skip()
def MouseDown(self, evt):
w, h = self.GetSize()
pos = evt.GetPosition()
if self.holdRec.Contains(pos):
if self.hold:
self.hold = 0
self.reset()
else:
self.hold = 1
wx.CallAfter(self.Refresh)
return
if self.offUpRec.Contains(pos):
self.offset += 12
if self.offset > 60:
self.offset = 60
wx.CallAfter(self.Refresh)
return
if self.offDownRec.Contains(pos):
self.offset -= 12
if self.offset < 0:
self.offset = 0
wx.CallAfter(self.Refresh)
return
total = len(self.blackSelected) + len(self.whiteSelected)
scanWhite = True
note = None
if self.hold:
for i, rec in enumerate(self.blackKeys):
if rec.Contains(pos):
pit = self.black[i % 5] + int(i / 5) * 12 + self.offset
if i in self.blackSelected:
self.blackSelected.remove(i)
del self.blackVelocities[i]
vel = 0
else:
hb = int(h * 4 / 7)
vel = int((hb - pos[1]) * 127 / hb)
if total < self.poly:
self.blackSelected.append(i)
self.blackVelocities[i] = int(127 - vel)
note = (pit, vel)
scanWhite = False
break
if scanWhite:
for i, rec in enumerate(self.whiteKeys):
if rec.Contains(pos):
pit = self.white[i % 7] + int(i / 7) * 12 + self.offset
if i in self.whiteSelected:
self.whiteSelected.remove(i)
del self.whiteVelocities[i]
vel = 0
else:
vel = int((h - pos[1]) * 127 / h)
if total < self.poly:
self.whiteSelected.append(i)
self.whiteVelocities[i] = int(127 - vel)
note = (pit, vel)
break
if note and self.outFunction and total < self.poly:
self.outFunction(note)
else:
self.keyPressed = None
for i, rec in enumerate(self.blackKeys):
if rec.Contains(pos):
pit = self.black[i % 5] + int(i / 5) * 12 + self.offset
if i not in self.blackSelected:
hb = int(h * 4 / 7)
vel = int((hb - pos[1]) * 127 / hb)
if total < self.poly:
self.blackSelected.append(i)
self.blackVelocities[i] = int(127 - vel)
note = (pit, vel)
self.keyPressed = (i, pit)
scanWhite = False
break
if scanWhite:
for i, rec in enumerate(self.whiteKeys):
if rec.Contains(pos):
pit = self.white[i % 7] + int(i / 7) * 12 + self.offset
if i not in self.whiteSelected:
vel = int((h - pos[1]) * 127 / h)
if total < self.poly:
self.whiteSelected.append(i)
self.whiteVelocities[i] = int(127 - vel)
note = (pit, vel)
self.keyPressed = (i, pit)
break
if note and self.outFunction and total < self.poly:
self.outFunction(note)
wx.CallAfter(self.Refresh)
evt.Skip()
def OnPaint(self, evt):
w, h = self.GetSize()
dc = wx.AutoBufferedPaintDC(self)
dc.SetBrush(wx.Brush("#000000", wx.SOLID))
dc.Clear()
dc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
dc.DrawRectangle(0, 0, w, h)
if sys.platform == "darwin":
dc.SetFont(wx.Font(12, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD))
else:
dc.SetFont(wx.Font(8, wx.FONTFAMILY_SWISS, wx.FONTSTYLE_NORMAL, wx.FONTWEIGHT_BOLD))
for i, rec in enumerate(self.whiteKeys):
if i in self.whiteSelected:
amp = int(self.whiteVelocities[i] * 1.5)
dc.GradientFillLinear(rec, (250, 250, 250), (amp, amp, amp), wx.SOUTH)
dc.SetBrush(wx.Brush("#CCCCCC", wx.SOLID))
dc.SetPen(wx.Pen("#CCCCCC", width=1, style=wx.SOLID))
else:
dc.SetBrush(wx.Brush("#FFFFFF", wx.SOLID))
dc.SetPen(wx.Pen("#CCCCCC", width=1, style=wx.SOLID))
dc.DrawRectangle(rec)
if i == (35 - (7 * int(self.offset / 12))):
if i in self.whiteSelected:
dc.SetTextForeground("#FFFFFF")
else:
dc.SetTextForeground("#000000")
dc.DrawText("C", rec[0] + 3, rec[3] - 15)
dc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
for i, rec in enumerate(self.blackKeys):
if i in self.blackSelected:
amp = int(self.blackVelocities[i] * 1.5)
dc.GradientFillLinear(rec, (250, 250, 250), (amp, amp, amp), wx.SOUTH)
dc.DrawLine(rec[0], 0, rec[0], rec[3])
dc.DrawLine(rec[0] + rec[2], 0, rec[0] + rec[2], rec[3])
dc.DrawLine(rec[0], rec[3], rec[0] + rec[2], rec[3])
dc.SetBrush(wx.Brush("#DDDDDD", wx.SOLID))
else:
dc.SetBrush(wx.Brush("#000000", wx.SOLID))
dc.SetPen(wx.Pen("#000000", width=1, style=wx.SOLID))
dc.DrawRectangle(rec)
dc.SetBrush(wx.Brush(BACKGROUND_COLOUR, wx.SOLID))
dc.SetPen(wx.Pen("#AAAAAA", width=1, style=wx.SOLID))
dc.DrawRectangle(self.offRec)
dc.DrawRectangle(self.holdRec)
dc.SetTextForeground("#000000")
dc.DrawText("oct", self.offRec[0] + 3, 15)
x1, y1 = self.offRec[0], self.offRec[1]
dc.SetBrush(wx.Brush("#000000", wx.SOLID))
if sys.platform == "darwin":
dc.DrawPolygon([wx.Point(x1 + 3, 36), wx.Point(x1 + 10, 29), wx.Point(x1 + 17, 36)])
self.offUpRec = wx.Rect(x1, 28, x1 + 20, 10)
dc.DrawPolygon([wx.Point(x1 + 3, 55), wx.Point(x1 + 10, 62), wx.Point(x1 + 17, 55)])
self.offDownRec = wx.Rect(x1, 54, x1 + 20, 10)
else:
dc.DrawPolygon([wx.Point(x1 + 5, 38), wx.Point(x1 + 12, 31), wx.Point(x1 + 19, 38)])
self.offUpRec = wx.Rect(x1, 30, x1 + 20, 10)
dc.DrawPolygon([wx.Point(x1 + 5, 57), wx.Point(x1 + 12, 64), wx.Point(x1 + 19, 57)])
self.offDownRec = wx.Rect(x1, 56, x1 + 20, 10)
dc.DrawText("%d" % int(self.offset / 12), x1 + 9, 41)
if self.hold:
dc.SetTextForeground("#0000CC")
else:
dc.SetTextForeground("#000000")
for i, c in enumerate("HOLD"):
dc.DrawText(c, self.holdRec[0] + 8, int(self.holdRec[3] / 6) * i + 15)
evt.Skip()
class NoteinKeyboardFrame(wx.Frame):
def __init__(self, parent=None, obj=None):
wx.Frame.__init__(self, parent, size=(900, 150))
self.obj = obj
self.keyboard = Keyboard(self, -1, outFunction=self.obj._newNote)
self.menubar = wx.MenuBar()
self.fileMenu = wx.Menu()
self.fileMenu.Append(wx.ID_CLOSE, "Close\tCtrl+W", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.close, id=wx.ID_CLOSE)
self.menubar.Append(self.fileMenu, "&File")
self.SetMenuBar(self.menubar)
def close(self, evt):
self.Destroy()
class ServerGUI(wx.Frame):
def __init__(
self,
parent=None,
nchnls=2,
startf=None,
stopf=None,
recstartf=None,
recstopf=None,
ampf=None,
started=0,
locals=None,
shutdown=None,
meter=True,
timer=True,
amp=1.0,
exit=True,
getIsBooted=None,
getIsStarted=None,
):
wx.Frame.__init__(self, parent, style=wx.DEFAULT_FRAME_STYLE ^ wx.RESIZE_BORDER)
self.menubar = wx.MenuBar()
self.menu = wx.Menu()
self.menu.Append(22999, "Start/Stop\tCtrl+R", kind=wx.ITEM_NORMAL)
self.Bind(wx.EVT_MENU, self.start, id=22999)
quit_item = self.menu.Append(wx.ID_EXIT, "Quit\tCtrl+Q")
self.Bind(wx.EVT_MENU, self.on_quit, id=wx.ID_EXIT)
self.menubar.Append(self.menu, "&File")
self.SetMenuBar(self.menubar)
self.shutdown = shutdown
self.locals = locals
self.nchnls = nchnls
self.startf = startf
self.stopf = stopf
self.recstartf = recstartf
self.recstopf = recstopf
self.ampf = ampf
self.exit = exit
self.getIsBooted = getIsBooted
self.getIsStarted = getIsStarted
self._started = False
self._recstarted = False
self._history = []
self._histo_count = 0
panel = wx.Panel(self)
panel.SetBackgroundColour(BACKGROUND_COLOUR)
box = wx.BoxSizer(wx.VERTICAL)
buttonBox = wx.BoxSizer(wx.HORIZONTAL)
self.startButton = wx.Button(panel, -1, "Start")
self.startButton.Bind(wx.EVT_BUTTON, self.start)
buttonBox.Add(self.startButton, 0, wx.LEFT | wx.RIGHT, 5)
self.recButton = wx.Button(panel, -1, "Rec Start")
self.recButton.Bind(wx.EVT_BUTTON, self.record)
buttonBox.Add(self.recButton, 0, wx.RIGHT, 5)
self.quitButton = wx.Button(panel, -1, "Quit")
self.quitButton.Bind(wx.EVT_BUTTON, self.on_quit)
buttonBox.Add(self.quitButton, 0, wx.RIGHT, 5)
box.Add(buttonBox, 0, wx.TOP, 10)
box.AddSpacer(10)
box.Add(wx.StaticText(panel, -1, "Amplitude (dB)"), 0, wx.LEFT, 5)
self.ampScale = ControlSlider(panel, -60, 18, 20.0 * math.log10(amp), size=(202, 16), outFunction=self.setAmp)
box.Add(self.ampScale, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 5)
if meter:
box.AddSpacer(10)
self.meter = VuMeter(panel, size=(200, 5 * self.nchnls + 1), numSliders=self.nchnls)
box.Add(self.meter, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 5)
box.AddSpacer(5)
if timer:
box.AddSpacer(10)
tt = wx.StaticText(panel, -1, "Elapsed time (hh:mm:ss:ms)")
box.Add(tt, 0, wx.LEFT, 5)
box.AddSpacer(3)
self.timetext = wx.StaticText(panel, -1, "00 : 00 : 00 : 000")
box.Add(self.timetext, 0, wx.LEFT, 5)
if self.locals is not None:
box.AddSpacer(10)
t = wx.StaticText(panel, -1, "Interpreter")
box.Add(t, 0, wx.LEFT, 5)
tw, th = self.GetTextExtent("|")
self.text = wx.TextCtrl(panel, -1, "", size=(202, th + 8), style=wx.TE_PROCESS_ENTER)
self.text.Bind(wx.EVT_TEXT_ENTER, self.getText)
self.text.Bind(wx.EVT_KEY_DOWN, self.onChar)
box.Add(self.text, 0, wx.LEFT | wx.RIGHT | wx.EXPAND, 5)
box.AddSpacer(10)
panel.SetSizerAndFit(box)
self.SetClientSize(panel.GetSize())
self.Bind(wx.EVT_CLOSE, self.on_quit)
if started == 1:
self.start(None, True)
def setTime(self, *args):
wx.CallAfter(self.timetext.SetLabel, "%02d : %02d : %02d : %03d" % (args[0], args[1], args[2], args[3]))
def start(self, evt=None, justSet=False):
if self._started == False:
self._started = True
wx.CallAfter(self.startButton.SetLabel, "Stop")
if self.exit:
wx.CallAfter(self.quitButton.Disable)
if not justSet:
self.startf()
else:
self._started = False
wx.CallAfter(self.startButton.SetLabel, "Start")
if self.exit:
wx.CallAfter(self.quitButton.Enable)
# TODO: Need a common method for every OSes.
# wx.CallLater(100, self.stopf)
# wx.CallAfter(self.stopf)
if self.getIsStarted():
self.stopf()
def record(self, evt):
if self._recstarted == False:
self.recstartf()
self._recstarted = True
wx.CallAfter(self.recButton.SetLabel, "Rec Stop")
else:
self.recstopf()
self._recstarted = False
wx.CallAfter(self.recButton.SetLabel, "Rec Start")
def quit_from_code(self):
wx.CallAfter(self.on_quit, None)
def on_quit(self, evt):
if self.exit and self.getIsBooted():
self.shutdown()
time.sleep(0.25)
self.Destroy()
if self.exit:
sys.exit()
def getPrev(self):
self.text.Clear()
self._histo_count -= 1
if self._histo_count < 0:
self._histo_count = 0
self.text.SetValue(self._history[self._histo_count])
wx.CallAfter(self.text.SetInsertionPointEnd)
def getNext(self):
self.text.Clear()
self._histo_count += 1
if self._histo_count >= len(self._history):
self._histo_count = len(self._history)
else:
self.text.SetValue(self._history[self._histo_count])
wx.CallAfter(self.text.SetInsertionPointEnd)
def getText(self, evt):
source = self.text.GetValue()
self.text.Clear()
self._history.append(source)
self._histo_count = len(self._history)
exec(source, self.locals)
def onChar(self, evt):
key = evt.GetKeyCode()
if key == 315:
self.getPrev()
evt.StopPropagation()
elif key == 317:
self.getNext()
evt.StopPropagation()
else:
evt.Skip()
def setAmp(self, value):
self.ampf(math.pow(10.0, float(value) * 0.05))
def setRms(self, *args):
self.meter.setRms(*args)
def setStartButtonState(self, state):
if state:
self._started = True
wx.CallAfter(self.startButton.SetLabel, "Stop")
if self.exit:
wx.CallAfter(self.quitButton.Disable)
else:
self._started = False
wx.CallAfter(self.startButton.SetLabel, "Start")
if self.exit:
wx.CallAfter(self.quitButton.Enable)
def ensureNFD(unistr):
if sys.platform == "win32" or sys.platform.startswith("linux"):
encodings = [sys.getdefaultencoding(), sys.getfilesystemencoding(), "cp1252", "iso-8859-1", "utf-16"]
format = "NFC"
else:
encodings = [sys.getdefaultencoding(), sys.getfilesystemencoding(), "macroman", "iso-8859-1", "utf-16"]
format = "NFC"
decstr = unistr
if type(decstr) != unicode_t:
for encoding in encodings:
try:
decstr = decstr.decode(encoding)
break
except UnicodeDecodeError:
continue
except:
decstr = "UnableToDecodeString"
print("Unicode encoding not in a recognized format...")
break
if decstr == "UnableToDecodeString":
return unistr
else:
return unicodedata.normalize(format, decstr)
| 35.987293
| 120
| 0.525166
| 17,828
| 150,103
| 4.362688
| 0.072246
| 0.00864
| 0.012266
| 0.015711
| 0.669058
| 0.605313
| 0.559451
| 0.523799
| 0.493469
| 0.469709
| 0
| 0.035753
| 0.343537
| 150,103
| 4,170
| 121
| 35.995923
| 0.753575
| 0.014177
| 0
| 0.571468
| 0
| 0
| 0.02712
| 0.00604
| 0
| 0
| 0
| 0.00024
| 0
| 1
| 0.072436
| false
| 0.000829
| 0.001935
| 0.004147
| 0.098148
| 0.000553
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae4bd329b0a39f201a2f41d92b1c573029070350
| 5,382
|
py
|
Python
|
napalm_yang/utils.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 64
|
2016-10-20T15:47:18.000Z
|
2021-11-11T11:57:32.000Z
|
napalm_yang/utils.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 126
|
2016-10-05T10:36:14.000Z
|
2019-05-15T08:43:23.000Z
|
napalm_yang/utils.py
|
ckishimo/napalm-yang
|
8f2bd907bd3afcde3c2f8e985192de74748baf6c
|
[
"Apache-2.0"
] | 63
|
2016-11-07T15:23:08.000Z
|
2021-09-22T14:41:16.000Z
|
from napalm_yang import base
def model_to_dict(model, mode="", show_defaults=False):
"""
Given a model, return a representation of the model in a dict.
This is mostly useful to have a quick visual represenation of the model.
Args:
model (PybindBase): Model to transform.
mode (string): Whether to print config, state or all elements ("" for all)
Returns:
dict: A dictionary representing the model.
Examples:
>>> config = napalm_yang.base.Root()
>>>
>>> # Adding models to the object
>>> config.add_model(napalm_yang.models.openconfig_interfaces())
>>> config.add_model(napalm_yang.models.openconfig_vlan())
>>> # Printing the model in a human readable format
>>> pretty_print(napalm_yang.utils.model_to_dict(config))
>>> {
>>> "openconfig-interfaces:interfaces [rw]": {
>>> "interface [rw]": {
>>> "config [rw]": {
>>> "description [rw]": "string",
>>> "enabled [rw]": "boolean",
>>> "mtu [rw]": "uint16",
>>> "name [rw]": "string",
>>> "type [rw]": "identityref"
>>> },
>>> "hold_time [rw]": {
>>> "config [rw]": {
>>> "down [rw]": "uint32",
>>> "up [rw]": "uint32"
(trimmed for clarity)
"""
def is_mode(obj, mode):
if mode == "":
return True
elif mode == "config":
return obj._yang_name == "config" or obj._is_config
elif mode == "state":
return obj._yang_name == "state" or not obj._is_config
else:
raise ValueError(
"mode can only be config, state or ''. Passed: {}".format(mode)
)
def get_key(key, model, parent_defining_module, show_defaults):
if not show_defaults:
# No need to display rw/ro when showing the defaults.
key = "{} {}".format(key, "[rw]" if model._is_config else "[ro]")
if parent_defining_module != model._defining_module:
key = "{}:{}".format(model._defining_module, key)
return key
if model._yang_type in ("container", "list"):
cls = model if model._yang_type in ("container",) else model._contained_class()
result = {}
for k, v in cls:
r = model_to_dict(v, mode=mode, show_defaults=show_defaults)
if r:
result[get_key(k, v, model._defining_module, show_defaults)] = r
return result
else:
if show_defaults:
if model._default is False:
if model._yang_type != "boolean":
# Unless the datatype is bool, when the _default attribute
# is False, it means there is not default value defined in
# the YANG model.
return None
return model._default
return model._yang_type if is_mode(model, mode) else None
def _diff_root(f, s):
result = {}
for k in f.elements():
v = getattr(f, k)
w = getattr(s, k)
r = diff(v, w)
if r:
result[k] = r
return result
def _diff_list(f, s):
result = {}
first_keys = set(f.keys())
second_keys = set(s.keys())
both = first_keys & second_keys
first_only = first_keys - second_keys
second_only = second_keys - first_keys
both_dict = {}
for k in both:
r = diff(f[k], s[k])
if r:
both_dict[k] = r
if both_dict:
result["both"] = both_dict
if first_only or second_only:
result["first_only"] = list(first_only)
result["second_only"] = list(second_only)
return result
def diff(f, s):
"""
Given two models, return the difference between them.
Args:
f (Pybindbase): First element.
s (Pybindbase): Second element.
Returns:
dict: A dictionary highlighting the differences.
Examples:
>>> diff = napalm_yang.utils.diff(candidate, running)
>>> pretty_print(diff)
>>> {
>>> "interfaces": {
>>> "interface": {
>>> "both": {
>>> "Port-Channel1": {
>>> "config": {
>>> "mtu": {
>>> "first": "0",
>>> "second": "9000"
>>> }
>>> }
>>> }
>>> },
>>> "first_only": [
>>> "Loopback0"
>>> ],
>>> "second_only": [
>>> "Loopback1"
>>> ]
>>> }
>>> }
>>> }
"""
if isinstance(f, base.Root) or f._yang_type in ("container", None):
result = _diff_root(f, s)
elif f._yang_type in ("list",):
result = _diff_list(f, s)
else:
result = {}
first = "{}".format(f)
second = "{}".format(s)
if first != second:
result = {"first": first, "second": second}
return result
| 30.40678
| 87
| 0.473987
| 555
| 5,382
| 4.421622
| 0.259459
| 0.03423
| 0.02119
| 0.018337
| 0.05379
| 0.05379
| 0.0326
| 0
| 0
| 0
| 0
| 0.004328
| 0.398922
| 5,382
| 176
| 88
| 30.579545
| 0.75425
| 0.45392
| 0
| 0.191781
| 0
| 0
| 0.063694
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.082192
| false
| 0.013699
| 0.013699
| 0
| 0.246575
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae4dfb5b9ba2ae94cfbe34ece6b1afd93884dd8b
| 2,430
|
py
|
Python
|
config.py
|
kenykau/reinforcement-forex
|
cac8c59ae7f5593bb7d9bb47e85f4ba2435a7a33
|
[
"MIT"
] | null | null | null |
config.py
|
kenykau/reinforcement-forex
|
cac8c59ae7f5593bb7d9bb47e85f4ba2435a7a33
|
[
"MIT"
] | null | null | null |
config.py
|
kenykau/reinforcement-forex
|
cac8c59ae7f5593bb7d9bb47e85f4ba2435a7a33
|
[
"MIT"
] | null | null | null |
from enum import IntEnum
from typing import List, Dict
class AssetType(IntEnum):
FOREX = 0
CFD = 1
class SpreadMode(IntEnum):
BIDASK = 0
RANDOM = 1
IGNORE = 2
FIXED = 3
SESSIONAL = 4
class Op(IntEnum):
LONG = 0
SHORT = 1
HOLD = 2
CLOSEALL = 3
class Config:
datafile:str = './2021617-60.csv'
fields:Dict = {
"symbol" : "symbol",
"dt" : "dt",
"tf" : "tf",
"open" : "open",
"high" : "high",
"low" : "low",
"close" : "close",
"vol" : "volume",
"bid" : "bid",
"ask" : "ask"}
symbols: List[Dict] = [{
"name" : "USDJPY",
"asset_type": AssetType.FOREX,
"leverage": 100,
"quote" : "JPY",
"base" : "USD",
"digits" : 3,
"commission" : 7,
"min_lot" : 0.01,
"max_lot" : 1,
"lot_step" : 0.01,
"lot_size" : 100000,
"swap_long" : 2.30,
"swap_short" : 2.75,
"swap_day" : 2,
"min_spread" : 1,
"max_spread" : 10,
"fixed_spread": 3,
"spread_mode" : SpreadMode.RANDOM,
"fixed_pt_value" : 1
},
{
"name" : "EURUSD",
"asset_type": AssetType.FOREX,
"leverage": 100,
"quote" : "USD",
"base" : "EUR",
"digits" : 5,
"commission" : 0,
"min_lot" : 0.01,
"max_lot" : 1,
"lot_step" : 0.01,
"lot_size" : 100000,
"swap_long" : 0,
"swap_short" : 0,
"swap_day" : 2,
"min_spread" : 1,
"max_spread" : 10,
"fixed_spread": 3,
"spread_mode" : SpreadMode.IGNORE,
"fixed_pt_value" : 1
}]
account: Dict = {
"balance": 10000.00,
"stop_out": 0.5,
"currency": "USD",
"fields": ["balance", "equity", "last_pnl", "total_orders", "margin_hold", "margin_free", "max_fl", "max_fp", "max_dd", "win_counts", "loss_count", "break_even"]
}
env: Dict = {
"window_size": 12,
"allow_multi_orders": False,
"obs_price_features": [],
"obs_price_exclude": ["tf", "symbol", "bid", "ask"],
#"obs_account_features": ["balance", "equity", "total_orders", "margin_hold", "margin_free", "max_fl", "max_fp", "win_counts", "loss_count", "break_even"]
"obs_account_features": ["balance", "equity", "win_counts", "loss_count", "break_even"]
}
| 25.851064
| 169
| 0.488477
| 271
| 2,430
| 4.154982
| 0.409594
| 0.010657
| 0.034636
| 0.047957
| 0.460924
| 0.405861
| 0.333925
| 0.264654
| 0.264654
| 0.264654
| 0
| 0.05397
| 0.336626
| 2,430
| 93
| 170
| 26.129032
| 0.644541
| 0.062963
| 0
| 0.261905
| 0
| 0
| 0.31239
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.261905
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae4e5f7fe6b5f5c3253e178b1b6eeb60c312745d
| 3,020
|
py
|
Python
|
metaci/release/models.py
|
giveclarity/MetaCI
|
f51bd50acf2e7d5e111f993f4816e5f0a5c5a441
|
[
"BSD-3-Clause"
] | null | null | null |
metaci/release/models.py
|
giveclarity/MetaCI
|
f51bd50acf2e7d5e111f993f4816e5f0a5c5a441
|
[
"BSD-3-Clause"
] | null | null | null |
metaci/release/models.py
|
giveclarity/MetaCI
|
f51bd50acf2e7d5e111f993f4816e5f0a5c5a441
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import datetime
from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils import Choices
from model_utils.fields import AutoCreatedField, AutoLastModifiedField
from model_utils.models import StatusModel
from metaci.release.utils import update_release_from_github
class ChangeCaseTemplate(models.Model):
name = models.CharField(_("name"), max_length=255)
case_template_id = models.CharField(_("case template id"), max_length=18)
def __str__(self):
return self.name
class Release(StatusModel):
def get_sandbox_date():
return datetime.date.today()
def get_production_date():
return datetime.date.today() + datetime.timedelta(days=6)
STATUS = Choices("draft", "published", "hidden")
created = AutoCreatedField(_("created"))
modified = AutoLastModifiedField(_("modified"))
repo = models.ForeignKey(
"repository.Repository", on_delete=models.CASCADE, related_name="releases"
)
version_name = models.CharField(
_("version name"), max_length=255, null=True, blank=True
)
version_number = models.CharField(
_("version number"), max_length=255, null=True, blank=True
)
package_version_id = models.CharField(
_("package version id"), max_length=18, null=True, blank=True
)
git_tag = models.CharField(_("git tag"), max_length=1024, null=True)
github_release = models.URLField(
_("github release"), max_length=1024, null=True, blank=True
)
trialforce_id = models.CharField(
_("trialforce template id"), max_length=18, null=True, blank=True
)
release_creation_date = models.DateField(
_("release creation date"),
null=True,
blank=True,
default=get_sandbox_date,
)
sandbox_push_date = models.DateField(
_("sandbox push date"),
null=True,
blank=True,
default=get_sandbox_date,
)
production_push_date = models.DateField(
_("production push date"),
null=True,
blank=True,
default=get_production_date,
)
created_from_commit = models.CharField(
_("created from commit"), max_length=1024, null=True, blank=True
)
work_item_link = models.URLField(
_("work item link"), max_length=1024, null=True, blank=True
)
change_case_template = models.ForeignKey(
"release.ChangeCaseTemplate", on_delete=models.SET_NULL, null=True
)
change_case_link = models.URLField(
_("change case link"), max_length=1024, null=True, blank=True
)
class Meta:
get_latest_by = "created"
ordering = ["-created"]
verbose_name = _("release")
verbose_name_plural = _("releases")
unique_together = ("repo", "git_tag")
def __str__(self):
return f"{self.repo}: {self.version_name}"
def update_from_github(self):
update_release_from_github(self)
| 30.816327
| 82
| 0.67351
| 350
| 3,020
| 5.525714
| 0.268571
| 0.053775
| 0.07394
| 0.096691
| 0.244571
| 0.190796
| 0.190796
| 0.129783
| 0.043433
| 0
| 0
| 0.015638
| 0.216556
| 3,020
| 97
| 83
| 31.134021
| 0.801775
| 0.006954
| 0
| 0.125
| 0
| 0
| 0.125792
| 0.015682
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.1
| 0.05
| 0.4875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae52b0c373a33d43af43b8a92c2a1b20dd0c87e2
| 3,841
|
py
|
Python
|
dgraphpandas/strategies/horizontal.py
|
rohith-bs/dgraphpandas
|
29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316
|
[
"MIT"
] | 1
|
2022-02-28T17:34:11.000Z
|
2022-02-28T17:34:11.000Z
|
dgraphpandas/strategies/horizontal.py
|
rohith-bs/dgraphpandas
|
29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316
|
[
"MIT"
] | null | null | null |
dgraphpandas/strategies/horizontal.py
|
rohith-bs/dgraphpandas
|
29e91e2e7bb1d5d991ab94709a2d7e27f7dd7316
|
[
"MIT"
] | 1
|
2021-04-10T19:57:05.000Z
|
2021-04-10T19:57:05.000Z
|
import logging
from typing import Any, Dict, List, Callable, Union
import pandas as pd
from dgraphpandas.config import get_from_config
from dgraphpandas.strategies.vertical import vertical_transform
logger = logging.getLogger(__name__)
def horizontal_transform(
frame: Union[str, pd.DataFrame],
config: Dict[str, Any],
config_file_key: str,
**kwargs):
'''
Horizontally Transform a Pandas DataFrame into Intrinsic and Edge DataFrames.
'''
if frame is None:
raise ValueError('frame')
if not config:
raise ValueError('config')
if not config_file_key:
raise ValueError('config_file_key')
file_config: Dict[str, Any] = config['files'][config_file_key]
type_overrides: Dict[str, str] = get_from_config('type_overrides', file_config, {}, **(kwargs))
subject_fields: Union[List[str], Callable[..., List[str]]] = get_from_config('subject_fields', file_config, **(kwargs))
date_fields: Dict[str, str] = get_from_config('date_fields', file_config, {}, **(kwargs))
if not subject_fields:
raise ValueError('subject_fields')
if isinstance(frame, str):
logger.debug(f'Reading file {frame}')
read_csv_options: Dict[str, Any] = get_from_config('read_csv_options', file_config, {}, **(kwargs))
frame = pd.read_csv(frame, **(read_csv_options))
if frame.shape[1] <= len(subject_fields):
raise ValueError(f'''
It looks like there are no data fields.
The subject_fields are {subject_fields}
The frame columns are {frame.columns}
''')
'''
Date Fields get special treatment as they can be represented in many different ways
from different sources. Therefore if the column has been defined in date_fields
then apply those options to that column.
'''
for col, date_format in date_fields.items():
date_format = date_fields[col]
logger.debug(f'Converting {col} to datetime: {date_format}')
frame[col] = pd.to_datetime(frame[col], **(date_format))
if col not in type_overrides:
logger.debug(f'Ensuring {col} has datetime64 type')
type_overrides[col] = 'datetime64'
'''
Ensure that object values have the correct type according to type_overrides.
For example, when pandas reads a csv and detects a numerical value it may decide to
represent them as a float e.g 10.0 so when it's melted into a string it will show as such
But we really want the value to be just 10 so it matches the corresponding rdf type.
Therefore before we melt the frame, we enforce these columns have the correct form.
'''
logger.debug('Applying Type Overrides %s', type_overrides)
for col, current_type in type_overrides.items():
try:
logger.debug(f'Converting {col} to {current_type}')
frame[col] = frame[col].astype(current_type)
except ValueError:
logger.exception(
f'''
Could not convert {col} to {current_type}.
Please confirm that the values in the {col} series are convertable to {current_type}.
A common scenario here is when we have NA values but the target type does not support them.
''')
exit()
'''
Pivot the Horizontal DataFrame based on the given key (subject).
Change the frame to be 3 columns with triples: subject, predicate, object
This changes the horizontal frame into a vertical frame as this more closely
resembles rdf triples.
'''
logger.debug(f'Melting frame with subject: {subject_fields}')
frame = frame.melt(
id_vars=subject_fields,
var_name='predicate',
value_name='object')
return vertical_transform(frame, config, config_file_key, **(kwargs))
| 40.431579
| 123
| 0.667534
| 515
| 3,841
| 4.836893
| 0.341748
| 0.046969
| 0.026094
| 0.019269
| 0.057808
| 0.040145
| 0
| 0
| 0
| 0
| 0
| 0.003775
| 0.241343
| 3,841
| 94
| 124
| 40.861702
| 0.851064
| 0.020047
| 0
| 0.033898
| 0
| 0
| 0.276588
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016949
| false
| 0
| 0.084746
| 0
| 0.118644
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae533f8aecb8c3af4f9e6c1898e9747d30e5e6e5
| 2,675
|
py
|
Python
|
classifier-start/lib/utils.py
|
sharifkaiser/codelabs-edgetpu-image-classifier-detector
|
da01229abec824994776507949adad1939fa45f0
|
[
"Apache-2.0"
] | 4
|
2019-05-13T15:18:36.000Z
|
2021-10-08T22:16:49.000Z
|
classifier-start/lib/utils.py
|
sharifkaiser/codelabs-edgetpu-image-classifier-detector
|
da01229abec824994776507949adad1939fa45f0
|
[
"Apache-2.0"
] | 1
|
2019-06-30T14:43:31.000Z
|
2019-10-25T17:49:52.000Z
|
classifier-start/lib/utils.py
|
sharifkaiser/codelabs-edgetpu-image-classifier-detector
|
da01229abec824994776507949adad1939fa45f0
|
[
"Apache-2.0"
] | 3
|
2019-07-22T15:16:02.000Z
|
2022-03-04T11:51:11.000Z
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
from .svg import *
CSS_STYLES = str(CssStyle({'.back': Style(fill='black',
stroke='black',
stroke_width='0.5em')}))
def size_em(length):
return '%sem' % str(0.6 * length)
def overlay(title, results, inference_time, layout):
x0, y0, width, height = layout.window
font_size = 0.03 * height
defs = Defs()
defs += CSS_STYLES
doc = Svg(width=width, height=height,
viewBox='%s %s %s %s' % layout.window,
font_size=font_size, font_family='monospace', font_weight=500)
doc += defs
ox1, ox2 = x0 + 20, x0 + width - 20
oy1, oy2 = y0 + 20 + font_size, y0 + height - 20
# Classes
lines = ['%s (%.2f)' % pair for pair in results]
for i, line in enumerate(lines):
y = oy2 - i * 1.7 * font_size
doc += Rect(x=0, y=0, width=size_em(len(line)), height='1em',
transform='translate(%s, %s) scale(-1,-1)' % (ox2, y),
_class='back')
doc += Text(line, text_anchor='end', x=ox2, y=y, fill='white')
# Title
if title:
doc += Rect(x=0, y=0, width=size_em(len(title)), height='1em',
transform='translate(%s, %s) scale(1,-1)' % (ox1, oy1), _class='back')
doc += Text(title, x=ox1, y=oy1, fill='white')
# Info
lines = [
'Inference time: %.2f ms (%.2f fps)' % (inference_time, 1000.0 / inference_time)
]
for i, line in enumerate(reversed(lines)):
y = oy2 - i * 1.7 * font_size
doc += Rect(x=0, y=0, width=size_em(len(line)), height='1em',
transform='translate(%s, %s) scale(1,-1)' % (ox1, y), _class='back')
doc += Text(line, x=ox1, y=y, fill='white')
return str(doc)
LABEL_PATTERN = re.compile(r'\s*(\d+)(.+)')
def load_labels(path):
with open(path, 'r', encoding='utf-8') as f:
lines = (LABEL_PATTERN.match(line).groups() for line in f.readlines())
return {int(num): text.strip() for num, text in lines}
| 34.74026
| 94
| 0.575327
| 382
| 2,675
| 3.963351
| 0.410995
| 0.03963
| 0.015852
| 0.017834
| 0.209379
| 0.18428
| 0.156539
| 0.156539
| 0.156539
| 0.156539
| 0
| 0.038282
| 0.277383
| 2,675
| 76
| 95
| 35.197368
| 0.744956
| 0.211963
| 0
| 0.093023
| 0
| 0
| 0.110952
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.069767
| false
| 0
| 0.046512
| 0.023256
| 0.186047
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae5506b0817f6bd6ba613b44f0005304d3cd1c5d
| 932
|
py
|
Python
|
example.py
|
rivasd/activiewBCI
|
b2278ebacc733e328f28d308146108a52d3deb78
|
[
"MIT"
] | 1
|
2020-09-10T08:04:06.000Z
|
2020-09-10T08:04:06.000Z
|
example.py
|
rivasd/activiewBCI
|
b2278ebacc733e328f28d308146108a52d3deb78
|
[
"MIT"
] | null | null | null |
example.py
|
rivasd/activiewBCI
|
b2278ebacc733e328f28d308146108a52d3deb78
|
[
"MIT"
] | null | null | null |
from ActiView import ActiveTwo
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui
import numpy as np
app = QtGui.QApplication([])
win = pg.GraphicsWindow()
win.setWindowTitle("Mimicking ActiView's EEG monitoring screen")
monitor = win.addPlot()
#we have so many curves that we will store them in an array
curves = [monitor.plot() for x in range(64)]
#this is the data that will be continuously updated and plotted
rawdata = np.empty((64,0))
#initialize connection with ActiView
actiview = ActiveTwo()
def update():
global rawdata
data = actiview.read()
rawdata = np.concatenate((rawdata, data), axis=1)
for i in range(64):
curves[i].setData(rawdata[i])
timer = pg.QtCore.QTimer()
timer.timeout.connect(update)
timer.start(0)
if __name__ == '__main__':
import sys
if sys.flags.interactive != 1 or not hasattr(pg.QtCore, 'PYQT_VERSION'):
pg.QtGui.QApplication.exec_()
| 23.897436
| 76
| 0.714592
| 134
| 932
| 4.895522
| 0.61194
| 0.051829
| 0.027439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013021
| 0.175966
| 932
| 39
| 77
| 23.897436
| 0.841146
| 0.166309
| 0
| 0
| 0
| 0
| 0.08
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.208333
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae5b1b9181972edef32d0c181d78511358cde1b1
| 2,671
|
py
|
Python
|
8_random_walker_segmentation_scikit-image.py
|
Data-Laboratory/WorkExamples
|
27e58207e664da7813673e6792c0c30c0a5bf74c
|
[
"MIT"
] | 1
|
2021-12-15T22:27:27.000Z
|
2021-12-15T22:27:27.000Z
|
8_random_walker_segmentation_scikit-image.py
|
Data-Laboratory/WorkExamples
|
27e58207e664da7813673e6792c0c30c0a5bf74c
|
[
"MIT"
] | null | null | null |
8_random_walker_segmentation_scikit-image.py
|
Data-Laboratory/WorkExamples
|
27e58207e664da7813673e6792c0c30c0a5bf74c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
__author__ = "Sreenivas Bhattiprolu"
__license__ = "Feel free to copy, I appreciate if you acknowledge Python for Microscopists"
# https://www.youtube.com/watch?v=6P8YhJa2V6o
"""
Using Random walker to generate lables and then segment and finally cleanup using closing operation.
"""
import matplotlib.pyplot as plt
from skimage import io, img_as_float
import numpy as np
img = img_as_float(io.imread("images/Alloy_noisy.jpg"))
#plt.hist(img.flat, bins=100, range=(0, 1))
# Very noisy image so histogram looks horrible. Let us denoise and see if it helps.
from skimage.restoration import denoise_nl_means, estimate_sigma
sigma_est = np.mean(estimate_sigma(img, multichannel=True))
denoise_img = denoise_nl_means(img, h=1.15 * sigma_est, fast_mode=True,
patch_size=5, patch_distance=3, multichannel=True)
#plt.hist(denoise_img.flat, bins=100, range=(0, 1))
# Much better histogram and now we can see two separate peaks.
#Still close enough so cannot use histogram based segmentation.
#Let us see if we can get any better by some preprocessing.
#Let's try histogram equalization
from skimage import exposure #Contains functions for hist. equalization
#eq_img = exposure.equalize_hist(denoise_img)
eq_img = exposure.equalize_adapthist(denoise_img)
#plt.imshow(eq_img, cmap='gray')
#plt.hist(denoise_img.flat, bins=100, range=(0., 1))
#Not any better. Let us stretch the hoistogram between 0.7 and 0.95
# The range of the binary image spans over (0, 1).
# For markers, let us include all between each peak.
markers = np.zeros(img.shape, dtype=np.uint)
markers[(eq_img < 0.8) & (eq_img > 0.7)] = 1
markers[(eq_img > 0.85) & (eq_img < 0.99)] = 2
from skimage.segmentation import random_walker
# Run random walker algorithm
# https://scikit-image.org/docs/dev/api/skimage.segmentation.html#skimage.segmentation.random_walker
labels = random_walker(eq_img, markers, beta=10, mode='bf')
plt.imsave("images/markers.jpg", markers)
segm1 = (labels == 1)
segm2 = (labels == 2)
all_segments = np.zeros((eq_img.shape[0], eq_img.shape[1], 3)) #nothing but denoise img size but blank
all_segments[segm1] = (1,0,0)
all_segments[segm2] = (0,1,0)
#plt.imshow(all_segments)
from scipy import ndimage as nd
segm1_closed = nd.binary_closing(segm1, np.ones((3,3)))
segm2_closed = nd.binary_closing(segm2, np.ones((3,3)))
all_segments_cleaned = np.zeros((eq_img.shape[0], eq_img.shape[1], 3))
all_segments_cleaned[segm1_closed] = (1,0,0)
all_segments_cleaned[segm2_closed] = (0,1,0)
plt.imshow(all_segments_cleaned)
plt.imsave("images/random_walker.jpg", all_segments_cleaned)
| 31.797619
| 102
| 0.7383
| 431
| 2,671
| 4.419954
| 0.401392
| 0.031496
| 0.047244
| 0.022047
| 0.11811
| 0.103412
| 0.103412
| 0.068241
| 0.068241
| 0.068241
| 0
| 0.034543
| 0.143766
| 2,671
| 83
| 103
| 32.180723
| 0.798426
| 0.365032
| 0
| 0
| 0
| 0
| 0.103846
| 0.029487
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.225806
| 0
| 0.225806
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae5dba7efd27593d74b0d517709967bd1f8e2e4a
| 3,090
|
py
|
Python
|
dockend/dockend.py
|
ChrisVidal10/dockend
|
8904e1d017fcc1767d8593190df537a750a50b4c
|
[
"MIT"
] | null | null | null |
dockend/dockend.py
|
ChrisVidal10/dockend
|
8904e1d017fcc1767d8593190df537a750a50b4c
|
[
"MIT"
] | 1
|
2018-06-25T23:38:09.000Z
|
2018-06-25T23:38:09.000Z
|
dockend/dockend.py
|
ChrisVidal10/dockend
|
8904e1d017fcc1767d8593190df537a750a50b4c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from termcolor import cprint
import argparse
import docker
DOCKER_CLIENT = docker.from_env()
def main():
try:
not_found_for_stop = False
not_found_for_start = False
ARGS = parser_arguments()
k_name_stop = 'byr' if ARGS.service == 'dla' else 'dev'
k_name_start = 'dev' if ARGS.service == 'dla' else 'byr'
containers_for_stop = docker_containers_list(k_name_stop)
containers_for_start = docker_containers_list(k_name_start)
if containers_for_stop:
stop_containers(containers_for_stop, k_name_stop)
else:
cprint("WARNING! Active containers for stop not found", 'yellow')
not_found_for_stop = True
if containers_for_start:
start_containers(containers_for_start, k_name_start)
else:
cprint("WARNING! Active containers for start not found", 'yellow')
not_found_for_start = True
if not not_found_for_start:
cprint("DONE! Happy Coding", "white", "on_green")
if not_found_for_start and not_found_for_stop:
cprint(
"STOP! Maybe you have problems with the containers. e.g. Containers not build", "white", "on_red")
except Exception:
cprint("ERROR! Docker is off or not installed", "white", "on_red")
exit(1)
def start_containers(container_lists, k_name):
try:
cprint("Start containers {}...".format(k_name), 'yellow')
for cont in container_lists:
cont.start()
cprint("OK containers {} up!".format(k_name), 'green')
except Exception as exc:
cprint("Error when starting the process (container starting process): {}".format(
exc), 'white', 'on_red')
exit(1)
def stop_containers(container_lists, k_name):
try:
cprint("Stop containers {}...".format(k_name), 'yellow')
for cont in container_lists:
cont.stop()
cprint("OK containers {} down!".format(k_name), 'green')
return True
except Exception as exc:
cprint("Error when starting the process (container stopping process): {}".format(
exc), 'white', 'on_red')
exit(1)
def docker_containers_list(key_name):
try:
return DOCKER_CLIENT.containers.list(filters={'name': key_name}, all=True)
except Exception as exc:
cprint("Error getting the list: {}".format(exc), 'red')
raise exc
def parser_arguments():
parser = argparse.ArgumentParser(
description='Tool for change backend services and process in docker environment (BYR-Microservicios/API-Integrada)')
parser.add_argument('-V',
'--version',
action='version',
version='%(prog)s {version}'.format(version='0.2.1'))
parser.add_argument('service',
choices=['byr', 'dla'],
type=str,
help='backend type')
args = parser.parse_args()
return args
if __name__ == '__main__':
main()
| 35.517241
| 124
| 0.612621
| 373
| 3,090
| 4.839142
| 0.286863
| 0.033241
| 0.042659
| 0.035457
| 0.357341
| 0.307479
| 0.229917
| 0.166205
| 0.166205
| 0.128532
| 0
| 0.002691
| 0.278317
| 3,090
| 86
| 125
| 35.930233
| 0.806726
| 0.006472
| 0
| 0.219178
| 0
| 0
| 0.245031
| 0.011079
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068493
| false
| 0
| 0.041096
| 0
| 0.150685
| 0.178082
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae616a523c7cfa0788d9038fa4b59abb5b2c597c
| 625
|
py
|
Python
|
exp_figure/figure_3(grey).py
|
qqxx6661/LDSM
|
b2be6fdfdac00fc4a469a72b3a10686fa0f4bd80
|
[
"MIT"
] | 4
|
2019-06-04T06:19:01.000Z
|
2021-04-16T15:50:30.000Z
|
exp_figure/figure_3(grey).py
|
qqxx6661/LDSM
|
b2be6fdfdac00fc4a469a72b3a10686fa0f4bd80
|
[
"MIT"
] | 1
|
2019-09-10T10:33:18.000Z
|
2021-02-08T14:51:39.000Z
|
exp_figure/figure_3(grey).py
|
qqxx6661/LDSM
|
b2be6fdfdac00fc4a469a72b3a10686fa0f4bd80
|
[
"MIT"
] | 2
|
2019-06-04T06:19:08.000Z
|
2021-09-06T07:30:44.000Z
|
import random
import matplotlib.pyplot as plt
import numpy as np
# 在一个图形中创建两条线
fig = plt.figure(figsize=(10, 6))
ax1 = fig.add_subplot(1, 1, 1)
ax1.set_xlabel('Frame', fontsize=18)
ax1.set_ylabel('Overall Time Cost (s)', fontsize=18)
x = range(180)
y1 = []
y2 = []
for i in range(180):
y1.append(random.uniform(0.30, 0.32))
y2.append(random.uniform(0.36, 0.38))
print(y1)
print(y2)
ax1.plot(x, y1,linestyle=':',marker='o', label="1-cam scenario")
ax1.plot(x, y2,marker='>', label="8-cam scenario")
plt.xticks((0, 30, 60, 90, 120, 150, 180), fontsize=16)
plt.yticks(fontsize=18)
plt.legend(fontsize=12)
plt.show()
| 23.148148
| 64
| 0.68
| 111
| 625
| 3.801802
| 0.558559
| 0.07109
| 0.047393
| 0.094787
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11883
| 0.1248
| 625
| 26
| 65
| 24.038462
| 0.652651
| 0.0176
| 0
| 0
| 0
| 0
| 0.093137
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0.095238
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae6410131f36b3418762c4c860f8c13f5bed9bd8
| 1,067
|
py
|
Python
|
build/lib/flaskr/__init__.py
|
LayneWei/NLP-medical-information-extraction
|
1657d956afd3a2c476da28e3e8a4f1c4ce4bdc4b
|
[
"MIT"
] | null | null | null |
build/lib/flaskr/__init__.py
|
LayneWei/NLP-medical-information-extraction
|
1657d956afd3a2c476da28e3e8a4f1c4ce4bdc4b
|
[
"MIT"
] | null | null | null |
build/lib/flaskr/__init__.py
|
LayneWei/NLP-medical-information-extraction
|
1657d956afd3a2c476da28e3e8a4f1c4ce4bdc4b
|
[
"MIT"
] | null | null | null |
import os
from flask import Flask
#import SQLAlchemy
from flaskr import db
def clear_data(session):
meta = db.metadata
for table in reversed(meta.sorted_tables):
print('Clear table %s' % table)
session.execute(table.delete())
session.commit()
def create_app(test_config=None):
# create and configure the app
app = Flask(__name__, instance_relative_config=True)
app.config.from_mapping(
SECRET_KEY='dev',
DATABASE=os.path.join(app.instance_path, 'flaskr.sqlite'),
)
if test_config is None:
# load the instance config, if it exists, when not testing
app.config.from_pyfile('config.py', silent=True)
else:
# load the test config if passed in
app.config.from_mapping(test_config)
# ensure the instance folder exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
from . import db
db.init_app(app)
from . import note
app.register_blueprint(note.bp)
app.add_url_rule('/', endpoint='index')
return app
| 23.711111
| 66
| 0.66448
| 145
| 1,067
| 4.737931
| 0.517241
| 0.058224
| 0.056769
| 0.058224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.242737
| 1,067
| 45
| 67
| 23.711111
| 0.850248
| 0.160262
| 0
| 0
| 0
| 0
| 0.050448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068966
| false
| 0.034483
| 0.172414
| 0
| 0.275862
| 0.068966
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae689f1c1175daa6fc473f2cb48f19de2559deff
| 830
|
py
|
Python
|
EvaMap/Metrics/sameAs.py
|
benjimor/EvaMap
|
42e616abe9f15925b885797d30496e30615989a0
|
[
"MIT"
] | 1
|
2021-01-29T18:53:26.000Z
|
2021-01-29T18:53:26.000Z
|
EvaMap/Metrics/sameAs.py
|
benjimor/EvaMap
|
42e616abe9f15925b885797d30496e30615989a0
|
[
"MIT"
] | 1
|
2021-06-06T17:56:00.000Z
|
2021-06-06T17:56:00.000Z
|
EvaMap/Metrics/sameAs.py
|
benjimor/EvaMap
|
42e616abe9f15925b885797d30496e30615989a0
|
[
"MIT"
] | null | null | null |
import rdflib
import requests
from EvaMap.Metrics.metric import metric
def sameAs(g_onto, liste_map, g_map, raw_data, g_link) :
result = metric()
result['name'] = "Use of sameAs properties"
nbPossible = 0
points = 0
set_URIs = set()
for s, _, _ in g_map.triples((None, None, None)) :
if isinstance(s, rdflib.term.URIRef) :
set_URIs.add(s)
for elt in set_URIs :
nbPossible = nbPossible + 1
for _, _, _ in g_map.triples((elt, rdflib.term.URIRef('http://www.w3.org/2002/07/owl#sameAs'), None)) :
points = points + 1
if points < 1 :
result['score'] = 0
result['feedbacks'].append("No sameAs defined")
else :
result['score'] = 0
if nbPossible != 0:
result['score'] = points/(nbPossible)
return result
| 31.923077
| 112
| 0.595181
| 110
| 830
| 4.354545
| 0.472727
| 0.025052
| 0.025052
| 0.05428
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025
| 0.277108
| 830
| 26
| 113
| 31.923077
| 0.773333
| 0
| 0
| 0.083333
| 0
| 0
| 0.126354
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.041667
| false
| 0
| 0.125
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae69c8b77055ca55392fe8a19a30b6175954dde3
| 5,716
|
py
|
Python
|
networkapi/api_route_map/v4/serializers.py
|
vinicius-marinho/GloboNetworkAPI
|
94651d3b4dd180769bc40ec966814f3427ccfb5b
|
[
"Apache-2.0"
] | 73
|
2015-04-13T17:56:11.000Z
|
2022-03-24T06:13:07.000Z
|
networkapi/api_route_map/v4/serializers.py
|
leopoldomauricio/GloboNetworkAPI
|
3b5b2e336d9eb53b2c113977bfe466b23a50aa29
|
[
"Apache-2.0"
] | 99
|
2015-04-03T01:04:46.000Z
|
2021-10-03T23:24:48.000Z
|
networkapi/api_route_map/v4/serializers.py
|
shildenbrand/GloboNetworkAPI
|
515d5e961456cee657c08c275faa1b69b7452719
|
[
"Apache-2.0"
] | 64
|
2015-08-05T21:26:29.000Z
|
2022-03-22T01:06:28.000Z
|
# -*- coding: utf-8 -*-
import logging
from django.db.models import get_model
from rest_framework import serializers
from networkapi.util.geral import get_app
from networkapi.util.serializers import DynamicFieldsModelSerializer
log = logging.getLogger(__name__)
class RouteMapV4Serializer(DynamicFieldsModelSerializer):
route_map_entries = serializers. \
SerializerMethodField('get_route_map_entries')
peer_groups = serializers. \
SerializerMethodField('get_peer_groups')
class Meta:
RouteMap = get_model('api_route_map', 'RouteMap')
model = RouteMap
fields = (
'id',
'name',
'route_map_entries',
'peer_groups'
)
basic_fields = (
'id',
'name',
)
default_fields = fields
details_fields = fields
def get_route_map_entries(self, obj):
return self.extends_serializer(obj, 'route_map_entries')
def get_peer_groups(self, obj):
return self.extends_serializer(obj, 'peer_groups')
def get_serializers(self):
routemap_slzs = get_app('api_route_map',
module_label='v4.serializers')
peergroup_slzs = get_app('api_peer_group',
module_label='v4.serializers')
if not self.mapping:
self.mapping = {
'route_map_entries': {
'obj': 'route_map_entries_id',
},
'route_map_entries__basic': {
'serializer': routemap_slzs.RouteMapEntryV4Serializer,
'kwargs': {
'kind': 'basic',
'many': True
},
'obj': 'route_map_entries'
},
'route_map_entries__details': {
'serializer': routemap_slzs.RouteMapEntryV4Serializer,
'kwargs': {
'kind': 'details',
'many': True
},
'obj': 'route_map_entries'
},
'peer_groups': {
'obj': 'peer_groups_id',
},
'peer_groups__basic': {
'serializer': peergroup_slzs.PeerGroupV4Serializer,
'kwargs': {
'kind': 'basic',
'many': True
},
'obj': 'peer_groups'
},
'peer_groups__details': {
'serializer': peergroup_slzs.PeerGroupV4Serializer,
'kwargs': {
'kind': 'details',
'many': True
},
'obj': 'peer_groups'
}
}
class RouteMapEntryV4Serializer(DynamicFieldsModelSerializer):
list_config_bgp = serializers.SerializerMethodField('get_list_config_bgp')
route_map = serializers.SerializerMethodField('get_route_map')
class Meta:
RouteMapEntry = get_model('api_route_map', 'RouteMapEntry')
model = RouteMapEntry
fields = (
'id',
'action',
'action_reconfig',
'order',
'list_config_bgp',
'route_map'
)
basic_fields = (
'id',
'action',
'action_reconfig',
'order'
)
default_fields = fields
details_fields = fields
def get_list_config_bgp(self, obj):
return self.extends_serializer(obj, 'list_config_bgp')
def get_route_map(self, obj):
return self.extends_serializer(obj, 'route_map')
def get_serializers(self):
lcb_slzs = get_app('api_list_config_bgp',
module_label='v4.serializers')
if not self.mapping:
self.mapping = {
'list_config_bgp': {
'obj': 'list_config_bgp_id',
},
'list_config_bgp__basic': {
'serializer': lcb_slzs.ListConfigBGPV4Serializer,
'kwargs': {
'kind': 'basic',
'prohibited': (
'route_map_entries__basic',
)
},
'obj': 'list_config_bgp'
},
'list_config_bgp__details': {
'serializer': lcb_slzs.ListConfigBGPV4Serializer,
'kwargs': {
'kind': 'details',
'prohibited': (
'route_map_entries__details',
)
},
'obj': 'list_config_bgp'
},
'route_map': {
'obj': 'route_map_id',
},
'route_map__basic': {
'serializer': RouteMapV4Serializer,
'kwargs': {
'kind': 'basic',
'prohibited': (
'route_map_entries__basic',
)
},
'obj': 'route_map'
},
'route_map__details': {
'serializer': RouteMapV4Serializer,
'kwargs': {
'kind': 'details',
'prohibited': (
'route_map_entries__details',
)
},
'obj': 'route_map'
}
}
| 31.065217
| 78
| 0.448216
| 410
| 5,716
| 5.860976
| 0.168293
| 0.096546
| 0.093633
| 0.028298
| 0.533916
| 0.426134
| 0.230545
| 0.19975
| 0.163129
| 0.042447
| 0
| 0.004525
| 0.458712
| 5,716
| 183
| 79
| 31.234973
| 0.77214
| 0.003674
| 0
| 0.474026
| 0
| 0
| 0.204286
| 0.038117
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038961
| false
| 0
| 0.032468
| 0.025974
| 0.149351
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae6dc1d38a589fb6dfb638a55ee82b80c824df9d
| 10,329
|
py
|
Python
|
actions/line_mmc.py
|
fmariv/udt-qgis-plugin
|
20cbf8889f2a2448d982c7057a4cfbe37d90d78b
|
[
"MIT"
] | null | null | null |
actions/line_mmc.py
|
fmariv/udt-qgis-plugin
|
20cbf8889f2a2448d982c7057a4cfbe37d90d78b
|
[
"MIT"
] | 2
|
2021-09-02T07:22:24.000Z
|
2021-09-22T05:31:45.000Z
|
actions/line_mmc.py
|
fmariv/udt-qgis-plugin
|
20cbf8889f2a2448d982c7057a4cfbe37d90d78b
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
/***************************************************************************
UDTPlugin
In this file is where the LineMMC class is defined. The main function
of this class is to run the automation process that exports the geometries
and generates the metadata of a municipal line.
***************************************************************************/
"""
import os
import numpy as np
from PyQt5.QtCore import QVariant
from qgis.core import (QgsVectorLayer,
QgsCoordinateReferenceSystem,
QgsVectorFileWriter,
QgsMessageLog,
QgsField,
QgsProject)
from ..config import *
from .adt_postgis_connection import PgADTConnection
from ..utils import *
# TODO in progress...
class LineMMC(object):
""" Line MMC Generation class """
def __init__(self, line_id):
self.line_id = line_id
self.crs = QgsCoordinateReferenceSystem("EPSG:25831")
# ADT PostGIS connection
self.pg_adt = PgADTConnection(HOST, DBNAME, USER, PWD, SCHEMA)
self.pg_adt.connect()
# Layers
self.work_points_layer, self.work_lines_layer = None, None
def check_line_exists(self):
""" """
line_exists_points_layer = self.check_line_exists_points_layer()
line_exists_lines_layer = self.check_line_exists_lines_layer()
return line_exists_points_layer, line_exists_lines_layer
def check_line_exists_points_layer(self):
""" """
fita_mem_layer = self.pg_adt.get_layer('v_fita_mem', 'id_fita')
fita_mem_layer.selectByExpression(f'"id_linia"=\'{int(self.line_id)}\'', QgsVectorLayer.SetSelection)
selected_count = fita_mem_layer.selectedFeatureCount()
if selected_count > 0:
return True
else:
return False
def check_line_exists_lines_layer(self):
""" """
line_mem_layer = self.pg_adt.get_layer('v_tram_linia_mem', 'id_tram_linia')
line_mem_layer.selectByExpression(f'"id_linia"=\'{int(self.line_id)}\'', QgsVectorLayer.SetSelection)
selected_count = line_mem_layer.selectedFeatureCount()
if selected_count > 0:
return True
else:
return False
def generate_line_data(self):
""" """
# ########################
# SET DATA
# Copy data to work directory
self.copy_data_to_work()
# Set the layers paths
self.work_points_layer, self.work_lines_layer = self.set_layers_paths()
# ########################
# GENERATION PROCESS
line_mmc_points = LineMMCPoints(self.line_id, self.work_points_layer)
line_mmc_points.generate_points_layer()
line_mmc_lines = LineMMCLines(self.line_id, self.work_lines_layer)
line_mmc_lines.generate_lines_layer()
# TODO metadata
##########################
# DATA EXPORTING
# Make the output directories if they don't exist
# TODO export, saber nombre de los archivos de salida
def copy_data_to_work(self):
""" """
# Points layer
fita_mem_layer = self.pg_adt.get_layer('v_fita_mem', 'id_fita')
fita_mem_layer.selectByExpression(f'"id_linia"=\'{self.line_id}\'', QgsVectorLayer.SetSelection)
# Lines layer
line_mem_layer = self.pg_adt.get_layer('v_tram_linia_mem', 'id_tram_linia')
line_mem_layer.selectByExpression(f'"id_linia"=\'{self.line_id}\'', QgsVectorLayer.SetSelection)
# Export layers to the work space
QgsVectorFileWriter.writeAsVectorFormat(fita_mem_layer, os.path.join(LINIA_WORK_DIR, f'fites_{self.line_id}.shp'),
'utf-8', self.crs, 'ESRI Shapefile', True)
QgsVectorFileWriter.writeAsVectorFormat(line_mem_layer, os.path.join(LINIA_WORK_DIR, f'tram_linia_{self.line_id}.shp'),
'utf-8', self.crs, 'ESRI Shapefile', True)
# TODO: sin proyección
def set_layers_paths(self):
""" """
work_points_layer = QgsVectorLayer(os.path.join(LINIA_WORK_DIR, f'fites_{self.line_id}.shp'))
work_lines_layer = QgsVectorLayer(os.path.join(LINIA_WORK_DIR, f'tram_linia_{self.line_id}.shp'))
return work_points_layer, work_lines_layer
class LineMMCPoints(LineMMC):
def __init__(self, line_id, points_layer):
LineMMC.__init__(self, line_id)
self.work_points_layer = points_layer
def generate_points_layer(self):
""" """
self.add_fields()
self.fill_fields()
self.delete_fields()
def add_fields(self):
""" """
# Set new fields
id_u_fita_field = QgsField(name='IdUfita', type=QVariant.String, typeName='text', len=10)
id_fita_field = QgsField(name='IdFita', type=QVariant.String, typeName='text', len=18)
id_sector_field = QgsField(name='IdSector', type=QVariant.String, typeName='text', len=1)
id_fita_r_field = QgsField(name='IdFitaR', type=QVariant.String, typeName='text', len=3)
num_termes_field = QgsField(name='NumTermes', type=QVariant.String, typeName='text', len=3)
monument_field = QgsField(name='Monument', type=QVariant.String, typeName='text', len=1)
id_linia_field, valid_de_field, valid_a_field, data_alta_field, data_baixa_field = get_common_fields()
new_fields_list = [id_u_fita_field, id_fita_field, id_sector_field, id_fita_r_field, num_termes_field,
monument_field, id_linia_field]
self.work_points_layer.dataProvider().addAttributes(new_fields_list)
self.work_points_layer.updateFields()
def fill_fields(self):
""" """
self.work_points_layer.startEditing()
for point in self.work_points_layer.getFeatures():
point_id_fita = coordinates_to_id_fita(point['point_x'], point['point_y'])
point_r_fita = point_num_to_text(point['num_fita'])
point['IdUFita'] = point['id_u_fita'][:-2]
point['IdFita'] = point_id_fita
point['IdFitaR'] = point_r_fita
point['IdSector'] = point['num_sector']
point['NumTermes'] = point['num_termes']
point['IdLinia'] = int(point['id_linia'])
# TODO tiene Valid de o Data alta? Preguntar Cesc
if point['trobada'] == 1:
point['Monument'] = 'S'
else:
point['Monument'] = 'N'
self.work_points_layer.updateFeature(point)
self.work_points_layer.commitChanges()
def delete_fields(self):
""" """
delete_fields_list = list([*range(0, 31)])
self.work_points_layer.dataProvider().deleteAttributes(delete_fields_list)
self.work_points_layer.updateFields()
class LineMMCLines(LineMMC):
def __init__(self, line_id, lines_layer):
LineMMC.__init__(self, line_id)
self.work_lines_layer = lines_layer
self.arr_lines_data = np.genfromtxt(DIC_LINES, dtype=None, encoding=None, delimiter=';', names=True)
def generate_lines_layer(self):
""" """
self.add_fields()
self.fill_fields()
self.delete_fields()
def add_fields(self):
""" """
name_municipality_1_field = QgsField(name='NomTerme1', type=QVariant.String, typeName='text', len=100)
name_municipality_2_field = QgsField(name='NomTerme2', type=QVariant.String, typeName='text', len=100)
tipus_ua_field = QgsField(name='TipusUA', type=QVariant.String, typeName='text', len=17)
limit_prov_field = QgsField(name='LimitProvi', type=QVariant.String, typeName='text', len=1)
limit_vegue_field = QgsField(name='LimitVegue', type=QVariant.String, typeName='text', len=1)
tipus_linia_field = QgsField(name='TipusLinia', type=QVariant.String, typeName='text', len=8)
# TODO tiene Valid de o Data alta? Preguntar Cesc
id_linia_field, valid_de_field, valid_a_field, data_alta_field, data_baixa_field = get_common_fields()
new_fields_list = [id_linia_field, name_municipality_1_field, name_municipality_2_field, tipus_ua_field,
limit_prov_field, limit_vegue_field, tipus_linia_field,]
self.work_lines_layer.dataProvider().addAttributes(new_fields_list)
self.work_lines_layer.updateFields()
def fill_fields(self):
""" """
# TODO casi identica a la de Generador MMC...
self.work_lines_layer.startEditing()
for line in self.work_lines_layer.getFeatures():
line_id = line['id_linia']
line_data = self.arr_lines_data[np.where(self.arr_lines_data['IDLINIA'] == line_id)]
# Get the Tipus UA type
tipus_ua = line_data['TIPUSUA'][0]
if tipus_ua == 'M':
line['TipusUA'] = 'Municipi'
elif tipus_ua == 'C':
line['TipusUA'] = 'Comarca'
elif tipus_ua == 'A':
line['TipusUA'] = 'Comunitat Autònoma'
elif tipus_ua == 'E':
line['TipusUA'] = 'Estat'
elif tipus_ua == 'I':
line['TipusUA'] = 'Inframunicipal'
# Get the Limit Vegue type
limit_vegue = line_data['LIMVEGUE'][0]
if limit_vegue == 'verdadero':
line['LimitVegue'] = 'S'
else:
line['LimitVegue'] = 'N'
# Get the tipus Linia type
tipus_linia = line_data['TIPUSREG']
if tipus_linia == 'internes':
line['TipusLinia'] = 'MMC'
else:
line['TipusLinia'] = 'Exterior'
# Non dependant fields
line['IdLinia'] = line_id
line['NomTerme1'] = str(line_data['NOMMUNI1'][0])
line['NomTerme2'] = str(line_data['NOMMUNI2'][0])
line['LimitProvi'] = str(line_data['LIMPROV'][0])
self.work_lines_layer.updateFeature(line)
self.work_lines_layer.commitChanges()
def delete_fields(self):
""" """
delete_fields_list = list([*range(0, 12)])
self.work_lines_layer.dataProvider().deleteAttributes(delete_fields_list)
self.work_lines_layer.updateFields()
| 41.817814
| 127
| 0.618162
| 1,218
| 10,329
| 4.922824
| 0.19376
| 0.033356
| 0.026684
| 0.041194
| 0.472482
| 0.43429
| 0.392595
| 0.329887
| 0.24483
| 0.223149
| 0
| 0.006854
| 0.251331
| 10,329
| 246
| 128
| 41.987805
| 0.768525
| 0.09604
| 0
| 0.235669
| 0
| 0
| 0.090979
| 0.011633
| 0
| 0
| 0
| 0.004065
| 0
| 1
| 0.10828
| false
| 0
| 0.044586
| 0
| 0.210191
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae6e3ce852e6d0276690375427d2e2f3c5953dfb
| 5,369
|
py
|
Python
|
SentimentAnalysis.py
|
hoossainalik/instalyzer
|
9ad7c59fba3f617801d3ec0c3ae216029ee0aece
|
[
"MIT"
] | null | null | null |
SentimentAnalysis.py
|
hoossainalik/instalyzer
|
9ad7c59fba3f617801d3ec0c3ae216029ee0aece
|
[
"MIT"
] | null | null | null |
SentimentAnalysis.py
|
hoossainalik/instalyzer
|
9ad7c59fba3f617801d3ec0c3ae216029ee0aece
|
[
"MIT"
] | null | null | null |
"""
Module: Sentiment Analysis
Author: Hussain Ali Khan
Version: 1.0.0
Last Modified: 29/11/2018 (Thursday)
"""
from vaderSentiment.vaderSentiment import SentimentIntensityAnalyzer
import pandas as pd
import re
import os
from emoji import UNICODE_EMOJI
import matplotlib.pyplot as plt
import seaborn as sns
class ResultData:
def __init__(self, data=[], scores=[]):
self.data = data
self.scores = scores
def get_data(self):
return self.data
def get_scores(self):
return self.scores
class SentimentAnalyzer:
def __init__(self):
self.analyzer = SentimentIntensityAnalyzer()
self.dataset = None
self.opened_dataset = None
def load_dataset(self, dir_name):
files_list = os.listdir(dir_name)
print("Please Select The DataSet That You Want To Open: ")
for i in range(len(files_list)):
print(i+1, ". ", files_list[i])
choice = int(input("Choice: "))
self.opened_dataset = files_list[choice-1]
self.dataset = pd.read_csv(dir_name + "/" + self.opened_dataset)
def sentiment_analyzer_scores(self, data):
score = self.analyzer.polarity_scores(data)
print("{:-<40} {}".format(data, str(score)))
def process_descriptions(self):
descriptions = self.dataset["description"]
scores = []
c_descriptions = []
for desc in descriptions:
desc = str(desc)
c_descriptions.append(desc[1:-1])
cleaned_descriptions = clean_list(c_descriptions)
# print("<----Post Descriptions Sentiment Scores---->")
for c_d in cleaned_descriptions:
scores.append(self.analyzer.polarity_scores(c_d))
# self.print_sentiment_scores(c_d)
# print("<------------------------------------------>")
rd = ResultData(cleaned_descriptions, scores)
return rd
def print_sentiment_scores(self, text):
txt = self.analyzer.polarity_scores(text)
print("{:-<40} {}".format(text, str(txt)))
def process_comments(self):
comments_lists = sa.dataset["comments"]
scores = []
all_comments = []
for c in comments_lists:
c = str(c).replace('[', '')
c = str(c).replace(']', '')
c = c.split(', ')
c = [comment.replace("'", "") for comment in c]
c = c[1::2]
for each_c in c:
all_comments.append(each_c)
cleaned_comments = clean_list(all_comments)
# print("<----Post Comments Sentiment Scores---->")
for c_c in cleaned_comments:
scores.append(self.analyzer.polarity_scores(c_c))
# self.print_sentiment_scores(c_c)
# print("<-------------------------------------->\n")
rd = ResultData(cleaned_comments, scores)
return rd
def save_results_as_csv(results, fn, c_name):
results_df = pd.DataFrame(results.get_scores())
results_df['class'] = results_df[['pos', 'neg', 'neu']].idxmax(axis=1)
results_df['class'] = results_df['class'].map({'pos': 'Positive', 'neg': 'Negative', 'neu': 'Neutral'})
text_df = pd.DataFrame(results.get_data(), columns=[c_name])
final_df = text_df.join(results_df)
print(final_df)
print(final_df.describe())
pie_plot_title = "Pie Plot For Sentiments Of " + c_name + " In dataset <" + fn + ">"
final_df["class"].value_counts().plot(kind="pie", autopct='%.1f%%', figsize=(8, 8), title=pie_plot_title)
pp = sns.pairplot(final_df, hue="class", height=3)
pp.fig.suptitle("Pair Plot For Sentiments Of "+c_name+" In dataset <"+fn+">")
plt.show()
final_df.to_csv("SentimentAnalysisResults/" + fn + ".csv")
# search your emoji
def is_emoji(s):
return s in UNICODE_EMOJI
# add space near your emoji
def add_space(text):
return ''.join(' ' + char if is_emoji(char) else char for char in text).strip()
def clean_text(text):
text = filter_mentions(text)
text = text.replace('#', '')
text = text.replace('/', ' ')
text = text.replace('_', ' ')
text = text.replace('❤', ' Love ')
text = text.replace('-', ' ')
text = re.sub(' +', ' ', text).strip()
text = re.sub(r'https?:/\/\S+', ' ', text).strip() # remove links
text = re.sub('[^A-Za-z0-9]+', ' ', text).strip()
text = add_space(text)
return text
def filter_mentions(text):
return " ".join(filter(lambda x: x[0] != '@', text.split()))
def clean_list(_list):
cleaned_list = []
for l in _list:
cleaned = clean_text(l)
if len(cleaned) > 0:
cleaned_list.append(cleaned)
return cleaned_list
def main():
sa = SentimentAnalyzer()
sa.load_dataset("Posts")
print("<---Sentiment Analysis Results On Post Descriptions--->")
description_results = sa.process_descriptions()
save_results_as_csv(description_results, sa.opened_dataset + "_descriptions_sa_results", "descriptions")
print("<----------------------------------------------------->")
print("<---Sentiment Analysis Results On All Post Comments--->")
comments_results = sa.process_comments()
save_results_as_csv(comments_results, sa.opened_dataset + "_comments_sa_results", "comments")
print("<----------------------------------------------------->")
if __name__ == "__main__":
main()
| 28.558511
| 109
| 0.596573
| 648
| 5,369
| 4.736111
| 0.260802
| 0.020854
| 0.024438
| 0.033887
| 0.142392
| 0.067775
| 0.067775
| 0.042359
| 0.042359
| 0
| 0
| 0.007236
| 0.227789
| 5,369
| 188
| 110
| 28.558511
| 0.732754
| 0.081393
| 0
| 0.052174
| 0
| 0
| 0.129347
| 0.032337
| 0
| 0
| 0
| 0
| 0
| 1
| 0.13913
| false
| 0
| 0.06087
| 0.043478
| 0.295652
| 0.095652
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae6e8bcab2c7710339f988ae2adebe63a8a6d860
| 11,100
|
py
|
Python
|
deep_sort_/track.py
|
brjathu/PHALP
|
0502c0aa515292bc70e358fe3b3ec65e63215327
|
[
"MIT"
] | 45
|
2022-02-23T04:32:22.000Z
|
2022-03-31T15:02:39.000Z
|
deep_sort_/track.py
|
brjathu/PHALP
|
0502c0aa515292bc70e358fe3b3ec65e63215327
|
[
"MIT"
] | 5
|
2022-02-23T15:08:29.000Z
|
2022-03-24T19:54:55.000Z
|
deep_sort_/track.py
|
brjathu/PHALP
|
0502c0aa515292bc70e358fe3b3ec65e63215327
|
[
"MIT"
] | 2
|
2022-02-26T13:01:19.000Z
|
2022-03-24T04:53:29.000Z
|
"""
Modified code from https://github.com/nwojke/deep_sort
"""
import numpy as np
import copy
import torch
import torch.nn as nn
import torch.nn.functional as F
import scipy.signal as signal
from scipy.ndimage.filters import gaussian_filter1d
class TrackState:
"""
Enumeration type for the single target track state. Newly created tracks are
classified as `tentative` until enough evidence has been collected. Then,
the track state is changed to `confirmed`. Tracks that are no longer alive
are classified as `deleted` to mark them for removal from the set of active
tracks.
"""
Tentative = 1
Confirmed = 2
Deleted = 3
class Track:
"""
A single target track with state space `(x, y, a, h)` and associated
velocities, where `(x, y)` is the center of the bounding box, `a` is the
aspect ratio and `h` is the height.
Parameters
----------
mean : ndarray
Mean vector of the initial state distribution.
covariance : ndarray
Covariance matrix of the initial state distribution.
track_id : int
A unique track identifier.
n_init : int
Number of consecutive detections before the track is confirmed. The
track state is set to `Deleted` if a miss occurs within the first
`n_init` frames.
max_age : int
The maximum number of consecutive misses before the track state is
set to `Deleted`.
feature : Optional[ndarray]
Feature vector of the detection this track originates from. If not None,
this feature is added to the `features` cache.
Attributes
----------
mean : ndarray
Mean vector of the initial state distribution.
covariance : ndarray
Covariance matrix of the initial state distribution.
track_id : int
A unique track identifier.
hits : int
Total number of measurement updates.
age : int
Total number of frames since first occurance.
time_since_update : int
Total number of frames since last measurement update.
state : TrackState
The current track state.
features : List[ndarray]
A cache of features. On each measurement update, the associated feature
vector is added to this list.
"""
def __init__(self, opt, track_id, n_init, max_age, feature=None, uv_map=None, bbox=None, detection_data=None, confidence=None, detection_id=None, dims=None, time=None):
self.opt = opt
self.track_id = track_id
self.hits = 1
self.age = 1
self.time_since_update = 0
self.state = TrackState.Tentative
if(dims is not None):
self.A_dim = dims[0]
self.P_dim = dims[1]
self.L_dim = dims[2]
self.phalp_uv_map = uv_map
self.phalp_uv_map_ = [uv_map]
self.phalp_uv_predicted = copy.deepcopy(self.phalp_uv_map)
self.phalp_uv_predicted_ = [copy.deepcopy(self.phalp_uv_map)]
self.phalp_appe_features = []
self.phalp_pose_features = []
self.phalp_loca_features = []
self.phalp_time_features = []
self.phalp_bbox = []
self.phalp_detection_id = []
self.detection_data = []
self.confidence_c = []
if feature is not None:
for i_ in range(self.opt.track_history):
self.phalp_appe_features.append(feature[:self.A_dim])
self.phalp_pose_features.append(feature[self.A_dim:self.A_dim+self.P_dim])
self.phalp_loca_features.append(feature[self.A_dim+self.P_dim:])
self.phalp_time_features.append(time)
self.phalp_bbox.append(bbox)
self.phalp_detection_id.append(detection_id)
self.detection_data.append(detection_data)
self.confidence_c.append(confidence[0])
self._n_init = n_init
self._max_age = max_age
self.track_data = {
"xy" : self.detection_data[-1]['xy'],
"bbox" : np.asarray(self.detection_data[-1]['bbox'], dtype=np.float),
}
self.phalp_pose_predicted_ = []
self.phalp_loca_predicted_ = []
self.phalp_features_ = []
def predict(self, phalp_tracker, increase_age=True):
"""Propagate the state distribution to the current time step using a
Kalman filter prediction step.
Parameters
----------
kf : kalman_filter.KalmanFilter
The Kalman filter.
"""
if(increase_age):
self.age += 1
self.time_since_update += 1
def add_predicted(self, appe=None, pose=None, loca=None, uv=None):
self.phalp_appe_predicted = copy.deepcopy(appe.numpy()) if(appe is not None) else copy.deepcopy(self.phalp_appe_features[-1])
self.phalp_pose_predicted = copy.deepcopy(pose.numpy()) if(pose is not None) else copy.deepcopy(self.phalp_pose_features[-1])
self.phalp_loca_predicted = copy.deepcopy(loca.numpy()) if(loca is not None) else copy.deepcopy(self.phalp_loca_features[-1])
self.phalp_features = np.concatenate((self.phalp_appe_predicted, self.phalp_pose_predicted, self.phalp_loca_predicted), axis=0)
self.phalp_pose_predicted_.append(self.phalp_pose_predicted)
if(len(self.phalp_pose_predicted_)>self.opt.n_init+1): self.phalp_pose_predicted_ = self.phalp_pose_predicted_[1:]
self.phalp_loca_predicted_.append(self.phalp_loca_predicted)
if(len(self.phalp_loca_predicted_)>self.opt.n_init+1): self.phalp_loca_predicted_ = self.phalp_loca_predicted_[1:]
self.phalp_features_.append(self.phalp_features)
if(len(self.phalp_features_)>self.opt.n_init+1): self.phalp_features_ = self.phalp_features_[1:]
def update(self, detection, detection_id, shot):
"""Perform Kalman filter measurement update step and update the feature
cache.
Parameters
----------
kf : kalman_filter.KalmanFilter
The Kalman filter.
detection : Detection
The associated detection.
"""
h = detection.tlwh[3]
w = detection.tlwh[2]
self.phalp_appe_features.append(detection.feature[:self.A_dim])
self.phalp_appe_features = copy.deepcopy(self.phalp_appe_features[1:])
self.phalp_pose_features.append(detection.feature[self.A_dim:self.A_dim+self.P_dim])
self.phalp_pose_features = copy.deepcopy(self.phalp_pose_features[1:])
self.phalp_loca_features.append(detection.feature[self.A_dim+self.P_dim:])
self.phalp_loca_features = copy.deepcopy(self.phalp_loca_features[1:])
if(shot==1): self.phalp_loca_features = [detection.feature[self.A_dim+self.P_dim:] for i in range(self.opt.track_history)]
self.phalp_time_features.append(detection.time)
self.phalp_time_features = copy.deepcopy(self.phalp_time_features[1:])
self.phalp_bbox.append(detection.tlwh)
self.phalp_bbox = self.phalp_bbox[1:]
self.confidence_c.append(detection.confidence_c)
self.confidence_c = self.confidence_c[1:]
self.detection_data.append(detection.detection_data)
self.detection_data = self.detection_data[1:]
self.phalp_detection_id.append(detection_id)
self.phalp_uv_map = copy.deepcopy(detection.uv_map)
self.phalp_uv_map_.append(copy.deepcopy(detection.uv_map))
if(self.opt.render or "T" in self.opt.predict):
mixing_alpha_ = self.opt.alpha*(detection.confidence_c**2)
ones_old = self.phalp_uv_predicted[3:, :, :]==1
ones_new = self.phalp_uv_map[3:, :, :]==1
ones_old = np.repeat(ones_old, 3, 0)
ones_new = np.repeat(ones_new, 3, 0)
ones_intersect = np.logical_and(ones_old, ones_new)
ones_union = np.logical_or(ones_old, ones_new)
good_old_ones = np.logical_and(np.logical_not(ones_intersect), ones_old)
good_new_ones = np.logical_and(np.logical_not(ones_intersect), ones_new)
new_rgb_map = np.zeros((3, 256, 256))
new_mask_map = np.zeros((1, 256, 256))-1
new_mask_map[ones_union[:1, :, :]] = 1.0
new_rgb_map[ones_intersect] = (1-mixing_alpha_)*self.phalp_uv_predicted[:3, :, :][ones_intersect] + mixing_alpha_*self.phalp_uv_map[:3, :, :][ones_intersect]
new_rgb_map[good_old_ones] = self.phalp_uv_predicted[:3, :, :][good_old_ones]
new_rgb_map[good_new_ones] = self.phalp_uv_map[:3, :, :][good_new_ones]
self.phalp_uv_predicted = np.concatenate((new_rgb_map, new_mask_map), 0)
self.phalp_uv_predicted_.append(self.phalp_uv_predicted)
if(len(self.phalp_uv_predicted_)>self.opt.n_init+1): self.phalp_uv_predicted_ = self.phalp_uv_predicted_[1:]
else:
self.phalp_uv_predicted = self.phalp_uv_map
self.track_data = {
"xy" : detection.detection_data['xy'],
"bbox" : np.asarray(detection.detection_data['bbox'], dtype=np.float64)
}
self.hits += 1
self.time_since_update = 0
if self.state == TrackState.Tentative and self.hits >= self._n_init:
self.state = TrackState.Confirmed
def mark_missed(self):
"""Mark this track as missed (no association at the current time step).
"""
if self.state == TrackState.Tentative:
self.state = TrackState.Deleted
elif self.time_since_update > self._max_age:
self.state = TrackState.Deleted
def is_tentative(self):
"""Returns True if this track is tentative (unconfirmed).
"""
return self.state == TrackState.Tentative
def is_confirmed(self):
"""Returns True if this track is confirmed."""
return self.state == TrackState.Confirmed
def is_deleted(self):
"""Returns True if this track is dead and should be deleted."""
return self.state == TrackState.Deleted
def smooth_bbox(self, bbox):
kernel_size = 5
sigma = 3
bbox = np.array(bbox)
smoothed = np.array([signal.medfilt(param, kernel_size) for param in bbox.T]).T
out = np.array([gaussian_filter1d(traj, sigma) for traj in smoothed.T]).T
return list(out)
| 42.692308
| 176
| 0.607748
| 1,397
| 11,100
| 4.591267
| 0.158912
| 0.112254
| 0.03773
| 0.037418
| 0.463361
| 0.321952
| 0.290458
| 0.23324
| 0.148269
| 0.137044
| 0
| 0.009891
| 0.298649
| 11,100
| 260
| 177
| 42.692308
| 0.814001
| 0.216757
| 0
| 0.058394
| 0
| 0
| 0.002999
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.065693
| false
| 0
| 0.051095
| 0
| 0.182482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae74ea38559f52ac217bf0d17616d5da35736211
| 14,773
|
py
|
Python
|
functions_baseline_opencv.py
|
Shiro-LK/Super-Resolution-ProbaV
|
e6b9d9d62caa50b84cd5bdca906af53aa1a5de8b
|
[
"MIT"
] | null | null | null |
functions_baseline_opencv.py
|
Shiro-LK/Super-Resolution-ProbaV
|
e6b9d9d62caa50b84cd5bdca906af53aa1a5de8b
|
[
"MIT"
] | null | null | null |
functions_baseline_opencv.py
|
Shiro-LK/Super-Resolution-ProbaV
|
e6b9d9d62caa50b84cd5bdca906af53aa1a5de8b
|
[
"MIT"
] | 1
|
2020-04-15T10:36:31.000Z
|
2020-04-15T10:36:31.000Z
|
# -*- coding: utf-8 -*-
import cv2
import numpy as np
import os
import pandas as pd
import math
from skimage import io
from skimage.transform import rescale
import skimage
import numba
from numba import prange
import time
from pathlib import Path
# MAX 35 IMG
## Create TXT FILE for loading
def import_norm_data(filename="data/norm.csv"):
dic = {}
file = pd.read_csv(filename, sep=" ", header= None, names=["name", "value"])
for i, (name, value) in file.iterrows():
dic[name] = value
return dic
def seperate_NIR_RED(filename):
with open(filename, "r") as f:
temp = [line.replace("\\","/").split() for line in f]
f_NIR = open(filename.replace(".txt", "_NIR.txt"), "w")
f_RED = open(filename.replace(".txt", "_RED.txt"), "w")
for line in temp:
if line[0].find("NIR") != -1:
f_NIR.write(line[0]+" " + line[1] + "\n")
else:
f_RED.write(line[0]+" " + line[1] + "\n")
f_NIR.close()
f_RED.close()
def create_data(path, normalize_data):
max_ = 0
f_train = open(path+"train.txt", "w")
f_test = open(path+"test.txt", "w")
folders1 = os.listdir(path)
for fold1 in folders1:
p1 = os.path.join(path, fold1)
if os.path.isdir(p1): # test/train fold
folders2 = os.listdir(p1)
for fold2 in folders2:
p2 = os.path.join(p1, fold2)
if os.path.isdir(p2): # NIR RED fold
folders3 = os.listdir(p2)
for fold3 in folders3:
p3 = os.path.join(p2, fold3)
if os.path.isdir(p3): #name imgset folders
if fold1 == "train":
f_train.write(p3 + " " + str(normalize_data[fold3]) + "\n")
elif fold1 == "test":
f_test.write(p3 + " " + str(normalize_data[fold3]) + "\n")
max_ = max(max_, len(os.listdir(p3)))
print(max_)
f_train.close()
f_test.close()
## Load all data
def load_data(filename, istrain=True):
with open(filename, "r") as f:
temp = [line.replace("\\","/").split() for line in f]
data = []
for path, v in temp:
norm = float(v)
if istrain:
LR, QM, SM, HR = get_scene(path, istrain)
data.append([LR, QM, norm, SM, HR])
else:
LR, QM, SM = get_scene(path, istrain)
data.append([LR, QM, norm])
return data
## load one scene data
def get_scene(path, istrain=True):
names = ['LR000.png', 'LR001.png', 'LR002.png', 'LR003.png', 'LR004.png', 'LR005.png',
'LR006.png', 'LR007.png', 'LR008.png', 'LR009.png', 'LR010.png', 'LR011.png',
'LR012.png', 'LR013.png', 'LR014.png', 'LR015.png', 'LR016.png', 'LR017.png',
'LR018.png', 'LR019.png', 'LR020.png', 'LR021.png', 'LR022.png', 'LR023.png',
'LR024.png', 'LR025.png', 'LR026.png', 'LR027.png', 'LR028.png', 'LR029.png',
'LR030.png', 'LR031.png', 'LR032.png', 'LR033.png', 'LR034.png',
'QM000.png', 'QM001.png', 'QM002.png', 'QM003.png',
'QM004.png', 'QM005.png', 'QM006.png', 'QM007.png', 'QM008.png', 'QM009.png',
'QM010.png', 'QM011.png', 'QM012.png', 'QM013.png', 'QM014.png', 'QM015.png',
'QM016.png', 'QM017.png', 'QM018.png', 'QM019.png', 'QM020.png', 'QM021.png',
'QM022.png', 'QM023.png', 'QM024.png', 'QM025.png', 'QM026.png', 'QM027.png',
'QM028.png', 'QM029.png', 'QM030.png', 'QM031.png', 'QM032.png', 'QM033.png',
'QM034.png', 'HR.png', 'SM.png']
if path is not None:
LR = []
QM = []
if istrain:
HR = os.path.join(path, names[-2])
SM = os.path.join(path, names[-1])
for lr in names[0:35]:
lr_path = os.path.join(path, lr)
if os.path.isfile(lr_path):
LR.append(lr_path)
else:
break
for qm in names[35:70]:
qm_path = os.path.join(path, qm)
if os.path.isfile(qm_path):
QM.append(qm_path)
else:
break
if istrain:
return [LR, QM, SM, HR]
else:
return [LR, QM, SM]
## METRIC FUNCTION FOR ONE SCENE
@numba.autojit
def score_scene(sr, hr, clearhr, norm, num_crop=6):
"""
score for one scene
"""
zSR = []
max_x, max_y = np.array(hr.shape) - num_crop
sr_ = sr[num_crop//2:-num_crop//2, num_crop//2:-num_crop//2]
np.place(clearhr, clearhr==0, np.nan)
zSR = np.zeros((num_crop + 1, num_crop + 1), np.float64)
for x_off in prange(0, num_crop+1):
for y_off in prange(0, num_crop+1):
clearHR_ = clearhr[x_off : x_off + max_x, y_off : y_off + max_y]
hr_ = hr[x_off:x_off + max_x, y_off:y_off + max_y]
diff = (hr_- sr_)* clearHR_
b = np.nanmean(diff)
## compute cMSE
cMSE = np.nanmean( (diff-b)**2)
cPSNR = -10.0*np.log10(cMSE)
zSR[x_off, y_off] = norm/cPSNR
return zSR.min()
@numba.autojit
def baseline_predict_scene(LR, QM, before=True, interpolation=cv2.INTER_CUBIC):
"""
baseline version 1 :
average images with the maximum number of clearance pixel
if before is true, average the image then apply the resize and return the resize image
else resize the images and return the average
"""
# load clearance map
n = len(QM)
clearance = np.zeros( (n,) )
#for cl in QM:
for i in prange(n):
cl = QM[i]
img_cl = skimage.img_as_float64( cv2.imread(cl , -1) ).astype(np.bool)
if img_cl is None:
print("error")
if len(np.unique(img_cl)) > 2:
print(np.unique(img_cl))
raise("Error during loading clearance map !!!! ")
#img_cl = img_cl/255 # normalize value 0-1
clearance[i] = np.sum(img_cl)
maxcl = clearance.max()
maxclears = [i for i in prange(len(clearance)) if clearance[i] == maxcl] # save index of image with max clearance
if before:
img_predict = np.zeros( (128, 128), dtype=np.float64)
#for ids in maxclears:
for i in prange(len(maxclears)):
ids = maxclears[i]
im = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
img_predict += im
img_predict = img_predict/len(maxclears)
im_rescale = cv2.resize(img_predict, (384, 384), interpolation = interpolation)# rescale(im, scale=3, order=3, mode='edge', anti_aliasing=False, multichannel=False)#
return im_rescale
else:
# upscale
img_predict = np.zeros( (384, 384), dtype=np.float64)
#for ids in maxclears:
for i in prange(len(maxclears)):
ids = maxclears[i]
im = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
im_rescale = cv2.resize(im, (384, 384), interpolation = interpolation)# rescale(im, scale=3, order=3, mode='edge', anti_aliasing=False, multichannel=False)#
img_predict += im_rescale
img_predict = img_predict/len(maxclears)
return img_predict
@numba.autojit
def baseline_predict_scenev2(LR, QM, interpolation=cv2.INTER_CUBIC):
"""
baseline version 2 :
average image with the maximum number of clearance pixel of one imageset
"""
# load clearance map
n = len(QM)
clearance = np.zeros( (n,) )
#for cl in QM:
for i in prange(n):
cl = QM[i]
img_cl = skimage.img_as_float64( cv2.imread(cl , -1) ).astype(np.bool)
if img_cl is None:
print("error")
if len(np.unique(img_cl)) > 2:
print(np.unique(img_cl))
raise("Error during loading clearance map !!!! ")
#img_cl = img_cl/255 # normalize value 0-1
clearance[i] = np.sum(img_cl)
maxcl = clearance.max()
maxclears = [i for i in prange(len(clearance)) if clearance[i] == maxcl] # save index of image with max clearance
dim = len(maxclears)
clearance_map = np.zeros( (dim, 128, 128), dtype=np.float64 )
im = np.zeros( (dim, 128, 128), dtype=np.float64)
for i in prange(dim):
ids = maxclears[i]
cl = QM[ids]
clearance_map[i] = skimage.img_as_float64( cv2.imread(cl , -1) )
im[i] = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
img = im * clearance_map # pixel with no clearance equal 0
clear = clearance_map.sum(axis=0)
np.place(clear, clear==0, np.nan)
img_predict = np.sum(img, axis=0)/clear
# average value of maxclearance and replace nan value by them
img_average = img.mean(axis=0)
img_predict[ np.isnan(img_predict) ] = img_average[np.isnan(img_predict)]
# upscale img
img_resize= cv2.resize(img_predict, (384, 384), interpolation = interpolation)
return img_resize
@numba.autojit
def baseline_predict_scenev3(LR, QM, interpolation=cv2.INTER_CUBIC):
"""
baseline version 2 :
average image with the maximum number of clearance pixel of one imageset
"""
# load clearance map
n = len(QM)
clearance = np.zeros( (n,) )
#for cl in QM:
for i in prange(n):
cl = QM[i]
img_cl = skimage.img_as_float64( cv2.imread(cl , -1) ).astype(np.bool)
if img_cl is None:
print("error")
if len(np.unique(img_cl)) > 2:
print(np.unique(img_cl))
raise("Error during loading clearance map !!!! ")
#img_cl = img_cl/255 # normalize value 0-1
clearance[i] = np.sum(img_cl)
maxcl = clearance.max()
max_clearance_value = clearance.argsort()[::-1]
maxclears = [i for i in prange(len(clearance)) if clearance[i] == maxcl] # save index of image with max clearance
dim = len(maxclears)
clearance_map = np.zeros( (dim, 128, 128), dtype=np.float64 )
im = np.zeros( (dim, 128, 128), dtype=np.float64)
for i in prange(dim):
ids = maxclears[i]
cl = QM[ids]
clearance_map[i] = skimage.img_as_float64( cv2.imread(cl , -1) )
im[i] = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
img = im * clearance_map # pixel with no clearance equal 0
clear = clearance_map.sum(axis=0)
np.place(clear, clear==0, np.nan)
img_predict = np.sum(img, axis=0)/clear
# replace nan value by value in image where the clearance is available
nan_map = clear.copy()
nan_map[~np.isnan(nan_map)] = 0.0
nan_map[np.isnan(nan_map)] = 1.0
for ids in max_clearance_value:
if clearance[ids] == maxcl:
pass
else:
cl = QM[ids]
img_temp = skimage.img_as_float64( cv2.imread(LR[ids], -1) )
clear_temp = skimage.img_as_float64( cv2.imread(cl , -1) )
temp = clear_temp*nan_map
np.place(temp, temp==0, np.nan)
temp = temp*img_temp
img_predict[np.isnan(img_predict)] = temp[np.isnan(img_predict)]
nan_map[:, :] = nan_map[:,:] - (nan_map*clear_temp)
# average value of maxclearance and replace nan value by them
img_average = img.mean(axis=0)
img_predict[ np.isnan(img_predict) ] = img_average[np.isnan(img_predict)]
# upscale img
img_resize= cv2.resize(img_predict, (384, 384), interpolation =interpolation)
return img_resize
@numba.autojit
def baseline_predict(data, istrain=True, evaluate=True, version=1, interpolation=cv2.INTER_CUBIC):
num = len(data)
predicted = np.zeros( (num, 384, 384) ) # number of images in the dataset to check
zsub = np.zeros((num,))
if istrain:
for i in prange( num ):
LR, QM, norm, SM, HR = data[i]
if version == 1:
img_predict = baseline_predict_scene(LR, QM, interpolation=interpolation)
elif version == 2:
img_predict = baseline_predict_scenev2(LR, QM, interpolation=interpolation)
elif version == 3:
img_predict = baseline_predict_scenev3(LR, QM, interpolation=interpolation)
else:
raise("methode not implemented ! ")
# save img
predicted[i] = img_predict
# evaluate
if evaluate:
num_crop = 6
clearHR = skimage.img_as_float64( cv2.imread(SM, -1 ) )
hr = skimage.img_as_float64( cv2.imread(HR, -1) )
zSR = score_scene(img_predict, hr, clearHR, norm, num_crop=num_crop)
zsub[i] = zSR
if evaluate:
print("evaluation \n number of elements : {0} \n Z = {1}".format(len(zsub), zsub.mean()))
return predicted
def baseline_predict_test(data, dirs = "results_baseline", interpolation=cv2.INTER_CUBIC):
num = len(data)
for i in range( num ):
LR, QM, norm = data[i]
p = Path(LR[0])
img_predict = baseline_predict_scene(LR, QM, interpolation=interpolation)
#print(img_predict.shape)
# save img
#predicted[i] = img_predict
#names[i] = p.parts[-2]
save_prediction(img_predict, p.parts[-2], directory=dirs)
def load_image2D(path, expand=False):
img = skimage.img_as_float64( cv2.imread(path, -1) )
#height, width = img.shape
#if scale > 1:
# img = cv2.resize(img, (height*scale, width*scale), interpolation = cv2.INTER_CUBIC)
if expand:
img = np.expand_dims(img, axis=2)
return img
def save_prediction(pred, names, directory):
try:
os.stat(directory)
except:
os.mkdir(directory)
#io.use_plugin('freeimage')
p = os.path.join(directory,names+'.png')
im = skimage.img_as_uint(pred)
#io.imsave(arr=im, fname= p, plugin="freeimage")
cv2.imwrite(p, im, [cv2.IMWRITE_PNG_COMPRESSION, 0])
#norm = import_norm_data()
#print(norm)
#
#create_data(path="data\\", normalize_data=norm)
#data_test = load_data(os.path.join("data","test.txt"), istrain=False)
#datas = load_data(os.path.join("data","train.txt"), istrain=True)
#begin = time.time()
#predict = baseline_predict(datas, istrain=True, evaluate=True, version=1)
#print(time.time()-begin)
#begin = time.time()
#baseline_predict_test(data_test)
#print(time.time()-begin)
| 34.678404
| 175
| 0.56434
| 1,999
| 14,773
| 4.050025
| 0.158079
| 0.039526
| 0.022233
| 0.032856
| 0.527297
| 0.50457
| 0.431201
| 0.404768
| 0.398592
| 0.366477
| 0
| 0.047098
| 0.301496
| 14,773
| 425
| 176
| 34.76
| 0.737475
| 0.149191
| 0
| 0.393502
| 0
| 0
| 0.078482
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046931
| false
| 0.00361
| 0.046931
| 0
| 0.133574
| 0.028881
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae75ff7994410f7e88a0e941f01acf2c32ca349b
| 4,676
|
py
|
Python
|
csvtoqbo.py
|
Airbitz/airbitz-ofx
|
8dc9a851fc585e373611d6d8e27ae0e8540ea35b
|
[
"MIT"
] | 2
|
2016-01-08T20:14:21.000Z
|
2018-06-15T17:58:09.000Z
|
csvtoqbo.py
|
EdgeApp/airbitz-ofx
|
8dc9a851fc585e373611d6d8e27ae0e8540ea35b
|
[
"MIT"
] | null | null | null |
csvtoqbo.py
|
EdgeApp/airbitz-ofx
|
8dc9a851fc585e373611d6d8e27ae0e8540ea35b
|
[
"MIT"
] | 2
|
2016-01-08T20:14:22.000Z
|
2016-03-30T19:59:48.000Z
|
#####################################################################
# #
# File: csvtoqbo.py #
# Developer: Paul Puey #
# Original Code by: Justin Leto #
# Forked from https://github.com/jleto/csvtoqbo #
# #
# main utility script file Python script to convert CSV files #
# of transactions exported from various platforms to QBO for #
# import into Quickbooks Online. #
# #
# Usage: python csvtoqbo.py <options> <csvfiles> #
# #
#####################################################################
import sys, traceback
import os
import logging
import csv
import qbo
import airbitzwallets
# If only utility script is called
if len(sys.argv) <= 1:
sys.exit("Usage: python %s <options> <csvfiles>\n"
"Where possible options include:\n"
" -btc Output bitcoin in full BTC denomination\n"
" -mbtc Output bitcoin in mBTC denomination\n"
" -bits Output bitcoin in bits (uBTC) denomination" % sys.argv[0]
)
# If help is requested
elif (sys.argv[1] == '--help'):
sys.exit("Help for %s not yet implemented." % sys.argv[0])
# Test for valid options, instantiate appropiate provider object
if sys.argv[1] == '-mbtc':
denom = 1000
elif sys.argv[1] == '-btc':
denom = 1
elif sys.argv[1] == '-bits':
denom = 1000000
myProvider = airbitzwallets.airbitzwallets()
# For each CSV file listed for conversion
for arg in sys.argv:
if sys.argv.index(arg) > 1:
try:
with open(arg[:len(arg)-3] + 'log'):
os.remove(arg[:len(arg)-3] + 'log')
except IOError:
pass
logging.basicConfig(filename=arg[:len(arg)-3] + 'log', level=logging.INFO)
logging.info("Opening '%s' CSV File" % myProvider.getName())
try:
with open(arg, 'r') as csvfile:
# Open CSV for reading
reader = csv.DictReader(csvfile, delimiter=',', quotechar='"')
#instantiate the qbo object
myQbo = None
myQbo = qbo.qbo()
txnCount = 0
for row in reader:
txnCount = txnCount+1
sdata = str(row)
#read in values from row of csv file
date_posted = myProvider.getDatePosted(myProvider,row)
txn_memo = myProvider.getTxnMemo(myProvider,row)
txn_amount = myProvider.getTxnAmount(myProvider,row)
txn_curamt = myProvider.getTxnCurAmt(myProvider,row)
txn_category = myProvider.getTxnCategory(myProvider,row)
txn_id = myProvider.getTxnId(myProvider,row)
name = myProvider.getTxnName(myProvider,row)
try:
#Add transaction to the qbo document
if myQbo.addTransaction(denom, date_posted, txn_memo, txn_id, txn_amount, txn_curamt, txn_category, name):
print('Transaction [' + str(txnCount) + '] added successfully!')
logging.info('Transaction [' + str(txnCount) + '] added successfully!')
except:
#Error adding transaction
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info("Transaction [" + str(txnCount) + "] excluded!")
logging.info('>> Data: ' + str(sdata))
pass
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info("Trouble reading CSV file!")
# After transactions have been read, write full QBO document to file
try:
filename = arg[:len(arg)-3] + 'qbo'
if myQbo.Write('./'+ filename):
print("QBO file written successfully!")
#log successful write
logging.info("QBO file %s written successfully!" % filename)
except:
#IO Error
exc_type, exc_value, exc_traceback = sys.exc_info()
lines = traceback.format_exception(exc_type, exc_value, exc_traceback)
print(''.join('!! ' + line for line in lines))
logging.info(''.join('!! ' + line for line in lines))
| 39.965812
| 155
| 0.534217
| 490
| 4,676
| 5.02449
| 0.332653
| 0.025589
| 0.02437
| 0.036556
| 0.240049
| 0.168562
| 0.159626
| 0.159626
| 0.159626
| 0.159626
| 0
| 0.008363
| 0.335115
| 4,676
| 116
| 156
| 40.310345
| 0.783532
| 0.174294
| 0
| 0.246575
| 0
| 0
| 0.146197
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.027397
| 0.082192
| 0
| 0.082192
| 0.068493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae81bb11bf5eb162ed9c0bef3b103ae5f25903e5
| 772
|
py
|
Python
|
icekit/response_pages/migrations/0001_initial.py
|
ic-labs/django-icekit
|
c507ea5b1864303732c53ad7c5800571fca5fa94
|
[
"MIT"
] | 52
|
2016-09-13T03:50:58.000Z
|
2022-02-23T16:25:08.000Z
|
icekit/response_pages/migrations/0001_initial.py
|
ic-labs/django-icekit
|
c507ea5b1864303732c53ad7c5800571fca5fa94
|
[
"MIT"
] | 304
|
2016-08-11T14:17:30.000Z
|
2020-07-22T13:35:18.000Z
|
icekit/response_pages/migrations/0001_initial.py
|
ic-labs/django-icekit
|
c507ea5b1864303732c53ad7c5800571fca5fa94
|
[
"MIT"
] | 12
|
2016-09-21T18:46:35.000Z
|
2021-02-15T19:37:50.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='ResponsePage',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('title', models.CharField(max_length=255)),
('type', models.CharField(unique=True, max_length=5, choices=[(b'404', 'Page Not Found'), (b'500', 'Internal Server Error')])),
('is_active', models.BooleanField(default=False)),
],
options={
},
bases=(models.Model,),
),
]
| 29.692308
| 143
| 0.563472
| 74
| 772
| 5.72973
| 0.743243
| 0.070755
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02011
| 0.291451
| 772
| 25
| 144
| 30.88
| 0.755027
| 0.027202
| 0
| 0
| 0
| 0
| 0.100134
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.105263
| 0
| 0.263158
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae81fe56f7386088702aa7258803c69277db7d71
| 5,495
|
py
|
Python
|
tests/cancer.py
|
old-rob/cptac
|
9b33893dd11c9320628a751c8840783a6ce81957
|
[
"Apache-2.0"
] | null | null | null |
tests/cancer.py
|
old-rob/cptac
|
9b33893dd11c9320628a751c8840783a6ce81957
|
[
"Apache-2.0"
] | null | null | null |
tests/cancer.py
|
old-rob/cptac
|
9b33893dd11c9320628a751c8840783a6ce81957
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2018 Samuel Payne sam_payne@byu.edu
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# The purpose of this class is to organize a cancer object's datasets by
# type. dataset.py in the cptac package defines a lot of methods and members
# but there is no built-in way to call them in batches by type for testing.
import pytest
class Cancer:
metadata_types = [
'clinical',
'derived_molecular',
'experimental_design',
# See dataset.py for why these aren't included:
#'medical_history',
#'treatment',
#'followup'
]
valid_omics_dfs = [
'acetylproteomics',
'circular_RNA',
'CNV',
'lincRNA',
'lipidomics',
'metabolomics',
'miRNA',
'phosphoproteomics',
'phosphoproteomics_gene',
'proteomics',
'somatic_mutation_binary',
'transcriptomics',
'CNV_log2ratio',
'CNV_gistic'
]
important_mutation_genes = ["TP53", "KRAS", "ARID1A", "PTEN", "EGFR"]
multi_join_types = [
"acetylproteomics",
"CNV",
"CNV_gistic",
"CNV_log2ratio",
"phosphoproteomics",
"phosphoproteomics_gene",
"proteomics",
"somatic_mutation_binary",
"somatic_mutation",
"transcriptomics",
"clinical",
"derived_molecular",
"experimental_design"
]
def __init__(self, cancer_type, cancer_object):
"""
Initialize a Cancer object.
Cancer class is used as a wrapper for cptac.[Cancer] objects that will be tested.
Parameters:
cancer_type (string): name of the cancer
cancer_object (cptac.[Cancer]): Instance of the cptac.[Cancer] class
"""
self.cancer_type = cancer_type
self.cancer_object = cancer_object
self.metadata = list()
self.omics = list()
# self.mutations = list()
self.valid_getters = dict()
self.invalid_getters = dict()
self.multi_joinables = dict()
self._sort_datasets()
self._sort_getters()
self._gather_mutation_genes()
def _sort_datasets(self):
# categorize datasets for join tests
# omics, metadata,
datasets = self.cancer_object.get_data_list().items()
for (dataset, dimensions) in datasets:
if dataset in Cancer.metadata_types:
self.metadata.append(dataset)
elif dataset in Cancer.valid_omics_dfs:
self.omics.append(dataset)
if dataset in ["clinical", "transcriptomics", "proteomics"]:
self.multi_joinables[dataset] = list()
def _sort_getters(self):
# collect all possible getters
all_getters = set()
for attribute in dir(self.cancer_object):
if attribute.startswith("get_"):
all_getters.add(attribute)
### sift valid and invalid getters
datasets = self.cancer_object.get_data_list().keys()
# valid getters
for d in datasets:
try:
if d.startswith("CNV") and self.cancer_type == "Ucecconf":
getter_name = "get_CNV"
else:
getter_name = "get_" + d
valid_getter = getattr(self.cancer_object, getter_name)
self.valid_getters[getter_name] = valid_getter
except:
pytest.fail(f"unable to add get {d} attribute")
# invalid getters
for getter in all_getters:
if getter_name not in self.valid_getters.keys():
g = getattr(self.cancer_object, getter_name)
self.invalid_getters[getter_name] = g
def _gather_mutation_genes(self):
self.mutation_genes = list()
if "somatic_mutation" in self.cancer_object.get_data_list():
recorded_genes = self.cancer_object.get_somatic_mutation()["Gene"].tolist()
for g in self.important_mutation_genes:
if g in recorded_genes:
self.mutation_genes.append(g)
def get_dataset(self, dataset, CNV_type="log2ratio"):
'''
Args:
dataset: the desired dataset
CNV_type: if the desired dataset is CNV and the cancer type is Ucecconf,
you can specify which version of the dataset is returned.
Returns:
adataframe for the dataset desired
'''
if dataset == "CNV" and self.cancer_type == "Ucecconf":
return self.valid_getters["get_CNV"](CNV_type)
return self.valid_getters["get_" + dataset]()
def get_omics(self):
return self.omics
def get_metadata(self):
return self.metadata
def get_mutation_genes(self):
return self.mutation_genes
| 33.10241
| 89
| 0.593267
| 610
| 5,495
| 5.159016
| 0.329508
| 0.049571
| 0.040674
| 0.02415
| 0.158564
| 0.115983
| 0.089609
| 0
| 0
| 0
| 0
| 0.003758
| 0.322111
| 5,495
| 165
| 90
| 33.30303
| 0.841074
| 0.278981
| 0
| 0
| 0
| 0
| 0.149594
| 0.023579
| 0
| 0
| 0
| 0
| 0
| 1
| 0.083333
| false
| 0
| 0.03125
| 0.03125
| 0.21875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae83889dc0e1e2a10d944afb86e01b0c15293029
| 5,098
|
py
|
Python
|
code/mlflow.py
|
michaelhball/ml_tidbits
|
55b77fded5f31cd280f043c8aa792a07ca572170
|
[
"MIT"
] | 1
|
2021-04-15T19:42:51.000Z
|
2021-04-15T19:42:51.000Z
|
code/mlflow.py
|
michaelhball/ml_toolshed
|
55b77fded5f31cd280f043c8aa792a07ca572170
|
[
"MIT"
] | null | null | null |
code/mlflow.py
|
michaelhball/ml_toolshed
|
55b77fded5f31cd280f043c8aa792a07ca572170
|
[
"MIT"
] | null | null | null |
import git
from mlflow.tracking import MlflowClient
from .utils import scp_files
class MyMLFlowClient:
""" Class to handle all MLFlow interactions. Only need one such client (i.e. can be used for many
training runs).
"""
def __init__(self, tracking_uri):
""" Initialise
:param tracking_uri (str) MLFlow tracking URI for tracking API
"""
self.client = MlflowClient(tracking_uri=tracking_uri)
self.run = None
def create_new_run(self, experiment_name, user_name, set_tags=True, run_name=None):
""" Creates a new Run in MLFlow tracking server (e.g. at start of training pipeline)
:param experiment_name: (str) name of experiment to create run within
:param user_name: (str) user name of person creating run
:param set_tags: (bool) indicating whether to assign my default tagset to the given run
:param run_name: (str) optional name of run (auto-generated ID will be used if not provided)
:return run ID
"""
try:
experiment = self.client.get_experiment_by_name(experiment_name)
if experiment is None:
experiment_id = self.client.create_experiment(experiment_name)
self.client.set_experiment_tag(experiment_id, "created_by", user_name)
else:
experiment_id = experiment.experiment_id
run = self.client.create_run(experiment_id)
run_id = run.info.run_id
if set_tags:
if not self._set_run_tags(user_name, run_id, run_name):
return False
return run_id
except Exception as e:
print('Exception initialising MLFlow run', e)
return False
def _set_run_tags(self, user_name, run_id, run_name):
""""""
try:
repo = git.Repo(search_parent_directories=True)
self.client.set_tag(run_id, "run_id", run_id)
self.client.set_tag(run_id, "mlflow.runName", run_name if run_name is not None else run_id)
self.client.set_tag(run_id, "mlflow.user", user_name)
self.client.set_tag(run_id, "mlflow.source.git.repoURL", repo.remotes.origin.url)
self.client.set_tag(run_id, "mlflow.source.git.branch", repo.active_branch.name)
self.client.set_tag(run_id, "mlflow.source.git.commit", repo.head.object.hexsha)
return True
except Exception as e:
print('Exception setting MLFlow run system tags: \n', e)
return False
def log_param(self, run_id, param_dict):
""" Log a dictionary of params to MLFlow tracking server
:param run_id: (str) run ID
:param param_dict: (dict) dictionary of param_name: param_value
:return success indicator
"""
try:
for param_name, param_value in param_dict.items():
self.client.log_param(run_id, param_name, param_value)
return True
except Exception as e:
print(f'Exception logging params run {run_id}', e)
return False
def log_metrics(self, run_id, metric_dict, step=None, timestamp=None):
""" Log a dictionary of metrics to MLFlow tracking server (at particular step or timestamp)
:param run_id: (str) run ID
:param metric_dict: (dict) dictionary of metric_name: metric_value
:param step: (int) integer step to associate metrics with (e.g. epoch | iteration)
:param timestamp: (time) timestamp to associate metrics with
:return success indicator
"""
try:
for metric_name, metric_val in metric_dict.items():
self.client.log_metric(run_id, metric_name, metric_val, step=step, timestamp=timestamp)
return True
except Exception as e:
print(f'Exception logging metrics to run {run_id}', e)
return False
def download_artifact(self, run_id, remote_dir, local_dir, ssh_params=None):
""" Downloads artifact from MLFlow server (either local or over SSH)
:param run_id: (str) run ID
:param remote_dir: (path) relative path to artifact (inside run artifact storage)
:param local_dir: (path) local directory in which to save artifact
:param ssh_params: (dict) must contain host, username, and password, if included
:return: True if successful, False otherwise.
"""
try:
if ssh_params is not None:
run = self.client.get_run(run_id)
artifact_uri = f"{run.info.artifact_uri}/{remote_dir}"
success = scp_files(**ssh_params, remote_dir=artifact_uri, local_dir=local_dir, direction='from')
if isinstance(success, bool) and not success:
return False
else:
self.client.download_artifacts(run_id, remote_dir, local_dir)
return True
except Exception as e:
print(f'Exception downloading artifact from run {run_id}', e)
return False
| 41.112903
| 113
| 0.632797
| 674
| 5,098
| 4.596439
| 0.24184
| 0.050032
| 0.029374
| 0.030988
| 0.25694
| 0.205617
| 0.152034
| 0.104261
| 0.104261
| 0.058102
| 0
| 0
| 0.287564
| 5,098
| 123
| 114
| 41.447154
| 0.852974
| 0.291291
| 0
| 0.343284
| 0
| 0
| 0.10625
| 0.03244
| 0
| 0
| 0
| 0
| 0
| 1
| 0.089552
| false
| 0
| 0.044776
| 0
| 0.328358
| 0.074627
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae84bc9755e4432da8e4dc0549c028ec150a10c7
| 4,215
|
py
|
Python
|
infinite_nature/autocruise.py
|
DionysisChristopoulos/google-research
|
7f59ef421beef32ca16c2a7215be74f7eba01a0f
|
[
"Apache-2.0"
] | 23,901
|
2018-10-04T19:48:53.000Z
|
2022-03-31T21:27:42.000Z
|
infinite_nature/autocruise.py
|
davidfitzek/google-research
|
eb2b142f26e39aac1dcbb768417465ae9d4e5af6
|
[
"Apache-2.0"
] | 891
|
2018-11-10T06:16:13.000Z
|
2022-03-31T10:42:34.000Z
|
infinite_nature/autocruise.py
|
davidfitzek/google-research
|
eb2b142f26e39aac1dcbb768417465ae9d4e5af6
|
[
"Apache-2.0"
] | 6,047
|
2018-10-12T06:31:02.000Z
|
2022-03-31T13:59:28.000Z
|
# coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Uses a heuristic to automatically navigate generated scenes.
fly_camera.fly_dynamic will generate poses using disparity maps that avoid
crashing into nearby terrain.
"""
import pickle
import time
import config
import fly_camera
import imageio
import infinite_nature_lib
import numpy as np
import tensorflow as tf
tf.compat.v1.flags.DEFINE_string(
"output_folder", "autocruise_output",
"Folder to save autocruise results")
tf.compat.v1.flags.DEFINE_integer(
"num_steps", 500,
"Number of steps to fly.")
FLAGS = tf.compat.v1.flags.FLAGS
def generate_autocruise(np_input_rgbd, checkpoint,
save_directory, num_steps, np_input_intrinsics=None):
"""Saves num_steps frames of infinite nature using an autocruise algorithm.
Args:
np_input_rgbd: [H, W, 4] numpy image and disparity to start
Infinite Nature with values ranging in [0, 1]
checkpoint: (str) path to the pre-trained checkpoint
save_directory: (str) the directory to save RGB images to
num_steps: (int) the number of steps to generate
np_input_intrinsics: [4] estimated intrinsics. If not provided,
makes assumptions on the FOV.
"""
render_refine, style_encoding = infinite_nature_lib.load_model(checkpoint)
if np_input_intrinsics is None:
# 0.8 focal_x corresponds to a FOV of ~64 degrees. This can be
# manually changed if more assumptions about the input image is given.
h, w, unused_channel = np_input_rgbd.shape
ratio = w / float(h)
np_input_intrinsics = np.array([0.8, 0.8 * ratio, .5, .5], dtype=np.float32)
np_input_rgbd = tf.image.resize(np_input_rgbd, [160, 256])
style_noise = style_encoding(np_input_rgbd)
meander_x_period = 100
meander_y_period = 100
meander_x_magnitude = 0.0
meander_y_magnitude = 0.0
fly_speed = 0.2
horizon = 0.3
near_fraction = 0.2
starting_pose = np.array(
[[1.0, 0.0, 0.0, 0.0], [0.0, 1.0, 0.0, 0.0], [0.0, 0.0, 1.0, 0.0]],
dtype=np.float32)
# autocruise heuristic funciton
fly_next_pose_function = fly_camera.fly_dynamic(
np_input_intrinsics, starting_pose,
speed=fly_speed,
meander_x_period=meander_x_period,
meander_x_magnitude=meander_x_magnitude,
meander_y_period=meander_y_period,
meander_y_magnitude=meander_y_magnitude,
horizon=horizon,
near_fraction=near_fraction)
if not tf.io.gfile.exists(save_directory):
tf.io.gfile.makedirs(save_directory)
curr_pose = starting_pose
curr_rgbd = np_input_rgbd
t0 = time.time()
for i in range(num_steps - 1):
next_pose = fly_next_pose_function(curr_rgbd)
curr_rgbd = render_refine(
curr_rgbd, style_noise, curr_pose, np_input_intrinsics,
next_pose, np_input_intrinsics)
# Update pose information for view.
curr_pose = next_pose
imageio.imsave("%s/%04d.png" % (save_directory, i),
(255 * curr_rgbd[:, :, :3]).astype(np.uint8))
if i % 100 == 0:
print("%d / %d frames generated" % (i, num_steps))
print("time / step: %04f" % ((time.time() - t0) / (i + 1)))
print()
def main(unused_arg):
if len(unused_arg) > 1:
raise tf.app.UsageError(
"Too many command-line arguments.")
config.set_training(False)
model_path = "ckpt/model.ckpt-6935893"
input_pkl = pickle.load(open("autocruise_input1.pkl", "rb"))
generate_autocruise(input_pkl["input_rgbd"],
model_path,
FLAGS.output_folder,
FLAGS.num_steps)
if __name__ == "__main__":
tf.compat.v1.enable_eager_execution()
tf.compat.v1.app.run(main)
| 33.452381
| 80
| 0.704152
| 631
| 4,215
| 4.492868
| 0.380349
| 0.014109
| 0.015873
| 0.016931
| 0.048677
| 0.008466
| 0.008466
| 0.008466
| 0.008466
| 0.008466
| 0
| 0.031732
| 0.2
| 4,215
| 125
| 81
| 33.72
| 0.809015
| 0.331673
| 0
| 0
| 0
| 0
| 0.087757
| 0.01589
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.108108
| 0
| 0.135135
| 0.040541
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae858354ab4f1914f4dfc11dd1d64a5507769f1b
| 563
|
py
|
Python
|
app/gui/repeater.py
|
TomVollerthun1337/logsmith
|
f2ecab4dea295d5493a9a3e77a2837b13fa139e5
|
[
"Apache-2.0"
] | 19
|
2020-01-18T00:25:43.000Z
|
2022-03-14T07:39:08.000Z
|
app/gui/repeater.py
|
TomVollerthun1337/logsmith
|
f2ecab4dea295d5493a9a3e77a2837b13fa139e5
|
[
"Apache-2.0"
] | 85
|
2020-01-21T12:13:56.000Z
|
2022-03-31T04:01:03.000Z
|
app/gui/repeater.py
|
TomVollerthun1337/logsmith
|
f2ecab4dea295d5493a9a3e77a2837b13fa139e5
|
[
"Apache-2.0"
] | 2
|
2020-06-25T06:15:19.000Z
|
2021-02-15T18:17:38.000Z
|
import logging
from PyQt5.QtCore import QTimer
logger = logging.getLogger('logsmith')
class Repeater:
def __init__(self):
self._current_task = None
def start(self, task, delay_seconds):
delay_millies = delay_seconds * 1000
self.stop()
logger.info('start timer')
timer = QTimer()
timer.setSingleShot(True)
timer.timeout.connect(task)
timer.start(delay_millies)
self._current_task = timer
def stop(self):
if self._current_task:
self._current_task.stop()
| 21.653846
| 44
| 0.635879
| 65
| 563
| 5.261538
| 0.461538
| 0.128655
| 0.175439
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012195
| 0.271758
| 563
| 25
| 45
| 22.52
| 0.821951
| 0
| 0
| 0
| 0
| 0
| 0.033748
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae8a6e4bdddcbc9fac409eabb59750fe2825a857
| 3,785
|
py
|
Python
|
exp/ground/infonce_acc_plot/plot.py
|
ChopinSharp/info-ground
|
12fba3c478b806f2fe068faac81237fd0f458b80
|
[
"Apache-2.0"
] | 56
|
2020-09-21T07:41:08.000Z
|
2022-01-10T13:28:36.000Z
|
exp/ground/infonce_acc_plot/plot.py
|
ChopinSharp/info-ground
|
12fba3c478b806f2fe068faac81237fd0f458b80
|
[
"Apache-2.0"
] | 5
|
2020-08-26T15:50:29.000Z
|
2022-01-04T07:53:07.000Z
|
exp/ground/infonce_acc_plot/plot.py
|
ChopinSharp/info-ground
|
12fba3c478b806f2fe068faac81237fd0f458b80
|
[
"Apache-2.0"
] | 15
|
2020-08-24T16:36:20.000Z
|
2022-01-17T12:51:45.000Z
|
import os
import numpy as np
import matplotlib.pyplot as plt
import utils.io as io
from global_constants import misc_paths
def get_infonce_data(infonce_dir,layers):
infonce_data = io.load_json_object(
os.path.join(
infonce_dir,
f'infonce_{layers}_layer.json'))
iters = []
losses = []
for time,it,loss in infonce_data:
if it==0:
continue
iters.append(it)
losses.append(round(loss,2))
return iters, losses
def get_acc_data(acc_dir,iters):
accs = [None]*len(iters)
for i,it in enumerate(iters):
results_json = os.path.join(acc_dir,f'results_val_{it}.json')
if not os.path.exists(results_json):
continue
accs[i] = io.load_json_object(results_json)['pt_recall']
return accs
def create_point_label(x,y,label,color,markersize,marker):
plt.plot(x,y,c=color,markersize=markersize,marker=marker)
plt.annotate(label,(x+0.025,y),c=color,va='center',fontsize=9,family='serif')
def main():
infonce_dir = os.path.join(
os.getcwd(),
'exp/pretrain_coco_noun_negs/infonce_acc_plot')
exp_dir = '/shared/rsaas/tgupta6/Data/context-regions/coco_exp'
colors = ['r','g','b']
num_layers = [1,2,3]
infonce_losses = {}
handles = [None]*3
labels = ['Linear', 'MLP w/ 1 hidden layer', 'MLP w/ 2 hidden layers']
arrowcolor='k' #(0.3,0.3,0.3)
ha = ['right','left','right']
for i,l in enumerate(num_layers):
iters,losses = get_infonce_data(infonce_dir,l)
acc_dir = os.path.join(
exp_dir,
f'loss_wts_neg_noun_1_self_sup_1_lang_sup_1_no_context_vgdet_nonlinear_infonce_{l}_layer_adj_batch_50')
accs = get_acc_data(acc_dir,iters)
bounds = [np.log(50)-infonce for infonce in losses]
handles[i], = plt.plot(bounds,accs,c=colors[i],markersize=0,marker='o',linewidth=1.5,label=labels[i])
k = np.argmax(accs)
labels.append(iters[k])
plt.annotate(
str(iters[k]//1000) + 'K Iters',
c=arrowcolor,
xy=(bounds[k],accs[k]),
xytext=(3.35,accs[k]),
fontsize=9,
family='serif',
arrowprops=dict(arrowstyle="->",linestyle='-',ec=arrowcolor,fc=arrowcolor),
va='center')
plt.plot(bounds[0],accs[0],c=colors[i],markersize=4,marker='o')
plt.plot(bounds[k],accs[k],c=colors[i],markersize=6,marker='*')
plt.plot(bounds[-1],accs[-1],c=colors[i],markersize=4,marker='s')
# Manual legend for iterations
lx = 3.04 #49.45
ly = 73 #66
d = 0.8
#plt.annotate('Iterations:',(lx-0.005,ly),c=arrowcolor,va='center',fontsize=9,family='serif',weight='bold')
create_point_label(lx,ly,'4K Iters',arrowcolor,markersize=4,marker='o')
create_point_label(lx,ly-d,'80K Iters',arrowcolor,markersize=4,marker='s')
create_point_label(lx,ly-2*d,'Best Accuracy',arrowcolor,markersize=6,marker='*')
# Legend for layers
plt.plot()
plt.legend(
handles=handles,
loc='upper left',
frameon=False,
prop={'size':9,'family':'serif'})
plt.xlabel("InfoNCE lower bound on COCO (Val)",fontsize=9,family='serif')
plt.ylabel('Pointing accuracy on Flickr30k Entities (Val)',fontsize=9,family='serif')
plt.yticks(size=9,family='serif')
plt.xticks(size=9,family='serif')
# a = plt.gca()
# import pdb; pdb.set_trace()
# a.set_xticklabels(a.get_xticks(), {'family':'serif'})
# a.set_yticklabels(a.get_yticks(), {'family':'serif'})
figname = os.path.join(misc_paths['scratch_dir'],'infonce_acc_plot.png')
plt.savefig(figname,dpi=600,bbox_inches='tight')
if __name__=='__main__':
main()
| 33.495575
| 115
| 0.622985
| 550
| 3,785
| 4.125455
| 0.338182
| 0.04848
| 0.042309
| 0.044072
| 0.180696
| 0.088145
| 0
| 0
| 0
| 0
| 0
| 0.027009
| 0.217437
| 3,785
| 113
| 116
| 33.495575
| 0.739028
| 0.085337
| 0
| 0.024096
| 0
| 0
| 0.16044
| 0.070084
| 0
| 0
| 0
| 0
| 0
| 1
| 0.048193
| false
| 0
| 0.060241
| 0
| 0.13253
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae8b36f89eab35825f3909abeb288a05a078f59a
| 5,416
|
py
|
Python
|
fairness/app.py
|
Tomcli/ffdl-knative
|
b68edaaa1717ac34c946e25d24198590012b0e20
|
[
"Apache-2.0"
] | 2
|
2019-01-18T16:10:50.000Z
|
2019-10-24T11:42:31.000Z
|
fairness/app.py
|
Tomcli/ffdl-knative
|
b68edaaa1717ac34c946e25d24198590012b0e20
|
[
"Apache-2.0"
] | null | null | null |
fairness/app.py
|
Tomcli/ffdl-knative
|
b68edaaa1717ac34c946e25d24198590012b0e20
|
[
"Apache-2.0"
] | null | null | null |
import os
from aif360.datasets import BinaryLabelDataset
from aif360.metrics import ClassificationMetric
import numpy as np
import argparse
import pandas as pd
import boto3
import botocore
import json
from flask import Flask, request, abort
from flask_cors import CORS
app = Flask(__name__)
CORS(app)
def dataset_wrapper(outcome, protected, unprivileged_groups, privileged_groups, favorable_label, unfavorable_label):
""" A wrapper function to create aif360 dataset from outcome and protected in numpy array format.
"""
df = pd.DataFrame(data=outcome,
columns=['outcome'])
df['race'] = protected
dataset = BinaryLabelDataset(favorable_label=favorable_label,
unfavorable_label=unfavorable_label,
df=df,
label_names=['outcome'],
protected_attribute_names=['race'],
unprivileged_protected_attributes=unprivileged_groups)
return dataset
def get_s3_item(client, bucket, s3_path, name):
try:
client.Bucket(bucket).download_file(s3_path, name)
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] == "404":
print("The object does not exist.")
else:
raise
def fairness_check(s3_url, bucket_name, s3_username, s3_password, training_id):
cos = boto3.resource("s3",
endpoint_url=s3_url,
aws_access_key_id=s3_username,
aws_secret_access_key=s3_password)
y_test_out = 'y_test.out'
p_test_out = 'p_test.out'
y_pred_out = 'y_pred.out'
get_s3_item(cos, bucket_name, training_id + '/' + y_test_out, y_test_out)
get_s3_item(cos, bucket_name, training_id + '/' + p_test_out, p_test_out)
get_s3_item(cos, bucket_name, training_id + '/' + y_pred_out, y_pred_out)
"""Need to generalize the protected features"""
unprivileged_groups = [{'race': 4.0}]
privileged_groups = [{'race': 0.0}]
favorable_label = 0.0
unfavorable_label = 1.0
"""Load the necessary labels and protected features for fairness check"""
y_test = np.loadtxt(y_test_out)
p_test = np.loadtxt(p_test_out)
y_pred = np.loadtxt(y_pred_out)
"""Calculate the fairness metrics"""
original_test_dataset = dataset_wrapper(outcome=y_test, protected=p_test,
unprivileged_groups=unprivileged_groups,
privileged_groups=privileged_groups,
favorable_label=favorable_label,
unfavorable_label=unfavorable_label)
plain_predictions_test_dataset = dataset_wrapper(outcome=y_pred, protected=p_test,
unprivileged_groups=unprivileged_groups,
privileged_groups=privileged_groups,
favorable_label=favorable_label,
unfavorable_label=unfavorable_label)
classified_metric_nodebiasing_test = ClassificationMetric(original_test_dataset,
plain_predictions_test_dataset,
unprivileged_groups=unprivileged_groups,
privileged_groups=privileged_groups)
TPR = classified_metric_nodebiasing_test.true_positive_rate()
TNR = classified_metric_nodebiasing_test.true_negative_rate()
bal_acc_nodebiasing_test = 0.5*(TPR+TNR)
print("#### Plain model - without debiasing - classification metrics on test set")
metrics = {
"Classification accuracy": classified_metric_nodebiasing_test.accuracy(),
"Balanced classification accuracy": bal_acc_nodebiasing_test,
"Statistical parity difference": classified_metric_nodebiasing_test.statistical_parity_difference(),
"Disparate impact": classified_metric_nodebiasing_test.disparate_impact(),
"Equal opportunity difference": classified_metric_nodebiasing_test.equal_opportunity_difference(),
"Average odds difference": classified_metric_nodebiasing_test.average_odds_difference(),
"Theil index": classified_metric_nodebiasing_test.theil_index(),
"False negative rate difference": classified_metric_nodebiasing_test.false_negative_rate_difference()
}
print("metrics: ", metrics)
return metrics
# with open(metric_path, "w") as report:
# report.write(json.dumps(metrics))
@app.route('/', methods=['POST'])
def fairness_api():
try:
s3_url = request.json['aws_endpoint_url']
bucket_name = request.json['training_results_bucket']
s3_username = request.json['aws_access_key_id']
s3_password = request.json['aws_secret_access_key']
training_id = request.json['model_id']
except:
abort(400)
return json.dumps(fairness_check(s3_url, bucket_name, s3_username, s3_password, training_id))
@app.route('/', methods=['OPTIONS'])
def fairness_api_options():
return "200"
if __name__ == "__main__":
app.run(debug=True,host='0.0.0.0',port=int(os.environ.get('PORT', 8080)))
| 41.984496
| 116
| 0.636263
| 588
| 5,416
| 5.506803
| 0.280612
| 0.05559
| 0.083385
| 0.095738
| 0.362261
| 0.242742
| 0.198579
| 0.198579
| 0.159049
| 0.159049
| 0
| 0.014627
| 0.280465
| 5,416
| 128
| 117
| 42.3125
| 0.816269
| 0.032496
| 0
| 0.114583
| 0
| 0
| 0.097912
| 0.008668
| 0
| 0
| 0
| 0
| 0
| 1
| 0.052083
| false
| 0.041667
| 0.114583
| 0.010417
| 0.208333
| 0.03125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae8f4daece742a4c95381dd42af1f242bb79321d
| 1,739
|
py
|
Python
|
trseeker/models/chromosome_model.py
|
ad3002/Lyrebird
|
8c0a186e32d61189f073401152c52a89bfed46ed
|
[
"MIT"
] | null | null | null |
trseeker/models/chromosome_model.py
|
ad3002/Lyrebird
|
8c0a186e32d61189f073401152c52a89bfed46ed
|
[
"MIT"
] | null | null | null |
trseeker/models/chromosome_model.py
|
ad3002/Lyrebird
|
8c0a186e32d61189f073401152c52a89bfed46ed
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
#@created: 08.09.2011
#@author: Aleksey Komissarov
#@contact: ad3002@gmail.com
from PyExp import AbstractModel
class ChomosomeModel(AbstractModel):
''' Chromosome model.
Dumpable attributes:
- "chr_genome",
- "chr_number",
- "chr_taxon",
- "chr_prefix",
- "chr_gpid",
- "chr_acronym",
- "chr_contigs",
- "chr_length",
- "chr_mean_gc",
- "chr_trs_all",
- "chr_trs_3000",
- "chr_trs_all_proc",
- "chr_trs_3000_proc",
- "chr_trs_all_length",
- "chr_trs_3000_length",
- "genome_gaps",
- "chr_sum_gc",
'''
dumpable_attributes = [
"chr_genome",
"chr_number",
"chr_taxon",
"chr_prefix",
"chr_gpid",
"chr_acronym",
"chr_contigs",
"chr_length",
"chr_mean_gc",
"chr_trs_all",
"chr_trs_3000",
"chr_trs_all_proc",
"chr_trs_3000_proc",
"chr_trs_all_length",
"chr_trs_3000_length",
"genome_gaps",
"chr_sum_gc",
]
def preprocess_data(self):
if self.chr_trs_all_length:
self.chr_trs_all_proc = self.chr_trs_all_length / float(self.chr_length)
if self.chr_trs_3000_length:
self.chr_trs_3000_proc = self.chr_trs_3000_length / float(self.chr_length)
if not self.chr_mean_gc:
self.chr_mean_gc = self.chr_sum_gc / self.chr_contigs
| 28.048387
| 87
| 0.496262
| 184
| 1,739
| 4.217391
| 0.282609
| 0.139175
| 0.104381
| 0.07732
| 0.69201
| 0.606959
| 0.497423
| 0.497423
| 0.497423
| 0.497423
| 0
| 0.046578
| 0.395055
| 1,739
| 61
| 88
| 28.508197
| 0.691065
| 0.26567
| 0
| 0
| 0
| 0
| 0.18198
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035714
| false
| 0
| 0.035714
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae93028834095132f0d185515f7eb82644b3d574
| 478
|
py
|
Python
|
heap/1.py
|
miiiingi/algorithmstudy
|
75eaf97e2c41d7edf32eb4a57d4d7685c9218aba
|
[
"MIT"
] | null | null | null |
heap/1.py
|
miiiingi/algorithmstudy
|
75eaf97e2c41d7edf32eb4a57d4d7685c9218aba
|
[
"MIT"
] | null | null | null |
heap/1.py
|
miiiingi/algorithmstudy
|
75eaf97e2c41d7edf32eb4a57d4d7685c9218aba
|
[
"MIT"
] | null | null | null |
import heapq
def solution(scoville, K) :
heapq.heapify(scoville)
count = 0
while scoville :
try :
first = heapq.heappop(scoville)
second = heapq.heappop(scoville)
combine = first + second * 2
count += 1
heapq.heappush(scoville, combine)
if scoville[0] >= K :
return count
except :
return -1
answer = solution([1,2,3,9,10,12], 1000)
print(answer)
| 28.117647
| 45
| 0.523013
| 52
| 478
| 4.807692
| 0.538462
| 0.096
| 0.16
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.057239
| 0.378661
| 478
| 17
| 46
| 28.117647
| 0.784512
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.058824
| 0
| 0.235294
| 0.058824
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae999dae84b2cf7e73c7d8ac63967bb8d105893f
| 652
|
py
|
Python
|
migrations/versions/e424d03ba260_.py
|
danielSbastos/gistified
|
96a8b61df4dbe54cc2e808734976c969e024976b
|
[
"MIT"
] | null | null | null |
migrations/versions/e424d03ba260_.py
|
danielSbastos/gistified
|
96a8b61df4dbe54cc2e808734976c969e024976b
|
[
"MIT"
] | null | null | null |
migrations/versions/e424d03ba260_.py
|
danielSbastos/gistified
|
96a8b61df4dbe54cc2e808734976c969e024976b
|
[
"MIT"
] | null | null | null |
"""empty message
Revision ID: e424d03ba260
Revises: ace8d095a26b
Create Date: 2017-10-12 11:25:11.775853
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'e424d03ba260'
down_revision = 'ace8d095a26b'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('gist', sa.Column('lang', sa.String(length=30), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('gist', 'lang')
# ### end Alembic commands ###
| 22.482759
| 81
| 0.68865
| 82
| 652
| 5.414634
| 0.609756
| 0.060811
| 0.094595
| 0.103604
| 0.198198
| 0.198198
| 0.198198
| 0.198198
| 0
| 0
| 0
| 0.092937
| 0.174847
| 652
| 28
| 82
| 23.285714
| 0.732342
| 0.452454
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae9ca2472d73373711675aa4fb19922a4e4088ab
| 1,558
|
py
|
Python
|
buycoins/ngnt.py
|
Youngestdev/buycoins-python
|
fa17600cfa92278d1c7f80f0a860e3ba7b5bc3b0
|
[
"MIT"
] | 46
|
2021-02-06T07:29:22.000Z
|
2022-01-28T06:52:18.000Z
|
buycoins/ngnt.py
|
Youngestdev/buycoins-python
|
fa17600cfa92278d1c7f80f0a860e3ba7b5bc3b0
|
[
"MIT"
] | 1
|
2021-04-05T12:40:38.000Z
|
2021-04-09T18:46:20.000Z
|
buycoins/ngnt.py
|
Youngestdev/buycoins-python
|
fa17600cfa92278d1c7f80f0a860e3ba7b5bc3b0
|
[
"MIT"
] | 5
|
2021-02-06T08:02:19.000Z
|
2022-02-18T12:46:26.000Z
|
from buycoins.client import BuyCoinsClient
from buycoins.exceptions import AccountError, ClientError, ServerError
from buycoins.exceptions.utils import check_response
class NGNT(BuyCoinsClient):
"""The NGNT class handles the generations of virtual bank deposit account."""
def create_deposit_account(self, account_name: str):
"""Creates a virtual deposit account under the supplied name.
Args:
account_name (str): Name of the new virtual deposit account to be generated*.
Returns:
response: A JSON object containing the response from the request.
"""
try:
if not account_name:
raise AccountError("Invalid account name passed", 400)
self.account_name = account_name
_variables = {"accountName": self.account_name}
self._query = """
mutation createDepositAccount($accountName: String!) {
createDepositAccount(accountName: $accountName) {
accountNumber
accountName
accountType
bankName
accountReference
}
}
"""
response = self._execute_request(query=self._query, variables=_variables)
check_response(response, AccountError)
except (AccountError, ClientError, ServerError) as e:
return e.response
else:
return response["data"]["createDepositAccount"]
| 35.409091
| 89
| 0.596919
| 138
| 1,558
| 6.623188
| 0.471014
| 0.084245
| 0.049234
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002913
| 0.338896
| 1,558
| 43
| 90
| 36.232558
| 0.884466
| 0.191913
| 0
| 0
| 0
| 0
| 0.363184
| 0.055556
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0.037037
| 0.111111
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae9d77bf011601ea6bcbab318779c48b7e9a439f
| 1,510
|
py
|
Python
|
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_training_status/signals.py
|
piyushka17/azure-intelligent-edge-patterns
|
0d088899afb0022daa2ac434226824dba2c997c1
|
[
"MIT"
] | null | null | null |
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_training_status/signals.py
|
piyushka17/azure-intelligent-edge-patterns
|
0d088899afb0022daa2ac434226824dba2c997c1
|
[
"MIT"
] | null | null | null |
factory-ai-vision/EdgeSolution/modules/WebModule/backend/vision_on_edge/azure_training_status/signals.py
|
piyushka17/azure-intelligent-edge-patterns
|
0d088899afb0022daa2ac434226824dba2c997c1
|
[
"MIT"
] | null | null | null |
"""App Signals
"""
import logging
from django.db.models.signals import post_save
from django.dispatch import receiver
from vision_on_edge.azure_training_status.models import TrainingStatus
from vision_on_edge.notifications.models import Notification
logger = logging.getLogger(__name__)
@receiver(signal=post_save,
sender=TrainingStatus,
dispatch_uid="training_status_send_notification")
def training_status_send_notification_handler(**kwargs):
"""training_status_send_notification_handler.
Args:
kwargs:
"""
if 'sender' not in kwargs or kwargs['sender'] != TrainingStatus:
logger.info(
"'sender' not in kwargs or kwargs['sender'] != TrainingStatus")
logger.info("nothing to do")
return
if 'instance' not in kwargs:
logger.info("'instance' not in kwargs:'")
logger.info("Nothing to do")
return
instance = kwargs['instance']
if 'need_to_send_notification' in dir(
instance) and instance.need_to_send_notification:
logger.info("Azure TrainingStatus changed.")
logger.info("instance.need_to_send_notification %s",
instance.need_to_send_notification)
Notification.objects.create(notification_type="project",
sender="system",
title=instance.status.capitalize(),
details=instance.log.capitalize())
logger.info("Signal end")
| 33.555556
| 75
| 0.654305
| 162
| 1,510
| 5.87037
| 0.351852
| 0.117771
| 0.046267
| 0.092534
| 0.384858
| 0.212408
| 0.115668
| 0.115668
| 0.115668
| 0.115668
| 0
| 0
| 0.25298
| 1,510
| 44
| 76
| 34.318182
| 0.843085
| 0.048344
| 0
| 0.066667
| 0
| 0
| 0.203258
| 0.065156
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.166667
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
ae9e92c6d74c509eb9f3ed8c37b24f34f450e293
| 2,526
|
py
|
Python
|
brilleaux_flask/brilleaux.py
|
digirati-co-uk/brilleaux
|
5061d96e60239380c052f70dd12c4bec830e80db
|
[
"MIT"
] | null | null | null |
brilleaux_flask/brilleaux.py
|
digirati-co-uk/brilleaux
|
5061d96e60239380c052f70dd12c4bec830e80db
|
[
"MIT"
] | null | null | null |
brilleaux_flask/brilleaux.py
|
digirati-co-uk/brilleaux
|
5061d96e60239380c052f70dd12c4bec830e80db
|
[
"MIT"
] | null | null | null |
import json
import brilleaux_settings
import flask
from flask_caching import Cache
from flask_cors import CORS
import logging
import sys
from pyelucidate.pyelucidate import async_items_by_container, format_results, mirador_oa
app = flask.Flask(__name__)
CORS(app)
cache = Cache(
app, config={"CACHE_TYPE": "filesystem", "CACHE_DIR": "./", "CACHE_THRESHOLD": 500}
)
@app.route("/annotationlist/<path:anno_container>", methods=["GET"])
@cache.cached(timeout=120) # Cache Flask request to save repeated hits to Elucidate.
def brilleaux(anno_container: str):
"""
Flask app.
Expects an md5 hashed annotation container as part of the path.
Montague stores annotations in a container based on the md5 hash of
the canvas uri.
Requests the annotation list from Elucidate, using the IIIF context.
Unpacks the annotation list, and reformats the JSON to be in the
IIIF Presentation API annotation list format.
Returns JSON-LD for an annotation list.
The @id of the annotation list is set to the request_url.
"""
if brilleaux_settings.ELUCIDATE_URI:
anno_server = brilleaux_settings.ELUCIDATE_URI.replace("annotation/w3c/", "")
else:
anno_server = "https://elucidate.dlcs-ida.org/" # Do we need this anymore?
if flask.request.method == "GET":
request_uri = flask.request.url
# make sure URL ends in a /
if request_uri[-1] != "/":
request_uri += "/"
annotations = async_items_by_container(
elucidate=anno_server,
container=anno_container,
header_dict={
"Accept": "Application/ld+json; profile="
+ '"http://www.w3.org/ns/anno.jsonld"'
},
flatten_ids=True,
trans_function=mirador_oa,
)
content = format_results(list(annotations), request_uri=request_uri)
if content:
resp = flask.Response(
json.dumps(content, sort_keys=True, indent=4),
headers={"Content-Type": "application/ld+json;charset=UTF-8"},
)
return resp
else:
flask.abort(404)
else:
logging.error("Brilleaux does not support this method.")
flask.abort(405)
if __name__ == "__main__":
logging.basicConfig(
stream=sys.stdout,
level=logging.DEBUG,
format="%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s",
)
app.run(threaded=True, debug=True, port=5000, host="0.0.0.0")
| 32.805195
| 88
| 0.644497
| 317
| 2,526
| 4.984227
| 0.485804
| 0.044304
| 0.032278
| 0.026582
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014256
| 0.250198
| 2,526
| 76
| 89
| 33.236842
| 0.819958
| 0.218131
| 0
| 0.056604
| 0
| 0
| 0.187532
| 0.047273
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.150943
| 0
| 0.188679
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
881a898ae26445fd0e94d07ff062d0f6af611593
| 520
|
py
|
Python
|
src/cli_report.py
|
dmitryvodop/vk_likechecker
|
3673ecf7548b3374aa5082bc69b7db1669f2f9c2
|
[
"MIT"
] | null | null | null |
src/cli_report.py
|
dmitryvodop/vk_likechecker
|
3673ecf7548b3374aa5082bc69b7db1669f2f9c2
|
[
"MIT"
] | null | null | null |
src/cli_report.py
|
dmitryvodop/vk_likechecker
|
3673ecf7548b3374aa5082bc69b7db1669f2f9c2
|
[
"MIT"
] | null | null | null |
MAX_CONSOLE_LINE_LENGTH = 79
class CliReport:
def __init__(self):
self.is_initialized = False
def print(self, string='', length=MAX_CONSOLE_LINE_LENGTH, end='\n'):
if self.is_initialized:
number_of_spaces = 0
if length > len(string):
number_of_spaces = length - len(string)
print((string + ' ' * number_of_spaces).encode('cp866', errors='ignore').decode('cp866').encode(
'cp1251', errors='ignore').decode('cp1251'), end=end)
| 34.666667
| 108
| 0.607692
| 62
| 520
| 4.806452
| 0.467742
| 0.080537
| 0.14094
| 0.134228
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.044041
| 0.257692
| 520
| 14
| 109
| 37.142857
| 0.727979
| 0
| 0
| 0
| 0
| 0
| 0.071154
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.181818
| false
| 0
| 0
| 0
| 0.272727
| 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
881aadb872501d08df8bad8897f3a02a5ed64924
| 5,138
|
py
|
Python
|
s3splitmerge/merge.py
|
MacHu-GWU/s3splitmerge-project
|
873892158f4a2d0ee20f291e5d3b2a80f0bae1ba
|
[
"MIT"
] | null | null | null |
s3splitmerge/merge.py
|
MacHu-GWU/s3splitmerge-project
|
873892158f4a2d0ee20f291e5d3b2a80f0bae1ba
|
[
"MIT"
] | null | null | null |
s3splitmerge/merge.py
|
MacHu-GWU/s3splitmerge-project
|
873892158f4a2d0ee20f291e5d3b2a80f0bae1ba
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import typing
import pandas as pd
import smart_open
import awswrangler as wr
from .helpers import (
check_enumeration_s3_key_string,
get_key_size_all_objects,
group_s3_objects_no_larger_than,
)
from .options import ZFILL
def merge_csv(
s3_client,
source_bucket: str,
source_key_prefix: str,
target_bucket: str,
target_key: str,
target_size: int,
zfill: int = ZFILL,
):
check_enumeration_s3_key_string(target_key)
# analyze input data
key_and_size_list = get_key_size_all_objects(
s3_client=s3_client,
bucket=source_bucket,
prefix=source_key_prefix,
)
group_list = group_s3_objects_no_larger_than(
key_and_size_list=key_and_size_list,
max_size=target_size,
)
for nth_group, s3_object_group in enumerate(group_list):
nth_group += 1
source_uri_list = [
f"s3://{source_bucket}/{s3_key}"
for s3_key in s3_object_group
]
merge_json(
s3_client=s3_client,
source_uri_list=source_uri_list,
target_bucket=target_bucket,
target_key=target_key.format(i=str(nth_group).zfill(zfill)),
)
def merge_parquet(boto3_session,
source_uri_list: typing.List[str],
target_bucket: str,
target_key: str) -> typing.Tuple[str, str]:
"""
Merge multiple parquet file on S3 into one parquet file.
.. note::
For parquet, it has to use the awswrangler API and it only support
boto3_session other than s3_client.
"""
df_list = list()
for s3_uri in source_uri_list:
df = wr.s3.read_parquet(s3_uri, boto3_session=boto3_session)
df_list.append(df)
df = pd.concat(df_list, axis=0)
wr.s3.to_parquet(
df=df,
path=f"s3://{target_bucket}/{target_key}",
boto3_session=boto3_session
)
return target_bucket, target_key
def merge_parquet_by_prefix(boto3_session,
source_bucket,
source_key_prefix,
target_bucket,
target_key,
target_size,
zfill: int = ZFILL) -> typing.List[typing.Tuple[str, str]]:
"""
Smartly merge all parquet s3 object under the same prefix into one or many
fixed size (approximately) parquet file.
"""
check_enumeration_s3_key_string(target_key)
s3_client = boto3_session.client("s3")
target_s3_bucket_key_list = list()
# analyze input data
key_and_size_list = get_key_size_all_objects(
s3_client=s3_client,
bucket=source_bucket,
prefix=source_key_prefix,
)
group_list = group_s3_objects_no_larger_than(
key_and_size_list=key_and_size_list,
max_size=target_size,
)
for nth_group, s3_object_group in enumerate(group_list):
nth_group += 1
source_uri_list = [
f"s3://{source_bucket}/{s3_key}"
for s3_key in s3_object_group
]
bucket_and_key = merge_parquet(
boto3_session=boto3_session,
source_uri_list=source_uri_list,
target_bucket=target_bucket,
target_key=target_key.format(i=str(nth_group).zfill(zfill)),
)
target_s3_bucket_key_list.append(bucket_and_key)
return target_s3_bucket_key_list
def merge_json(s3_client,
source_uri_list: typing.List[str],
target_bucket: str,
target_key: str):
transport_params = dict(client=s3_client)
with smart_open.open(
f"s3://{target_bucket}/{target_key}", "w",
transport_params=transport_params,
) as f_out:
for source_uri in source_uri_list:
with smart_open.open(
source_uri, "r",
transport_params=transport_params,
) as f_in:
for line in f_in:
f_out.write(line)
def merge_json_by_prefix(s3_client,
source_bucket: str,
source_key_prefix: str,
target_bucket: str,
target_key: str,
target_size: int,
zfill: int = ZFILL):
check_enumeration_s3_key_string(target_key)
# analyze input data
key_and_size_list = get_key_size_all_objects(
s3_client=s3_client,
bucket=source_bucket,
prefix=source_key_prefix,
)
group_list = group_s3_objects_no_larger_than(
key_and_size_list=key_and_size_list,
max_size=target_size,
)
for nth_group, s3_object_group in enumerate(group_list):
nth_group += 1
source_uri_list = [
f"s3://{source_bucket}/{s3_key}"
for s3_key in s3_object_group
]
merge_json(
s3_client=s3_client,
source_uri_list=source_uri_list,
target_bucket=target_bucket,
target_key=target_key.format(i=str(nth_group).zfill(zfill)),
)
| 29.528736
| 87
| 0.610743
| 659
| 5,138
| 4.345979
| 0.151745
| 0.053422
| 0.059008
| 0.043994
| 0.686802
| 0.615223
| 0.566341
| 0.553771
| 0.553771
| 0.553771
| 0
| 0.018739
| 0.314519
| 5,138
| 173
| 88
| 29.699422
| 0.794435
| 0.072986
| 0
| 0.522388
| 0
| 0
| 0.033312
| 0.032463
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037313
| false
| 0
| 0.044776
| 0
| 0.097015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
881b4859fbf99cdf056286c05c45307fee24239c
| 5,316
|
py
|
Python
|
maraboupy/test/test_query.py
|
yuvaljacoby/Marabou-1
|
553b780ef2e2cfe349b3954adc433a27af37a50f
|
[
"BSD-3-Clause"
] | null | null | null |
maraboupy/test/test_query.py
|
yuvaljacoby/Marabou-1
|
553b780ef2e2cfe349b3954adc433a27af37a50f
|
[
"BSD-3-Clause"
] | null | null | null |
maraboupy/test/test_query.py
|
yuvaljacoby/Marabou-1
|
553b780ef2e2cfe349b3954adc433a27af37a50f
|
[
"BSD-3-Clause"
] | 1
|
2021-06-29T06:54:29.000Z
|
2021-06-29T06:54:29.000Z
|
# Supress warnings caused by tensorflow
import warnings
warnings.filterwarnings('ignore', category = DeprecationWarning)
warnings.filterwarnings('ignore', category = PendingDeprecationWarning)
import pytest
from .. import Marabou
import numpy as np
import os
# Global settings
TOL = 1e-4 # Tolerance for Marabou evaluations
ONNX_FILE = "../../resources/onnx/fc1.onnx" # File for test onnx network
ACAS_FILE = "../../resources/nnet/acasxu/ACASXU_experimental_v2a_1_1.nnet" # File for test nnet network
def test_sat_query(tmpdir):
"""
Test that a query generated from Maraboupy can be saved and loaded correctly and return sat
"""
network = load_onnx_network()
# Set output constraint
outputVars = network.outputVars.flatten()
outputVar = outputVars[1]
minOutputValue = 70.0
network.setLowerBound(outputVar, minOutputValue)
# Save this query to a temporary file, and reload the query
queryFile = tmpdir.mkdir("query").join("query.txt").strpath
network.saveQuery(queryFile)
ipq = Marabou.load_query(queryFile)
# Solve the query loaded from the file and compare to the solution of the original query
# The result should be the same regardless of verbosity options used, or if a file redirect is used
tempFile = tmpdir.mkdir("redirect").join("marabouRedirect.log").strpath
opt = Marabou.createOptions(verbosity = 0)
vals_net, _ = network.solve(filename = tempFile)
vals_ipq, _ = Marabou.solve_query(ipq, filename = tempFile)
# The two value dictionaries should have the same number of variables,
# the same keys, and the values assigned should be within some tolerance of each other
assert len(vals_net) == len(vals_ipq)
for k in vals_net:
assert k in vals_ipq
assert np.abs(vals_ipq[k] - vals_net[k]) < TOL
def test_unsat_query(tmpdir):
"""
Test that a query generated from Maraboupy can be saved and loaded correctly and return unsat
"""
network = load_onnx_network()
# Set output constraint
outputVars = network.outputVars.flatten()
outputVar = outputVars[0]
minOutputValue = 2000.0
network.setLowerBound(outputVar, minOutputValue)
# Save this query to a temporary file, and reload the query):
queryFile = tmpdir.mkdir("query").join("query.txt").strpath
network.saveQuery(queryFile)
ipq = Marabou.load_query(queryFile)
# Solve the query loaded from the file and compare to the solution of the original query
opt = Marabou.createOptions(verbosity = 0)
vals_net, stats_net = network.solve(options = opt)
vals_ipq, stats_ipq = Marabou.solve_query(ipq, options = opt)
# Assert the value dictionaries are both empty, and both queries have not timed out (unsat)
assert len(vals_net) == 0
assert len(vals_ipq) == 0
assert not stats_net.hasTimedOut()
assert not stats_ipq.hasTimedOut()
def test_to_query(tmpdir):
"""
Test that a query generated from Maraboupy can be saved and loaded correctly and return timeout.
This query is expected to be UNSAT but is currently unsolveable within one second.
If future improvements allow the query to be solved within a second, then this test will need to be updated.
"""
network = load_acas_network()
# Set output constraint
outputVars = network.outputVars.flatten()
outputVar = outputVars[0]
minOutputValue = 1500.0
network.setLowerBound(outputVar, minOutputValue)
# Save this query to a temporary file, and reload the query):
queryFile = tmpdir.mkdir("query").join("query.txt").strpath
network.saveQuery(queryFile)
ipq = Marabou.load_query(queryFile)
# Solve the query loaded from the file and compare to the solution of the original query
opt = Marabou.createOptions(verbosity = 0, timeoutInSeconds = 1)
vals_net, stats_net = network.solve(options = opt)
vals_ipq, stats_ipq = Marabou.solve_query(ipq, options = opt)
# Assert timeout
assert stats_net.hasTimedOut()
assert stats_ipq.hasTimedOut()
def load_onnx_network():
"""
The test network fc1.onnx is used, which has two input variables and two output variables.
The network was trained such that the first output approximates the sum of the absolute
values of the inputs, while the second output approximates the sum of the squares of the inputs
for inputs in the range [-10.0, 10.0].
"""
filename = os.path.join(os.path.dirname(__file__), ONNX_FILE)
network = Marabou.read_onnx(filename)
# Get the input and output variable numbers; [0] since first dimension is batch size
inputVars = network.inputVars[0][0]
# Set input bounds
network.setLowerBound(inputVars[0],-10.0)
network.setUpperBound(inputVars[0], 10.0)
network.setLowerBound(inputVars[1],-10.0)
network.setUpperBound(inputVars[1], 10.0)
return network
def load_acas_network():
"""
Load one of the acas networks. This network is larger than fc1.onnx, making it a better test case
for testing timeout.
"""
filename = os.path.join(os.path.dirname(__file__), ACAS_FILE)
return Marabou.read_nnet(filename, normalize=True)
| 40.892308
| 112
| 0.702784
| 711
| 5,316
| 5.164557
| 0.260197
| 0.010893
| 0.022876
| 0.015523
| 0.501362
| 0.47195
| 0.456155
| 0.445261
| 0.426198
| 0.426198
| 0
| 0.013218
| 0.217269
| 5,316
| 129
| 113
| 41.209302
| 0.869262
| 0.378668
| 0
| 0.362319
| 0
| 0
| 0.053409
| 0.027961
| 0
| 0
| 0
| 0
| 0.130435
| 1
| 0.072464
| false
| 0
| 0.072464
| 0
| 0.173913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8820ecc0654f8927cee2ae38d218e22ba45c5793
| 3,050
|
py
|
Python
|
scripts/computeDice.py
|
STORM-IRIT/pcednet-supp
|
68d2a2a62bfb7b450bf241c2251ee3bb99d18c7e
|
[
"CC-BY-3.0"
] | 7
|
2022-01-28T14:59:11.000Z
|
2022-03-17T05:09:28.000Z
|
scripts/computeDice.py
|
STORM-IRIT/pcednet-supp
|
68d2a2a62bfb7b450bf241c2251ee3bb99d18c7e
|
[
"CC-BY-3.0"
] | 4
|
2021-11-18T13:50:21.000Z
|
2022-02-25T15:10:06.000Z
|
scripts/computeDice.py
|
STORM-IRIT/pcednet-supp
|
68d2a2a62bfb7b450bf241c2251ee3bb99d18c7e
|
[
"CC-BY-3.0"
] | null | null | null |
import sys, glob
from os import listdir, remove
from os.path import dirname, join, isfile, abspath
from io import StringIO
import numpy as np
import utilsmodule as um
script_path = dirname(abspath(__file__))
datasetPath = join(script_path,"data/")
e = 'shrec'
### Compute the dice coefficient used in Table 1,
# E Moscoso Thompson, G Arvanitis, K Moustakas, N Hoang-Xuan, E R Nguyen, et al..
# SHREC’19track: Feature Curve Extraction on Triangle Meshes.
# 12th EG Workshop 3D Object Retrieval 2019,May 2019, Gênes, Italy.
print (" Processing experiment " + e)
# Fields loaded from the file
input_file_fields = ['Precision', 'Recall', 'MCC', 'TP', 'FP', 'TN', 'FN']
# Expected range for the fields (used to compute the histogram bins)
input_fields_range = [(0,1), (0,1), (-1,1), (0,1), (0,1), (0,1), (0,1)]
input_fields_bins = []
# Functions used to summarize a field for the whole dataset
input_fied_summary = {
"median": lambda buf: np.nanmedian(buf),
"mean": lambda buf: np.nanmean(buf)
}
experimentPath = join(datasetPath, e)
experimentFile = join(script_path,"../assets/js/data_" + e + ".js")
approaches = [f for f in listdir(experimentPath) if isfile(join(experimentPath, f))]
# Data loaded from the file
rawdata = dict()
# Number of samples (3D models) used in this experiment
nbsamples = 0
# Load data
for a in approaches:
if a.endswith(".txt"):
aname = a[:-4]
apath = join(experimentPath,a)
# Load and skip comments, empty lines
lines = [item.split() for item in tuple(open(apath, 'r')) if not item[0].startswith('#') or item == '']
nbsamples = len(lines)
# Current layout: lines[lineid][columnid]
# Reshape so we have columns[columnid][lineid]
rawdata[aname] = np.swapaxes( lines, 0, 1 )
# Convert array of str to numpy array of numbers
converter = lambda x:np.fromstring(', '.join(x) , dtype = np.float, sep =', ' )
rawdata[aname] = list(map(converter,rawdata[aname]))
print (" Loaded methods " + str(rawdata.keys()))
for method, data in rawdata.items():
precision = data[0]
recall = data[1]
tp = data[3]
fp = data[4]
tn = data[5]
fn = data[6]
# Compute dice
dice = (2.*tp) / (2.*tp + fn + fp)
#dice = data[2]
data.append(dice)
# Now print the latex table header
for method, data in rawdata.items():
print (method + " & ", end = '')
print("\\\\ \n \hline")
# Find max value per model
maxid = []
for i in range (0,nbsamples):
vmax = 0.
mmax = 0
m = 0
for method, data in rawdata.items():
if data[7][i] > vmax:
vmax = data[7][i]
mmax = m
m = m+1
maxid.append(mmax)
# Now print the latex table content
for i in range (0,nbsamples):
m = 0
for method, data in rawdata.items():
# print ( str(data[:-1][i]) + " & " )
valstr = "{:.2f}".format(data[7][i])
if maxid[i] == m:
valstr = "\\textbf{" + valstr + "}"
print ( valstr + " & " , end = '')
m = m+1
print("\\\\ \n \hline")
| 26.99115
| 107
| 0.615738
| 440
| 3,050
| 4.231818
| 0.415909
| 0.007519
| 0.008056
| 0.008593
| 0.115467
| 0.092911
| 0.055854
| 0.031149
| 0
| 0
| 0
| 0.024014
| 0.23541
| 3,050
| 112
| 108
| 27.232143
| 0.774443
| 0.270164
| 0
| 0.1875
| 0
| 0
| 0.077343
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.09375
| 0
| 0.09375
| 0.09375
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8823f9acf9979c7b6037b4ffb5d08ae416a7a660
| 1,094
|
py
|
Python
|
plugins/dbnd-test-scenarios/src/dbnd_test_scenarios/integrations/mlflow_example.py
|
ipattarapong/dbnd
|
7bd65621c46c73e078eb628f994127ad4c7dbd1a
|
[
"Apache-2.0"
] | 224
|
2020-01-02T10:46:37.000Z
|
2022-03-02T13:54:08.000Z
|
plugins/dbnd-test-scenarios/src/dbnd_test_scenarios/integrations/mlflow_example.py
|
ipattarapong/dbnd
|
7bd65621c46c73e078eb628f994127ad4c7dbd1a
|
[
"Apache-2.0"
] | 16
|
2020-03-11T09:37:58.000Z
|
2022-01-26T10:22:08.000Z
|
plugins/dbnd-test-scenarios/src/dbnd_test_scenarios/integrations/mlflow_example.py
|
ipattarapong/dbnd
|
7bd65621c46c73e078eb628f994127ad4c7dbd1a
|
[
"Apache-2.0"
] | 24
|
2020-03-24T13:53:50.000Z
|
2022-03-22T11:55:18.000Z
|
import logging
from random import randint, random
from mlflow import (
active_run,
end_run,
get_tracking_uri,
log_metric,
log_param,
start_run,
)
from mlflow.tracking import MlflowClient
from dbnd import task
logger = logging.getLogger(__name__)
@task
def mlflow_example():
logger.info("Running MLFlow example!")
logger.info("MLFlow tracking URI: {}".format(get_tracking_uri()))
start_run()
# params
log_param("param1", randint(0, 100))
log_param("param2", randint(0, 100))
# metrics
log_metric("foo1", random())
log_metric("foo1", random() + 1)
log_metric("foo2", random())
log_metric("foo2", random() + 1)
# Show metadata & data from the mlflow tracking store:
service = MlflowClient()
run_id = active_run().info.run_id
run = service.get_run(run_id)
logger.info("Metadata & data for run with UUID %s: %s" % (run_id, run))
end_run()
logger.info("MLFlow example completed!")
#
# from dbnd_task
# @task
# def mlflow_example():
# pass
if __name__ == "__main__":
mlflow_example()
| 20.259259
| 75
| 0.662706
| 144
| 1,094
| 4.763889
| 0.354167
| 0.065598
| 0.026239
| 0.058309
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018519
| 0.210238
| 1,094
| 53
| 76
| 20.641509
| 0.775463
| 0.108775
| 0
| 0
| 0
| 0
| 0.152174
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03125
| false
| 0
| 0.15625
| 0
| 0.1875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8826f802253e79fbdf200b9f603f2a1bd96164e1
| 2,673
|
py
|
Python
|
notes/conditionals/if_blocks.py
|
mcorley-gba/IntroCS21-22
|
a823e17f2cb618be0e67468cb15f48873ae85152
|
[
"MIT"
] | null | null | null |
notes/conditionals/if_blocks.py
|
mcorley-gba/IntroCS21-22
|
a823e17f2cb618be0e67468cb15f48873ae85152
|
[
"MIT"
] | null | null | null |
notes/conditionals/if_blocks.py
|
mcorley-gba/IntroCS21-22
|
a823e17f2cb618be0e67468cb15f48873ae85152
|
[
"MIT"
] | null | null | null |
#Conditional Tests HW - Due Monday
# 13 Tests --> 1 True and 1 False for each
#If Statements
#Simplest structure of an if statement:
# if conditional_test:
# do something <-- Instructions/commands
#my_age = 13
#if my_age >= 18:
# print("You are old enough to vote.")
# print("Are you registered to vote?")
#Unindent!
#Indentation plays the same role for if-statements
#as it did for 'for' loops. Anything indented will be
#executed whenever the conditional test is true. Anything
#indented will be skipped whenever the conditional test is
#false.
#USE CAUTION - Don't forget to un-indent when you are finished
#with your if-block.
#Often we want one action if the conditional test is True,
#But make another action whenever it is false.
my_age = 33
if my_age >= 18:
print("You are old enough to vote.")
print("Are you registered to vote?")
else: #Catches any instances when the above test fails
print("You are not old enough to vote.")
print("Please register to vote when you turn 18.")
#The if-else structure works very well in situations in which python
#needs to always execute one of two possible actions.
#in a simple if-else block, one of the two will always be evaluated.
#if-elif-else Chain
#Python will only execute one block in an if-elif-else chain.
#As soon as one test passes, python execute that block
#and skips the rest (even if they might be true).
#Example: Admission to a theme park:
#Three price-levels:
#Under 4 --> Free
#between 4 and 18 --> $25
#18 to 65 --> $40
#65 and older--> $20
age = 66
if age < 4:
price = 0
elif age < 18: #elif = else+if --> if the above test(s) is(are) false,
#try this test next
price = 25
elif age < 65:
price = 40
#We can have more than one elif statement
elif age >= 65:
price = 20
#The catch-all 'else' statement is no longer needed.
#If you have a definite condition for the last block of an if-elif-else
#Use an elif statement with a definite conditional test. If you don't have a
#definite condition in mind for the last layer of an if-elif-else block,
#else works fine (unless you don't really need it).
print(f"Your admission cost is ${price}")
#Think about the structure of your if-elif-else blocks.
#Especially when the tests overlap
#The purpose of the above code was to determine the cost for the user
#Multiple conditions.
requested_toppings = ['mushrooms','extra cheese']
if 'mushrooms' in requested_toppings:
print("Adding mushrooms.")
if 'pepperoni' in requested_toppings:
print("Adding pepperoni")
if 'extra cheese' in requested_toppings:
print("Adding extra cheese")
print("Finished making pizza!")
| 29.373626
| 76
| 0.716423
| 444
| 2,673
| 4.292793
| 0.391892
| 0.018888
| 0.026233
| 0.02361
| 0.181532
| 0.065058
| 0.065058
| 0.065058
| 0.065058
| 0.065058
| 0
| 0.021475
| 0.198653
| 2,673
| 90
| 77
| 29.7
| 0.868347
| 0.696596
| 0
| 0
| 0
| 0
| 0.369594
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.36
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8828b21c1d7aa3ef1f1b5b77da67057776db662c
| 3,798
|
py
|
Python
|
make_histogram.py
|
hijinks/python-bcet
|
3e2fac66c82fb3f1c02e8e19153f5e3e97f57aca
|
[
"MIT"
] | null | null | null |
make_histogram.py
|
hijinks/python-bcet
|
3e2fac66c82fb3f1c02e8e19153f5e3e97f57aca
|
[
"MIT"
] | null | null | null |
make_histogram.py
|
hijinks/python-bcet
|
3e2fac66c82fb3f1c02e8e19153f5e3e97f57aca
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# BCET Workflow
__author__ = 'Sam Brooke'
__date__ = 'September 2017'
__copyright__ = '(C) 2017, Sam Brooke'
__email__ = "sbrooke@tuta.io"
import os
import georasters as gr
import matplotlib.pyplot as plt
import numpy as np
from optparse import OptionParser
import fnmatch
import re
from scipy.interpolate import spline
parser = OptionParser()
(options, args) = parser.parse_args()
# args[0] for bcet_directory
# args[1] for no_bcet_directory
bcet_directory = False
no_bcet_directory = False
file_prefix = ''
if os.path.isdir(args[0]):
bcet_directory = args[0]
if os.path.isdir(args[1]):
no_bcet_directory = args[1]
bcet_matches = []
for root, dirnames, filenames in os.walk(bcet_directory):
for filename in fnmatch.filter(filenames, '*.tif'):
bcet_matches.append(os.path.join(root, filename))
print(bcet_matches)
no_bcet_matches = []
for root, dirnames, filenames in os.walk(no_bcet_directory):
for filename in fnmatch.filter(filenames, '*.tif'):
no_bcet_matches.append(os.path.join(root, filename))
print(no_bcet_matches)
output = args[2]
# Load Raster
colours = {
'B1':'lightblue',
'B2':'blue',
'B3':'green',
'B4':'red',
'B5':'firebrick',
'B6':'grey',
'B7':'k'
}
band_labels = {
'B1':'Band 1 - Ultra Blue',
'B2':'Band 2 - Blue',
'B3':'Band 3 - Green',
'B4':'Band 4 - Red',
'B5':'Band 5 - NIR',
'B6':'Band 6 - SWIR 1',
'B7':'Band 7 - SWIR 2'
}
# Display results
#fig = plt.figure(figsize=(8, 5))
fig, axarr = plt.subplots(2, sharex=False)
width = 25 #cm
height = 20 #cm
fig.set_size_inches(float(width)/2.54, float(height)/2.54)
for ma in no_bcet_matches:
raster = os.path.join(ma)
base = os.path.basename(raster)
m = re.search(r"B[0-9]+",base)
band_name = m.group()
ndv, xsize, ysize, geot, projection, datatype = gr.get_geo_info(raster) # Raster information
# ndv = no data value
data = gr.from_file(raster) # Create GeoRaster object
crs = projection.ExportToProj4() # Create a projection string in proj4 format
sp = data.raster.ravel()
spn = len(sp)
hist, bins = np.histogram(data.raster.ravel(), bins=50)
hist_norm = hist.astype(float) / spn
width = 0.7 * (bins[1] - bins[0])
center = (bins[:-1] + bins[1:]) / 2
centernew = np.linspace(center.min(),center.max(),300) #300 represents number of points to make between T.min and T.max
hist_smooth = spline(center,hist_norm,centernew)
axarr[0].plot(centernew, hist_smooth, color=colours[band_name], label=band_labels[band_name])
for ma in bcet_matches:
raster = os.path.join(ma)
base = os.path.basename(raster)
m = re.search(r"B[0-9]+",base)
band_name = m.group()
ndv, xsize, ysize, geot, projection, datatype = gr.get_geo_info(raster) # Raster information
# ndv = no data value
data = gr.from_file(raster) # Create GeoRaster object
crs = projection.ExportToProj4() # Create a projection string in proj4 format
sp = data.raster.ravel()
spn = len(sp)
hist, bins = np.histogram(data.raster.ravel(), bins=25)
hist_norm = hist.astype(float) / spn
width = 0.7 * (bins[1] - bins[0])
center = (bins[:-1] + bins[1:]) / 2
centernew = np.linspace(center.min(),center.max(),300) #300 represents number of points to make between T.min and T.max
hist_smooth = spline(center,hist_norm,centernew)
axarr[1].plot(centernew, hist_smooth, color=colours[band_name], label=band_labels[band_name])
axarr[0].set_xlim([0, 25000])
axarr[1].set_xlim([0,255])
axarr[0].set_ylim([0, 0.5])
axarr[1].set_ylim([0, 0.5])
axarr[0].set_xlabel('R')
axarr[1].set_xlabel('R*')
axarr[0].set_ylabel('f')
axarr[1].set_ylabel('f')
axarr[0].set_title('LANDSAT (White Mountains ROI) 2014-02-25 Unmodified Histogram')
axarr[1].set_title('LANDSAT (White Mountains ROI) 2014-02-25 BCET Histogram')
axarr[0].legend()
axarr[1].legend()
plt.savefig('histograms.pdf')
| 27.926471
| 120
| 0.700105
| 608
| 3,798
| 4.246711
| 0.3125
| 0.040279
| 0.017428
| 0.013943
| 0.623548
| 0.61038
| 0.598761
| 0.598761
| 0.598761
| 0.460883
| 0
| 0.041284
| 0.139021
| 3,798
| 135
| 121
| 28.133333
| 0.748318
| 0.130332
| 0
| 0.29703
| 0
| 0
| 0.116052
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.079208
| 0
| 0.079208
| 0.019802
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88398ae2c4128d5752a93cafe6efa48eb9858180
| 3,718
|
py
|
Python
|
tests/integration/questionnaire/test_questionnaire_save_sign_out.py
|
uk-gov-mirror/ONSdigital.eq-survey-runner
|
b3a67a82347d024177f7fa6bf05499f47ece7ea5
|
[
"MIT"
] | 27
|
2015-10-02T17:27:54.000Z
|
2021-04-05T12:39:16.000Z
|
tests/integration/questionnaire/test_questionnaire_save_sign_out.py
|
uk-gov-mirror/ONSdigital.eq-survey-runner
|
b3a67a82347d024177f7fa6bf05499f47ece7ea5
|
[
"MIT"
] | 1,836
|
2015-09-16T09:59:03.000Z
|
2022-03-30T14:27:06.000Z
|
tests/integration/questionnaire/test_questionnaire_save_sign_out.py
|
uk-gov-mirror/ONSdigital.eq-survey-runner
|
b3a67a82347d024177f7fa6bf05499f47ece7ea5
|
[
"MIT"
] | 20
|
2016-09-09T16:56:12.000Z
|
2021-11-12T06:09:27.000Z
|
from app.validation.error_messages import error_messages
from tests.integration.integration_test_case import IntegrationTestCase
class TestSaveSignOut(IntegrationTestCase):
def test_save_sign_out_with_mandatory_question_not_answered(self):
# We can save and go to the sign-out page without having to fill in mandatory answer
# Given
self.launchSurvey('test', '0205', account_service_url='https://localhost/my-account', account_service_log_out_url='https://localhost/logout')
# When
self.post(action='start_questionnaire')
self.post(post_data={'total-retail-turnover': '1000'}, action='save_sign_out')
# Then we are presented with the sign out page
self.assertInUrl('/logout')
def test_save_sign_out_with_non_mandatory_validation_error(self):
# We can't save if a validation error is caused, this doesn't include missing a mandatory question
# Given
self.launchSurvey('test', '0205')
# When
self.post(action='start_questionnaire')
self.post(post_data={'total-retail-turnover': 'error'}, action='save_sign_out')
# Then we are presented with an error message
self.assertRegexPage(error_messages['INVALID_NUMBER'])
def test_save_sign_out_complete_a_block_then_revisit_it(self):
# If a user completes a block, but then goes back and uses save and come back on that block, that block
# should no longer be considered complete and on re-authenticate it should return to it
self.launchSurvey('test', '0102')
self.post(action='start_questionnaire')
block_one_url = self.last_url
post_data = {
'period-from-day': '01',
'period-from-month': '4',
'period-from-year': '2016',
'period-to-day': '30',
'period-to-month': '4',
'period-to-year': '2016'
}
self.post(post_data)
# We go back to the first page and save and complete later
self.get(block_one_url)
self.post(action='save_sign_out')
# We re-authenticate and check we are on the first page
self.launchSurvey('test', '0102')
self.assertEqual(block_one_url, self.last_url)
def test_sign_out_on_introduction_page(self):
# Given
self.launchSurvey('test', '0205', account_service_url='https://localhost/my-account', account_service_log_out_url='https://localhost/logout')
# When
self.post(action='sign_out')
# Then we are presented with the sign out page
self.assertInUrl('/logout')
def test_thank_you_without_logout_url(self):
"""
If the signed-out url is hit but there is no account_service_log_out_url, then a sign out page is rendered.
"""
self.launchSurvey('test', 'textarea')
self.post({'answer': 'This is an answer'})
token = self.last_csrf_token
self.post(action=None)
self.assertInUrl('thank-you')
self.last_csrf_token = token
self.post(action='sign_out')
self.assertInUrl('/signed-out')
self.assertInBody('Your survey answers have been saved. You are now signed out')
def test_thank_you_page_post_without_action(self):
"""
If the thank you page is posted to without an action,
it takes you back to the thank you page.
"""
self.launchSurvey('test', 'textarea')
self.post({'answer': 'This is an answer'})
token = self.last_csrf_token
self.post(action=None)
self.assertInUrl('thank-you')
self.last_csrf_token = token
self.post(action=None)
self.assertInUrl('/thank-you')
| 35.75
| 149
| 0.655998
| 496
| 3,718
| 4.727823
| 0.268145
| 0.047761
| 0.053731
| 0.028998
| 0.492111
| 0.423454
| 0.388913
| 0.388913
| 0.388913
| 0.356503
| 0
| 0.013485
| 0.242066
| 3,718
| 103
| 150
| 36.097087
| 0.818666
| 0.228349
| 0
| 0.45283
| 0
| 0
| 0.216786
| 0.015
| 0
| 0
| 0
| 0
| 0.169811
| 1
| 0.113208
| false
| 0
| 0.037736
| 0
| 0.169811
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
883ab54b46f93c6809f1bedd1cd71a0ee4774d4e
| 16,479
|
py
|
Python
|
model/UniGNN.py
|
czc567/UniGNN
|
bbb061f393b847ff6c7c20cab9e1ecb8f1c3eb96
|
[
"MIT"
] | null | null | null |
model/UniGNN.py
|
czc567/UniGNN
|
bbb061f393b847ff6c7c20cab9e1ecb8f1c3eb96
|
[
"MIT"
] | null | null | null |
model/UniGNN.py
|
czc567/UniGNN
|
bbb061f393b847ff6c7c20cab9e1ecb8f1c3eb96
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn, torch.nn.functional as F
from torch.nn.parameter import Parameter
import math
from torch_scatter import scatter
from torch_geometric.utils import softmax
# NOTE: can not tell which implementation is better statistically
def glorot(tensor):
if tensor is not None:
stdv = math.sqrt(6.0 / (tensor.size(-2) + tensor.size(-1)))
tensor.data.uniform_(-stdv, stdv)
def normalize_l2(X):
"""Row-normalize matrix"""
rownorm = X.detach().norm(dim=1, keepdim=True)
scale = rownorm.pow(-1)
scale[torch.isinf(scale)] = 0.
X = X * scale
return X
# v1: X -> XW -> AXW -> norm
class UniSAGEConv(nn.Module):
def __init__(self, args, in_channels, out_channels, heads=8, dropout=0., negative_slope=0.2):
super().__init__()
# TODO: bias?
self.W = nn.Linear(in_channels, heads * out_channels, bias=False)
self.heads = heads
self.in_channels = in_channels
self.out_channels = out_channels
self.negative_slope = negative_slope
self.dropout = dropout
self.args = args
def __repr__(self):
return '{}({}, {}, heads={})'.format(self.__class__.__name__,
self.in_channels,
self.out_channels, self.heads)
def forward(self, X, vertex, edges):
N = X.shape[0]
# X0 = X # NOTE: reserved for skip connection
X = self.W(X)
Xve = X[vertex] # [nnz, C]
Xe = scatter(Xve, edges, dim=0, reduce=self.args.first_aggregate) # [E, C]
Xev = Xe[edges] # [nnz, C]
Xv = scatter(Xev, vertex, dim=0, reduce=self.args.second_aggregate, dim_size=N) # [N, C]
X = X + Xv
if self.args.use_norm:
X = normalize_l2(X)
# NOTE: concat heads or mean heads?
# NOTE: normalize here?
# NOTE: skip concat here?
return X
# v1: X -> XW -> AXW -> norm
class UniGINConv(nn.Module):
def __init__(self, args, in_channels, out_channels, heads=8, dropout=0., negative_slope=0.2):
super().__init__()
self.W = nn.Linear(in_channels, heads * out_channels, bias=False)
self.heads = heads
self.in_channels = in_channels
self.out_channels = out_channels
self.negative_slope = negative_slope
self.dropout = dropout
self.eps = nn.Parameter(torch.Tensor([0.]))
self.args = args
def __repr__(self):
return '{}({}, {}, heads={})'.format(self.__class__.__name__,
self.in_channels,
self.out_channels, self.heads)
def forward(self, X, vertex, edges):
N = X.shape[0]
# X0 = X # NOTE: reserved for skip connection
# v1: X -> XW -> AXW -> norm
X = self.W(X)
Xve = X[vertex] # [nnz, C]
Xe = scatter(Xve, edges, dim=0, reduce=self.args.first_aggregate) # [E, C]
Xev = Xe[edges] # [nnz, C]
Xv = scatter(Xev, vertex, dim=0, reduce='sum', dim_size=N) # [N, C]
X = (1 + self.eps) * X + Xv
if self.args.use_norm:
X = normalize_l2(X)
# NOTE: concat heads or mean heads?
# NOTE: normalize here?
# NOTE: skip concat here?
return X
# v1: X -> XW -> AXW -> norm
class UniGCNConv(nn.Module):
def __init__(self, args, in_channels, out_channels, heads=8, dropout=0., negative_slope=0.2):
super().__init__()
self.W = nn.Linear(in_channels, heads * out_channels, bias=False)
self.heads = heads
self.in_channels = in_channels
self.out_channels = out_channels
self.negative_slope = negative_slope
self.dropout = dropout
self.args = args
def __repr__(self):
return '{}({}, {}, heads={})'.format(self.__class__.__name__,
self.in_channels,
self.out_channels, self.heads)
def forward(self, X, vertex, edges):
N = X.shape[0]
degE = self.args.degE
degV = self.args.degV
# v1: X -> XW -> AXW -> norm
X = self.W(X)
Xve = X[vertex] # [nnz, C]
Xe = scatter(Xve, edges, dim=0, reduce=self.args.first_aggregate) # [E, C]
Xe = Xe * degE
Xev = Xe[edges] # [nnz, C]
Xv = scatter(Xev, vertex, dim=0, reduce='sum', dim_size=N) # [N, C]
Xv = Xv * degV
X = Xv
if self.args.use_norm:
X = normalize_l2(X)
# NOTE: skip concat here?
return X
# v2: X -> AX -> norm -> AXW
class UniGCNConv2(nn.Module):
def __init__(self, args, in_channels, out_channels, heads=8, dropout=0., negative_slope=0.2):
super().__init__()
self.W = nn.Linear(in_channels, heads * out_channels, bias=True)
self.heads = heads
self.in_channels = in_channels
self.out_channels = out_channels
self.negative_slope = negative_slope
self.dropout = dropout
self.args = args
def __repr__(self):
return '{}({}, {}, heads={})'.format(self.__class__.__name__,
self.in_channels,
self.out_channels, self.heads)
def forward(self, X, vertex, edges):
N = X.shape[0]
degE = self.args.degE
degV = self.args.degV
# v3: X -> AX -> norm -> AXW
Xve = X[vertex] # [nnz, C]
Xe = scatter(Xve, edges, dim=0, reduce=self.args.first_aggregate) # [E, C]
Xe = Xe * degE
Xev = Xe[edges] # [nnz, C]
Xv = scatter(Xev, vertex, dim=0, reduce='sum', dim_size=N) # [N, C]
Xv = Xv * degV
X = Xv
if self.args.use_norm:
X = normalize_l2(X)
X = self.W(X)
# NOTE: result might be slighly unstable
# NOTE: skip concat here?
return X
class UniGATConv(nn.Module):
def __init__(self, args, in_channels, out_channels, heads=8, dropout=0., negative_slope=0.2, skip_sum=False):
super().__init__()
self.W = nn.Linear(in_channels, heads * out_channels, bias=False)
self.att_v = nn.Parameter(torch.Tensor(1, heads, out_channels))
self.att_e = nn.Parameter(torch.Tensor(1, heads, out_channels))
self.heads = heads
self.in_channels = in_channels
self.out_channels = out_channels
self.attn_drop = nn.Dropout(dropout)
self.leaky_relu = nn.LeakyReLU(negative_slope)
self.skip_sum = skip_sum
self.args = args
self.reset_parameters()
def __repr__(self):
return '{}({}, {}, heads={})'.format(self.__class__.__name__,
self.in_channels,
self.out_channels, self.heads)
def reset_parameters(self):
glorot(self.att_v)
glorot(self.att_e)
def forward(self, X, vertex, edges):
H, C, N = self.heads, self.out_channels, X.shape[0]
# X0 = X # NOTE: reserved for skip connection
X0 = self.W(X)
X = X0.view(N, H, C)
Xve = X[vertex] # [nnz, H, C]
Xe = scatter(Xve, edges, dim=0, reduce=self.args.first_aggregate) # [E, H, C]
alpha_e = (Xe * self.att_e).sum(-1) # [E, H, 1]
a_ev = alpha_e[edges]
alpha = a_ev # Recommed to use this
alpha = self.leaky_relu(alpha)
alpha = softmax(alpha, vertex, num_nodes=N)
alpha = self.attn_drop( alpha )
alpha = alpha.unsqueeze(-1)
Xev = Xe[edges] # [nnz, H, C]
Xev = Xev * alpha
Xv = scatter(Xev, vertex, dim=0, reduce='sum', dim_size=N) # [N, H, C]
X = Xv
X = X.view(N, H * C)
if self.args.use_norm:
X = normalize_l2(X)
if self.skip_sum:
X = X + X0
# NOTE: concat heads or mean heads?
# NOTE: skip concat here?
return X
__all_convs__ = {
'UniGAT': UniGATConv,
'UniGCN': UniGCNConv,
'UniGCN2': UniGCNConv2,
'UniGIN': UniGINConv,
'UniSAGE': UniSAGEConv,
}
class UniGNN(nn.Module):
def __init__(self, args, nfeat, nhid, nclass, nlayer, nhead, V, E):
"""UniGNN
Args:
args (NamedTuple): global args
nfeat (int): dimension of features
nhid (int): dimension of hidden features, note that actually it\'s #nhid x #nhead
nclass (int): number of classes
nlayer (int): number of hidden layers
nhead (int): number of conv heads
V (torch.long): V is the row index for the sparse incident matrix H, |V| x |E|
E (torch.long): E is the col index for the sparse incident matrix H, |V| x |E|
"""
super().__init__()
Conv = __all_convs__[args.model_name]
self.conv_out = Conv(args, nhid * nhead, nclass, heads=1, dropout=args.attn_drop)
self.convs = nn.ModuleList(
[ Conv(args, nfeat, nhid, heads=nhead, dropout=args.attn_drop)] +
[Conv(args, nhid * nhead, nhid, heads=nhead, dropout=args.attn_drop) for _ in range(nlayer-2)]
)
self.V = V
self.E = E
act = {'relu': nn.ReLU(), 'prelu':nn.PReLU() }
self.act = act[args.activation]
self.input_drop = nn.Dropout(args.input_drop)
self.dropout = nn.Dropout(args.dropout)
self.type_norm = args.type_norm
self.num_groups =args.num_groups
self.skip_weight=args.skip_weight
if self.type_norm in ['None', 'batch', 'pair']:
skip_connect = False
else:
skip_connect = True
self.layers_bn = torch.nn.ModuleList([])
for _ in range(nlayer-1):
self.layers_bn.append(batch_norm(nhid * nhead, self.type_norm, skip_connect, self.num_groups, self.skip_weight,
args.skipweight_learnable))
def forward(self, X):
V, E = self.V, self.E
X = self.input_drop(X)
for i, conv in enumerate(self.convs):
X = conv(X, V, E)
X=self.layers_bn[i](X)
X = self.act(X)
X = self.dropout(X)
X = self.conv_out(X, V, E)
return F.log_softmax(X, dim=1)
class UniGCNIIConv(nn.Module):
def __init__(self, args, in_features, out_features):
super().__init__()
self.W = nn.Linear(in_features, out_features, bias=False)
self.args = args
def forward(self, X, vertex, edges, alpha, beta, X0):
N = X.shape[0]
degE = self.args.degE
degV = self.args.degV
Xve = X[vertex] # [nnz, C]
Xe = scatter(Xve, edges, dim=0, reduce=self.args.first_aggregate) # [E, C]
Xe = Xe * degE
Xev = Xe[edges] # [nnz, C]
Xv = scatter(Xev, vertex, dim=0, reduce='sum', dim_size=N) # [N, C]
Xv = Xv * degV
X = Xv
if self.args.use_norm:
X = normalize_l2(X)
Xi = (1-alpha) * X + alpha * X0
X = (1-beta) * Xi + beta * self.W(Xi)
return X
class UniGCNII(nn.Module):
def __init__(self, args, nfeat, nhid, nclass, nlayer, nhead, V, E):
"""UniGNNII
Args:
args (NamedTuple): global args
nfeat (int): dimension of features
nhid (int): dimension of hidden features, note that actually it\'s #nhid x #nhead
nclass (int): number of classes
nlayer (int): number of hidden layers
nhead (int): number of conv heads
V (torch.long): V is the row index for the sparse incident matrix H, |V| x |E|
E (torch.long): E is the col index for the sparse incident matrix H, |V| x |E|
"""
super().__init__()
self.V = V
self.E = E
nhid = nhid * nhead
act = {'relu': nn.ReLU(), 'prelu':nn.PReLU() }
self.act = act[args.activation]
self.input_drop = nn.Dropout(args.input_drop)
self.dropout = nn.Dropout(args.dropout)
self.convs = torch.nn.ModuleList()
self.convs.append(torch.nn.Linear(nfeat, nhid))
for _ in range(nlayer):
self.convs.append(UniGCNIIConv(args, nhid, nhid))
self.convs.append(torch.nn.Linear(nhid, nclass))
self.reg_params = list(self.convs[1:-1].parameters())
self.non_reg_params = list(self.convs[0:1].parameters())+list(self.convs[-1:].parameters())
self.dropout = nn.Dropout(args.dropout)
self.alpha_learnable=args.alpha_learnable
self.learnable_alpha= Parameter(torch.FloatTensor(nlayer, 1))
self.reset_parameters()
def reset_parameters(self):
self.learnable_alpha.data.uniform_(0.1,0.1)
def forward(self, x):
V, E = self.V, self.E
lamda, alpha = 0.2, 0.1
x = self.dropout(x)
x = F.relu(self.convs[0](x))
x0 = x
for i,con in enumerate(self.convs[1:-1]):
if self.alpha_learnable:
alpha= self.learnable_alpha[i]
x = self.dropout(x)
beta = math.log(lamda/(i+1)+1)
x = F.relu(con(x, V, E, alpha, beta, x0))
x = self.dropout(x)
x = self.convs[-1](x)
return F.log_softmax(x, dim=1)
class batch_norm(torch.nn.Module):
def __init__(self, dim_hidden, type_norm, skip_connect=False, num_groups=1,
skip_weight=0.005,sw_learnable=False,multiple=1,mul_learnable=False):
super(batch_norm, self).__init__()
self.type_norm = type_norm
self.skip_connect = skip_connect
self.num_groups = num_groups
self.skip_weight = skip_weight
self.dim_hidden = dim_hidden
self.sw_learnable=sw_learnable
self.multiple=multiple
self.mul_learnable=mul_learnable
if self.type_norm == 'batch':
self.bn = torch.nn.BatchNorm1d(dim_hidden, momentum=0.3)
elif self.type_norm == 'group':
self.bn = torch.nn.BatchNorm1d(dim_hidden*self.num_groups, momentum=0.3)
self.group_func = torch.nn.Linear(dim_hidden, self.num_groups, bias=True)
else:
pass
self.lam=Parameter(torch.FloatTensor(1, 1))
self.mul=Parameter(torch.FloatTensor(1, 1))
#self.lam =Parameter(torch.FloatTensor(num_groups, 1))
self.reset_parameters()
def reset_parameters(self):
self.lam.data.uniform_(self.skip_weight, self.skip_weight)
self.mul.data.uniform_(self.multiple, self.multiple)
def forward(self, x):
if self.type_norm == 'None':
return x
elif self.type_norm == 'batch':
# print(self.bn.running_mean.size())
return self.bn(x)
elif self.type_norm == 'pair':
col_mean = x.mean(dim=0)
x = x - col_mean
rownorm_mean = (1e-6 + x.pow(2).sum(dim=1).mean()).sqrt()
x = x / rownorm_mean
if self.mul_learnable:
x=x*self.mul
else:
x=x*self.multiple
return x
elif self.type_norm == 'group':
if self.num_groups == 1:
x_temp = self.bn(x)
else:
score_cluster = F.softmax(self.group_func(x), dim=1)
x_temp = torch.cat([score_cluster[:, group].unsqueeze(dim=1) * x for group in range(self.num_groups)], dim=1)
#x_temp = torch.cat([self.lam[group]*score_cluster[:, group].unsqueeze(dim=1) * x for group in range(self.num_groups)],dim=1)
x_temp = self.bn(x_temp).view(-1, self.num_groups, self.dim_hidden).sum(dim=1)
#x_temp = self.bn(x_temp).view(-1, self.num_groups, self.dim_hidden).self.lam*average(axis=1,weights=torch.ones(num_groups))
if self.sw_learnable:
x = x + x_temp * self.lam
else:
x = x + x_temp * self.skip_weight
'''
for i in range(self.num_groups):
x=x+x_temp[:,i,:]*self.lam[i]
'''
return x
else:
raise Exception(f'the normalization has not been implemented')
| 32.311765
| 141
| 0.550155
| 2,181
| 16,479
| 3.982118
| 0.107749
| 0.030397
| 0.020725
| 0.019574
| 0.643638
| 0.608866
| 0.557398
| 0.529073
| 0.520092
| 0.496603
| 0
| 0.012423
| 0.32593
| 16,479
| 509
| 142
| 32.375246
| 0.769445
| 0.134596
| 0
| 0.541538
| 0
| 0
| 0.017822
| 0
| 0
| 0
| 0
| 0.001965
| 0
| 1
| 0.086154
| false
| 0.003077
| 0.018462
| 0.015385
| 0.187692
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
883dc5b6a2c7b8f6d8eeeaa196713dc1735f14e3
| 23,515
|
py
|
Python
|
emolog_pc/emolog/emotool/main.py
|
alon/emolog
|
ed6e9e30a46ffc04282527ee73aa3bb8605e2dc9
|
[
"MIT"
] | null | null | null |
emolog_pc/emolog/emotool/main.py
|
alon/emolog
|
ed6e9e30a46ffc04282527ee73aa3bb8605e2dc9
|
[
"MIT"
] | 2
|
2019-01-29T15:27:34.000Z
|
2021-03-06T20:00:16.000Z
|
emolog_pc/emolog/emotool/main.py
|
alon/emolog
|
ed6e9e30a46ffc04282527ee73aa3bb8605e2dc9
|
[
"MIT"
] | 1
|
2019-01-03T18:44:54.000Z
|
2019-01-03T18:44:54.000Z
|
#!/bin/env python3
# import os
# os.environ['PYTHONASYNCIODEBUG'] = '1'
# import logging
# logging.getLogger('asyncio').setLevel(logging.DEBUG)
from datetime import datetime
import traceback
import atexit
import argparse
import os
from os import path
import sys
import logging
from struct import pack
import random
from time import time, sleep, perf_counter
from socket import socket
from configparser import ConfigParser
from shutil import which
from asyncio import sleep, Protocol, get_event_loop, Task
from pickle import dumps
import csv
from ..consts import BUILD_TIMESTAMP_VARNAME
from ..util import version, resolve, create_process, kill_all_processes, gcd
from ..util import verbose as util_verbose
from ..lib import AckTimeout, ClientProtocolMixin, SamplerSample
from ..varsfile import merge_vars_from_file_and_list
from ..dwarfutil import read_elf_variables
logger = logging.getLogger()
module_dir = os.path.dirname(os.path.realpath(__file__))
pc_dir = os.path.join(module_dir, '..', '..', '..', 'examples', 'pc_platform')
pc_executable = os.path.join(pc_dir, 'pc')
def start_fake_bench(port):
return start_fake_sine(ticks_per_second=0, port=port)
def start_fake_sine(ticks_per_second, port, build_timestamp_value):
# Run in a separate process so it doesn't hog the CPython lock
# Use our executable to work with a development environment (python executable)
# or pyinstaller (emotool.exe)
if sys.argv[0].endswith(path.basename(get_python_executable())):
cmdline = sys.argv[:2]
elif path.isfile(sys.argv[0]) or path.isfile(sys.argv[0] + '.exe'):
cmdline = [sys.argv[0]]
elif which(sys.argv[0]):
cmdline = [sys.argv[0]]
# force usage of python if the first parameter is a python script; use extension as predicate
if cmdline[0].endswith('.py'):
cmdline = [get_python_executable()] + cmdline
#print("{sys_argv} ; which said {which}".format(sys_argv=repr(sys.argv), which=which(sys.argv[0]))
return create_process(cmdline + ['--embedded', '--ticks-per-second', str(ticks_per_second), '--port', str(port),
'--build-timestamp-value', str(build_timestamp_value)])
def start_pc(port, exe, debug):
exe = os.path.realpath(exe)
cmdline = [exe, str(port)]
cmdline_str = ' '.join(cmdline)
debug_cmdline = 'EMOLOG_PC_PORT={port} cgdb --args {cmdline_str}'.format(port=port, cmdline_str=cmdline_str)
os.environ['EMOLOG_PC_PORT'] = str(port)
if debug:
input("press enter once you ran pc with: {debug_cmdline}".format(debug_cmdline=debug_cmdline))
return
return create_process(cmdline)
def iterate(prefix, initial):
while True:
yield '{}_{:03}.csv'.format(prefix, initial)
initial += 1
def next_available(folder, prefix):
filenames = iterate(prefix, 1)
for filename in filenames:
candidate = os.path.join(folder, filename)
if not os.path.exists(candidate):
return candidate
def setup_logging(filename, silent):
if silent:
logger.setLevel(logging.ERROR)
else:
logger.setLevel(logging.DEBUG)
if filename:
file_handler = logging.FileHandler(filename=filename)
file_handler.setLevel(level=logging.DEBUG)
file_formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
file_handler.setFormatter(file_formatter)
logger.addHandler(file_handler)
stream_formatter = logging.Formatter('%(message)s')
stream_handler = logging.StreamHandler()
stream_handler.setLevel(level=logging.INFO)
stream_handler.setFormatter(stream_formatter)
logger.addHandler(stream_handler)
logger.debug('debug first')
logger.info('info first')
def start_serial_process(serialurl, baudrate, hw_flow_control, port):
"""
Block until serial2tcp is ready to accept a connection
"""
serial2tcp_cmd = create_python_process_cmdline('serial2tcp.py')
if hw_flow_control is True:
serial2tcp_cmd += ['-r']
serial2tcp_cmd += ' -b {} -p {} -P {}'.format(baudrate, serialurl, port).split()
serial_subprocess = create_process(serial2tcp_cmd)
return serial_subprocess
def create_python_process_cmdline(script):
script_path = os.path.join(module_dir, script)
return [sys.executable, script_path]
def create_python_process_cmdline_command(command):
return [sys.executable, '-c', command]
class EmoToolClient(ClientProtocolMixin):
def __init__(self, ticks_per_second, verbose, dump, debug, csv_writer_factory=None):
if debug:
print("timeout set to one hour for debugging (gdb)")
ClientProtocolMixin.ACK_TIMEOUT_SECONDS = 3600.0
super().__init__(verbose=verbose, dump=dump,
ticks_per_second=ticks_per_second,
csv_writer_factory=csv_writer_factory)
@property
def running(self):
return self.cylib.running()
@property
def ticks_lost(self):
return self.cylib.csv_handler.ticks_lost
@property
def samples_received(self):
return self.cylib.csv_handler.samples_received
@property
def csv_filename(self):
return self.cylib.csv_handler.csv_filename
def reset(self, *args, **kw):
self.last_samples_received = None # don't trigger the check_progress() watchdog on the next sample
self.cylib.csv_handler.reset(*args, **kw)
def register_listener(self, *args, **kw):
self.cylib.csv_handler.register_listener(*args, **kw)
def data_received(self, data):
self.cylib.data_received(data)
async def start_transport(client, args):
loop = get_event_loop()
port = random.randint(10000, 50000)
if args.fake is not None:
if args.fake == 'gen':
start_fake_sine(ticks_per_second=args.ticks_per_second, port=port, build_timestamp_value=args.fake_gen_build_timestamp_value)
elif args.fake == 'bench':
start_fake_bench(port)
elif args.fake == 'pc' or os.path.exists(args.fake):
exe = pc_executable if args.fake == 'pc' else args.fake
start_pc(port=port, exe=exe, debug=args.debug)
else:
print("error: unfinished support for fake {fake}".format(fake=args.fake))
raise SystemExit
else:
start_serial_process(serialurl=args.serial, baudrate=args.baud, hw_flow_control=args.hw_flow_control, port=port)
attempt = 0
while attempt < 10:
attempt += 1
await sleep(0.1)
s = socket()
try:
s.connect(('127.0.0.1', port))
except:
pass
else:
break
client_transport, client2 = await loop.create_connection(lambda: client, sock=s)
assert client2 is client
args = None
def cancel_outstanding_tasks():
for task in Task.all_tasks():
logger.warning('canceling task {}'.format(task))
task.cancel()
def windows_try_getch():
import msvcrt
if msvcrt.kbhit():
return msvcrt.getch()
return None # be explicit
if sys.platform == 'win32':
try_getch_message = "Press any key to stop capture early..."
try_getch = windows_try_getch
else:
try_getch_message = "Press Ctrl-C to stop capture early..."
def try_getch():
return None
async def cleanup(args, client):
if not hasattr(client, 'transport') or client.transport is None:
cancel_outstanding_tasks()
return
if not args.no_cleanup:
logger.info("sending sampler stop")
try:
await client.send_sampler_stop()
except:
logger.info("exception when sending sampler stop in cleanup()")
client.exit_gracefully()
if client.transport is not None:
client.transport.close()
kill_all_processes()
def parse_args(args=None):
parser = argparse.ArgumentParser(
description='Emolog protocol capture tool. Implements emolog client side, captures a given set of variables to a csv file')
parser.add_argument('--fake', # TODO: can I have a hook for choices? i.e. choices=ChoicesOrExecutable['gen', 'pc', 'bench'],
help='debug only - fake a client - either generated or pc controller')
now_timestamp = int(datetime.now().timestamp() * 1000)
parser.add_argument('--fake-elf-build-timestamp-value', type=int, default=now_timestamp, help='debug only - fake build timestamp value (address is fixed)')
parser.add_argument('--fake-gen-build-timestamp-value', type=int, default=now_timestamp, help='debug only - fake build timestamp value (address is fixed)')
parser.add_argument('--serial', default='auto', help='serial URL or device name') # see http://pythonhosted.org/pyserial/pyserial_api.html#serial.serial_for_url
parser.add_argument('--baud', default=8000000, help='baudrate, using RS422 up to 12000000 theoretically', type=int)
parser.add_argument('--hw_flow_control', default=False, action='store_true', help='use CTS/RTS signals for flow control')
parser.add_argument('--elf', default=None, help='elf executable running on embedded side')
parser.add_argument('--var', default=[], action='append',
help='add a single var, example "foo,1,0" = "varname,ticks,tickphase"')
parser.add_argument('--snapshotfile', help='file containing variable definitions to be taken once at startup')
parser.add_argument('--varfile', help='file containing variable definitions, identical to multiple --var calls')
group = parser.add_mutually_exclusive_group()
group.add_argument('--out', help='Output file name. ".csv" extension is added if missing. '
'File is overwritten if already exists.')
group.add_argument('--out_prefix', default='emo', help='Output file prefix. Output is saved to the first free '
'(not already existing) file of the format "prefix_xxx.csv", '
'where xxx is a sequential number starting from "001"')
parser.add_argument('--csv-factory', help='advanced: module[.module]*.function to use as factory for csv file writing', default=None)
parser.add_argument('--verbose', default=True, action='store_false', dest='silent',
help='turn on verbose logging; affects performance under windows')
parser.add_argument('--verbose-kill', default=False, action='store_true')
parser.add_argument('--log', default=None, help='log messages and other debug/info logs to this file')
parser.add_argument('--runtime', type=float, default=3.0, help='quit after given seconds. use 0 for endless run.')
parser.add_argument('--no-cleanup', default=False, action='store_true', help='do not stop sampler on exit')
parser.add_argument('--dump')
parser.add_argument('--ticks-per-second', default=1000000 / 50, type=float,
help='number of ticks per second. used in conjunction with runtime')
parser.add_argument('--debug', default=False, action='store_true', help='produce more verbose debugging output')
# Server - used for GUI access
parser.add_argument('--listen', default=None, type=int, help='enable listening TCP port for samples') # later: add a command interface, making this suitable for interactive GUI
parser.add_argument('--gui', default=False, action='store_true', help='launch graphing gui in addition to saving')
# Embedded
parser.add_argument('--embedded', default=False, action='store_true', help='debugging: be a fake embedded target')
parser.add_argument('--check-timestamp', action='store_true', default=False, help='wip off by default for now')
ret, unparsed = parser.parse_known_args(args=args)
if ret.fake is None:
if not ret.elf and not ret.embedded:
# elf required unless fake_sine in effect
parser.print_usage()
print("{e}: error: the following missing argument is required: --elf".format(e=sys.argv[0]))
raise SystemExit
else:
if ret.fake == 'gen':
# fill in fake vars
ret.var = [
# name, ticks, phase
'a,1,0',
'b,1,0',
'c,1,0',
'd,1,0',
'e,1,0',
'f,1,0',
'g,1,0',
'h,1,0',
]
else:
if ret.elf is None:
if ret.fake == 'pc':
if not os.path.exists(pc_executable):
print("missing pc ELF file: {e}".format(e=pc_executable))
raise SystemExit
ret.elf = pc_executable
else:
ret.elf = ret.fake
if ret.varfile is None:
ret.varfile = os.path.join(module_dir, '..', '..', 'vars.csv')
ret.snapshotfile = os.path.join(module_dir, '..', '..', 'snapshot_vars.csv')
return ret
def bandwidth_calc(args, variables):
"""
:param variables: list of dictionaries
:return: average baud rate (considering 8 data bits, 1 start & stop bits)
"""
packets_per_second = args.ticks_per_second # simplification: assume a packet every tick (upper bound)
header_average = packets_per_second * SamplerSample.empty_size()
payload_average = sum(args.ticks_per_second / v['period_ticks'] * v['size'] for v in variables)
return (header_average + payload_average) * 10
async def initialize_board(client, variables):
logger.debug("about to send version")
await client.send_version()
retries = max_retries = 3
while retries > 0:
try:
logger.debug("about to send sampler stop")
await client.send_sampler_stop()
logger.debug("about to send sampler set variables")
await client.send_set_variables(variables)
logger.debug("about to send sampler start")
await client.send_sampler_start()
logger.debug("client initiated, starting to log data at rate TBD")
break
except AckTimeout:
retries -= 1
logger.info("Ack Timeout. Retry {}".format(max_retries - retries))
return retries != 0
def banner(s):
print("=" * len(s))
print(s)
print("=" * len(s))
async def run_client(args, client, variables, allow_kb_stop):
if not await initialize_board(client=client, variables=variables):
logger.error("Failed to initialize board, exiting.")
raise SystemExit
sys.stdout.flush()
logger.info('initialized board')
dt = 0.1 if args.runtime is not None else 1.0
if allow_kb_stop and try_getch_message:
print(try_getch_message)
client.start_logging_time = time()
while client.running:
if allow_kb_stop and try_getch():
break
await sleep(dt)
await client.send_sampler_stop()
async def record_snapshot(args, client, csv_filename, varsfile, extra_vars=None):
if extra_vars is None:
extra_vars = []
defs = merge_vars_from_file_and_list(filename=varsfile, def_lines=extra_vars)
names, variables = read_elf_variables(elf=args.elf, defs=defs, fake_build_timestamp=args.fake_elf_build_timestamp_value)
elf_by_name = {x['name']: x for x in variables}
client.reset(csv_filename=csv_filename, names=names, min_ticks=1, max_samples=1)
await run_client(args, client, variables, allow_kb_stop=False)
read_values = {}
try:
with open(csv_filename) as fd:
lines = list(csv.reader(fd))
except IOError as io:
logger.warning("snapshot failed, no file created")
lines = []
if len(lines) < 2:
logger.warning("snapshot failed, no data saved")
else:
read_values = dict(zip(lines[0], lines[1]))
return elf_by_name, read_values
CONFIG_FILE_NAME = 'local_machine_config.ini'
class SamplePassOn(Protocol):
def __init__(self, client):
self.client = client
def connection_made(self, transport):
self.transport = transport
self.client.register_listener(self.write_messages)
def write_messages(self, messages):
pickled_messages = dumps(messages)
self.transport.write(pack('<i', len(pickled_messages)))
self.transport.write(pickled_messages)
async def start_tcp_listener(client, port):
loop = get_event_loop()
await loop.create_server(lambda: SamplePassOn(client), host='localhost', port=port)
print("waiting on {port}".format(port=port))
async def amain_startup(args):
if not os.path.exists(CONFIG_FILE_NAME):
print("Configuration file {} not found. "
"This file is required for specifying local machine configuration such as the output folder.\n"
"Please start from the example {}.example.\n"
"Exiting.".format(CONFIG_FILE_NAME, CONFIG_FILE_NAME))
raise SystemExit
setup_logging(args.log, args.silent)
# TODO - fold this into window, make it the general IO object, so it decided to spew to stdout or to the GUI
banner("Emotool {}".format(version()))
client = EmoToolClient(ticks_per_second=args.ticks_per_second,
verbose=not args.silent, dump=args.dump, debug=args.debug,
csv_writer_factory=resolve(args.csv_factory))
await start_transport(client=client, args=args)
return client
def reasonable_timestamp_ms(timestamp):
"""
checks that the timestamp is within 100 years and not zero
this means a random value from memory will probably not be interpreted as a valid timestamp
and a better error message could be printed
"""
return timestamp != 0 and timestamp < 1000 * 3600 * 24 * 365 * 100
def check_timestamp(params, elf_variables):
if BUILD_TIMESTAMP_VARNAME not in params:
logger.error('timestamp not received from target')
raise SystemExit
read_value = int(params[BUILD_TIMESTAMP_VARNAME])
if BUILD_TIMESTAMP_VARNAME not in elf_variables:
logger.error('Timestamp variable not in ELF file. Did you add a pre-build step to generate it?')
raise SystemExit
elf_var = elf_variables[BUILD_TIMESTAMP_VARNAME]
elf_value = elf_var['init_value']
if elf_value is None or elf_var['address'] == 0:
logger.error('Bad timestamp variable in ELF: init value = {value}, address = {address}'.format(value=elf_value, address=elf_var["address"]))
raise SystemExit
elf_value = int(elf_variables[BUILD_TIMESTAMP_VARNAME]['init_value'])
if read_value != elf_value:
if not reasonable_timestamp_ms(read_value):
logger.error("Build timestamp mismatch: the embedded target probably doesn't contain a timestamp variable")
raise SystemExit
if read_value < elf_value:
logger.error('Build timestamp mismatch: target build timestamp is older than ELF')
else:
logger.error('Build timestamp mismatch: target build timestamp is newer than ELF')
raise SystemExit
print("Timestamp verified: ELF file and embedded target match")
async def amain(client, args):
defs = merge_vars_from_file_and_list(def_lines=args.var, filename=args.varfile)
names, variables = read_elf_variables(elf=args.elf, defs=defs)
config = ConfigParser()
config.read(CONFIG_FILE_NAME)
output_folder = config['folders']['output_folder']
if args.out:
if args.out[-4:] != '.csv':
args.out = args.out + '.csv'
csv_filename = os.path.join(output_folder, args.out)
else: # either --out or --out_prefix must be specified
csv_filename = next_available(output_folder, args.out_prefix)
take_snapshot = args.check_timestamp or args.snapshotfile
if take_snapshot:
print("Taking snapshot of parameters")
snapshot_output_filename = csv_filename[:-4] + '_params.csv'
(snapshot_elf_variables, params) = await record_snapshot(
args=args, client=client,
csv_filename=snapshot_output_filename,
varsfile=args.snapshotfile,
# TODO: why do we use 20000 in snapshot_vars.csv? ask Guy
extra_vars = ['{var_name},100,50'.format(var_name=BUILD_TIMESTAMP_VARNAME)] if args.check_timestamp else [])
print("parameters saved to: {}".format(snapshot_output_filename))
if args.check_timestamp:
check_timestamp(params, snapshot_elf_variables)
print("")
print("output file: {}".format(csv_filename))
bandwidth_bps = bandwidth_calc(args=args, variables=variables)
print("upper bound on bandwidth: {} Mbps out of {} ({:.3f}%)".format(
bandwidth_bps / 1e6,
args.baud / 1e6,
100 * bandwidth_bps / args.baud))
min_ticks = gcd(*(var['period_ticks'] for var in variables))
max_samples = args.ticks_per_second * args.runtime if args.runtime else 0 # TODO - off by a factor of at least min_ticks_between_samples
# TODO this corrects run-time if all vars are sampled at a low rate, but still incorrect in some cases e.g. (10, 13)
max_samples = max_samples / min_ticks
if max_samples > 0:
print("running for {} seconds = {} samples".format(args.runtime, int(max_samples)))
client.reset(csv_filename=csv_filename, names=names, min_ticks=min_ticks, max_samples=max_samples)
if args.listen:
await start_tcp_listener(client, args.listen)
start_time = time()
start_clock = perf_counter()
await run_client(args=args, client=client, variables=variables, allow_kb_stop=True)
logger.debug("stopped at time={} samples={}".format(time(), client.samples_received))
setup_time = client.start_logging_time - start_time
total_time = time() - start_time
total_clock = perf_counter() - start_clock
print("samples received: {samples_received}\nticks lost: {ticks_lost}\ntime run {total_time:#3.6} cpu %{percent} (setup time {setup_time:#3.6})".format(
samples_received=client.samples_received,
ticks_lost=client.ticks_lost,
total_time=total_time,
percent=int(total_clock * 100 / total_time),
setup_time=setup_time,
))
return client
def start_callback(args, loop):
loop.set_debug(args.debug)
try:
client = loop.run_until_complete(amain_startup(args))
except:
traceback.print_exc()
raise SystemExit
try:
client = loop.run_until_complete(amain(client=client, args=args))
except KeyboardInterrupt:
print("exiting on user ctrl-c")
except Exception as e:
logger.error("got exception {!r}".format(e))
raise
loop.run_until_complete(cleanup(args=args, client=client))
return client
def main(cmdline=None):
atexit.register(kill_all_processes)
parse_args_args = [] if cmdline is None else [cmdline]
args = parse_args(*parse_args_args)
util_verbose.kill = args.verbose_kill
if args.embedded:
from .embedded import main as embmain
embmain()
else:
loop = get_event_loop()
def exception_handler(loop, context):
print("Async Exception caught: {context}".format(context=context))
raise SystemExit
loop.set_exception_handler(exception_handler)
client = start_callback(args, loop)
if client.csv_filename is None or not os.path.exists(client.csv_filename):
print("no csv file created.")
if __name__ == '__main__':
main()
| 39.991497
| 180
| 0.671444
| 3,086
| 23,515
| 4.943292
| 0.195723
| 0.018027
| 0.025631
| 0.009046
| 0.13471
| 0.093871
| 0.055457
| 0.039987
| 0.034874
| 0.027663
| 0
| 0.009837
| 0.221858
| 23,515
| 587
| 181
| 40.059625
| 0.823861
| 0.073528
| 0
| 0.13082
| 0
| 0.004435
| 0.188462
| 0.009538
| 0
| 0
| 0
| 0.005111
| 0.002217
| 1
| 0.068736
| false
| 0.006652
| 0.055432
| 0.015521
| 0.179601
| 0.050998
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88409ac26b662efb26f26a13bba8f1ae10c3260d
| 487
|
py
|
Python
|
test.py
|
exc4l/kanjigrid
|
9e7dc0dadb578fc7ee4129aca5abf0a3767bc6dd
|
[
"MIT"
] | 1
|
2021-03-23T14:10:59.000Z
|
2021-03-23T14:10:59.000Z
|
test.py
|
exc4l/kanjigrid
|
9e7dc0dadb578fc7ee4129aca5abf0a3767bc6dd
|
[
"MIT"
] | null | null | null |
test.py
|
exc4l/kanjigrid
|
9e7dc0dadb578fc7ee4129aca5abf0a3767bc6dd
|
[
"MIT"
] | null | null | null |
import kanjigrid
gridder = kanjigrid.Gridder("Kanji", 40, "Header", 52)
grading = kanjigrid.Jouyou()
with open("test.txt", "r", encoding="utf-8") as f:
data = f.read()
gridder.feed_text(data)
grid = gridder.make_grid(grading, outside_of_grading=True, stats=True, bar_graph=True)
grid.save("test.png")
if "𠮟" in grading.get_all_in_grading():
print("𠮟")
if "塡" in grading.get_all_in_grading():
print("塡")
if "叱" in grading.get_all_in_grading():
print("叱 as replacement")
| 28.647059
| 86
| 0.702259
| 79
| 487
| 4.151899
| 0.531646
| 0.164634
| 0.109756
| 0.137195
| 0.265244
| 0.265244
| 0.265244
| 0
| 0
| 0
| 0
| 0.011792
| 0.129363
| 487
| 17
| 87
| 28.647059
| 0.761792
| 0
| 0
| 0
| 0
| 0
| 0.110656
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.071429
| 0
| 0.071429
| 0.214286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8843966a1736b059d72b2035589a76126f469706
| 12,738
|
py
|
Python
|
spectrl/rl/ars_discrete.py
|
luigiberducci/dirl
|
5f7997aea20dfb7347ebdee66de9bea4e6cd3c62
|
[
"MIT"
] | 6
|
2021-11-11T00:29:18.000Z
|
2022-03-18T13:56:51.000Z
|
spectrl/rl/ars_discrete.py
|
luigiberducci/dirl
|
5f7997aea20dfb7347ebdee66de9bea4e6cd3c62
|
[
"MIT"
] | null | null | null |
spectrl/rl/ars_discrete.py
|
luigiberducci/dirl
|
5f7997aea20dfb7347ebdee66de9bea4e6cd3c62
|
[
"MIT"
] | 4
|
2021-11-26T03:11:02.000Z
|
2022-01-13T02:32:29.000Z
|
import torch
import numpy as np
import time
from spectrl.util.rl import get_rollout, test_policy
class NNParams:
'''
Defines the neural network architecture.
Parameters:
state_dim: int (continuous state dimension for nn input)
action_dim: int (action space dimension for nn output)
hidden_dim: int (hidden states in the nn)
action_bound: float
num_discrete_states: int (number of different discrete states possible)
'''
def __init__(self, state_dim, action_dim, action_bound, hidden_dim, num_discrete_states):
self.state_dim = state_dim
self.action_dim = action_dim
self.action_bound = action_bound
self.hidden_dim = hidden_dim
self.num_discrete_states = num_discrete_states
class ARSParams:
'''
HyperParameters for augmented random search.
Parameters:
n_iters: int (ending condition)
n_samples: int (N)
n_top_samples: int (b)
delta_std (nu)
lr: float (alpha)
min_lr: float (minimum alpha)
'''
def __init__(self, n_iters, n_samples, n_top_samples, delta_std, lr, min_lr, log_interval=1):
self.n_iters = n_iters
self.n_samples = n_samples
self.n_top_samples = n_top_samples
self.delta_std = delta_std
self.lr = lr
self.min_lr = min_lr
self.log_interval = log_interval
class NNPolicy:
'''
Neural network policy.
params: NNParams
'''
def __init__(self, params):
# Step 1: Parameters
self.params = params
# Step 2: Construct num_discrete_states neural networks
self.input_layers = []
self.hidden_layers = []
self.output_layers = []
for i in range(self.params.num_discrete_states):
# Step 2a: Construct the input layer
input_layer = torch.nn.Linear(
self.params.state_dim, self.params.hidden_dim)
# Step 2b: Construct the hidden layer
hidden_layer = torch.nn.Linear(
self.params.hidden_dim, self.params.hidden_dim)
# Step 2c: Construct the output layer
output_layer = torch.nn.Linear(
self.params.hidden_dim, self.params.action_dim)
self.input_layers.append(input_layer)
self.hidden_layers.append(hidden_layer)
self.output_layers.append(output_layer)
# Step 3: Construct input normalization
self.mu = np.zeros(self.params.state_dim)
self.sigma_inv = np.ones(self.params.state_dim)
# Set requires_grad to False
for param in self.parameters():
param.requires_grad_(False)
def get_input(self, state):
'''
Get the neural network input from the full state
state is a pair (continuous state, discrete state).
'''
return state[0][:self.params.state_dim]
def get_action(self, state):
'''
Get the action to take in the current state.
state: (np.array, int)
'''
# Step 0: Separate discrete and continuous components
input = self.get_input(state)
# Step 1: Normalize state
input = (input - self.mu) * self.sigma_inv
# Step 2: Convert to torch
input = torch.tensor(input, dtype=torch.float)
# Step 3: Apply the input layer
hidden = torch.relu(self.input_layers[state[1]](input))
# Step 4: Apply the hidden layer
hidden = torch.relu(self.hidden_layers[state[1]](hidden))
# Step 5: Apply the output layer
output = torch.tanh(self.output_layers[state[1]](hidden))
# Step 6: Convert to numpy
actions = output.detach().numpy()
return self.params.action_bound * actions
def parameters(self):
'''
Construct the set of parameters for the policy.
Returns a list of torch parameters.
'''
parameters = []
for i in range(self.params.num_discrete_states):
parameters.extend(self.input_layers[i].parameters())
parameters.extend(self.hidden_layers[i].parameters())
parameters.extend(self.output_layers[i].parameters())
return parameters
class NNPolicySimple:
'''
Neural network policy that only looks at system state.
Ignores discrete state.
Only looks at first state_dim components of continuous state.
params: NNParams
'''
def __init__(self, params):
# Step 1: Parameters
self.params = params
# Step 2a: Construct the input layer
self.input_layer = torch.nn.Linear(
self.params.state_dim, self.params.hidden_dim)
# Step 2b: Construct the hidden layer
self.hidden_layer = torch.nn.Linear(
self.params.hidden_dim, self.params.hidden_dim)
# Step 2c: Construct the output layer
self.output_layer = torch.nn.Linear(
self.params.hidden_dim, self.params.action_dim)
# Step 3: Construct input normalization
self.mu = np.zeros(self.params.state_dim)
self.sigma_inv = np.ones(self.params.state_dim)
def get_input(self, state):
return state[0][:self.params.state_dim]
def get_action(self, state):
'''
Get the action to take in the current state.
state: (np.array, int)
'''
# Step 0: Extract the system state
input = self.get_input(state)
# Step 1: Normalize state
input = (input - self.mu) * self.sigma_inv
# Step 2: Convert to torch
input = torch.tensor(input, dtype=torch.float)
# Step 3: Apply the input layer
hidden = torch.relu(self.input_layer(input))
# Step 4: Apply the hidden layer
hidden = torch.relu(self.hidden_layer(hidden))
# Step 5: Apply the output layer
output = torch.tanh(self.output_layer(hidden))
# Step 6: Convert to numpy
actions = output.detach().numpy()
return self.params.action_bound * actions
def parameters(self):
'''
Construct the set of parameters for the policy.
Returns a list of torch parameters.
'''
parameters = []
parameters.extend(self.input_layer.parameters())
parameters.extend(self.hidden_layer.parameters())
parameters.extend(self.output_layer.parameters())
return parameters
def ars(env, nn_policy, params):
'''
Run augmented random search.
Parameters:
env: gym.Env (state is expected to be a pair (np.array, int))
Also expected to provide cum_reward() function.
nn_policy: NNPolicy
params: ARSParams
'''
best_policy = nn_policy
best_success_rate = 0
best_reward = -1e9
log_info = []
num_steps = 0
start_time = time.time()
# Step 1: Save original policy
nn_policy_orig = nn_policy
# Step 2: Initialize state distribution estimates
mu_sum = np.zeros(nn_policy.params.state_dim)
sigma_sq_sum = np.ones(nn_policy.params.state_dim) * 1e-5
n_states = 0
# Step 3: Training iterations
for i in range(params.n_iters):
# Step 3a: Sample deltas
deltas = []
for _ in range(params.n_samples):
# i) Sample delta
delta = _sample_delta(nn_policy)
# ii) Construct perturbed policies
nn_policy_plus = _get_delta_policy(
nn_policy, delta, params.delta_std)
nn_policy_minus = _get_delta_policy(
nn_policy, delta, -params.delta_std)
# iii) Get rollouts
sarss_plus = get_rollout(env, nn_policy_plus, False)
sarss_minus = get_rollout(env, nn_policy_minus, False)
num_steps += (len(sarss_plus) + len(sarss_minus))
# iv) Estimate cumulative rewards
r_plus = env.cum_reward(
np.array([state for state, _, _, _ in sarss_plus]))
r_minus = env.cum_reward(
np.array([state for state, _, _, _ in sarss_minus]))
# v) Save delta
deltas.append((delta, r_plus, r_minus))
# v) Update estimates of normalization parameters
states = np.array([nn_policy.get_input(state)
for state, _, _, _ in sarss_plus + sarss_minus])
mu_sum += np.sum(states)
sigma_sq_sum += np.sum(np.square(states))
n_states += len(states)
# Step 3b: Sort deltas
deltas.sort(key=lambda delta: -max(delta[1], delta[2]))
deltas = deltas[:params.n_top_samples]
# Step 3c: Compute the sum of the deltas weighted by their reward differences
delta_sum = [torch.zeros(delta_cur.shape)
for delta_cur in deltas[0][0]]
for j in range(params.n_top_samples):
# i) Unpack values
delta, r_plus, r_minus = deltas[j]
# ii) Add delta to the sum
for k in range(len(delta_sum)):
delta_sum[k] += (r_plus - r_minus) * delta[k]
# Step 3d: Compute standard deviation of rewards
sigma_r = np.std([delta[1] for delta in deltas] +
[delta[2] for delta in deltas])
# Step 3e: Compute step length
delta_step = [(params.lr * params.delta_std / (params.n_top_samples * sigma_r + 1e-8))
* delta_sum_cur
for delta_sum_cur in delta_sum]
# Step 3f: Update policy weights
nn_policy = _get_delta_policy(nn_policy, delta_step, 1.0)
# Step 3g: Update normalization parameters
nn_policy.mu = mu_sum / n_states
nn_policy.sigma_inv = 1.0 / np.sqrt((sigma_sq_sum / n_states))
# Step 3h: Logging
if i % params.log_interval == 0:
exp_cum_reward, success_rate = test_policy(env, nn_policy, 100, use_cum_reward=True)
current_time = time.time() - start_time
print('\nSteps taken after iteration {}: {}'.format(i, num_steps))
print('Reward after iteration {}: {}'.format(i, exp_cum_reward))
print('Success rate after iteration {}: {}'.format(i, success_rate))
print('Time after iteration {}: {} mins'.format(i, current_time/60))
log_info.append([num_steps, current_time/60, exp_cum_reward, success_rate])
# save best policy
if success_rate > best_success_rate or (success_rate == best_success_rate
and exp_cum_reward >= best_reward):
best_policy = nn_policy
best_success_rate = success_rate
best_reward = exp_cum_reward
if success_rate > 80 and exp_cum_reward > 0:
params.lr = max(params.lr/2, params.min_lr)
nn_policy = best_policy
# Step 4: Copy new weights and normalization parameters to original policy
for param, param_orig in zip(nn_policy.parameters(), nn_policy_orig.parameters()):
param_orig.data.copy_(param.data)
nn_policy_orig.mu = nn_policy.mu
nn_policy_orig.sigma_inv = nn_policy.sigma_inv
return log_info
def _sample_delta(nn_policy):
'''
Construct random perturbations to neural network parameters.
nn_policy: NNPolicy or NNPolicySimple
Returns: [torch.tensor] (list of torch tensors that is the same shape as nn_policy.parameters())
'''
delta = []
for param in nn_policy.parameters():
delta.append(torch.normal(torch.zeros(param.shape, dtype=torch.float)))
return delta
def _get_delta_policy(nn_policy, delta, sign):
'''
Construct the policy perturbed by the given delta
Parameters:
nn_policy: NNPolicy or NNPolicySimple
delta: [torch.tensor] (list of torch tensors with same shape as nn_policy.parameters())
sign: float
Returns: NNPolicy or NNPolicySimple
'''
# Step 1: Construct the perturbed policy
nn_policy_delta = None
if (isinstance(nn_policy, NNPolicySimple)):
nn_policy_delta = NNPolicySimple(nn_policy.params)
elif (isinstance(nn_policy, NNPolicy)):
nn_policy_delta = NNPolicy(nn_policy.params)
else:
raise Exception("Unrecognized neural network architecture")
# Step 2: Set normalization of the perturbed policy
nn_policy_delta.mu = nn_policy.mu
nn_policy_delta.sigma_inv = nn_policy.sigma_inv
# Step 3: Set the weights of the perturbed policy
for param, param_delta, delta_cur in zip(nn_policy.parameters(), nn_policy_delta.parameters(),
delta):
param_delta.data.copy_(param.data + sign * delta_cur)
return nn_policy_delta
| 33.87766
| 100
| 0.622861
| 1,637
| 12,738
| 4.632254
| 0.149664
| 0.051695
| 0.018858
| 0.01899
| 0.427667
| 0.379797
| 0.30542
| 0.28854
| 0.28854
| 0.267704
| 0
| 0.008514
| 0.289998
| 12,738
| 375
| 101
| 33.968
| 0.829943
| 0.270372
| 0
| 0.229885
| 0
| 0
| 0.019396
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074713
| false
| 0
| 0.022989
| 0.005747
| 0.172414
| 0.022989
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8844d83df31129aa57478e21727d0b2f1ba309a4
| 640
|
py
|
Python
|
backends/c-scpu/config.py
|
guoshzhao/antares
|
30a6338dd6ce4100922cf26ec515e615b449f76a
|
[
"MIT"
] | null | null | null |
backends/c-scpu/config.py
|
guoshzhao/antares
|
30a6338dd6ce4100922cf26ec515e615b449f76a
|
[
"MIT"
] | null | null | null |
backends/c-scpu/config.py
|
guoshzhao/antares
|
30a6338dd6ce4100922cf26ec515e615b449f76a
|
[
"MIT"
] | null | null | null |
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
import subprocess
def get_execution_parallism():
return 1
def do_native_translation_v2(codeset, **kwargs):
kernel_name, in_args, out_args, body = codeset
expand_args = ' '.join([f'{x[0]}* {x[1]} = ({x[0]}*)__args[{i}];' for i, x in enumerate(in_args + out_args)])
full_body = f'''
#include <math.h>
#include <algorithm>
#define rsqrt(x) (1.0f / sqrt(x))
{kwargs['attrs'].blend}
extern "C" void {kernel_name}(int __rank__, void** __args) {{
{expand_args}
using namespace std;
{body.replace('threadIdx.x', '__rank__')}
}}
'''
return full_body
| 22.857143
| 111
| 0.673438
| 93
| 640
| 4.344086
| 0.612903
| 0.049505
| 0.044554
| 0.064356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.012891
| 0.151563
| 640
| 27
| 112
| 23.703704
| 0.731123
| 0.10625
| 0
| 0
| 0
| 0
| 0.506151
| 0.126538
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.055556
| 0.055556
| 0.277778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8845d03ee4e193d770ba1a3bdc365691fd17435f
| 878
|
py
|
Python
|
src/10_reactive/db.py
|
rurumimic/concurrency-python
|
3eb7875dd4848872226f8035d295a31a40e32bf0
|
[
"MIT"
] | null | null | null |
src/10_reactive/db.py
|
rurumimic/concurrency-python
|
3eb7875dd4848872226f8035d295a31a40e32bf0
|
[
"MIT"
] | null | null | null |
src/10_reactive/db.py
|
rurumimic/concurrency-python
|
3eb7875dd4848872226f8035d295a31a40e32bf0
|
[
"MIT"
] | null | null | null |
import sqlite3
from collections import namedtuple
from functional import seq
with sqlite3.connect(':memory:') as conn:
conn.execute('CREATE TABLE user (id INT, name TEXT)')
conn.commit()
User = namedtuple('User', 'id name')
seq([(1, 'pedro'), (2, 'fritz')]).to_sqlite3(
conn, 'INSERT INTO user (id, name) VALUES (?, ?)')
seq([(3, 'sam'), (4, 'stan')]).to_sqlite3(conn, 'user')
seq([User(name='tom', id=5), User(name='keiga', id=6)]).to_sqlite3(conn, 'user')
seq([dict(name='david', id=7), User(name='jordan', id=8)]
).to_sqlite3(conn, 'user')
print(list(conn.execute('SELECT * FROM user')))
# [
# (1, 'pedro'), (2, 'fritz'),
# (3, 'sam'), (4, 'stan'),
# (5, 'tom'), (6, 'keiga'),
# (7, 'david'), (8, 'jordan')
# ]
users = seq.sqlite3(conn, 'SELECT * FROM user').to_list()
print(users)
| 31.357143
| 84
| 0.555809
| 119
| 878
| 4.058824
| 0.386555
| 0.113872
| 0.10766
| 0.10559
| 0.082816
| 0
| 0
| 0
| 0
| 0
| 0
| 0.033189
| 0.210706
| 878
| 27
| 85
| 32.518519
| 0.663781
| 0.134396
| 0
| 0
| 0
| 0
| 0.240372
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.1875
| 0
| 0.1875
| 0.125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
884b8595b246a25d1c4c0a76969e4887169352b3
| 3,171
|
py
|
Python
|
tests/plugins/remove/test_rm_cli.py
|
jtpavlock/moe
|
6f053c8c53f92686013657bda676b00f97edd230
|
[
"MIT"
] | 14
|
2021-09-04T11:42:18.000Z
|
2022-02-04T05:11:46.000Z
|
tests/plugins/remove/test_rm_cli.py
|
jtpavlock/Moe
|
6f053c8c53f92686013657bda676b00f97edd230
|
[
"MIT"
] | 56
|
2021-05-26T00:00:46.000Z
|
2021-08-08T17:14:31.000Z
|
tests/plugins/remove/test_rm_cli.py
|
jtpavlock/moe
|
6f053c8c53f92686013657bda676b00f97edd230
|
[
"MIT"
] | 1
|
2021-07-22T21:55:21.000Z
|
2021-07-22T21:55:21.000Z
|
"""Tests the ``remove`` plugin."""
from unittest.mock import patch
import pytest
import moe
@pytest.fixture
def mock_rm():
"""Mock the `remove_item()` api call."""
with patch("moe.plugins.remove.remove_item", autospec=True) as mock_rm:
yield mock_rm
@pytest.fixture
def tmp_rm_config(tmp_config):
"""A temporary config for the edit plugin with the cli."""
return tmp_config('default_plugins = ["cli", "remove"]')
class TestCommand:
"""Test the `remove` command."""
def test_track(self, mock_track, mock_query, mock_rm, tmp_rm_config):
"""Tracks are removed from the database with valid query."""
cli_args = ["remove", "*"]
mock_query.return_value = [mock_track]
moe.cli.main(cli_args, tmp_rm_config)
mock_query.assert_called_once_with("*", query_type="track")
mock_rm.assert_called_once_with(mock_track)
def test_album(self, mock_album, mock_query, mock_rm, tmp_rm_config):
"""Albums are removed from the database with valid query."""
cli_args = ["remove", "-a", "*"]
mock_query.return_value = [mock_album]
moe.cli.main(cli_args, tmp_rm_config)
mock_query.assert_called_once_with("*", query_type="album")
mock_rm.assert_called_once_with(mock_album)
def test_extra(self, mock_extra, mock_query, mock_rm, tmp_rm_config):
"""Extras are removed from the database with valid query."""
cli_args = ["remove", "-e", "*"]
mock_query.return_value = [mock_extra]
moe.cli.main(cli_args, tmp_rm_config)
mock_query.assert_called_once_with("*", query_type="extra")
mock_rm.assert_called_once_with(mock_extra)
def test_multiple_items(
self, mock_track_factory, mock_query, mock_rm, tmp_rm_config
):
"""All items returned from the query are removed."""
cli_args = ["remove", "*"]
mock_tracks = [mock_track_factory(), mock_track_factory()]
mock_query.return_value = mock_tracks
moe.cli.main(cli_args, tmp_rm_config)
for mock_track in mock_tracks:
mock_rm.assert_any_call(mock_track)
assert mock_rm.call_count == 2
def test_exit_code(self, mock_query, mock_rm, tmp_rm_config):
"""Return a non-zero exit code if no items are removed."""
cli_args = ["remove", "*"]
mock_query.return_value = []
with pytest.raises(SystemExit) as error:
moe.cli.main(cli_args, tmp_rm_config)
assert error.value.code != 0
mock_rm.assert_not_called()
class TestPluginRegistration:
"""Test the `plugin_registration` hook implementation."""
def test_no_cli(self, tmp_config):
"""Don't enable the remove cli plugin if the `cli` plugin is not enabled."""
config = tmp_config(settings='default_plugins = ["remove"]')
assert not config.plugin_manager.has_plugin("remove_cli")
def test_cli(self, tmp_config):
"""Enable the remove cli plugin if the `cli` plugin is enabled."""
config = tmp_config(settings='default_plugins = ["remove", "cli"]')
assert config.plugin_manager.has_plugin("remove_cli")
| 33.03125
| 84
| 0.666667
| 439
| 3,171
| 4.501139
| 0.200456
| 0.04251
| 0.061235
| 0.060729
| 0.547571
| 0.492915
| 0.47419
| 0.298583
| 0.219636
| 0.219636
| 0
| 0.000803
| 0.214443
| 3,171
| 95
| 85
| 33.378947
| 0.792453
| 0.186692
| 0
| 0.188679
| 0
| 0
| 0.081414
| 0.011914
| 0
| 0
| 0
| 0
| 0.226415
| 1
| 0.169811
| false
| 0
| 0.056604
| 0
| 0.283019
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8853e415ffd0f52c5a2f8419a9bf5ebfef325883
| 2,678
|
py
|
Python
|
examples/pytorch/dtgrnn/dataloading.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 9,516
|
2018-12-08T22:11:31.000Z
|
2022-03-31T13:04:33.000Z
|
examples/pytorch/dtgrnn/dataloading.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,494
|
2018-12-08T22:43:00.000Z
|
2022-03-31T21:16:27.000Z
|
examples/pytorch/dtgrnn/dataloading.py
|
ketyi/dgl
|
a1b859c29b63a673c148d13231a49504740e0e01
|
[
"Apache-2.0"
] | 2,529
|
2018-12-08T22:56:14.000Z
|
2022-03-31T13:07:41.000Z
|
import os
import ssl
from six.moves import urllib
import torch
import numpy as np
import dgl
from torch.utils.data import Dataset, DataLoader
def download_file(dataset):
print("Start Downloading data: {}".format(dataset))
url = "https://s3.us-west-2.amazonaws.com/dgl-data/dataset/{}".format(
dataset)
print("Start Downloading File....")
context = ssl._create_unverified_context()
data = urllib.request.urlopen(url, context=context)
with open("./data/{}".format(dataset), "wb") as handle:
handle.write(data.read())
class SnapShotDataset(Dataset):
def __init__(self, path, npz_file):
if not os.path.exists(path+'/'+npz_file):
if not os.path.exists(path):
os.mkdir(path)
download_file(npz_file)
zipfile = np.load(path+'/'+npz_file)
self.x = zipfile['x']
self.y = zipfile['y']
def __len__(self):
return len(self.x)
def __getitem__(self, idx):
if torch.is_tensor(idx):
idx = idx.tolist()
return self.x[idx, ...], self.y[idx, ...]
def METR_LAGraphDataset():
if not os.path.exists('data/graph_la.bin'):
if not os.path.exists('data'):
os.mkdir('data')
download_file('graph_la.bin')
g, _ = dgl.load_graphs('data/graph_la.bin')
return g[0]
class METR_LATrainDataset(SnapShotDataset):
def __init__(self):
super(METR_LATrainDataset, self).__init__('data', 'metr_la_train.npz')
self.mean = self.x[..., 0].mean()
self.std = self.x[..., 0].std()
class METR_LATestDataset(SnapShotDataset):
def __init__(self):
super(METR_LATestDataset, self).__init__('data', 'metr_la_test.npz')
class METR_LAValidDataset(SnapShotDataset):
def __init__(self):
super(METR_LAValidDataset, self).__init__('data', 'metr_la_valid.npz')
def PEMS_BAYGraphDataset():
if not os.path.exists('data/graph_bay.bin'):
if not os.path.exists('data'):
os.mkdir('data')
download_file('graph_bay.bin')
g, _ = dgl.load_graphs('data/graph_bay.bin')
return g[0]
class PEMS_BAYTrainDataset(SnapShotDataset):
def __init__(self):
super(PEMS_BAYTrainDataset, self).__init__(
'data', 'pems_bay_train.npz')
self.mean = self.x[..., 0].mean()
self.std = self.x[..., 0].std()
class PEMS_BAYTestDataset(SnapShotDataset):
def __init__(self):
super(PEMS_BAYTestDataset, self).__init__('data', 'pems_bay_test.npz')
class PEMS_BAYValidDataset(SnapShotDataset):
def __init__(self):
super(PEMS_BAYValidDataset, self).__init__(
'data', 'pems_bay_valid.npz')
| 28.795699
| 78
| 0.639283
| 345
| 2,678
| 4.649275
| 0.249275
| 0.030549
| 0.048005
| 0.041147
| 0.445761
| 0.356608
| 0.225686
| 0.160848
| 0.160848
| 0.123441
| 0
| 0.003795
| 0.212845
| 2,678
| 92
| 79
| 29.108696
| 0.757116
| 0
| 0
| 0.231884
| 0
| 0
| 0.134055
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.173913
| false
| 0
| 0.101449
| 0.014493
| 0.434783
| 0.028986
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88551bbfcb08a1b53c119a389c90207f6e61b6cd
| 1,552
|
py
|
Python
|
django_boto/tests.py
|
degerli/django-boto
|
930863b75c0f26eb10090a6802e16e1cf127b588
|
[
"MIT"
] | 54
|
2015-02-09T14:25:56.000Z
|
2021-09-03T21:11:29.000Z
|
django_boto/tests.py
|
degerli/django-boto
|
930863b75c0f26eb10090a6802e16e1cf127b588
|
[
"MIT"
] | 12
|
2015-01-10T06:39:56.000Z
|
2019-06-19T19:36:40.000Z
|
django_boto/tests.py
|
degerli/django-boto
|
930863b75c0f26eb10090a6802e16e1cf127b588
|
[
"MIT"
] | 18
|
2015-01-09T20:06:38.000Z
|
2019-02-22T12:33:44.000Z
|
# -*- coding: utf-8 -*-
import string
import random
import logging
import urllib2
from os import path
from django.test import TestCase
from django.core.files.base import ContentFile
from s3 import upload
from s3.storage import S3Storage
from settings import BOTO_S3_BUCKET
logger = logging.getLogger(__name__)
local_path = path.realpath(path.dirname(__file__))
def get_string(lngth):
strn = ''
for i in xrange(lngth):
strn += random.choice(string.letters)
return strn
class BotoTest(TestCase):
"""
Testing Amazon S3.
"""
def test_storage(self):
"""
Storage testing.
"""
text = ''
storage = S3Storage(host='s3.amazonaws.com')
file_length = random.randrange(300, 1300)
text = get_string(file_length)
filename_length = random.randrange(5, 12)
filename = get_string(filename_length)
self.assertFalse(storage.exists(filename))
test_file = ContentFile(text)
test_file.name = filename
uploaded_url = upload(test_file, host='s3.amazonaws.com')
self.assertTrue(storage.exists(filename))
url = 'http://' + BOTO_S3_BUCKET + '.s3.amazonaws.com/' + filename
self.assertEqual(uploaded_url, url)
page = urllib2.urlopen(uploaded_url)
self.assertEqual(text, page.read())
self.assertEqual(len(text), storage.size(filename))
self.assertEqual(url, storage.url(filename))
storage.delete(filename)
self.assertFalse(storage.exists(filename))
| 23.164179
| 74
| 0.661727
| 183
| 1,552
| 5.464481
| 0.393443
| 0.06
| 0.042
| 0.036
| 0.072
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019247
| 0.230026
| 1,552
| 66
| 75
| 23.515152
| 0.817573
| 0.037371
| 0
| 0.052632
| 0
| 0
| 0.039175
| 0
| 0
| 0
| 0
| 0
| 0.184211
| 1
| 0.052632
| false
| 0
| 0.263158
| 0
| 0.368421
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8857049e6802ce1ea80b578b8e1834b184a88a8c
| 4,060
|
py
|
Python
|
src/roswire/ros1/bag/player.py
|
ChrisTimperley/roswire
|
3220583305dc3e90b8cf0a7653cbc1b9c7fdb83b
|
[
"Apache-2.0"
] | 4
|
2019-09-22T18:38:33.000Z
|
2021-04-02T01:37:10.000Z
|
src/roswire/ros1/bag/player.py
|
ChrisTimperley/roswire
|
3220583305dc3e90b8cf0a7653cbc1b9c7fdb83b
|
[
"Apache-2.0"
] | 208
|
2019-03-27T18:34:39.000Z
|
2021-07-26T20:36:07.000Z
|
src/roswire/ros1/bag/player.py
|
ChrisTimperley/roswire
|
3220583305dc3e90b8cf0a7653cbc1b9c7fdb83b
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# http://wiki.ros.org/Bags/Format/2.0
__all__ = ("BagPlayer",)
import subprocess
import threading
from types import TracebackType
from typing import Optional, Type
import dockerblade
from loguru import logger
from ... import exceptions
class BagPlayer:
def __init__(
self,
fn_container: str,
shell: dockerblade.Shell,
files: dockerblade.FileSystem,
*,
delete_file_after_use: bool = False,
) -> None:
self.__lock = threading.Lock()
self.__fn_container = fn_container
self.__shell = shell
self.__files = files
self.__delete_file_after_use = delete_file_after_use
self.__started = False
self.__stopped = False
self._process: Optional[dockerblade.popen.Popen] = None
@property
def started(self) -> bool:
"""Indicates whether or not playback has started."""
return self.__started
@property
def stopped(self) -> bool:
"""Indicates whether or not playback has stopped."""
return self.__stopped
def __enter__(self) -> "BagPlayer":
self.start()
return self
def __exit__(
self,
ex_type: Optional[Type[BaseException]],
ex_val: Optional[BaseException],
ex_tb: Optional[TracebackType],
) -> None:
if ex_type is not None:
logger.error(
"error occurred during bag playback",
exc_info=(ex_type, ex_val, ex_tb),
)
if not self.stopped:
self.stop()
def finished(self) -> bool:
"""Checks whether playback has completed."""
p = self._process
return p.finished if p else False
def wait(self, time_limit: Optional[float] = None) -> None:
"""Blocks until playback has finished.
Parameters
----------
time_limit: Optional[float] = None
an optional time limit.
Raises
------
PlayerTimeout:
if playback did not finish within the provided timeout.
PlayerFailure:
if an unexpected occurred during playback.
"""
assert self._process
try:
self._process.wait(time_limit)
retcode = self._process.returncode
assert retcode is not None
if retcode != 0:
out = "\n".join(self._process.stream) # type: ignore
raise exceptions.PlayerFailure(retcode, out)
except subprocess.TimeoutExpired as error:
raise exceptions.PlayerTimeout from error
def start(self) -> None:
"""Starts playback from the bag.
Raises
------
PlayerAlreadyStarted:
if the player has already started.
"""
logger.debug("starting bag playback")
with self.__lock:
if self.__started:
raise exceptions.PlayerAlreadyStarted
self.__started = True
command: str = f"rosbag play -q {self.__fn_container}"
self._process = self.__shell.popen(
command, stdout=False, stderr=False
)
logger.debug("started bag playback")
def stop(self) -> None:
"""Stops playback from the bag.
Raises
------
PlayerAlreadyStopped:
if the player has already been stopped.
"""
logger.debug("stopping bag playback")
with self.__lock:
if self.__stopped:
raise exceptions.PlayerAlreadyStopped
if not self.__started:
raise exceptions.PlayerNotStarted
assert self._process
self._process.kill()
out = "\n".join(list(self._process.stream)) # type: ignore
logger.debug("player output:\n%s", out)
self._process = None
if self.__delete_file_after_use:
self.__files.remove(self.__fn_container)
self.__stopped = True
logger.debug("stopped bag playback")
| 29.852941
| 71
| 0.578818
| 422
| 4,060
| 5.338863
| 0.315166
| 0.053706
| 0.026631
| 0.031957
| 0.179316
| 0.061252
| 0.061252
| 0.035508
| 0
| 0
| 0
| 0.001472
| 0.330788
| 4,060
| 135
| 72
| 30.074074
| 0.827751
| 0.174138
| 0
| 0.113636
| 0
| 0
| 0.060991
| 0.006671
| 0
| 0
| 0
| 0
| 0.034091
| 1
| 0.102273
| false
| 0
| 0.079545
| 0
| 0.238636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
885d4d75f5722e68e64b97b960999165b69c5ecc
| 1,244
|
py
|
Python
|
src/data processing - clinical notes and structured data/step5_note_level_tagging.py
|
arjun-parthi/SSRI-Project
|
62f610a594e5849ccf0f3c25cd6adcd63888ec2a
|
[
"MIT"
] | 2
|
2019-02-12T00:37:37.000Z
|
2021-03-25T05:40:06.000Z
|
src/data processing - clinical notes and structured data/step5_note_level_tagging.py
|
arjun-parthi/SSRI-Project
|
62f610a594e5849ccf0f3c25cd6adcd63888ec2a
|
[
"MIT"
] | null | null | null |
src/data processing - clinical notes and structured data/step5_note_level_tagging.py
|
arjun-parthi/SSRI-Project
|
62f610a594e5849ccf0f3c25cd6adcd63888ec2a
|
[
"MIT"
] | 1
|
2021-03-25T05:40:17.000Z
|
2021-03-25T05:40:17.000Z
|
import pandas as pd
import numpy as np
from collections import Counter
data = pd.read_csv('out/negex_all.txt', sep="\t", header=None)
print(data.shape)
data.columns = ['PAT_DEID','NOTE_DEID','NOTE_DATE','ENCOUNTER_DATE','NOTE_CODE','TEXT_SNIPPET','lower_text','STATUS']
df = data.groupby(['PAT_DEID','NOTE_DEID','NOTE_DATE','ENCOUNTER_DATE','NOTE_CODE'])['STATUS'].apply(','.join).reset_index()
df_text = data.groupby(['PAT_DEID','NOTE_DEID','NOTE_DATE','ENCOUNTER_DATE','NOTE_CODE'])['TEXT_SNIPPET'].apply(' ##### '.join).reset_index()
df_text_required = df_text[['NOTE_DEID','TEXT_SNIPPET']]
df_fin = pd.merge(df, df_text_required, on='NOTE_DEID', how='inner')
df1 = df_fin.copy()
def check(l):
# l1 = l['STATUS'].tolist()
# l2 = str(l1).split(',')
l2 = l['STATUS'].split(',')
c = Counter(l2)
affirmed = c['affirmed']
negated = c['negated']
if (affirmed > negated or affirmed == negated):
return "Affirmed"
else:
return "Negated"
def majority_rule(var1,var2):
df[var2] = df.apply(check, axis = 1)
return df
df1 = majority_rule('STATUS','STATUS_FINAL')
print(df1.shape)
df2 = pd.merge(df1, df_text_required, on='NOTE_DEID', how='inner')
df2.to_pickle("out/annotated_note_all.pkl")
| 34.555556
| 141
| 0.676045
| 186
| 1,244
| 4.295699
| 0.397849
| 0.060075
| 0.041302
| 0.05632
| 0.362954
| 0.362954
| 0.300375
| 0.300375
| 0.220275
| 0.220275
| 0
| 0.013825
| 0.127814
| 1,244
| 35
| 142
| 35.542857
| 0.722581
| 0.039389
| 0
| 0
| 0
| 0
| 0.293624
| 0.021812
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.111111
| 0
| 0.296296
| 0.074074
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
885d8583c03a1a8044c9ab014f78fb40213a58b5
| 821
|
py
|
Python
|
app.py
|
shaungarwood/co-voter-db
|
bcbc0d46459cc9913ed318b32b284a4139c75b74
|
[
"MIT"
] | null | null | null |
app.py
|
shaungarwood/co-voter-db
|
bcbc0d46459cc9913ed318b32b284a4139c75b74
|
[
"MIT"
] | null | null | null |
app.py
|
shaungarwood/co-voter-db
|
bcbc0d46459cc9913ed318b32b284a4139c75b74
|
[
"MIT"
] | null | null | null |
from flask import Flask
from flask import request
from flask import jsonify
from os import environ
import query
app = Flask(__name__)
if 'MONGODB_HOST' in environ:
mongodb_host = environ['MONGODB_HOST']
else:
mongodb_host = "localhost"
if 'MONGODB_PORT' in environ:
mongodb_port = environ['MONGODB_PORT']
else:
mongodb_port = "27017"
vr = query.VoterRecords(mongodb_host, mongodb_port)
@app.route('/search')
def search():
if request.args and 'q' in request.args:
search_string = request.args['q']
res = vr.determine_query_type(search_string)
resp = app.make_response(res)
resp.mimetype = 'application/json'
return jsonify(resp)
else:
return "No query data received", 200
if __name__ == '__main__':
app.run(debug=False, host='0.0.0.0')
| 21.605263
| 52
| 0.686967
| 112
| 821
| 4.794643
| 0.410714
| 0.102421
| 0.083799
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018405
| 0.205847
| 821
| 37
| 53
| 22.189189
| 0.805215
| 0
| 0
| 0.111111
| 0
| 0
| 0.151035
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037037
| false
| 0
| 0.185185
| 0
| 0.296296
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
886191b83cc6306a7a234ebf3e4730d225e73536
| 692
|
py
|
Python
|
100-clean_web_static.py
|
cbarros7/AirBnB_clone_v2
|
b25d8facc07ac5be2092a9f6214d1ef8c32ce60e
|
[
"MIT"
] | null | null | null |
100-clean_web_static.py
|
cbarros7/AirBnB_clone_v2
|
b25d8facc07ac5be2092a9f6214d1ef8c32ce60e
|
[
"MIT"
] | null | null | null |
100-clean_web_static.py
|
cbarros7/AirBnB_clone_v2
|
b25d8facc07ac5be2092a9f6214d1ef8c32ce60e
|
[
"MIT"
] | 1
|
2021-08-11T05:20:27.000Z
|
2021-08-11T05:20:27.000Z
|
#!/usr/bin/python3
# Fabfile to delete out-of-date archives.
import os
from fabric.api import *
env.hosts = ['104.196.116.233', '54.165.130.77']
def do_clean(number=0):
"""Delete out-of-date archives.
"""
number = 1 if int(number) == 0 else int(number)
archives = sorted(os.listdir("versions"))
[archives.pop() for i in range(number)]
with lcd("versions"):
[local("rm ./{}".format(a)) for a in archives]
with cd("/data/web_static/releases"):
archives = run("ls -tr").split()
archives = [a for a in archives if "web_static_" in a]
[archives.pop() for i in range(number)]
[run("rm -rf ./{}".format(a)) for a in archives]
| 28.833333
| 62
| 0.606936
| 106
| 692
| 3.924528
| 0.54717
| 0.028846
| 0.036058
| 0.050481
| 0.382212
| 0.235577
| 0.134615
| 0
| 0
| 0
| 0
| 0.04797
| 0.216763
| 692
| 23
| 63
| 30.086957
| 0.719557
| 0.131503
| 0
| 0.142857
| 0
| 0
| 0.175084
| 0.042088
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071429
| false
| 0
| 0.142857
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8863590d6524676746195e9a24531f9c96bd95d5
| 17,316
|
py
|
Python
|
dask/threaded.py
|
eriknw/dask
|
f654b47a61cbbddaf5d2f4d1a3e6e07373b86709
|
[
"BSD-3-Clause"
] | null | null | null |
dask/threaded.py
|
eriknw/dask
|
f654b47a61cbbddaf5d2f4d1a3e6e07373b86709
|
[
"BSD-3-Clause"
] | null | null | null |
dask/threaded.py
|
eriknw/dask
|
f654b47a61cbbddaf5d2f4d1a3e6e07373b86709
|
[
"BSD-3-Clause"
] | null | null | null |
"""
A threaded shared-memory scheduler for dask graphs.
This code is experimental and fairly ugly. It should probably be rewritten
before anyone really depends on it. It is very stateful and error-prone.
That being said, it is decently fast.
State
=====
Many functions pass around a ``state`` variable that holds the current state of
the computation. This variable consists of several other dictionaries and
sets, explained below.
Constant state
--------------
1. dependencies: {x: [a, b ,c]} a,b,c, must be run before x
2. dependents: {a: [x, y]} a must run before x or y
Changing state
--------------
### Data
1. cache: available concrete data. {key: actual-data}
2. released: data that we've seen, used, and released because it is no longer
needed
### Jobs
1. ready: A set of ready-to-run tasks
1. running: A set of tasks currently in execution
2. finished: A set of finished tasks
3. waiting: which tasks are still waiting on others :: {key: {keys}}
Real-time equivalent of dependencies
4. waiting_data: available data to yet-to-be-run-tasks :: {key: {keys}}
Real-time equivalent of dependents
Example
-------
>>> import pprint
>>> dsk = {'x': 1, 'y': 2, 'z': (inc, 'x'), 'w': (add, 'z', 'y')}
>>> pprint.pprint(start_state_from_dask(dsk)) # doctest: +NORMALIZE_WHITESPACE
{'cache': {'x': 1, 'y': 2},
'dependencies': {'w': set(['y', 'z']),
'x': set([]),
'y': set([]),
'z': set(['x'])},
'dependents': {'w': set([]),
'x': set(['z']),
'y': set(['w']),
'z': set(['w'])},
'finished': set([]),
'ready': set(['z']),
'released': set([]),
'running': set([]),
'waiting': {'w': set(['z'])},
'waiting_data': {'x': set(['z']),
'y': set(['w']),
'z': set(['w'])}}
Optimizations
=============
We build this scheduler with out-of-core array operations in mind. To this end
we have encoded some particular optimizations.
Compute to release data
-----------------------
When we choose a new task to execute we often have many options. Policies at
this stage are cheap and can significantly impact performance. One could
imagine policies that expose parallelism, drive towards a paticular output,
etc.. Our current policy is the compute tasks that free up data resources.
See the functions ``choose_task`` and ``score`` for more information
Inlining computations
---------------------
We hold on to intermediate computations either in memory or on disk.
For very cheap computations that may emit new copies of the data, like
``np.transpose`` or possibly even ``x + 1`` we choose not to store these as
separate pieces of data / tasks. Instead we combine them with the computations
that require them. This may result in repeated computation but saves
significantly on space and computation complexity.
See the function ``inline`` for more information.
"""
from .core import istask, flatten, reverse_dict, get_dependencies, ishashable
from .utils import deepmap
from operator import add
from toolz import concat, partial
from multiprocessing.pool import ThreadPool
from .compatibility import Queue
from threading import Lock
import psutil
def inc(x):
return x + 1
def double(x):
return x * 2
DEBUG = False
def start_state_from_dask(dsk, cache=None):
""" Start state from a dask
Example
-------
>>> dsk = {'x': 1, 'y': 2, 'z': (inc, 'x'), 'w': (add, 'z', 'y')}
>>> import pprint
>>> pprint.pprint(start_state_from_dask(dsk)) # doctest: +NORMALIZE_WHITESPACE
{'cache': {'x': 1, 'y': 2},
'dependencies': {'w': set(['y', 'z']),
'x': set([]),
'y': set([]),
'z': set(['x'])},
'dependents': {'w': set([]),
'x': set(['z']),
'y': set(['w']),
'z': set(['w'])},
'finished': set([]),
'ready': set(['z']),
'released': set([]),
'running': set([]),
'waiting': {'w': set(['z'])},
'waiting_data': {'x': set(['z']),
'y': set(['w']),
'z': set(['w'])}}
"""
if cache is None:
cache = dict()
for k, v in dsk.items():
if not istask(v):
cache[k] = v
dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
waiting = dict((k, v.copy()) for k, v in dependencies.items() if v)
dependents = reverse_dict(dependencies)
for a in cache:
for b in dependents[a]:
waiting[b].remove(a)
waiting_data = dict((k, v.copy()) for k, v in dependents.items() if v)
ready = set([k for k, v in waiting.items() if not v])
waiting = dict((k, v) for k, v in waiting.items() if v)
state = {'dependencies': dependencies,
'dependents': dependents,
'waiting': waiting,
'waiting_data': waiting_data,
'cache': cache,
'ready': ready,
'running': set(),
'finished': set(),
'released': set()}
return state
'''
Running tasks
-------------
When we execute tasks we both
1. Perform the actual work of collecting the appropriate data and calling the function
2. Manage administrative state to coordinate with the scheduler
'''
def _execute_task(arg, cache, dsk=None):
""" Do the actual work of collecting data and executing a function
Examples
--------
>>> cache = {'x': 1, 'y': 2}
Compute tasks against a cache
>>> _execute_task((add, 'x', 1), cache) # Compute task in naive manner
2
>>> _execute_task((add, (inc, 'x'), 1), cache) # Support nested computation
3
Also grab data from cache
>>> _execute_task('x', cache)
1
Support nested lists
>>> list(_execute_task(['x', 'y'], cache))
[1, 2]
>>> list(map(list, _execute_task([['x', 'y'], ['y', 'x']], cache)))
[[1, 2], [2, 1]]
>>> _execute_task('foo', cache) # Passes through on non-keys
'foo'
"""
dsk = dsk or dict()
if isinstance(arg, list):
return (_execute_task(a, cache) for a in arg)
elif istask(arg):
func, args = arg[0], arg[1:]
args2 = [_execute_task(a, cache, dsk=dsk) for a in args]
return func(*args2)
elif not ishashable(arg):
return arg
elif arg in cache:
return cache[arg]
elif arg in dsk:
raise ValueError("Premature deletion of data. Key: %s" % str(arg))
else:
return arg
def execute_task(dsk, key, state, queue, results, lock):
"""
Compute task and handle all administration
See also:
_execute_task - actually execute task
"""
try:
task = dsk[key]
result = _execute_task(task, state['cache'], dsk=dsk)
with lock:
finish_task(dsk, key, result, state, results)
result = key, task, result, None
except Exception as e:
import sys
exc_type, exc_value, exc_traceback = sys.exc_info()
result = key, task, e, exc_traceback
queue.put(result)
return
def finish_task(dsk, key, result, state, results):
"""
Update executation state after a task finishes
Mutates. This should run atomically (with a lock).
"""
state['cache'][key] = result
if key in state['ready']:
state['ready'].remove(key)
for dep in state['dependents'][key]:
s = state['waiting'][dep]
s.remove(key)
if not s:
del state['waiting'][dep]
state['ready'].add(dep)
for dep in state['dependencies'][key]:
if dep in state['waiting_data']:
s = state['waiting_data'][dep]
s.remove(key)
if not s and dep not in results:
if DEBUG:
from chest.core import nbytes
print("Key: %s\tDep: %s\t NBytes: %.2f\t Release" % (key, dep,
sum(map(nbytes, state['cache'].values()) / 1e6)))
assert dep in state['cache']
release_data(dep, state)
assert dep not in state['cache']
elif dep in state['cache'] and dep not in results:
release_data(dep, state)
state['finished'].add(key)
state['running'].remove(key)
return state
def release_data(key, state):
""" Remove data from temporary storage
See Also
finish_task
"""
if key in state['waiting_data']:
assert not state['waiting_data'][key]
del state['waiting_data'][key]
state['released'].add(key)
del state['cache'][key]
def nested_get(ind, coll, lazy=False):
""" Get nested index from collection
Examples
--------
>>> nested_get(1, 'abc')
'b'
>>> nested_get([1, 0], 'abc')
('b', 'a')
>>> nested_get([[1, 0], [0, 1]], 'abc')
(('b', 'a'), ('a', 'b'))
"""
if isinstance(ind, list):
if lazy:
return (nested_get(i, coll, lazy=lazy) for i in ind)
else:
return tuple([nested_get(i, coll, lazy=lazy) for i in ind])
return seq
else:
return coll[ind]
'''
Task Selection
--------------
We often have a choice among many tasks to run next. This choice is both
cheap and can significantly impact performance.
Here we choose tasks that immediately free data resources.
'''
def score(key, state):
""" Prefer to run tasks that remove need to hold on to data """
deps = state['dependencies'][key]
wait = state['waiting_data']
return sum([1./len(wait[dep])**2 for dep in deps])
def choose_task(state, score=score):
"""
Select a task that maximizes scoring function
Default scoring function selects tasks that free up the maximum number of
resources.
E.g. for ready tasks a, b with dependencies:
{a: {x, y},
b: {x, w}}
and for data w, x, y, z waiting on the following tasks
{w: {b, c}
x: {a, b, c},
y: {a}}
We choose task a because it will completely free up resource y and
partially free up resource x. Task b only partially frees up resources x
and w and completely frees none so it is given a lower score.
See also:
score
"""
return max(state['ready'], key=partial(score, state=state))
'''
Inlining
--------
We join small cheap tasks on to others to avoid the creation of intermediaries.
'''
def inline(dsk, fast_functions=None):
""" Inline cheap functions into larger operations
>>> dsk = {'out': (add, 'i', 'd'), # doctest: +SKIP
... 'i': (inc, 'x'),
... 'd': (double, 'y'),
... 'x': 1, 'y': 1}
>>> inline(dsk, [inc]) # doctest: +SKIP
{'out': (add, (inc, 'x'), 'd'),
'd': (double, 'y'),
'x': 1, 'y': 1}
"""
if not fast_functions:
return dsk
dependencies = dict((k, get_dependencies(dsk, k)) for k in dsk)
dependents = reverse_dict(dependencies)
def isfast(func):
if hasattr(func, 'func'): # Support partials, curries
return func.func in fast_functions
else:
return func in fast_functions
result = dict((k, expand_value(dsk, fast_functions, k))
for k, v in dsk.items()
if not dependents[k]
or not istask(v)
or not isfast(v[0]))
return result
def expand_key(dsk, fast, key):
"""
>>> dsk = {'out': (sum, ['i', 'd']),
... 'i': (inc, 'x'),
... 'd': (double, 'y'),
... 'x': 1, 'y': 1}
>>> expand_key(dsk, [inc], 'd')
'd'
>>> expand_key(dsk, [inc], 'i') # doctest: +SKIP
(inc, 'x')
>>> expand_key(dsk, [inc], ['i', 'd']) # doctest: +SKIP
[(inc, 'x'), 'd']
"""
if isinstance(key, list):
return [expand_key(dsk, fast, item) for item in key]
def isfast(func):
if hasattr(func, 'func'): # Support partials, curries
return func.func in fast
else:
return func in fast
if not ishashable(key):
return key
if (key in dsk and istask(dsk[key]) and isfast(dsk[key][0])):
task = dsk[key]
return (task[0],) + tuple([expand_key(dsk, fast, k) for k in task[1:]])
else:
return key
def expand_value(dsk, fast, key):
"""
>>> dsk = {'out': (sum, ['i', 'd']),
... 'i': (inc, 'x'),
... 'd': (double, 'y'),
... 'x': 1, 'y': 1}
>>> expand_value(dsk, [inc], 'd') # doctest: +SKIP
(double, 'y')
>>> expand_value(dsk, [inc], 'i') # doctest: +SKIP
(inc, 'x')
>>> expand_value(dsk, [inc], 'out') # doctest: +SKIP
(sum, [(inc, 'x'), 'd'])
"""
task = dsk[key]
if not istask(task):
return task
func, args = task[0], task[1:]
return (func,) + tuple([expand_key(dsk, fast, arg) for arg in args])
'''
`get`
-----
The main function of the scheduler. Get is the main entry point.
'''
def get(dsk, result, nthreads=psutil.NUM_CPUS, cache=None, debug_counts=None, **kwargs):
""" Threaded cached implementation of dask.get
Parameters
----------
dsk: dict
A dask dictionary specifying a workflow
result: key or list of keys
Keys corresponding to desired data
nthreads: integer of thread count
The number of threads to use in the ThreadPool that will actually execute tasks
cache: dict-like (optional)
Temporary storage of results
debug_counts: integer or None
This integer tells how often the scheduler should dump debugging info
Examples
--------
>>> dsk = {'x': 1, 'y': 2, 'z': (inc, 'x'), 'w': (add, 'z', 'y')}
>>> get(dsk, 'w')
4
>>> get(dsk, ['w', 'y'])
(4, 2)
"""
if isinstance(result, list):
result_flat = set(flatten(result))
else:
result_flat = set([result])
results = set(result_flat)
pool = ThreadPool(nthreads)
state = start_state_from_dask(dsk, cache=cache)
queue = Queue()
#lock for state dict updates
#When a task completes, we need to update several things in the state dict.
#To make sure the scheduler is in a safe state at all times, the state dict
# needs to be updated by only one thread at a time.
lock = Lock()
tick = [0]
if not state['ready']:
raise ValueError("Found no accessible jobs in dask")
def fire_task():
""" Fire off a task to the thread pool """
# Update heartbeat
tick[0] += 1
# Emit visualization if called for
if debug_counts and tick[0] % debug_counts == 0:
visualize(dsk, state, filename='dask_%03d' % tick[0])
# Choose a good task to compute
key = choose_task(state)
state['ready'].remove(key)
state['running'].add(key)
# Submit
pool.apply_async(execute_task, args=[dsk, key, state, queue, results,
lock])
try:
# Seed initial tasks into the thread pool
with lock:
while state['ready'] and len(state['running']) < nthreads:
fire_task()
# Main loop, wait on tasks to finish, insert new ones
while state['waiting'] or state['ready'] or state['running']:
key, finished_task, res, tb = queue.get()
if isinstance(res, Exception):
import traceback
traceback.print_tb(tb)
raise res
with lock:
while state['ready'] and len(state['running']) < nthreads:
fire_task()
finally:
# Clean up thread pool
pool.close()
pool.join()
# Final reporting
while not queue.empty():
key, finished_task, res, tb = queue.get()
# print("Finished %s" % str(finished_task))
if debug_counts:
visualize(dsk, state, filename='dask_end')
return nested_get(result, state['cache'])
'''
Debugging
---------
The threaded nature of this project presents challenging to normal unit-test
and debug workflows. Visualization of the execution state has value.
Our main mechanism is a visualization of the execution state as colors on our
normal dot graphs (see dot module).
'''
def visualize(dsk, state, filename='dask'):
""" Visualize state of compputation as dot graph """
from dask.dot import dot_graph, write_networkx_to_dot
g = state_to_networkx(dsk, state)
write_networkx_to_dot(g, filename=filename)
def color_nodes(dsk, state):
data, func = dict(), dict()
for key in dsk:
func[key] = {'color': 'gray'}
data[key] = {'color': 'gray'}
for key in state['released']:
data[key] = {'color': 'blue'}
for key in state['cache']:
data[key] = {'color': 'red'}
for key in state['finished']:
func[key] = {'color': 'blue'}
for key in state['running']:
func[key] = {'color': 'red'}
for key in dsk:
func[key]['penwidth'] = 4
data[key]['penwidth'] = 4
return data, func
def state_to_networkx(dsk, state):
""" Convert state to networkx for visualization
See Also:
visualize
"""
from .dot import to_networkx
data, func = color_nodes(dsk, state)
return to_networkx(dsk, data_attributes=data, function_attributes=func)
| 28.340426
| 88
| 0.569416
| 2,313
| 17,316
| 4.206658
| 0.194553
| 0.002878
| 0.003083
| 0.002467
| 0.221377
| 0.173381
| 0.141727
| 0.114388
| 0.101028
| 0.101028
| 0
| 0.006434
| 0.281994
| 17,316
| 610
| 89
| 28.386885
| 0.77616
| 0.433183
| 0
| 0.195455
| 0
| 0
| 0.069271
| 0
| 0
| 0
| 0
| 0
| 0.013636
| 1
| 0.090909
| false
| 0
| 0.059091
| 0.009091
| 0.290909
| 0.009091
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8864cc357b1ab216b6ae36ff17356348ff1a4bee
| 6,163
|
py
|
Python
|
deprecated/test_01_job_cli.py
|
cloudmesh/cloudmesh-queue
|
8a299c8a4915916c9214d4b9e681da4a1b36bfd4
|
[
"Apache-2.0"
] | null | null | null |
deprecated/test_01_job_cli.py
|
cloudmesh/cloudmesh-queue
|
8a299c8a4915916c9214d4b9e681da4a1b36bfd4
|
[
"Apache-2.0"
] | 12
|
2020-12-18T09:57:49.000Z
|
2020-12-28T12:34:15.000Z
|
deprecated/test_01_job_cli.py
|
cloudmesh/cloudmesh-queue
|
8a299c8a4915916c9214d4b9e681da4a1b36bfd4
|
[
"Apache-2.0"
] | null | null | null |
###############################################################
# cms set host='juliet.futuresystems.org'
# cms set user=$USER
#
# pytest -v --capture=no tests/test_01_job_cli.py
# pytest -v tests/test_01_job_cli.py
# pytest -v --capture=no tests/test_01_job_cli.py::TestJob::<METHODNAME>
###############################################################
import pytest
from cloudmesh.common.Shell import Shell
from cloudmesh.common.debug import VERBOSE
from cloudmesh.common.util import HEADING
from cloudmesh.common.Benchmark import Benchmark
from cloudmesh.common.variables import Variables
from cloudmesh.configuration.Configuration import Configuration
from textwrap import dedent
from cloudmesh.common.util import path_expand
import oyaml as yaml
import re
import time
import getpass
Benchmark.debug()
variables = Variables()
print(variables)
variables["jobset"] = path_expand("./a.yaml")
configured_jobset = variables["jobset"]
remote_host_ip = variables['host'] or 'juliet.futuresystems.org'
remote_host_user = variables['user'] or getpass.getuser()
@pytest.mark.incremental
class TestJob:
def test_help(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job help", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert "Usage" in result
assert "Description" in result
def test_info(self):
HEADING()
Benchmark.Start()
variables = Variables()
configured_jobset = variables["jobset"]
result = Shell.execute("cms job info", shell=True)
Benchmark.Stop()
VERBOSE(result)
assert configured_jobset in result
def test_template(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job template --name='job[1-2]'", shell=True)
Benchmark.Stop()
VERBOSE(result)
spec = Configuration(configured_jobset)
assert spec['cloudmesh.jobset.hosts'] is not None
jobs = spec['cloudmesh.jobset.jobs'].keys()
assert 'job1' in jobs
assert 'job2' in jobs
def test_add_file(self):
HEADING()
job_str = dedent("""
pytest_job:
name: pytest_job
directory: .
ip: local
input: ./data
output: ./output/abcd
status: ready
gpu: ' '
user: user
arguments: -lisa
executable: ls
shell: bash
""").strip()
job = yaml.safe_load(job_str)
with open('../tests/other.yaml', 'w') as fo:
yaml.safe_dump(job, fo)
Benchmark.Start()
result = Shell.execute("cms job add 'other.yaml'", shell=True)
Benchmark.Stop()
VERBOSE(result)
time.sleep(10)
spec1 = Configuration(configured_jobset)
jobs1 = spec1['cloudmesh.jobset.jobs'].keys()
assert 'pytest_job' in jobs1
def test_add_cli(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job add --name='pytest_job1' "
f"--ip={remote_host_ip} "
"--executable='ls' "
"--arguments='-lisa' "
f"--user='{remote_host_user}' ",
shell=True)
Benchmark.Stop()
VERBOSE(result)
spec = Configuration(configured_jobset)
jobs = spec['cloudmesh.jobset.jobs'].keys()
assert 'pytest_job1' in jobs
def test_list(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job list", shell=True)
Benchmark.Stop()
job_count_1 = len(re.findall(r"\|\s\d+\s+\|", result, re.MULTILINE))
VERBOSE(result)
spec = Configuration(configured_jobset)
job_count_2 = len(spec['cloudmesh.jobset.jobs'].keys())
assert job_count_1 == job_count_2
def test_add_host(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job hosts add --hostname='juliet' "
f"--ip='{remote_host_ip}' "
"--cpu_count='12'", shell=True)
VERBOSE(result)
spec = Configuration(configured_jobset)
host_list = spec['cloudmesh.jobset.hosts'].keys()
assert 'juliet' in host_list
def test_run(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job run --name='pytest_job1'", shell=True)
Benchmark.Stop()
VERBOSE(result)
time.sleep(10)
spec = Configuration(configured_jobset)
job_status = spec['cloudmesh.jobset.jobs.pytest_job1.status']
assert job_status == 'submitted'
assert spec['cloudmesh.jobset.jobs.pytest_job1.submitted_to_ip'] \
is not None
def test_kill(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job kill --name='pytest_job1'", shell=True)
Benchmark.Stop()
VERBOSE(result)
time.sleep(10)
spec = Configuration(configured_jobset)
job_status = spec['cloudmesh.jobset.jobs.pytest_job1.status']
assert job_status == 'killed'
def test_reset(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job reset --name='pytest_job1'", shell=True)
Benchmark.Stop()
VERBOSE(result)
time.sleep(5)
spec = Configuration(configured_jobset)
job_status = spec['cloudmesh.jobset.jobs.pytest_job1.status']
assert job_status == 'ready'
def test_delete(self):
HEADING()
Benchmark.Start()
result = Shell.execute("cms job delete --name='pytest_job1'",
shell=True)
Benchmark.Stop()
VERBOSE(result)
time.sleep(5)
spec = Configuration(configured_jobset)
jobs = spec['cloudmesh.jobset.jobs'].keys()
assert 'pytest_job1' not in jobs
def test_benchmark(self):
HEADING()
Benchmark.print(csv=True)
| 28.013636
| 80
| 0.577154
| 668
| 6,163
| 5.197605
| 0.193114
| 0.0553
| 0.063364
| 0.066532
| 0.530818
| 0.497696
| 0.442108
| 0.407834
| 0.387673
| 0.247408
| 0
| 0.008945
| 0.292552
| 6,163
| 219
| 81
| 28.141553
| 0.787385
| 0.034723
| 0
| 0.43949
| 0
| 0
| 0.220426
| 0.071011
| 0
| 0
| 0
| 0
| 0.095541
| 1
| 0.076433
| false
| 0.012739
| 0.082803
| 0
| 0.165605
| 0.012739
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88663926b411e82cb276e8ee0d40df6d2b4d5fe4
| 3,370
|
py
|
Python
|
source/pysqlizer-cli.py
|
slafi/pysqlizer
|
871ad922d42fd99a59dd33091ea3eaa4406542b4
|
[
"MIT"
] | null | null | null |
source/pysqlizer-cli.py
|
slafi/pysqlizer
|
871ad922d42fd99a59dd33091ea3eaa4406542b4
|
[
"MIT"
] | null | null | null |
source/pysqlizer-cli.py
|
slafi/pysqlizer
|
871ad922d42fd99a59dd33091ea3eaa4406542b4
|
[
"MIT"
] | 1
|
2020-01-05T05:36:58.000Z
|
2020-01-05T05:36:58.000Z
|
import argparse
import time
from pathlib import Path
from logger import get_logger
from csv_reader import CSVReader
from utils import infer_type, clear_console
from sql_generator import SQLGenerator
if __name__ == "__main__":
## Clear console
clear_console()
## get logger
logger = get_logger('pysqlizer')
# Parse command line arguments
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input', type=str, default='', help='Input CSV filename', metavar='infile', required=True)
parser.add_argument('-o', '--output', type=str, default='', help='Output SQL filename', metavar='outfile')
parser.add_argument('-t', '--table_name', type=str, default='', help='SQL table name', metavar='tname')
parser.add_argument('-d', '--db_name', type=str, default='', help='SQL database name', metavar='dbname')
parser.add_argument('-s', '--delimiter', type=str, default='', help='CSV file delimiter', metavar='delimiter')
parser.add_argument('-v', '--version', help='Show the program version', action='version', version='%(prog)s 1.0')
args = parser.parse_args()
#print(args)
logger.info('Starting PySQLizer...')
# Get arguments
input_file = args.input
output_file = args.output
table_name = args.table_name
database_name = args.db_name
delimiter = args.delimiter if args.delimiter else ','
## Check input file (type, existence and extension)
infile = Path(input_file)
if infile.is_dir():
logger.error('The file {} is a directory!'.format(input_file))
quit()
if not infile.exists():
logger.debug('The file {} does not exist!'.format(input_file))
quit()
if not infile.suffix.lower() == '.csv':
logger.error('The extension of the file {} is not CSV!'.format(input_file))
quit()
if output_file == '':
output_file = infile.stem
if table_name == '':
table_name = 'tname'
try:
logger.info('Reading CSV file: {}'.format(input_file))
start_time = time.perf_counter()
## Create CSV reader instance
csv_reader = CSVReader(input_file)
csv_reader.read_file(delimiter=delimiter)
csv_reader.extract_header_fields()
csv_reader.check_data_sanity()
end_time = time.perf_counter()
logger.info('Elapsed time: {}s'.format(end_time-start_time))
logger.info('Generating SQL instructions...')
start_time = time.perf_counter()
## Create SQL generator instance
sql_generator = SQLGenerator()
table_query = sql_generator.create_sql_table(table_name=table_name, columns=csv_reader.keys, db_name=database_name)
insert_query = sql_generator.insert_data(tablename=table_name, columns=csv_reader.keys, data=csv_reader.data)
end_time = time.perf_counter()
logger.info('Elapsed time: {}s'.format(end_time-start_time))
logger.info('Saving SQL file: {}'.format(output_file + '.sql'))
start_time = time.perf_counter()
sql_generator.save_sql_file(filename=output_file, table_structure_query=table_query, insert_query=insert_query)
end_time = time.perf_counter()
logger.info('Elapsed time: {}s'.format(end_time-start_time))
except Exception as e:
logger.error('{}'.format(e.args))
| 35.104167
| 123
| 0.663205
| 429
| 3,370
| 4.990676
| 0.272727
| 0.037833
| 0.047641
| 0.053246
| 0.229332
| 0.208314
| 0.129846
| 0.101822
| 0.101822
| 0.101822
| 0
| 0.000747
| 0.205638
| 3,370
| 95
| 124
| 35.473684
| 0.799029
| 0.054599
| 0
| 0.196721
| 0
| 0
| 0.156999
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.114754
| 0
| 0.114754
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88672f1ee1e8b7ba396e5278ca480986acfefed4
| 854
|
py
|
Python
|
MergeIntervals56.py
|
Bit64L/LeetCode-Python-
|
64847cbb1adcaca4561b949e8acc52e8e031a6cb
|
[
"MIT"
] | null | null | null |
MergeIntervals56.py
|
Bit64L/LeetCode-Python-
|
64847cbb1adcaca4561b949e8acc52e8e031a6cb
|
[
"MIT"
] | null | null | null |
MergeIntervals56.py
|
Bit64L/LeetCode-Python-
|
64847cbb1adcaca4561b949e8acc52e8e031a6cb
|
[
"MIT"
] | null | null | null |
# Definition for an interval.
class Interval(object):
def __init__(self, s=0, e=0):
self.start = s
self.end = e
class Solution(object):
def merge(self, intervals):
"""
:type intervals: List[Interval]
:rtype: List[Interval]
"""
if intervals is None or len(intervals) == 0:
return []
intervals.sort(key=lambda x: x.start)
ans = [intervals.pop(0)]
last = ans[0]
for interval in intervals:
if interval.start <= last.end:
if interval.end > last.end:
last.end = interval.end
else:
ans.append(interval)
last = interval
return ans
solution = Solution()
ans = solution.merge([Interval(1, 4), Interval(2, 3)])
for i in ans:
print(i.start, i.end)
| 24.4
| 54
| 0.529274
| 103
| 854
| 4.349515
| 0.407767
| 0.046875
| 0.044643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.016334
| 0.354801
| 854
| 34
| 55
| 25.117647
| 0.796733
| 0.09719
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0
| 0
| 0.26087
| 0.043478
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88681b8ce61bdcea470b1b26564a91d9e24035aa
| 221
|
py
|
Python
|
Training/mangement_system.py
|
Orleanslindsay/Python_Programming
|
dacc08090e9ebf9eb43aec127ee3e2e3cdcb4f55
|
[
"MIT"
] | 1
|
2021-08-16T10:25:01.000Z
|
2021-08-16T10:25:01.000Z
|
Training/mangement_system.py
|
Orleanslindsay/Python_Programming
|
dacc08090e9ebf9eb43aec127ee3e2e3cdcb4f55
|
[
"MIT"
] | null | null | null |
Training/mangement_system.py
|
Orleanslindsay/Python_Programming
|
dacc08090e9ebf9eb43aec127ee3e2e3cdcb4f55
|
[
"MIT"
] | null | null | null |
from tkinter import *
import mariadb
root = Tk()
root.title('SCHOOL MANAGEMENT')
root.geometry("900x700")
counter=2
for i in range(1,20):
label=Entry(root).grid(row=counter,column=0)
counter += 2
root.mainloop()
| 13
| 45
| 0.710407
| 34
| 221
| 4.617647
| 0.764706
| 0.101911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063158
| 0.140271
| 221
| 17
| 46
| 13
| 0.763158
| 0
| 0
| 0
| 0
| 0
| 0.108108
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88688860861603e2b3b947fdc9d58f769c86e31d
| 2,742
|
py
|
Python
|
FlopyAdapter/MtPackages/SftAdapter.py
|
inowas/InowasFlopyAdapter
|
43ddf223778693ea5e7651d7a55bef56deff0ad5
|
[
"MIT"
] | null | null | null |
FlopyAdapter/MtPackages/SftAdapter.py
|
inowas/InowasFlopyAdapter
|
43ddf223778693ea5e7651d7a55bef56deff0ad5
|
[
"MIT"
] | null | null | null |
FlopyAdapter/MtPackages/SftAdapter.py
|
inowas/InowasFlopyAdapter
|
43ddf223778693ea5e7651d7a55bef56deff0ad5
|
[
"MIT"
] | 1
|
2020-09-27T23:26:14.000Z
|
2020-09-27T23:26:14.000Z
|
import flopy.mt3d as mt
class SftAdapter:
_data = None
def __init__(self, data):
self._data = data
def validate(self):
# should be implemented
# for key in content:
# do something
# return some hints
pass
def is_valid(self):
# should be implemented
# for key in content:
# do something
# return true or false
return True
def merge(self):
default = self.default()
for key in self._data:
if key == 'sf_stress_period_data':
default[key] = self.to_dict(self._data[key])
continue
default[key] = self._data[key]
return default
def to_dict(self, data):
if type(data) == list:
spd_dict = {}
for stress_period, record in enumerate(data):
spd_dict[stress_period] = record
return spd_dict
return data
def get_package(self, _mt):
content = self.merge()
return mt.Mt3dSft(
_mt,
**content
)
@staticmethod
def default():
default = {
"nsfinit": 0,
"mxsfbc": 0,
"icbcsf": 0,
"ioutobs": None,
"ietsfr": 0,
"isfsolv": 1,
"wimp": 0.5,
"wups": 1.0,
"cclosesf": 1e-06,
"mxitersf": 10,
"crntsf": 1.0,
"iprtxmd": 0,
"coldsf": 0.0,
"dispsf": 0.0,
"nobssf": 0,
"obs_sf": None,
"sf_stress_period_data": None,
"unitnumber": None,
"filenames": None,
"dtype": None,
"extension": 'sft'
}
return default
@staticmethod
def read_package(package):
content = {
"nsfinit": package.nsfinit,
"mxsfbc": package.mxsfbc,
"icbcsf": package.icbcsf,
"ioutobs": package.ioutobs,
"ietsfr": package.ietsfr,
"isfsolv": package.isfsolv,
"wimp": package.wimp,
"wups": package.wups,
"cclosesf": package.cclosesf,
"mxitersf": package.mxitersf,
"crntsf": package.crntsf,
"iprtxmd": package.iprtxmd,
"coldsf": package.coldsf,
"dispsf": package.dispsf,
"nobssf": package.nobssf,
"obs_sf": package.obs_sf,
"sf_stress_period_data": package.sf_stress_period_data,
"unitnumber": package.unitnumber,
"filenames": package.filenames,
"dtype": package.dtype,
"extension": package.extension
}
return content
| 26.882353
| 67
| 0.486871
| 262
| 2,742
| 4.958015
| 0.293893
| 0.036952
| 0.04311
| 0.055427
| 0.084681
| 0.084681
| 0.084681
| 0.084681
| 0.084681
| 0.084681
| 0
| 0.014715
| 0.405179
| 2,742
| 101
| 68
| 27.148515
| 0.781729
| 0.056893
| 0
| 0.04878
| 0
| 0
| 0.128832
| 0.024447
| 0
| 0
| 0
| 0
| 0
| 1
| 0.097561
| false
| 0.012195
| 0.012195
| 0.012195
| 0.219512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8869c83d02e1a922baaa9130b61848763de1897f
| 2,089
|
py
|
Python
|
src/payoff_landscape.py
|
khozzy/phd
|
9a05572a6960d948320669c51e0c80bb9d037d4a
|
[
"CC-BY-4.0"
] | null | null | null |
src/payoff_landscape.py
|
khozzy/phd
|
9a05572a6960d948320669c51e0c80bb9d037d4a
|
[
"CC-BY-4.0"
] | null | null | null |
src/payoff_landscape.py
|
khozzy/phd
|
9a05572a6960d948320669c51e0c80bb9d037d4a
|
[
"CC-BY-4.0"
] | null | null | null |
import matplotlib.pyplot as plt
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
from collections import namedtuple
from typing import Dict
from src.visualization import diminishing_reward_colors, PLOT_DPI
StateAction = namedtuple('StateAction', 'id state action')
def get_all_state_action(state_to_actions):
state_action = []
idx = 1
for state, actions in state_to_actions.items():
if len(actions) > 0:
for action in actions:
state_action.append(StateAction(idx, state, action))
idx += 1
return state_action
def plot_payoff_landscape(payoffs: Dict, rho: float, rho_text_location, plot_filename=None) -> None:
colors = diminishing_reward_colors()
fig, ax = plt.subplots(figsize=(15, 10))
x = range(1, len(payoffs)+1)
for alg in ['ACS2', 'AACS2_v1', 'AACS2_v2', 'Q-Learning', 'R-Learning']:
y = sorted([v[alg] for k, v in payoffs.items()])
plt.scatter(x, y, color=colors[alg])
plt.plot(x, y, label=alg, linewidth=2, color=colors[alg])
# x-axis
ax.xaxis.set_major_locator(MultipleLocator(5))
ax.xaxis.set_minor_locator(MultipleLocator(1))
ax.xaxis.set_major_formatter(FormatStrFormatter('%1.0f'))
ax.xaxis.set_tick_params(which='major', size=10, width=2, direction='in')
ax.xaxis.set_tick_params(which='minor', size=5, width=1, direction='in')
ax.set_xlabel("State-action pairs")
# y-axis
ax.yaxis.set_major_locator(MultipleLocator(250))
ax.yaxis.set_minor_locator(MultipleLocator(50))
ax.yaxis.set_tick_params(which='major', size=10, width=2, direction='in')
ax.yaxis.set_tick_params(which='minor', size=5, width=1, direction='in')
ax.set_ylabel("Payoff value")
# others
ax.set_title(f"Payoff Landscape")
ax.text(**rho_text_location, s=fr'$\rho={rho:.2f}$', color=colors['R-Learning'])
ax.legend(loc='lower right', bbox_to_anchor=(1, 0), frameon=False)
if plot_filename:
plt.savefig(plot_filename, transparent=False, bbox_inches='tight', dpi=PLOT_DPI)
return fig
| 34.245902
| 100
| 0.691719
| 299
| 2,089
| 4.662207
| 0.377926
| 0.055237
| 0.035868
| 0.05165
| 0.157819
| 0.157819
| 0.140603
| 0.140603
| 0.140603
| 0.140603
| 0
| 0.021412
| 0.17281
| 2,089
| 61
| 101
| 34.245902
| 0.785301
| 0.009574
| 0
| 0
| 0
| 0
| 0.090513
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.05
| false
| 0
| 0.125
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
886aabc8aa4ca1dc2dab928e6d2e967f6a38ad60
| 5,626
|
py
|
Python
|
Mods/Chatpp/Mod.py
|
3p0bleedthemoncrip/Tobify-Overlay-Download
|
00c0c8a7f3c7ccefce9387b9209dcc86e3b2abc9
|
[
"MIT"
] | null | null | null |
Mods/Chatpp/Mod.py
|
3p0bleedthemoncrip/Tobify-Overlay-Download
|
00c0c8a7f3c7ccefce9387b9209dcc86e3b2abc9
|
[
"MIT"
] | null | null | null |
Mods/Chatpp/Mod.py
|
3p0bleedthemoncrip/Tobify-Overlay-Download
|
00c0c8a7f3c7ccefce9387b9209dcc86e3b2abc9
|
[
"MIT"
] | null | null | null |
class MOD:
def __init__(self, Globals):
""" This adds additional message categories to the player detection algorithm """
# data transfer variables
self.Globals = Globals
self.G = self.Globals
self.ModData = Globals.ModData["Chatpp"]
self.backend = Globals.ui_backend
self.frontend = Globals.ui_frontend
# set mod data
self.ModData.name = "Chatpp"
self.ModData.version = "0.0.1"
self.ModData.config = {
"chat++-hypixel": True,
"chat++-bedwars practice": False,
}
self.ModData.settings = {
"chat++-hypixel": "Optimise for Hypixel", # config name : displayed name
"chat++-bedwars practice": "Optimise for the Bedwars Practice server", # config name : displayed name
}
self.ModData.scopes = {
"init": self.setup, # this is part of the setup for the backend ui
"config-init": self.ModData.config, # this is a dictionary of all config items which the mod uses
"config-settings": self.ModData.name, # this registers the mod for the settings menu
"on-message": self.on_message, # this is called when a chat message appears
}
def setup(self, frontend, backend):
""" This is the mod setup function """
join_fragment = "\n - "
print(
f"{self.ModData.name} {self.ModData.version} has been loaded with scopes:{join_fragment}{join_fragment.join([scope for scope in self.ModData.scopes.keys()])}",
end="\n\n")
self.frontend = frontend
self.backend = backend
def on_message(self, timestamp, message):
""" This processes a message """
# print(f"{timestamp} : '{message}'")
# Hypixel
if self.G.config["chat++-hypixel"]:
pass
# Bedwars practice
ranks = ["[Master]", "[Adept]", "[Trainee]"]
if self.G.config["chat++-bedwars practice"]:
# ranked users
for rank in ranks:
if f"{rank} " in message:
message = message.split(f"{rank} ")[1]
username = message.split(" ")[0]
self.add_user(username)
# void message
if " was hit into the void by " in message:
if message.endswith(" FINAL KILL!"):
username1 = message.split(" ")[0]
username2 = message.split(" ")[-3]
else:
username1, *_, username2 = message.split(" ")
self.add_user(username1)
self.add_user(username2)
# void message
elif message.endswith(" fell into the void."):
username = message.split(" ")[0]
self.add_user(username)
# lives remaining
elif " has " in message and " lives" in message:
username, *_ = message.split(" ")
self.add_user(username)
# elimination
elif " has been eliminated" in message:
username, *_ = message.split(" ")
self.sub_user(username)
# server join message
elif " has joined!" in message:
*_, username, _, _ = message.split(" ")
self.add_user(username)
# server leave message
elif " has left!" in message:
*_, username, _, _ = message.split(" ")
self.sub_user(username)
# game leave message
elif message.endswith(" has left the game!"):
username = message.split(" ")[0]
self.add_user(username)
# game start (connecting to lobby)
elif message.startswith("Connecting to "):
self.G.lobby_players = []
# game start (connection successful)
elif message.startswith("Successfully connected to "):
self.G.lobby_players = []
# sending to lobby
elif message.startswith("Sending you to "):
self.G.lobby_players = []
# remove "at"
elif message == "Join the discord for more info at: ":
self.sub_user("at")
# players in game
elif message.startswith("Players in this game: "):
players = message.split(": ")[-1].split(" ")
for player in players:
self.add_user(player)
# block sumo: gold block
elif message.endswith(" has been on the centre gold block for 5 seconds!"):
username = message.split(" ")[0]
self.add_user(username)
# bedwars
elif message.startswith("BED DESTRUCTION > ") and " was dismantled by " in message:
username = message.split(" ")[-1]
self.add_user(username)
# else:
# for p in self.G.lobby_players:
# if p in message:
# print(f"{timestamp} : '{message}'")
def add_user(self, username):
""" This adds a username to the player list """
if username not in self.G.lobby_players:
self.G.lobby_players.append(username)
def sub_user(self, username):
""" This removes a username from the player list """
if username in self.G.lobby_players:
# remove player
self.G.lobby_players.remove(username)
# run mod actions
self.G.thread_chat_ctx.mod_on_player_leave(username)
| 38.534247
| 171
| 0.530928
| 598
| 5,626
| 4.914716
| 0.245819
| 0.057162
| 0.037428
| 0.046274
| 0.254168
| 0.119769
| 0.119769
| 0.119769
| 0.065328
| 0
| 0
| 0.005294
| 0.362069
| 5,626
| 145
| 172
| 38.8
| 0.813597
| 0.174902
| 0
| 0.21978
| 0
| 0.010989
| 0.169432
| 0.021616
| 0
| 0
| 0
| 0
| 0
| 1
| 0.054945
| false
| 0.010989
| 0
| 0
| 0.065934
| 0.010989
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
886c941fa641d07a3da73aedf1058de8f4d4b127
| 569
|
py
|
Python
|
newss.py
|
krishnansuki/daily-news
|
3b03ea4bcd0aed8ddf69d91128bfce1f3d9192c0
|
[
"Apache-2.0"
] | 1
|
2020-08-01T04:04:34.000Z
|
2020-08-01T04:04:34.000Z
|
newss.py
|
krishnansuki/daily-news
|
3b03ea4bcd0aed8ddf69d91128bfce1f3d9192c0
|
[
"Apache-2.0"
] | null | null | null |
newss.py
|
krishnansuki/daily-news
|
3b03ea4bcd0aed8ddf69d91128bfce1f3d9192c0
|
[
"Apache-2.0"
] | null | null | null |
import feedparser
def parseRSS(rss_url):
return feedparser.parse(rss_url)
def getHeadLines(rss_url):
headlines = []
feed = parseRSS(rss_url)
for newitem in feed['items']:
headlines.append(newitem['title'])
return headlines
allheadlines = []
newsurls={'googlenews': 'https://news.google.com/news/rss/?h1=ta&ned=us&gl=IN',}# I used IN in this line for indian news instead of that you can use your capital's
for key, url in newsurls.items():
allheadlines.extend(getHeadLines(url))
for h in allheadlines:
print(h)
| 35.5625
| 172
| 0.692443
| 82
| 569
| 4.756098
| 0.585366
| 0.061538
| 0.071795
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002155
| 0.184534
| 569
| 15
| 173
| 37.933333
| 0.838362
| 0.142355
| 0
| 0
| 0
| 0.066667
| 0.169851
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133333
| false
| 0
| 0.066667
| 0.066667
| 0.333333
| 0.066667
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
886fdb38b86a90cbac81f513da517e4152656447
| 2,824
|
py
|
Python
|
examples/05_fields.py
|
johnaparker/MiePy
|
5c5bb5a07c8ab79e9e2a9fc79fb9779e690147be
|
[
"MIT"
] | 3
|
2016-05-30T06:45:29.000Z
|
2017-08-30T19:58:56.000Z
|
examples/05_fields.py
|
johnaparker/MiePy
|
5c5bb5a07c8ab79e9e2a9fc79fb9779e690147be
|
[
"MIT"
] | null | null | null |
examples/05_fields.py
|
johnaparker/MiePy
|
5c5bb5a07c8ab79e9e2a9fc79fb9779e690147be
|
[
"MIT"
] | 5
|
2016-12-13T02:05:31.000Z
|
2018-03-23T07:11:30.000Z
|
"""
Displaying the fields in an xy cross section of the sphere (x polarized light, z-propagating)
"""
import numpy as np
import matplotlib.pyplot as plt
import miepy
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.cm as cm
Ag = miepy.materials. Ag()
# calculate scattering coefficients, 800 nm illumination
radius = 200e-9 # 200 nm radius
lmax = 5 # Use up to 5 multipoles
sphere = miepy.single_mie_sphere(radius, Ag, 800e-9, lmax)
# create discretized xy plane
x = np.linspace(-2*radius,2*radius,100)
y = np.linspace(-2*radius,2*radius,100)
z = np.array([radius*0.0])
X,Y,Z = np.meshgrid(x,y,z, indexing='xy')
R = (X**2 + Y**2 + Z**2)**0.5
THETA = np.arccos(Z/R)
PHI = np.arctan2(Y,X)
# electric and magnetic field functions
E_func = sphere.E_field(index=0)
E = E_func(R,THETA,PHI).squeeze()
IE = np.sum(np.abs(E)**2, axis=0)
H_func = sphere.H_field(index=0)
H = H_func(R,THETA,PHI).squeeze()
IH = np.sum(np.abs(H)**2, axis=0)
# plot results
fig,axes = plt.subplots(ncols=2, figsize=plt.figaspect(1/2.7))
for i,ax in enumerate(axes):
plt.subplot(ax)
I = IE if i == 0 else IH
plt.pcolormesh(np.squeeze(X)*1e9,np.squeeze(Y)*1e9, I, shading="gouraud", cmap=cm.viridis)
plt.colorbar(label='field intensity')
THETA = np.squeeze(THETA)
PHI = np.squeeze(PHI)
for i,ax in enumerate(axes):
F = E if i == 0 else H
Fx = F[0]*np.sin(THETA)*np.cos(PHI) + F[1]*np.cos(THETA)*np.cos(PHI) - F[2]*np.sin(PHI)
Fy = F[0]*np.sin(THETA)*np.sin(PHI) + F[1]*np.cos(THETA)*np.sin(PHI) + F[2]*np.cos(PHI)
step=10
ax.streamplot(np.squeeze(X)*1e9, np.squeeze(Y)*1e9, np.real(Fx), np.real(Fy), color='white', linewidth=1.0)
for ax in axes:
ax.set(xlim=[-2*radius*1e9, 2*radius*1e9], ylim=[-2*radius*1e9, 2*radius*1e9],
aspect='equal', xlabel="X (nm)", ylabel="Y (nm)")
axes[0].set_title("Electric Field")
axes[1].set_title("Magnetic Field")
plt.show()
# theta = np.linspace(0,np.pi,50)
# phi = np.linspace(0,2*np.pi,50)
# r = np.array([10000])
# R,THETA,PHI = np.meshgrid(r,theta,phi)
# X = R*np.sin(THETA)*np.cos(PHI)
# Y = R*np.sin(THETA)*np.sin(PHI)
# Z = R*np.cos(THETA)
# X = X.squeeze()
# Y = Y.squeeze()
# Z = Z.squeeze()
# E = E_func(R,THETA,PHI)
# I = np.sum(np.abs(E)**2, axis=0)
# I = np.squeeze(I)
# I -= np.min(I)
# I /= np.max(I)
# fig = plt.figure()
# ax = fig.add_subplot(111, projection='3d')
# shape = X.shape
# C = np.zeros((shape[0], shape[1], 4))
# cmap_3d = cm.viridis
# for i in range(shape[0]):
# for j in range(shape[1]):
# C[i,j,:] = cmap_3d(I[i,j])
# surf = ax.plot_surface(X*1e9, Y*1e9, Z*1e9, rstride=1, cstride=1,shade=False, facecolors=C,linewidth=.0, edgecolors='#000000', antialiased=False)
# m = cm.ScalarMappable(cmap=cmap_3d)
# m.set_array(I)
# plt.colorbar(m)
# surf.set_edgecolor('k')
# ax.set_xlabel('X')
| 29.726316
| 147
| 0.645892
| 532
| 2,824
| 3.390977
| 0.296992
| 0.034922
| 0.024945
| 0.026608
| 0.221175
| 0.197339
| 0.096452
| 0.047672
| 0
| 0
| 0
| 0.050501
| 0.151558
| 2,824
| 94
| 148
| 30.042553
| 0.702421
| 0.38881
| 0
| 0.047619
| 0
| 0
| 0.043969
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.119048
| 0
| 0.119048
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
887136302539945d1d8fc0fd52d9556bdb55e9ef
| 13,616
|
py
|
Python
|
pya2a/models.py
|
LvanWissen/pya2a
|
d8a7848ba408850aedd79d18ad2816524499f528
|
[
"MIT"
] | null | null | null |
pya2a/models.py
|
LvanWissen/pya2a
|
d8a7848ba408850aedd79d18ad2816524499f528
|
[
"MIT"
] | null | null | null |
pya2a/models.py
|
LvanWissen/pya2a
|
d8a7848ba408850aedd79d18ad2816524499f528
|
[
"MIT"
] | null | null | null |
import datetime
import dateutil.parser
import xml
import xml.etree.ElementTree
from pya2a.utils import parseRemark
class Entity:
NAMESPACE = {"a2a": "http://Mindbus.nl/A2A"}
class Person(Entity):
"""
"""
def __init__(self, element: xml.etree.ElementTree.Element):
self.id = element.attrib['pid']
self.relations = []
## PersonName
pn = element.find('a2a:PersonName', namespaces=self.NAMESPACE)
self.PersonName = PersonName(pn)
# Gender
if (el := element.find('a2a:Gender',
namespaces=self.NAMESPACE)) is not None:
self.Gender = el.text
# Residence
if (el := element.find('a2a:Residence',
namespaces=self.NAMESPACE)) is not None:
self.Residence = Place(el)
# Religion
if (el := element.find('a2a:Religion',
namespaces=self.NAMESPACE)) is not None:
self.Religion = el.find('a2a:ReligionLiteral',
namespaces=self.NAMESPACE).text
# Origin
if (el := element.find('a2a:Origin',
namespaces=self.NAMESPACE)) is not None:
self.Origin = Place(el)
# Age
# BirthDate
if (el := element.find('a2a:BirthDate',
namespaces=self.NAMESPACE)) is not None:
self.BirthDate = Date(el)
# BirthPlace
if (el := element.find('a2a:BirthPlace',
namespaces=self.NAMESPACE)) is not None:
self.BirthPlace = Place(el)
# Profession
if (el := element.find('a2a:Profession',
namespaces=self.NAMESPACE)) is not None:
self.Profession = el.text
# MaritalStatus
if (el := element.find('a2a:MaritalStatus',
namespaces=self.NAMESPACE)) is not None:
self.Gender = el.text
# PersonRemark
if (els := element.findall('a2a:PersonRemark',
namespaces=self.NAMESPACE)) is not None:
remarks = []
for el in els:
remarkType = el.attrib['Key']
remark = el.find('a2a:Value', namespaces=self.NAMESPACE).text
remarks.append((remarkType, parseRemark(remark)))
self.Remarks = dict(remarks)
def __getattr__(self, attr):
return None
class PersonName(Entity):
"""
A2A:PersonNameAlias, A2A:PersonNameFamilyName, A2A:PersonNameFirstName,
A2A:PersonNameInitials, A2A:PersonNameLastName, A2A:PersonNameLiteral,
A2A:PersonNameNickName, A2A:PersonNamePatronym, A2A:PersonNamePrefixLastName,
A2A:PersonNameRemark, A2A:PersonNameTitle, A2A:PersonNameTitleOfNobility
"""
def __init__(self, element: xml.etree.ElementTree.Element):
for child in element:
key = child.tag.replace(f"{{{self.NAMESPACE['a2a']}}}", '')
value = child.text
self.__setattr__(key, value)
def __iter__(self):
for i in vars(self):
if i.startswith('PersonName'):
yield self.__getattribute__(i)
else:
continue
def __getattr__(self, attr):
return None
class Event(Entity):
def __init__(self, element: xml.etree.ElementTree.Element):
self.id = element.attrib['eid']
self.relations = []
# EventType
self.EventType = element.find('a2a:EventType',
namespaces=self.NAMESPACE).text
# EventDate
if (el := element.find('a2a:EventDate',
namespaces=self.NAMESPACE)) is not None:
self.EventDate = Date(el)
# EventPlace
if (el := element.find('a2a:EventPlace',
namespaces=self.NAMESPACE)) is not None:
self.EventPlace = Place(el)
# EventReligion
if (el := element.find('a2a:EventReligion',
namespaces=self.NAMESPACE)) is not None:
self.EventReligion = el.find('a2a:ReligionLiteral',
namespaces=self.NAMESPACE).text
# EventRemark
if (els := element.findall('a2a:EventRemark',
namespaces=self.NAMESPACE)) is not None:
remarks = []
for el in els:
remarkType = el.attrib['Key']
remark = el.find('a2a:Value', namespaces=self.NAMESPACE).text
remarks.append((remarkType, parseRemark(remark)))
self.Remarks = dict(remarks)
def __getattr__(self, attr):
return None
class Object(Entity):
def __init__(self, element: xml.etree.ElementTree.Element):
self.id = element.attrib['oid']
self.relations = []
class Source(Entity):
"""
A2A:EAC, A2A:EAD, A2A:RecordGUID, A2A:RecordIdentifier, A2A:SourceAvailableScans, A2A:SourceDate,
A2A:SourceDigitalOriginal, A2A:SourceDigitalizationDate, A2A:SourceIndexDate, A2A:SourceLastChangeDate,
A2A:SourcePlace, A2A:SourceReference, A2A:SourceRemark, A2A:SourceType
"""
def __init__(self, element: xml.etree.ElementTree.Element):
# SourcePlace
self.SourcePlace = Place(
element.find('a2a:SourcePlace', namespaces=self.NAMESPACE))
# SourceIndexDate
date_from = element.find('a2a:SourceIndexDate/a2a:From',
namespaces=self.NAMESPACE).text
self.IndexDateFrom = dateutil.parser.parse(date_from)
date_to = element.find('a2a:SourceIndexDate/a2a:To',
namespaces=self.NAMESPACE).text
self.IndexDateTo = dateutil.parser.parse(date_to)
# SourceDate
if (el := element.find('a2a:SourceDate',
namespaces=self.NAMESPACE)) is not None:
self.SourceDate = Date(el)
# SourceType
self.SourceType = element.find('a2a:SourceType',
namespaces=self.NAMESPACE).text
# EAD
# EAC
# SourceReference
self.SourceReference = SourceReference(
element.find('a2a:SourceReference', namespaces=self.NAMESPACE))
# SourceAvailableScans
if (el := element.find('a2a:SourceAvailableScans',
namespaces=self.NAMESPACE)) is not None:
self.scans = [
Scan(i)
for i in el.findall('a2a:Scan', namespaces=self.NAMESPACE)
]
else:
self.scans = []
# SourceDigitalizationDate
if (el := element.find('a2a:SourceDigitalizationDate',
namespaces=self.NAMESPACE)) is not None:
self.SourceDigitalizationDate = datetime.date.fromisoformat(
el.text)
# SourceLastChangeDate
self.SourceLastChangeDate = datetime.date.fromisoformat(
element.find('a2a:SourceLastChangeDate',
namespaces=self.NAMESPACE).text)
# SourceRetrievalDate
if (el := element.find('a2a:SourceRetrievalDate',
namespaces=self.NAMESPACE)) is not None:
self.SourceRetrievalDate = datetime.date.fromisoformat(el.text)
# SourceDigitalOriginal
# RecordIdentifier
if (el := element.find('a2a:RecordIdentifier',
namespaces=self.NAMESPACE)) is not None:
self.identifier = el.text
# RecordGUID
guid = element.find('a2a:RecordGUID', namespaces=self.NAMESPACE).text
self.guid = guid.replace('{', '').replace('}', '') # m$
# SourceRemark
if (els := element.findall('a2a:SourceRemark',
namespaces=self.NAMESPACE)) is not None:
remarks = []
for el in els:
remarkType = el.attrib['Key']
remark = el.find('a2a:Value', namespaces=self.NAMESPACE).text
remarks.append((remarkType, parseRemark(remark)))
remarkKeys = [i[0] for i in remarks]
duplicateKeys = set(k for k in remarkKeys
if remarkKeys.count(k) > 1)
duplicateKeys.add('filename') # hardcode
remarkDict = dict(
[i for i in remarks if i[0] not in duplicateKeys])
# add the duplicate keys with list value
for key in duplicateKeys:
remarkDict[key] = [
i[1]['Other'] for i in remarks if i[0] == key
]
self.Remarks = remarkDict
class Relation(Entity):
def __init__(self, element: xml.etree.ElementTree.Element):
self.RelationType = element.find('a2a:RelationType',
namespaces=self.NAMESPACE).text
# ExtendedRelationType
if (el := element.find('a2a:ExtendedRelationType',
namespaces=self.NAMESPACE)) is not None:
self.ExtendedRelationType = el.text
def __get__(self, value):
return self.value
class RelationEP(Relation):
def __init__(self, element: xml.etree.ElementTree.Element):
super().__init__(element)
self.person = element.find('a2a:PersonKeyRef',
namespaces=self.NAMESPACE).text
self.event = element.find('a2a:EventKeyRef',
namespaces=self.NAMESPACE).text
class RelationPP(Relation):
def __init__(self, element: xml.etree.ElementTree.Element):
super().__init__(element)
self.persons = [
i.text for i in element.findall('a2a:PersonKeyRef',
namespaces=self.NAMESPACE)
]
class RelationPO(Relation):
def __init__(self, element: xml.etree.ElementTree.Element):
super().__init__(element)
self.person = element.find('a2a:PersonKeyRef',
namespaces=self.NAMESPACE).text
self.object = element.find('a2a:ObjectKeyRef',
namespaces=self.NAMESPACE).text
class RelationP(Relation):
def __init__(self, element: xml.etree.ElementTree.Element):
super().__init__(element)
self.person = element.find('a2a:PersonKeyRef',
namespaces=self.NAMESPACE).text
class RelationOO(Relation):
def __init__(self, element: xml.etree.ElementTree.Element):
super().__init__(element)
self.objects = [
i.text for i in element.findall('a2a:ObjectKeyRef',
namespaces=self.NAMESPACE)
]
class RelationO(Relation):
def __init__(self, element: xml.etree.ElementTree.Element):
super().__init__(element)
self.object = element.find('a2a:ObjectKeyRef',
namespaces=self.NAMESPACE).text
class Place(Entity):
"""
A2A:Block, A2A:Country, A2A:County, A2A:DescriptiveLocationIndicator, A2A:DetailPlaceRemark,
A2A:HouseName, A2A:HouseNumber, A2A:HouseNumberAddition, A2A:Latitude, A2A:Longitude,
A2A:Municipality, A2A:PartMunicipality, A2A:Place, A2A:Province, A2A:Quarter, A2A:State, A2A:Street
"""
def __init__(self, element: xml.etree.ElementTree.Element):
for child in element:
key = child.tag.replace(f"{{{self.NAMESPACE['a2a']}}}", '')
value = child.text
self.__setattr__(key, value)
class SourceReference(Entity):
def __init__(self, element: xml.etree.ElementTree.Element):
for child in element:
key = child.tag.replace(f"{{{self.NAMESPACE['a2a']}}}", '')
value = child.text
self.__setattr__(key, value)
class Scan(Entity):
def __init__(self, element: xml.etree.ElementTree.Element):
for child in element:
key = child.tag.replace(f"{{{self.NAMESPACE['a2a']}}}", '')
value = child.text
self.__setattr__(key, value)
class Date(Entity):
def __init__(self, element: xml.etree.ElementTree.Element):
# Calendar="" IndexDateTime=""
if 'Calendar' in element.attrib:
self.calendar = element.attrib['Calendar']
if 'IndexDateTime' in element.attrib:
self.IndexDateTime = element.attrib['IndexDateTime']
for child in element:
key = child.tag.replace(f"{{{self.NAMESPACE['a2a']}}}", '')
value = child.text
self.__setattr__(key, value)
self.date = self._toISO()
def _toISO(self):
arguments = {
k.lower(): int(v)
for k, v in vars(self).items()
if k.lower() in ('year', 'month', 'day', 'hour', 'minute')
}
if {'year', 'month', 'day', 'hour'}.issubset(arguments):
date = datetime.datetime(**arguments)
#return date.isoformat()
return date
elif {'year', 'month', 'day'}.issubset(arguments):
date = datetime.date(**arguments)
#return date.isoformat()
return date
elif {'year', 'month'}.issubset(arguments):
return f"{arguments['year']}-{arguments['month']}"
elif {'year'}.issubset(arguments):
return f"{arguments['year']}"
else:
return None
def __str__(self):
return self._toISO()
| 32.809639
| 107
| 0.563234
| 1,299
| 13,616
| 5.793687
| 0.146266
| 0.08464
| 0.134467
| 0.066436
| 0.537337
| 0.444592
| 0.43476
| 0.354106
| 0.327265
| 0.307069
| 0
| 0.011095
| 0.324838
| 13,616
| 414
| 108
| 32.888889
| 0.807571
| 0.101425
| 0
| 0.471545
| 0
| 0
| 0.088845
| 0.029064
| 0
| 0
| 0
| 0
| 0
| 1
| 0.093496
| false
| 0
| 0.020325
| 0.020325
| 0.227642
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
88737c6f1857632bec14f3d69ee844444dd65d17
| 2,221
|
py
|
Python
|
musiker_fille_bot.py
|
pranay414/musiker_fille_bot
|
55d87a3bfdbaf8b99b5ca86c6f7a433cd6280d42
|
[
"MIT"
] | null | null | null |
musiker_fille_bot.py
|
pranay414/musiker_fille_bot
|
55d87a3bfdbaf8b99b5ca86c6f7a433cd6280d42
|
[
"MIT"
] | 1
|
2017-12-24T11:18:07.000Z
|
2017-12-25T20:29:18.000Z
|
musiker_fille_bot.py
|
pranay414/musiker_fille_bot
|
55d87a3bfdbaf8b99b5ca86c6f7a433cd6280d42
|
[
"MIT"
] | null | null | null |
# - *- coding: utf- 8 - *-
""" Bot to suggest music from Spotify based on your mood.
"""
import spotipy, os
from spotipy.oauth2 import SpotifyClientCredentials
from telegram.ext import Updater, CommandHandler, MessageHandler, Filters
#from access_token import AUTH_TOKEN, CLIENT_ID, CLIENT_SECRET
# Intialise spotipy
client_credentials_manager = SpotifyClientCredentials(client_id=os.environ['CLIENT_ID'], client_secret=os.environ['CLIENT_SECRET'])
sp = spotipy.Spotify(client_credentials_manager=client_credentials_manager)
# Define command handlers. They usually take two arguments bot and update
# In case of error handler they recieve TelegramError object in error
def start(bot, update):
update.message.reply_text("I can help you find the best music from Spotify 😉")
def help(bot, update):
update.message.reply_text("You can control me by sending these commands:\n\n/start - start a conversation with bot\n/new - get new releases from Spotify\n/help - get help from bot")
def new(bot, update):
response = []
results = sp.new_releases(country='US',limit=10)
for i, album in enumerate(results['albums']['items'],1):
response.append(' ' + str(i) + ' ' + album['name'] + ' - ' + album['artists'][0]['name'])
update.message.reply_text('\n\n'.join(response))
def sorry(bot, update):
update.message.reply_text("Sorry, I didn't get you. Type /help to get the list of available commands.")
def main():
"""Start the bot"""
# Create event handler and pass it your bot's token
updater = Updater(os.environ['AUTH_TOKEN'])
# Get dispatcher to register handlers
dispatcher = updater.dispatcher
print("Bot started!")
# On different commands - answer in Telegram
dispatcher.add_handler(CommandHandler('start', start))
dispatcher.add_handler(CommandHandler('help', help))
dispatcher.add_handler(CommandHandler('new', new))
# dispatcher.add_handler(CommandHandler(''))
# On non-command i.e message - echo the message in telegram
dispatcher.add_handler(MessageHandler(Filters.text, sorry))
# Start the Bot
updater.start_polling()
# Run the bot until you press Ctrl-C
updater.idle()
if __name__ == '__main__':
main()
| 38.964912
| 185
| 0.722647
| 305
| 2,221
| 5.154098
| 0.416393
| 0.041349
| 0.063613
| 0.05598
| 0.097328
| 0.05916
| 0
| 0
| 0
| 0
| 0
| 0.003224
| 0.162089
| 2,221
| 56
| 186
| 39.660714
| 0.840946
| 0.266096
| 0
| 0
| 0
| 0.034483
| 0.234122
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.172414
| false
| 0
| 0.103448
| 0
| 0.275862
| 0.034483
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8875919d1f2a6e03d1eb055a54e6b7d341bfcdca
| 1,544
|
py
|
Python
|
tracker/utils/projects.py
|
dti-research/tracker
|
f2384c0c7b631aa9efd39bf606cda8b85187fcc6
|
[
"BSD-3-Clause"
] | 1
|
2019-07-25T18:02:37.000Z
|
2019-07-25T18:02:37.000Z
|
tracker/utils/projects.py
|
dti-research/tracker
|
f2384c0c7b631aa9efd39bf606cda8b85187fcc6
|
[
"BSD-3-Clause"
] | 10
|
2019-08-29T12:27:35.000Z
|
2020-01-04T18:40:48.000Z
|
tracker/utils/projects.py
|
dti-research/tracker
|
f2384c0c7b631aa9efd39bf606cda8b85187fcc6
|
[
"BSD-3-Clause"
] | null | null | null |
# Copyright (c) 2019, Danish Technological Institute.
# All rights reserved.
#
# This source code is licensed under the BSD-style license found in the
# LICENSE file in the root directory of this source tree.
# -*- coding: utf-8 -*-
""" Utility code to locate tracker projects
"""
from tracker.tracker_file import TrackerFile
from tracker.utils import cli
from tracker.utils import config
def is_cwd_project(cwd):
raise NotImplementedError
def get_project_names_and_dirs():
trackerfile = TrackerFile()
projects = trackerfile.get("projects", {})
if projects:
data = [
{
"name": name,
"path": r.get("path", ""),
}
for d in projects for name, r in d.items()
]
return data
else:
cli.error("No projects specified in {}".format(
config.get_user_config_path()))
def get_project_names():
"""Searches for Tracker projects at the Tracker home configuration file
Returns:
<list> -- List of project names
"""
trackerfile = TrackerFile()
projects = trackerfile.get("projects", {})
project_names = []
if projects:
for d in projects:
k, _ = list(d.items())[0]
project_names.append(k)
return project_names
def get_project_dir_by_name(name):
trackerfile = TrackerFile()
data = trackerfile.get("projects")
for d in data:
k, _ = list(d.items())[0]
if name in k:
path = d[k]["path"]
return path
| 21.746479
| 75
| 0.609456
| 188
| 1,544
| 4.893617
| 0.398936
| 0.078261
| 0.042391
| 0.047826
| 0.13913
| 0.113043
| 0
| 0
| 0
| 0
| 0
| 0.006352
| 0.286269
| 1,544
| 70
| 76
| 22.057143
| 0.828494
| 0.243523
| 0
| 0.243243
| 0
| 0
| 0.058927
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108108
| false
| 0
| 0.081081
| 0
| 0.27027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
8876c3bace11ab590dd97932baa4aa09e457abf7
| 2,580
|
py
|
Python
|
day06.py
|
AnthonyFloyd/2017-AdventOfCode-Python
|
ef66ed25fef416f1f5f269810e6039cab53dc6d0
|
[
"MIT"
] | null | null | null |
day06.py
|
AnthonyFloyd/2017-AdventOfCode-Python
|
ef66ed25fef416f1f5f269810e6039cab53dc6d0
|
[
"MIT"
] | null | null | null |
day06.py
|
AnthonyFloyd/2017-AdventOfCode-Python
|
ef66ed25fef416f1f5f269810e6039cab53dc6d0
|
[
"MIT"
] | null | null | null |
'''
Advent of Code 2017
Day 6: Memory Reallocation
'''
import unittest
TEST_BANKS = ('0 2 7 0', 5, 4)
INPUT_BANKS = '0 5 10 0 11 14 13 4 11 8 8 7 1 4 12 11'
def findInfiniteLoop(memoryBanks):
'''
Finds the number of iterations required to detect an infinite loop with the given start condition.
memoryBanks is a list of integers, representing a number of memory banks with items in each.
Returns the number of iterations until an infinite loop is detected, and the size of the loop.
'''
nIterations = 0
nBanks = len(memoryBanks)
foundLoop = False
# create a history of known configurations, starting with the current one
# use a list instead of a set because sets reorder the items
# use strings instead of frozensets because frozensets reorder the items
resultList = [' '.join([str(i) for i in memoryBanks]),]
while not foundLoop:
# find the memory bank with the largest quanity
maximumItems = max(memoryBanks)
index = memoryBanks.index(maximumItems)
# Redistribute the items by emptying out the current bank and then
# giving the rest one of them, looping around the banks
nIterations += 1
memoryBanks[index] = 0
for counter in range(maximumItems):
index += 1
if index == nBanks:
index = 0
memoryBanks[index] += 1
# check to see if the current state has been seen before
currentState = ' '.join([str(i) for i in memoryBanks])
if currentState in resultList:
foundLoop = True
sizeOfLoop = nIterations - resultList.index(currentState)
else:
resultList.append(currentState)
return (nIterations, sizeOfLoop)
# Unit tests
class TestLoops(unittest.TestCase):
'''
Tests for Part 1 and Part 2
'''
# Part 1
def test_part1(self):
'''
Part 1 tests
'''
self.assertEqual(findInfiniteLoop([int(i) for i in TEST_BANKS[0].strip().split()])[0], TEST_BANKS[1])
## Part 2
def test_part2(self):
'''
Part 2 tests
'''
self.assertEqual(findInfiniteLoop([int(i) for i in TEST_BANKS[0].strip().split()])[1], TEST_BANKS[2])
if __name__ == '__main__':
print('Advent of Code\nDay 6: Memory Reallocation\n')
(iterations, loopSize) = findInfiniteLoop([int(i) for i in INPUT_BANKS.strip().split()])
print('Part 1: {0:d} iterations to infinite loop'.format(iterations))
print('Part 2: The loop is {0:d} iterations'.format(loopSize))
| 27.446809
| 109
| 0.637597
| 343
| 2,580
| 4.746356
| 0.396501
| 0.027641
| 0.015356
| 0.021499
| 0.127764
| 0.127764
| 0.111794
| 0.081081
| 0.081081
| 0.081081
| 0
| 0.0314
| 0.271705
| 2,580
| 93
| 110
| 27.741935
| 0.835019
| 0.324031
| 0
| 0
| 0
| 0
| 0.107514
| 0
| 0
| 0
| 0
| 0
| 0.057143
| 1
| 0.085714
| false
| 0
| 0.028571
| 0
| 0.171429
| 0.085714
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
887a62af70424662df05268a24baf2a7aafc6529
| 1,757
|
py
|
Python
|
iucas/utils.py
|
rysdyk/django-iucas
|
d534800c6a1fc6cf3ea5e3f1c0d9bc0dc7a2b4db
|
[
"BSD-3-Clause"
] | null | null | null |
iucas/utils.py
|
rysdyk/django-iucas
|
d534800c6a1fc6cf3ea5e3f1c0d9bc0dc7a2b4db
|
[
"BSD-3-Clause"
] | null | null | null |
iucas/utils.py
|
rysdyk/django-iucas
|
d534800c6a1fc6cf3ea5e3f1c0d9bc0dc7a2b4db
|
[
"BSD-3-Clause"
] | 1
|
2020-01-16T20:25:52.000Z
|
2020-01-16T20:25:52.000Z
|
"""
Utility Methods for Authenticating against and using Indiana University CAS.
"""
import httplib2
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from django.conf import settings
def validate_cas_ticket(casticket, casurl):
"""
Takes a CAS Ticket and makes the out of bound GET request to
cas.iu.edu to verify the ticket.
"""
validate_url = 'https://%s/cas/validate?cassvc=IU&casurl=%s' % \
(settings.CAS_HOST, casurl,)
if hasattr(settings, 'CAS_HTTP_CERT'):
h = httplib2.Http(ca_certs=settings.CAS_HTTP_CERT)
else:
h = httplib2.Http()
resp, content = h.request(validate_url,"GET")
return content.splitlines()
def get_cas_username(casticket, casurl):
"""
Validates the given casticket and casurl and returns the username of the
logged in user. If the user is not logged in returns None
"""
resp = validate_cas_ticket(casticket, casurl)
if len(resp) == 2 and resp[0] == 'yes':
return resp[1]
else:
return None
class IUCASBackend(object):
"""
IUCAS Authentication Backend for Django
"""
def authenticate(self, ticket, casurl):
resp = validate_cas_ticket(ticket, casurl)
if len(resp) == 2 and resp[0] == 'yes':
username = resp[1]
if not username:
return None
try:
user = User.objects.get(username__iexact=username)
except User.DoesNotExist:
return username
return user
else:
return None
def get_user(self, user_id):
try:
return User.objects.get(pk=user_id)
except User.DoesNotExist:
return None
| 30.293103
| 76
| 0.632328
| 221
| 1,757
| 4.932127
| 0.393665
| 0.033028
| 0.046789
| 0.047706
| 0.102752
| 0.049541
| 0.049541
| 0.049541
| 0.049541
| 0
| 0
| 0.007087
| 0.277177
| 1,757
| 57
| 77
| 30.824561
| 0.851181
| 0.194081
| 0
| 0.342105
| 0
| 0
| 0.04797
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105263
| false
| 0
| 0.105263
| 0
| 0.473684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|
887a99f77ebc5982239f9bc71d68f9e4f2afc02f
| 20,460
|
py
|
Python
|
blousebrothers/confs/views.py
|
sladinji/blousebrothers
|
461de3ba011c0aaed3f0014136c4497b6890d086
|
[
"MIT"
] | 1
|
2022-01-27T11:58:10.000Z
|
2022-01-27T11:58:10.000Z
|
blousebrothers/confs/views.py
|
sladinji/blousebrothers
|
461de3ba011c0aaed3f0014136c4497b6890d086
|
[
"MIT"
] | 5
|
2021-03-19T00:01:54.000Z
|
2022-03-11T23:46:21.000Z
|
blousebrothers/confs/views.py
|
sladinji/blousebrothers
|
461de3ba011c0aaed3f0014136c4497b6890d086
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from django.http import JsonResponse
from decimal import Decimal
from datetime import datetime, timedelta
import re
import logging
from disqusapi import DisqusAPI
from django.contrib import messages
from django.apps import apps
from django.core.mail import mail_admins
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.contrib.auth.mixins import LoginRequiredMixin
from django.shortcuts import redirect
from djng.views.mixins import JSONResponseMixin, allow_remote_invocation
from django.core.exceptions import ObjectDoesNotExist
from django.views.generic import (
DetailView,
ListView,
UpdateView,
CreateView,
FormView,
DeleteView,
TemplateView,
)
from django.conf import settings
import blousebrothers.classifier as cl
from blousebrothers.tools import get_disqus_sso
from blousebrothers.auth import (
BBConferencierReqMixin,
ConferenceWritePermissionMixin,
ConferenceReadPermissionMixin,
TestPermissionMixin,
BBLoginRequiredMixin,
)
from blousebrothers.tools import analyse_conf, get_full_url
from blousebrothers.confs.utils import get_or_create_product
from blousebrothers.users.charts import MonthlyLineChart
from blousebrothers.users.models import User
from .models import (
Conference,
Question,
Answer,
AnswerImage,
ConferenceImage,
QuestionImage,
QuestionExplainationImage,
Item,
Test,
TestAnswer,
)
from .forms import ConferenceForm, ConferenceFinalForm, RefundForm, ConferenceFormSimple
logger = logging.getLogger(__name__)
Product = apps.get_model('catalogue', 'Product')
class ConferenceHomeView(LoginRequiredMixin, TemplateView):
template_name = 'confs/conference_home.html'
def get(self, request, *args, **kwargs):
if not request.user.tests.filter(finished=True).count():
return redirect(reverse('catalogue:index'))
else:
return super().get(request, *args, **kwargs)
def get_context_data(self, *args, **kwargs):
context = super().get_context_data(**kwargs)
context['object'] = self.request.user
user = User.objects.prefetch_related("tests__answers").get(pk=self.request.user.pk)
context.update(**user.stats)
monthly_chart = MonthlyLineChart()
monthly_chart.context = context
context['monthly_chart'] = monthly_chart
return context
class ConferenceDetailView(ConferenceReadPermissionMixin, DetailView):
model = Conference
# These next two lines tell the view to index lookups by conf
def get_object(self, queryset=None):
obj = Conference.objects.prefetch_related(
"questions__answers",
"questions__images",
).get(slug=self.kwargs['slug'])
return obj
def get_context_data(self, **kwargs):
context = super().get_context_data(**kwargs)
context['meta'] = self.get_object().as_meta(self.request)
if self.request.user.is_superuser:
l = []
intro = context['object'].statement
quest = context['object'].questions.all()
for question in quest:
if question.explaination:
res = cl.classifier(str(intro)+" "+question.question+" "+question.explaination)
else:
res = cl.classifier(str(intro)+" "+question.question)
l.append(res)
context['specialities'] = l
return context
class ConferenceDeleteView(ConferenceWritePermissionMixin, BBConferencierReqMixin, DeleteView):
"""
View displayed to confirm deletion. Object are just flaged as deleted but are not
removed from db. Need to use admin interface to do so.
"""
template_name = 'confs/conference_delete.html'
model = Conference
def delete(self, request, *args, **kwargs):
"""
Override delete method to simply update object attribute deleted=True.
"""
self.object = self.get_object()
success_url = self.get_success_url()
self.object.deleted = True
self.object.save()
return HttpResponseRedirect(success_url)
def get_success_url(self):
return reverse('confs:list')
class ConferenceUpdateView(ConferenceWritePermissionMixin, JSONResponseMixin, UpdateView):
"""
Main Angular JS interface where you can edit question, images...
"""
template_name = 'confs/conference_update.html'
form_class = ConferenceForm
# send the user back to their own page after a successful update
def get_redirect_url(self):
return reverse('confs:detail',
kwargs={'slug': self.request.conf.slug})
def get_object(self, queryset=None):
obj = Conference.objects.get(slug=self.kwargs['slug'])
return obj
def form_valid(self, form):
context = self.get_context_data()
formset = context['formset']
if form.is_valid():
self.object = form.save(commit=False)
self.object.owner = self.request.user
self.object.save()
else:
return self.render_to_response(self.get_context_data(form=form, formset=formset))
if formset.is_valid():
formset.save()
return redirect(self.object.get_absolute_url())
else:
return self.render_to_response(self.get_context_data(form=form, formset=formset))
@allow_remote_invocation
def sync_data(self, edit_data):
# process in_data
conf, question, answers, images, qimages, ansimages, qexpimages = edit_data
try:
conf.pop('items')
conf.pop('specialities')
except:
pass
conf_pk = conf.pop('pk')
Conference.objects.filter(pk=conf_pk).update(**conf)
question.pop('specialities')
question.pop('items')
Question.objects.filter(pk=question.pop('pk')).update(**question)
for answer in answers:
Answer.objects.filter(pk=answer.pop('pk')).update(**answer)
for __, answers_images in ansimages.items():
for answer_image in answers_images:
AnswerImage.objects.filter(pk=answer_image.pop('pk')).update(**answer_image)
for image in images:
ConferenceImage.objects.filter(pk=image.pop('pk')).update(**image)
for image in qimages:
QuestionImage.objects.filter(pk=image.pop('pk')).update(**image)
for image in qexpimages:
QuestionExplainationImage.objects.filter(pk=image.pop('pk')).update(**image)
return analyse_conf(Conference.objects.get(pk=conf_pk))
@allow_remote_invocation
def get_keywords(self, data):
cf = Conference.objects.get(pk=data['pk'])
txt = cf.get_all_txt()
ret = []
for item in Item.objects.all():
for kw in item.kwords.all():
if re.search(r'[^\w]'+kw.value+r'[^\w]', txt):
ret.append("{} => {}".format(kw.value, item.name))
break
return ret
def ajax_switch_correction(request):
"""
Ajax switch correction available.
"""
status = request.GET['state'] == 'true'
conf = request.user.created_confs.get(id=request.GET['conf_id'])
conf.correction_dispo = status
conf.save()
return JsonResponse({'success': True})
def ajax_switch_for_sale(request):
"""
Ajax conf available.
"""
status = request.GET['state'] == 'true'
conf = request.user.created_confs.get(id=request.GET['conf_id'])
conf.for_sale = status
conf.save()
return JsonResponse({'success': True})
class ConferenceListView(ListView):
model = Conference
# These next two lines tell the view to index lookups by conf
paginate_by = 10
def get_queryset(self):
if self.request.user.is_superuser:
qry = self.model.objects.order_by('-edition_progress')
else:
qry = self.model.objects.filter(owner=self.request.user)
qry = qry.order_by('edition_progress')
if self.request.GET.get('q', False):
qry = qry.filter(title__icontains=self.request.GET['q'])
qry = qry.prefetch_related('products__stats')
qry = qry.prefetch_related('owner__sales')
return qry.all()
class ConferenceCreateView(BBConferencierReqMixin, CreateView, FormView):
template_name = 'confs/conference_form.html'
form_class = ConferenceForm
model = Conference
def get_object(self, queryset=None):
obj = Conference.objects.prefetch_related(
"questions__answers",
"questions__images",
).get(slug=self.kwargs['slug'])
return obj
# send the user back to their own page after a successful update
def get_redirect_url(self):
return reverse('confs:detail',
kwargs={'slug': self.request.conf.slug})
def get_success_url(self):
return reverse('confs:update',
kwargs={'slug': self.object.slug})
def form_valid(self, form):
if form.is_valid():
self.object = form.save(commit=False)
self.object.owner = self.request.user
self.object.save()
# create questions
for i in range(form.cleaned_data['nb_questions']):
q = Question.objects.create(conf=self.object, index=i)
for j in range(5):
Answer.objects.create(question=q, index=j)
self.request.user.status = 'creat_conf_begin'
self.request.user.conf_entam_url = get_full_url(self.request, 'confs:update', args=(self.object.slug,))
self.request.user.save()
return super().form_valid(form)
else:
return self.render_to_response(self.get_context_data(form=form))
class ConferenceFinalView(ConferenceWritePermissionMixin, BBConferencierReqMixin, UpdateView):
template_name = 'confs/conference_final.html'
form_class = ConferenceFinalForm
model = Conference
def get_success_url(self):
return reverse('confs:test',
kwargs={'slug': self.object.slug})
def get_object(self, queryset=None):
"""
Update user status if required.
"""
obj = super().get_object(queryset)
if not obj.for_sale:
self.request.user.status = 'creat_conf_100'
self.request.user.save()
else:
self.request.user.conf_pub_url = get_full_url(self.request, 'confs:update', args=(obj.slug,))
self.request.user.action = "publi"
self.request.user.save()
return obj
def get_context_data(self, **kwargs):
items = []
if self.object.items.count() == 0:
self.object.set_suggested_items()
else:
txt = self.object.get_all_txt()
for item in Item.objects.exclude(
id__in=self.object.items.all()
).all():
for kw in item.kwords.all():
if re.search(r'[^\w]'+kw.value+r'([^\w]|$)', txt):
items.append(item)
break
context = super().get_context_data(**{'items': items})
return context
def form_valid(self, form):
"""
Create a Test instance for user to be able to test is conference,
and create a disqus thread with owner as thread creator.
"""
if not Test.objects.filter(
conf=self.object,
student=self.request.user
).exists():
Test.objects.create(conf=self.object, student=self.request.user)
get_or_create_product(self.object)
if self.object.for_sale:
self.request.user.status = 'conf_publi_ok'
self.request.user.save()
if form.cleaned_data["free"]:
self.object.price = 0
else:
self.object.price = Decimal('0.33')
# Create disqus thread
try:
disqus = DisqusAPI(settings.DISQUS_SECRET_KEY, settings.DISQUS_PUBLIC_KEY)
disqus.get("threads.create",
method='post',
forum='blousebrothers',
remote_auth=get_disqus_sso(self.object.owner),
title=self.object.title,
url=get_full_url(self.request, 'confs:result', args=(self.object.slug,)),
identifier=self.object.slug,
)
except Exception as ex:
if "thread already exists" in ex.message:
pass
else:
logger.exception("PB CREATING THREAD")
return super().form_valid(form)
class ConferenceEditView(ConferenceWritePermissionMixin, BBConferencierReqMixin, UpdateView):
template_name = 'confs/conference_form.html'
form_class = ConferenceFormSimple
model = Conference
def get_redirect_url(self):
return reverse('confs:update',
kwargs={'slug': self.request.conf.slug})
def get_success_url(self):
return reverse('confs:update',
kwargs={'slug': self.object.slug})
class BuyedConferenceListView(LoginRequiredMixin, ListView):
model = Test
# These next two lines tell the view to index lookups by conf
paginate_by = 10
def get_queryset(self):
qry = self.model.objects.filter(student=self.request.user)
qry = qry.order_by('progress')
if self.request.GET.get('q', False):
qry = qry.filter(conf__title__icontains=self.request.GET['q'])
return qry.all()
class TestUpdateView(TestPermissionMixin, JSONResponseMixin, UpdateView):
"""
Main test view.
"""
model = Test
fields = []
def get(self, request, *args, **kwargs):
self.object = self.get_object()
if self.object.finished:
return redirect(
reverse('confs:result', kwargs={'slug': self.object.conf.slug})
)
else:
return super().get(request, *args, **kwargs)
def get_context_data(self, **kwargs):
"""
Add time_taken var to context for timer initialization. time_taken units is
milliseconds as angularjs timer needs.
"""
tt = self.object.time_taken
time_taken = (tt.hour * 3600 + tt.minute * 60 + tt.second) * 1000 if tt else 0
return super().get_context_data(time_taken=time_taken, **kwargs)
def get_object(self, queryset=None):
"""
TestAnswers are created here, when user starts his test.
"""
conf = Conference.objects.get(slug=self.kwargs['slug'])
if conf.owner.username == "BlouseBrothers":
test, __ = Test.objects.get_or_create(conf=conf, student=self.request.user)
else:
test = Test.objects.get(conf=conf, student=self.request.user)
if not test.answers.count():
for question in conf.questions.all():
TestAnswer.objects.create(question=question, test=test)
return test
@allow_remote_invocation
def send_answers(self, data):
"""
API to collect test's answers.
:param data: {'answers': [0..4] => list of checked answers indexes,
'millis': time elapsed in milliseconds since test started,
}
"""
answers = data["answers"]
time_taken = datetime.fromtimestamp(data["millis"]/1000.0).time()
question = Question.objects.get(pk=answers[0]['question'])
test = Test.objects.get(conf=question.conf, student=self.request.user)
ta = TestAnswer.objects.get(test=test, question=question)
ta.given_answers = ','.join([str(answer['index']) for answer in answers if answer['correct']])
if not ta.given_answers:
raise Exception("NO ANSWER GIVEN")
if test.time_taken:
last_time = test.time_taken.hour * 3600 + test.time_taken.minute * 60 + test.time_taken.second
this_time = time_taken.hour * 3600 + time_taken.minute * 60 + time_taken.second
ta.time_taken = datetime.fromtimestamp(this_time - last_time)
else:
ta.time_taken = time_taken
ta.save()
test.time_taken = time_taken
test.progress = test.answers.exclude(given_answers='').count()/test.answers.count() * 100
test.save()
return {'success': True}
class TestResult(TestPermissionMixin, DetailView):
model = Test
def get_object(self, queryset=None):
conf = Conference.objects.get(slug=self.kwargs['slug'])
test = Test.objects.prefetch_related(
"answers__question__answers",
"answers__question__images",
).get(
conf=conf, student=self.request.user)
if not test.finished:
self.request.user.status = "give_eval_notok"
self.request.user.last_dossier_url = get_full_url(
self.request,
'confs:detail',
args=(conf.slug,)
)
self.request.user.save()
test.set_score()
try:
disqus = DisqusAPI(settings.DISQUS_SECRET_KEY, settings.DISQUS_PUBLIC_KEY)
thread = disqus.get('threads.details', method='get', forum='blousebrothers',
thread='ident:' + test.conf.slug)
disqus.post('threads.subscribe',
method='post',
thread=thread['id'],
remote_auth=get_disqus_sso(test.student),
)
except:
logger.exception("Student Disqus thread subscription error")
return test
def get(self, *args, **kwargs):
conf = Conference.objects.get(slug=self.kwargs['slug'])
product = Product.objects.get(conf=conf)
try:
return super().get(*args, **kwargs)
except ObjectDoesNotExist:
return redirect(product.get_absolute_url())
def get_context_data(self, **kwargs):
ctx = super().get_context_data(**kwargs)
try:
product = Product.objects.get(conf=self.object.conf)
ctx.update(product=product)
except:
ctx.update(product=None)
return ctx
class TestResetView(TestPermissionMixin, UpdateView):
model = Test
fields = ['id']
def form_valid(self, form):
if self.request.user.has_full_access:
self.object.finished = False
self.object.progress = 0
self.object.answers.all().delete()
self.object.save()
return super().form_valid(form)
def get_success_url(self):
if self.request.user.has_full_access:
return reverse('confs:test',
kwargs={'slug': self.object.conf.slug})
else:
messages.info(self.request,
"Merci de souscrire à un abonnement pour pouvoir recommencer un dossier.")
return reverse('users:subscription', kwargs={'sub_id': 0})
def get_object(self, queryset=None):
conf = Conference.objects.get(slug=self.kwargs['slug'])
return Test.objects.get(conf=conf, student=self.request.user)
class RefundView(TestPermissionMixin, UpdateView):
model = Test
form_class = RefundForm
template_name = 'confs/refund_form.html'
email_template = '''
DEMANDE DE REMBOURSEMENT DE CONF
Nom : {}
Email : {}
Lien : {}
Conf : {}
Msg : {}'''
def form_valid(self, form):
msg = self.email_template.format(
self.request.user.username,
self.request.user.email,
get_full_url(self.request, 'dashboard:user-detail', args=(self.request.user.id,)),
get_full_url(self.request, 'confs:detail', args=(self.object.conf.slug,)),
form.cleaned_data['msg'],
)
mail_admins('Demande de remboursement', msg)
return super().form_valid(form)
def get_object(self, queryset=None):
conf = Conference.objects.get(slug=self.kwargs['slug'])
return Test.objects.get(conf=conf, student=self.request.user)
def get_success_url(self):
messages.success(self.request, "Ta demande à bien été transmise, on te recontacte très vite.")
return reverse('catalogue:index')
| 36.34103
| 115
| 0.62219
| 2,319
| 20,460
| 5.3605
| 0.169038
| 0.046899
| 0.041026
| 0.015928
| 0.385649
| 0.339313
| 0.305044
| 0.25187
| 0.202317
| 0.189285
| 0
| 0.00334
| 0.26828
| 20,460
| 562
| 116
| 36.405694
| 0.826999
| 0.060704
| 0
| 0.369021
| 0
| 0
| 0.077301
| 0.013455
| 0
| 0
| 0
| 0
| 0
| 1
| 0.08656
| false
| 0.004556
| 0.059226
| 0.015945
| 0.341686
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
1
| 0
|