content
stringlengths 0
1.05M
| origin
stringclasses 2
values | type
stringclasses 2
values |
|---|---|---|
a,b = [int(x) for x in input().split(' ')]
print(str(a+b))
|
nilq/baby-python
|
python
|
# Source: https://stackoverflow.com/questions/9282967/how-to-open-a-file-using-the-open-with-statement
def filter(txt, oldfile, newfile):
'''\
Read a list of names from a file line by line into an output file.
If a line begins with a particular name, insert a string of text
after the name before appending the line to the output file.
'''
with open(newfile, 'w') as outfile, open(oldfile, 'r', encoding='utf-8') as infile:
for line in infile:
if line.startswith(txt):
line = line[0:len(txt)] + ' - Truly a great person!\n'
outfile.write(line)
# input the name you want to check against
text = input('Please enter the name of a great person: ')
letsgo = filter(text,'Spanish', 'Spanish2')
|
nilq/baby-python
|
python
|
"""
SLM test for the cortex-to-hippocampus connectivity for individual subfields
usage: $ python s16_cortex_testSLM.py LSUB
"""
import os, sys
import h5py
import numpy as np
from numpy import genfromtxt
# definde data directories
ddir = '../data/' # data dir
cordir = '../data/tout_cortex/'
odir = '../data/tout_group'
# final subject list after QC
subjlist = os.path.join(ddir, 'subjectListS900_QC_gr.txt'); # 709 subjects
f = open(subjlist); mylist = f.read().split("\n"); f.close()
mylist = mylist[:-1]
totnum = len(mylist)
labeling_file = '../data/tout_group/glasser.csv'
mylabel = genfromtxt(labeling_file)
print('We have now %i subjects... ' % totnum)
# subfield = 'LSUB'
subfield = sys.argv[1]
# here we go
C360_all = np.zeros((len(mylist), 360))
i = 0
for subjID in mylist:
subjsub= os.path.join(cordir, subjID + '_cortex_%s.h5' % (subfield))
with h5py.File(subjsub, "r") as f:
subjdata = np.array(f[subjID])
C360_all[i, :] = subjdata.T
i +=1
print(C360_all.shape, C360_all.mean(axis=0).max())
# labeling from 360 to 64k points
C64k_all = np.zeros((len(mylist), 64984))
for i in range(0, len(mylist)):
for j in range(1,360+1):
C64k_all[i, np.where(mylabel == j)] = C360_all[i,(j-1)]
print(C64k_all.shape, C64k_all.mean(axis=0).max())
from brainspace.datasets import load_conte69
from brainspace.mesh import mesh_elements
# load poly data for 64k surface (for the test & plotting)
surf_lh, surf_rh = load_conte69()
# write surface coordinates and triangles in a dictionary
lh_coord = np.array(mesh_elements.get_points(surf_lh)).T
rh_coord = np.array(mesh_elements.get_points(surf_rh)).T
lh_tri = np.array(mesh_elements.get_cells(surf_lh))
rh_tri = np.array(mesh_elements.get_cells(surf_rh))
D = {}
D['coord'] = np.concatenate((lh_coord, rh_coord), axis=1) # (3, 64984)
D['tri'] = np.concatenate((lh_tri, rh_tri + lh_coord.shape[1])) # (129960, 3)
# run slm
from brainstat.stats.terms import FixedEffect
from brainstat.stats.SLM import SLM
Y = C64k_all
contrast = np.ones((len(mylist),1))
term_ = FixedEffect(contrast)
model_ = 1 + term_
slm = SLM(model_, contrast = contrast)
slm.fit(Y)
Tvals = slm.t
Tvals.shape
h = h5py.File(os.path.join(odir, 'Tvals_cortex709_%s.h5' % (subfield)), 'w')
h.create_dataset('data', data = Tvals)
h.close()
|
nilq/baby-python
|
python
|
import csbuilder
from csbuilder.standard import Protocols, Roles, States
@csbuilder.protocols
class SFTProtocols(Protocols):
SFT = 8888
@csbuilder.roles(protocol=SFTProtocols.SFT)
class SFTRoles(Roles):
SENDER = 0
RECEIVER = 1
@csbuilder.states(SFTProtocols.SFT,SFTRoles.SENDER)
class SFTSenderStates(States):
IGNORE = 0
REQUEST = 1
INFO = 2
SEND = 3
DENY = 4
@csbuilder.states(SFTProtocols.SFT, SFTRoles.RECEIVER)
class SFTReceiverStates(States):
IGNORE = 0
ACCEPT = 1
DENY = 2
REQUIRE = 3
SUCCESS = 4
FAILURE = 5
REQUEST = 6
|
nilq/baby-python
|
python
|
from django.core.management.base import BaseCommand
from django.db.models import Count
from project.pastebin.models import Country
class Command(BaseCommand):
help = 'countries statistics'
def handle(self, *args, **kwargs):
countries = Country.objects.annotate(
pastes_count=Count('users__pastes')).order_by(
'-pastes_count')[:5].values(*['id', 'pastes_count', 'title'])
[print(country) for country in countries]
|
nilq/baby-python
|
python
|
"""Hass cmd."""
def breaking_change(number, cli=False):
"""Create breaking_change list for HA."""
import json
import requests
import os
from github import Github
comp_base = "https://www.home-assistant.io/components/"
pull_base = "https://github.com/home-assistant/home-assistant/pull/"
github = Github(os.environ["GHTOKEN"])
repo = github.get_repo("home-assistant/home-assistant.io")
posts = repo.get_dir_contents("source/_posts", "current")
this_post = None
for post in posts:
if "release" in post.path:
name = post.path.split("/")[-1].split(".")[0]
name = name.split("-")
rel_number = name[-1]
if rel_number == number:
this_post = post.html_url
if this_post is None:
print("Release for", number, "not found")
return
url = this_post
url_data = requests.get(url).text.split("\n")
raw_changes = []
changes = {}
changes["version"] = "0.{}.x".format(url.split(".markdown")[0].split("-")[-1])
changes["data"] = []
control = []
for line in url_data:
if "(breaking change)" in line:
raw_changes.append(line)
for change in raw_changes:
if change[0:3] == "<p>":
pass
else:
this = {}
try:
pull = str(change)
pull = pull.split("home-assistant/home-assistant/pull/")[1]
pull = pull.split('"')[0]
except:
pull = None
if pull not in control and pull is not None:
prlink = "{}{}".format(pull_base, pull)
try:
split = '<a href="/home-assistant/home-assistant.io/blob/'
split += "current/components/"
component = str(change)
component = component.split(split)[1]
component = component.split('">')[0]
except:
component = None
doclink = "{}{}".format(comp_base, component)
if len(change.split("<li>")) == 1:
desc = change.split("<li>")[0]
else:
desc = change.split("<li>")[1]
desc = desc.split("(<a ")[0]
desc = desc.replace("</code>", "")
desc = desc.replace('<code class="highlighter-rouge">', "")
desc = desc.replace("\u2019", "`")
desc = desc.replace("\u201c", "")
desc = desc.replace("\u201d", "")
this["pull_request"] = pull
this["prlink"] = prlink
this["component"] = component
this["doclink"] = doclink
this["description"] = desc
changes["data"].append(this)
control.append(pull)
if cli:
data = json.dumps(changes, sort_keys=True, indent=4, ensure_ascii=True)
print(data)
return changes
|
nilq/baby-python
|
python
|
import logging
import boto3
import os
import pandas as pd
import argparse
from datetime import datetime
from dataactcore.models.domainModels import DUNS
from dataactcore.utils.parentDuns import sam_config_is_valid
from dataactcore.utils.duns import load_duns_by_row
from dataactvalidator.scripts.loader_utils import clean_data
from dataactvalidator.health_check import create_app
from dataactcore.interfaces.db import GlobalDB
from dataactcore.logging import configure_logging
from dataactcore.config import CONFIG_BROKER
import dataactcore.utils.parentDuns
logger = logging.getLogger(__name__)
# CSV column header name in DUNS file
column_headers = [
"awardee_or_recipient_uniqu", # DUNS Field
"registration_date", # Registration_Date
"expiration_date", # Expiration_Date
"last_sam_mod_date", # Last_Update_Date
"activation_date", # Activation_Date
"legal_business_name" # Legal_Business_Name
]
props_columns = {
'address_line_1': None,
'address_line_2': None,
'city': None,
'state': None,
'zip': None,
'zip4': None,
'country_code': None,
'congressional_district': None,
'business_types_codes': []
}
column_mappings = {x: x for x in column_headers + list(props_columns.keys())}
def remove_existing_duns(data, sess):
""" Remove rows from file that already have a entry in broker database. We should only update missing DUNS
Args:
data: dataframe representing a list of duns
sess: the database session
Returns:
a new dataframe with the DUNS removed that already exist in the database
"""
duns_in_file = ",".join(list(data['awardee_or_recipient_uniqu'].unique()))
sql_query = "SELECT awardee_or_recipient_uniqu " +\
"FROM duns where awardee_or_recipient_uniqu = ANY('{" + \
duns_in_file +\
"}')"
db_duns = pd.read_sql(sql_query, sess.bind)
missing_duns = data[~data['awardee_or_recipient_uniqu'].isin(db_duns['awardee_or_recipient_uniqu'])]
return missing_duns
def clean_duns_csv_data(data):
""" Simple wrapper around clean_data applied just for duns
Args:
data: dataframe representing the data to be cleaned
Returns:
a dataframe cleaned and to be imported to the database
"""
return clean_data(data, DUNS, column_mappings, {})
def batch(iterable, n=1):
""" Simple function to create batches from a list
Args:
iterable: the list to be batched
n: the size of the batches
Yields:
the same list (iterable) in batches depending on the size of N
"""
l = len(iterable)
for ndx in range(0, l, n):
yield iterable[ndx:min(ndx + n, l)]
def update_duns_props(df, client):
""" Returns same dataframe with address data updated"
Args:
df: the dataframe containing the duns data
client: the connection to the SAM service
Returns:
a merged dataframe with the duns updated with location info from SAM
"""
all_duns = df['awardee_or_recipient_uniqu'].tolist()
columns = ['awardee_or_recipient_uniqu'] + list(props_columns.keys())
duns_props_df = pd.DataFrame(columns=columns)
# SAM service only takes in batches of 100
for duns_list in batch(all_duns, 100):
duns_props_batch = dataactcore.utils.parentDuns.get_location_business_from_sam(client, duns_list)
# Adding in blank rows for DUNS where location data was not found
added_duns_list = []
if not duns_props_batch.empty:
added_duns_list = [str(duns) for duns in duns_props_batch['awardee_or_recipient_uniqu'].tolist()]
empty_duns_rows = []
for duns in (set(added_duns_list) ^ set(duns_list)):
empty_duns_row = props_columns.copy()
empty_duns_row['awardee_or_recipient_uniqu'] = duns
empty_duns_rows.append(empty_duns_row)
duns_props_batch = duns_props_batch.append(pd.DataFrame(empty_duns_rows))
duns_props_df = duns_props_df.append(duns_props_batch)
return pd.merge(df, duns_props_df, on=['awardee_or_recipient_uniqu'])
def run_duns_batches(file, sess, client, block_size=10000):
""" Updates DUNS table in chunks from csv file
Args:
file: path to the DUNS export file to use
sess: the database connection
client: the connection to the SAM service
block_size: the size of the batches to read from the DUNS export file.
"""
logger.info("Retrieving total rows from duns file")
start = datetime.now()
row_count = len(pd.read_csv(file, skipinitialspace=True, header=None, encoding='latin1', quotechar='"',
dtype=str, names=column_headers, skiprows=1))
logger.info("Retrieved row count of {} in {} s".format(row_count, (datetime.now()-start).total_seconds()))
duns_reader_obj = pd.read_csv(file, skipinitialspace=True, header=None, encoding='latin1', quotechar='"',
dtype=str, names=column_headers, iterator=True, chunksize=block_size, skiprows=1)
for duns_df in duns_reader_obj:
start = datetime.now()
# Remove rows where awardee_or_recipient_uniqu is null
duns_df = duns_df[duns_df['awardee_or_recipient_uniqu'].notnull()]
duns_to_load = remove_existing_duns(duns_df, sess)
duns_count = 0
# Only update database if there are DUNS from file missing in database
if not duns_to_load.empty:
duns_count = duns_to_load.shape[0]
# get address info for incoming duns
duns_to_load = update_duns_props(duns_to_load, client)
duns_to_load = clean_duns_csv_data(duns_to_load)
models = {}
load_duns_by_row(duns_to_load, sess, models, None)
sess.commit()
logger.info("Finished updating {} DUNS rows in {} s".format(duns_count,
(datetime.now()-start).total_seconds()))
def main():
""" Loads DUNS from the DUNS export file (comprised of DUNS pre-2014) """
parser = argparse.ArgumentParser(description='Adding historical DUNS to Broker.')
parser.add_argument('-size', '--block_size', help='Number of rows to batch load', type=int,
default=10000)
args = parser.parse_args()
sess = GlobalDB.db().session
client = sam_config_is_valid()
logger.info('Retrieving historical DUNS file')
start = datetime.now()
if CONFIG_BROKER["use_aws"]:
s3_client = boto3.client('s3', region_name=CONFIG_BROKER['aws_region'])
duns_file = s3_client.generate_presigned_url('get_object', {'Bucket': CONFIG_BROKER['archive_bucket'],
'Key': "DUNS_export_deduped.csv"}, ExpiresIn=10000)
else:
duns_file = os.path.join(CONFIG_BROKER["broker_files"], "DUNS_export_deduped.csv")
if not duns_file:
raise OSError("No DUNS_export_deduped.csv found.")
logger.info("Retrieved historical DUNS file in {} s".format((datetime.now()-start).total_seconds()))
try:
run_duns_batches(duns_file, sess, client, args.block_size)
except Exception as e:
logger.exception(e)
sess.rollback()
logger.info("Updating historical DUNS complete")
sess.close()
if __name__ == '__main__':
with create_app().app_context():
configure_logging()
with create_app().app_context():
main()
|
nilq/baby-python
|
python
|
from datetime import datetime, timedelta
from cymepy.common import DATE_FORMAT
import math
import os
class Solver:
def __init__(self, cymepy, settings, logger):
self.Settings = settings
self._Logger = logger
self.cymepy = cymepy
self._mStepRes = settings['project']['time_step_min']
StartTimeMin = settings['project']['start_time']
EndTimeMin = settings['project']['end_time']
self._Time = datetime.strptime(StartTimeMin, DATE_FORMAT)
self._StartTime = self._Time
self._EndTime = datetime.strptime(EndTimeMin, DATE_FORMAT)
if settings['project']["simulation_type"] == "QSTS":
if self.Settings['profiles']["use_internal_profile_manager"]:
self.solverObj = cymepy.sim.LoadFlowWithProfiles()
self.solverObj.SetValue("SingleTimeMode", "Parameters.TimeParametersMode")
self.loadflowSettings(cymepy.sim.LoadFlow())
else:
self.solverObj = cymepy.sim.LoadFlow()
self.loadflowSettings(self.solverObj)
elif settings['project']["simulation_type"] == "Static":
self.solverObj = cymepy.sim.LoadFlow()
self.loadflowSettings(self.solverObj)
self._Logger.debug("Solver object created.")
return
def loadflowSettings(self, lf):
lf.SetValue('VoltageDropUnbalanced', 'ParametersConfigurations[0].AnalysisMode')
lf.SetValue(self.Settings['project']["max_iter"],
'ParametersConfigurations[0].MaximumIterations')
lf.SetValue(self.Settings['project']["error_tolerance"],
'ParametersConfigurations[0].VoltageTolerance')
return
def increment(self):
if self.Settings['project']["simulation_type"] == "QSTS":
if self.Settings['profiles']["use_profiles"]:
if self.Settings['profiles']["use_internal_profile_manager"]:
self.solverObj.SetValue(int(self._Time.timestamp()), "Parameters.SingleTime")
self.solverObj.Run()
#self._Logger.debug(f"CYME internal time: {self._Time}")
else:
self.solverObj.Run()
self._Time = self._Time + timedelta(minutes=self._mStepRes)
self._Logger.debug(f"CYMEPY time: {self._Time}")
elif self.Settings['project']["simulation_type"] == "Static":
raise Exception("'increment' method cannot be used in QSTS mode")
return
def resolve(self):
self.solverObj.Run()
self._Logger.debug(f"Resolving at time: {self._Time}")
def SimulationSteps(self):
Minutes = (self._EndTime - self._StartTime).total_seconds() / 60.0
Steps = math.ceil(Minutes / self._mStepRes)
return Steps, self._StartTime, self._EndTime
def GetTotalSeconds(self):
return (self._Time - self._StartTime).total_seconds()
def GetDateTime(self):
return self._Time
|
nilq/baby-python
|
python
|
from flask import Flask, render_template, request
from github_api import GithubUser
from pprint import pprint
app = Flask('git connect')
MATCHED_PROFILES = {}
userororg = 'user'
ghuser = None
@app.route('/')
def hello():
return render_template('app/index.html', err='')
@app.route('/login', methods=['GET', 'POST'])
def handle_data():
global ghuser
if request.method == 'POST':
username = request.form['username']
password = request.form['password']
try:
ghuser = GithubUser(username, password)
except Exception:
return render_template('app/index.html', err='Invalid auth')
if MATCHED_PROFILES.get(username, None) is None:
MATCHED_PROFILES[username] = ([], [])
else:
ghuser.active_matches = MATCHED_PROFILES[username][1]
# , username=ghuser.username, location=ghuser.location, bio=ghuser.bio, repocount=ghuser.repo_count, l1=ghuser.lang_name[0], l2=ghuser.lang_name[1], l3=ghuser.lang_name[2]
return render_template('app/profile.html', user_name=ghuser.username, location=ghuser.location, bio=ghuser.bio, repo_count=ghuser.repo_count, lang0=ghuser.lang_name[0][0], lang1=ghuser.lang_name[1][0], lang2=ghuser.lang_name[2][0], match_count=len(MATCHED_PROFILES[ghuser.username][1]), avatar_url=ghuser.avatar)
elif request.method == 'GET':
return render_template('app/index.html')
else:
return 'Please try again, this time using GET or POST'
@app.route('/profile')
def user():
global ghuser
if ghuser is None:
return render_template('app/index.html', err='Please log in')
pprint(MATCHED_PROFILES)
if len(MATCHED_PROFILES[ghuser.username][1]) > 0:
match = MATCHED_PROFILES[ghuser.username][1][0]
match_url = 'https://github.com/%s' % MATCHED_PROFILES[ghuser.username][1][0]
else:
match = ''
match_url = '#'
return render_template('app/profile.html', match=match, match_url=match_url, user_name=ghuser.username, location=ghuser.location, bio=ghuser.bio, repo_count=ghuser.repo_count, lang0=ghuser.lang_name[0][0], lang1=ghuser.lang_name[1][0], lang2=ghuser.lang_name[2][0], match_count=len(MATCHED_PROFILES[ghuser.username][1]), avatar_url=ghuser.avatar)
@app.route('/explore', methods=['GET', 'POST'])
def dislike_love():
global ghuser
if ghuser is None:
return render_template('app/index.html', err='Please log in')
pprint(MATCHED_PROFILES)
if request.method == 'POST':
if request.form['submit'] == 'dislike':
pass # do nothing
elif request.form['submit'] == 'love':
# matched user
muser = ghuser.matches[ghuser.mindex-1]['login']
if MATCHED_PROFILES.get(muser, None) is None:
MATCHED_PROFILES[muser] = ([ghuser.username], [])
elif muser in MATCHED_PROFILES[ghuser.username][0]:
MATCHED_PROFILES[muser][1].append(ghuser.username)
MATCHED_PROFILES[ghuser.username][1].append(muser)
else:
return 'unvalid'
match = ghuser.get_match()
murl = 'https://github.com/%s' % match['login']
return render_template("app/explore.html",murl=murl, followers=match['followers'], repos=match['repos'], avatar_url=match['avatar_url'], user_name=match['login'], location=ghuser.location)
elif request.method == 'GET':
match = ghuser.get_match()
murl = 'https://github.com/%s' % match['login']
return render_template("app/explore.html",murl=murl, followers=match['followers'], repos=match['repos'], avatar_url=match['avatar_url'], user_name=match['login'], location=ghuser.location)
else:
return 'BA MUI, GET sau POST'
if __name__ == '__main__':
app.run()
|
nilq/baby-python
|
python
|
import urllib2
import logging
from random import choice, randint
from os.path import exists
from time import sleep
from os import getenv
logging.basicConfig(
format='[%(asctime)s] %(levelname)s [%(name)s:%(lineno)s] %(message)s',
datefmt="%d/%b/%Y %H:%M:%S",
level=getenv('LOG_LEVEL', logging.DEBUG)
)
logger = logging.getLogger(__name__)
class EasyScrapper(object):
"""
Simple and Fast Scrapper base object to implement dataset scrappers.
It provides a set of method to make it faster and better.
Support:
- User Agents
- Web Proxies
"""
DEFAULT_USER_AGENTS = [
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/54.0.2840.71 Safari/537.36',
'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:40.0) Gecko/20100101 Firefox/40.1',
'Mozilla/5.0 (Windows NT 6.3; rv:36.0) Gecko/20100101 Firefox/36.0',
'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_3) AppleWebKit/537.75.14 (KHTML, like Gecko) Version/7.0.3 Safari/7046A194A',
'Mozilla/5.0 (iPad; CPU OS 6_0 like Mac OS X) AppleWebKit/536.26 (KHTML, like Gecko) Version/6.0 Mobile/10A5355d Safari/8536.25',
'Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2228.0 Safari/537.36'
]
DEFAULT_PROXIES = [
'97.77.104.22:80',
'188.213.143.119:8118',
'47.88.137.179:8080',
'12.41.141.10:8080'
]
def __init__(self, proxies_filename='./proxies.txt', user_agents_filename='./user_agents.txt'):
"""
Initialize the ojects and all the needed resources.
:param proxies_filename filename to the proxy list separated by new line
:param user_agents_filename filename to the user agenst list separated by new line
"""
self.proxies = self.load_data_list(proxies_filename, self.DEFAULT_PROXIES)
self.user_agents = self.load_data_list(user_agents_filename, self.DEFAULT_USER_AGENTS)
def load_data_list(self, filename, defaults):
"""
Load an array from a file that contains one value per line. For example:
```
a
b
c
```
will return
['a', 'b', 'c']
:param filename the filename containing the list
:param defaults the default list if the file does not exist.
:returns the array with all the loaded elements
"""
logger.info("Loading data from {}...".format(filename))
all_data = []
if exists(filename):
with open(filename, 'r+') as fp:
data = fp.read()
all_data = filter(None, data.split("\n"))
else:
return defaults
return all_data
def save_data_list(self, filename, data):
"""
Save a list into a folder, one element per line.
:param filename the filename to save/create/override
:param data the list to save
"""
logger.info("Saving data to {}...".format(filename))
with open(filename, 'w+') as fp:
fp.write("\n".join(data))
def sleep(self, seconds_from, seconds_to):
"""
Sleep a random number of seconds between seconds_from and seconds_to. For example:
self.sleep(2, 30) will sleep ramdomly between 2 and 30 seconds.
:param seconds_from lower limit for the seconds to sleep
:param seconds_to upper limit for the seconds to sleep
"""
time_to_sleep = randint(seconds_from, seconds_to)
logger.info("Going to sleep for {} seconds...".format(time_to_sleep))
sleep(time_to_sleep)
def download_data(self, url, referer='http://www.google.com/', use_proxy=False, retries=1):
"""
Download all the data from the url faking the referer and user-agent. This method has the
power to use proxies and perform retries if the download fails.
:param url the url of the file to download
:param referer the url to send as referer (Identifies the address of the webpage that linked to the resource being requested)
:param use_proxy if TRUE it will download the resource using a proxy listed in the proxies file, if FALSE it will download it directly.
:param retries is the number of retries to try to download the resource if fails.
:returns the url data
"""
iteration = 1
while iteration <= retries:
try:
the_proxy = choice(self.proxies)
if use_proxy:
logger.info("Downloading {} through {} and retry {}/{} times.".format(url, the_proxy, iteration, retries))
else:
logger.info("Downloading {} and retry {}/{} times.".format(url, iteration, retries))
if use_proxy:
# Enable Proxies
urllib2.install_opener(
urllib2.build_opener(
urllib2.ProxyHandler({'http': the_proxy})
)
)
req = urllib2.Request(url, headers={
'referer': referer,
'User-Agent': choice(self.user_agents)
})
data = urllib2.urlopen(req).read()
if use_proxy:
# Disable all proxies
urllib2.install_opener(
urllib2.build_opener(
urllib2.ProxyHandler({})
)
)
return data
except Exception:
iteration += 1
logger.error("Download failed. Retry: {}".format(iteration))
raise Exception("Download failed: {}".format(url))
def start(self, *args, **kwargs):
"""
Method to override and create all the needed logic you need.
"""
raise NotImplemented
|
nilq/baby-python
|
python
|
"""
polarAWB.py
Copyright (c) 2022 Sony Group Corporation
This software is released under the MIT License.
http://opensource.org/licenses/mit-license.php
"""
import json
from pathlib import Path
import shutil
import numpy as np
from myutils.imageutils import MAX_16BIT, my_read_image, my_write_image
from myutils.datautils import macbeth_position_txt_parse, compute_gt_illum, calc_ang_error
import myutils.polarutils as plutil
import myutils.weighturils as weutil
import myutils.wbutils as wbutil
if __name__ == "__main__":
params = json.load(open("parameters.json", "r"))
input_path = Path("images").joinpath(params["input_folder"])
result_path = Path("results").joinpath(input_path.name)
result_path.mkdir(parents=True, exist_ok=True)
shutil.copy("parameters.json", result_path)
with open(input_path.joinpath("macbeth_position.txt"), "r") as f:
lines = f.readlines()
for line in lines:
scene_name, x, y, w, h = macbeth_position_txt_parse(line)
imean_path = input_path.joinpath("{}_imean.png".format(scene_name))
i000_path = input_path.joinpath("{}_i000.png".format(scene_name))
i045_path = input_path.joinpath("{}_i045.png".format(scene_name))
i090_path = input_path.joinpath("{}_i090.png".format(scene_name))
i135_path = input_path.joinpath("{}_i135.png".format(scene_name))
macbeth_path = input_path.joinpath("{}_macbeth.png".format(scene_name))
imean = my_read_image(imean_path) / MAX_16BIT
i000 = my_read_image(i000_path) / MAX_16BIT
i045 = my_read_image(i045_path) / MAX_16BIT
i090 = my_read_image(i090_path) / MAX_16BIT
i135 = my_read_image(i135_path) / MAX_16BIT
macbeth = my_read_image(macbeth_path)
s0, s1, s2 = plutil.calc_s0s1s2_from_fourPolar(i000, i045, i090, i135)
dolp = plutil.calc_dolp_from_s0s1s2(s0, s1, s2)
aolp = plutil.calc_aolp_from_s1s2(s1, s2)
# Weights
w_valid = weutil.valid_weight_fourPolar(i000, i045, i090, i135, th=params["valid_th"])
w_dolp = weutil.sigmoid(
np.mean(dolp, axis=2), alpha=params["w_dolp_a"], center=params["w_dolp_b"])
w_dolp_ach = weutil.rg_bg_sigmoid_weight_achromatic(
dolp, alpha=params["w_dolp_ach_a"], center=params["w_dolp_ach_b"], normalize=True)
w_aolp_ach = weutil.rg_bg_sigmoid_weight_achromatic_phase(
aolp, alpha=params["w_aolp_ach_a"], center=params["w_aolp_ach_b"])
w_dolp_ch = weutil.rg_bg_sigmoid_weight_chromatic(
dolp, alpha=params["w_dolp_ch_a"], center=params["w_dolp_ch_b"], normalize=True)
w_aolp_ch = weutil.rg_bg_sigmoid_weight_achromatic_phase(
aolp, alpha=params["w_aolp_ch_a"], center=params["w_aolp_ch_b"])
weight_achromatic = w_valid * w_dolp * w_dolp_ach * w_aolp_ach
weight_chromatic = w_valid * w_dolp * w_dolp_ch * w_aolp_ch
# WB.
illum_est = wbutil.polarAWB(dolp, imean, weight_achromatic, weight_chromatic, params["alpha"])
# Compute Error.
illum_gt = compute_gt_illum(macbeth, x, y, w, h)
err_deg = calc_ang_error(illum_est, illum_gt)
with open(result_path.joinpath("error.txt"), "a") as f2:
f2.write("{}'s Error: {:.3f}\n".format(scene_name, err_deg))
# Save White-balanced Images.
macbeth_wb = np.copy(imean)
polar_wb = np.copy(imean)
polar_wb[..., 0] /= illum_est[..., 0]
polar_wb[..., 2] /= illum_est[..., 2]
polar_wb = np.clip(polar_wb, 0, 1) * MAX_16BIT
my_write_image(result_path.joinpath("{}_PolarWB.png".format(scene_name)), polar_wb)
r_gain = illum_gt[1] / illum_gt[0]
b_gain = illum_gt[1] / illum_gt[2]
macbeth_wb[..., 0] *= r_gain
macbeth_wb[..., 2] *= b_gain
macbeth_wb = np.clip(macbeth_wb, 0, 1) * MAX_16BIT
my_write_image(result_path.joinpath("{}_MacbethWB.png".format(scene_name)), macbeth_wb)
|
nilq/baby-python
|
python
|
# !/usr/bin/env python
# -*- coding: utf-8 -*-
from collections import OrderedDict, defaultdict
from traceback import print_exc
import wx
from wx import EVT_MENU
from .Controls import CheckBox, RadioButton, Row, StaticText
class FormDialog(wx.Dialog):
def __init__(
self,
parent,
panel=None,
title="Unnamed Dialog",
modal=False,
sizes=(-1, -1),
offset=None,
gap=3,
position=None,
**kwargs
):
wx.Dialog.__init__(
self, parent, -1, title, style=wx.DEFAULT_DIALOG_STYLE | wx.RESIZE_BORDER
)
if panel is not None:
self.SetTitle(title)
self.panel = panel(self, gap=gap, **kwargs)
self.panel.SetSizeHints(*sizes)
ds = wx.GridBagSizer(self.panel.gap, self.panel.gap)
ds.Add(self.panel, (0, 0), (1, 1), wx.EXPAND | wx.ALL, self.panel.gap)
ds.Add(
wx.StaticLine(self),
(1, 0),
(1, 1),
wx.EXPAND | wx.RIGHT | wx.LEFT,
self.panel.gap,
)
if "AddButtons" in self.panel.form:
self.bs = wx.GridBagSizer()
self.bs.AddGrowableCol(0)
for col, (label, wx_id) in enumerate(self.panel.form["AddButtons"].items(), start=1):
button = wx.Button(self, label=label, id=wx_id)
self.bs.Add(button, (0, col))
if hasattr(self.panel, f"on{label}"):
self.Bind(wx.EVT_BUTTON, getattr(self.panel, f"on{label}"), id=wx_id)
else:
self.bs = self.CreateButtonSizer(
self.panel.form.get("Buttons", wx.OK | wx.CANCEL)
)
self.Bind(wx.EVT_BUTTON, self.panel.onOk, id=wx.ID_OK)
self.Bind(wx.EVT_BUTTON, self.panel.onClose, id=wx.ID_CANCEL)
ds.Add(self.bs, (2, 0), (1, 1), wx.ALIGN_RIGHT | wx.ALL, self.panel.gap)
ds.AddGrowableCol(0)
ds.AddGrowableRow(0)
self.SetSizerAndFit(ds)
if position is None:
self.Center()
else:
self.SetPosition(position)
if offset:
newpos = map(lambda x: x + offset, self.GetPosition())
self.SetPosition(wx.Point(*newpos))
for wrapper in self.panel.elements.values():
if not isinstance(wrapper, (RadioButton, CheckBox, StaticText)):
wrapper.SetFocus()
break
if modal:
self.res = self.ShowModal()
else:
self.Show()
def FocusNext(self):
for child in reversed(wx.GetTopLevelWindows()[0].GetChildren()):
if isinstance(child, FormDialog) and child is not self:
child.Raise()
break
self.Destroy()
class Form(wx.Panel):
# Flags for containers.
D = DEFAULT_FLAGS = 0
G = GROWABLE = 1
NC = NO_CONTAINER = 2
R = RIGHT_ALIGN = 4
VC = VERTICAL_ENTER = wx.EXPAND | wx.ALL
def __init__(
self, parent=None, id=-1, gap=3, sizes=(-1, -1), *args
): # @ReservedAssignment
wx.Panel.__init__(self, parent, id)
self.SetSizeHints(*sizes)
self.gap = gap
self.elements = OrderedDict([])
self.ATables = defaultdict(list)
if hasattr(self, "form"):
# Before building verify that several required sections exist in the form
# definition object.
if "Defaults" not in self.form:
self.form["Defaults"] = {}
if "Disabled" not in self.form:
self.form["Disabled"] = []
if "Validators" not in self.form:
self.form["Validators"] = {}
if "Options" not in self.form:
self.form["Options"] = {}
# Allow sub classes to add their own values or defaults.
self.loadDefaults()
self.loadOptions()
self.build()
if sizes == (-1, -1):
self.Parent.SetSize(self.Parent.GetBestVirtualSize())
if "Title" in self.form and hasattr(parent, "SetTitle"):
parent.SetTitle(self.form["Title"])
self.bind()
def __iter__(self):
return ((k, self[k]) for k in self.elements.keys())
def __getitem__(self, key):
try:
return self.h2m(key, self.elements[key].GetValue())
except:
return
def __setitem__(self, key, value=""):
try:
return self.elements[key].SetValue(self.m2h(key, value))
except:
print_exc()
def HumanToMachine(self, name, value=""):
if "Translations" in self.form:
if name in self.form["Translations"]:
value = self.form["Translations"][name][1].get(value, value)
return value
h2m = HumanToMachine
def MachineToHuman(self, name, value=""):
if "Translations" in self.form:
if name in self.form["Translations"]:
value = self.form["Translations"][name][0].get(value, value)
return value
m2h = MachineToHuman
def Bind(self, evtType, evtFunc, evtSrc, call=False, *args, **kwargs):
"""
I rewrote Bind a little bit to simplify binding events using the names
that you assign to individual elements. The call signature is the
same, and it only triggers when you pass the *wrong* type argument
as the event source, so it shouldn't affect existing Bind calls.
"""
if isinstance(evtSrc, str):
evtSrc = self.elements[evtSrc]
# if isinstance(evtType, wx.CommandEvent):
evtSrc.Bind(evtType, evtFunc)
# else:
# super(Form, self).Bind(evtType, evtFunc, evtSrc, *args, **kwargs)
if call:
evtFunc()
def Accel(self, key, func, elem, kind=wx.ACCEL_NORMAL):
"""
This convenience function is provided to simplify Accelerator Table
creation. It builds Accelerator Tables over repeated calls for
the windows indicated by `elem`. The tables will be set in the
bind method (the default behavior).
"""
self.ATables[elem].append((kind, key, func))
def build(self):
"""
The Build Method automates sizer creation and element placement by parsing
a properly constructed object.
"""
# The Main Sizer for the Panel.
panelSizer = wx.BoxSizer(wx.VERTICAL)
# Pass the outermost Parts and the container to the OrderedDict Parser.
self.parseContainer(self.form["Parts"], panelSizer)
self.SetSizerAndFit(panelSizer)
def bind(self):
# Attempt to accommodate non-dialog parents.
if not isinstance(self.Parent, FormDialog):
self.Parent.Bind(wx.EVT_CLOSE, self.onClose)
for name, table in self.ATables.items():
if table:
at = []
for kind, key, func in table:
at.append((kind, key, key))
EVT_MENU(self.elements[name], key, func)
self.elements[name].SetAcceleratorTable(wx.AcceleratorTable(at))
def parseContainer(self, container, outerSizer, pos=None, span=None):
sectionSizer = wx.BoxSizer(wx.VERTICAL)
for section in container.items():
region, proportion = self.parseSection(section)
sectionSizer.Add(region, proportion, flag=Form.VC, border=self.gap)
if isinstance(outerSizer, wx.GridBagSizer):
outerSizer.Add(
sectionSizer, pos, span, border=self.gap, flag=wx.ALIGN_CENTER_VERTICAL
)
if proportion:
row, col = pos
outerSizer.AddGrowableRow(row)
outerSizer.AddGrowableCol(col)
else:
outerSizer.Add(sectionSizer, 1, flag=Form.VC, border=self.gap)
def parseSection(self, section):
container, blocks = section
if isinstance(container, tuple):
display, flags = container
else:
# String instead of tuple.
flags = Form.D
display = container
self.flags = flags
sizerProportion = 1 if flags & Form.G else 0
if flags & Form.NC:
sectionSizer = wx.BoxSizer(wx.VERTICAL)
else:
box = wx.StaticBox(self, -1, display)
sectionSizer = wx.StaticBoxSizer(box, wx.VERTICAL)
for block in blocks:
self.parseBlock(block, sectionSizer)
return sectionSizer, sizerProportion
def parseBlock(self, block, sectionSizer):
"""
The form structure is a list of rows (blocks) in the form. Each row
consists of a single element, a row of elements, or a sub-grid of
elements. These are represented by dictionaries, tuples, or lists,
respectively and are each processed differently.
"""
proportion = 0
if isinstance(block, OrderedDict):
return self.parseContainer(block, sectionSizer)
if isinstance(block, list):
item = self.makeGrid(block)
elif isinstance(block, (tuple, Row)):
proportion = getattr(block, "proportion", proportion)
item = self.makeRow(block)
else:
proportion = block.proportion
item = self.makeWidget(block)
sectionSizer.Add(item, proportion, flag=Form.VC, border=self.gap)
def makeRow(self, fields):
"""
In the form structure a tuple signifies a row of elements. These items
will be arranged horizontally without dependency on other rows. Each
item may provide a proportion property which can cause that element to
expand horizontally to fill space.
"""
sizer = wx.BoxSizer(wx.HORIZONTAL)
for field in fields:
self.parseBlock(field, sizer)
return sizer
def makeGrid(self, rows):
"""
In the form structure a list signifies a grid of elements (equal width
columns, rows with similar numbers of elements, etc).
"""
sizer = wx.GridBagSizer(0, 0)
for row, fields in enumerate(rows):
for col, field in enumerate(fields):
# Each item may specify that its row or column 'grow' or expand to fill
# the available space in the form. Spans or specific positions are also
# possible.
flags = getattr(field, "flags", wx.ALL)
rowGrowable = getattr(field, "rowGrowable", False)
colGrowable = getattr(field, "colGrowable", True)
span = getattr(field, "span", (1, 1))
pos = (
getattr(field, "rowpos", row) or row,
getattr(field, "colpos", col) or col,
)
if isinstance(field, OrderedDict):
self.parseContainer(field, sizer, pos, span)
else:
element = self.makeWidget(field)
sizer.Add(
element,
pos,
span,
border=self.gap,
flag=wx.ALIGN_CENTER_VERTICAL | flags,
)
if (
rowGrowable
and row < sizer.GetRows()
and not sizer.IsRowGrowable(row)
):
sizer.AddGrowableRow(row)
if (
colGrowable
and col < sizer.GetCols()
and not sizer.IsColGrowable(col)
):
sizer.AddGrowableCol(col)
return sizer
def makeWidget(self, declarator):
"""
This function actually creates the widgets that make up the form.
Each element should provide a `make` method which takes as an argument
it's parent, and returns a wx item (sizer, form element, etc).
Other methods for each widget (defined with placeholders on
the wxPlaceholder Class) are
GetValue
SetValue
SetValidator
SetOptions
"""
# Attach the elements container to the declarator.
declarator._elements = self.elements
element = declarator.make(self)
if declarator.name:
self.elements[declarator.name] = declarator
# Disable if requested.
if declarator.name in self.form["Disabled"]:
declarator.Enable(False)
# Options need to exist early.
if hasattr(declarator, "SetOptions"):
declarator.SetOptions(self.form["Options"].get(declarator.name, []))
# We need to use the existing value if there isn't one in defaults
# to prevent StaticText's from ending up blank.
value = self.form["Defaults"].get(declarator.name, declarator.GetValue())
# Assign or populate any fields requiring it.
declarator.SetValue(self.m2h(declarator.name, value))
declarator.SetValidator(self.form["Validators"].get(declarator.name, None))
return element
def loadDefaults(self):
pass
def loadOptions(self):
pass
def onOk(self, evt):
evt.Skip()
self.onClose(evt)
def onClose(self, evt):
evt.Skip()
if isinstance(self.Parent, FormDialog):
self.Parent.FocusNext()
def fieldValidate(self):
if "Validators" not in self.form:
return True
success, messages = True, []
for name, field in self.elements.items():
if name in self.form["Validators"]:
s, m = field.Validate()
if not s:
success = False
messages.extend(m)
if messages:
text = "\r\n".join(messages)
wx.MessageDialog(self, text, "Form Field Error", wx.OK).ShowModal()
return success
if __name__ == "__main__":
from src.pyform.Demos import (
DemoForm,
DemoFormGrowable,
DemoNested,
DemoNestedHorizontal,
ComplicatedDemo,
ComprehensiveDemo,
AlternateDeclaration,
GridDemos,
DemoLeftStacked,
NonDialog,
)
app = wx.PySimpleApp()
f = wx.Frame(None)
NonDialog(f)
f.Show()
FormDialog(parent=f, panel=DemoForm)
FormDialog(parent=f, panel=DemoFormGrowable)
FormDialog(parent=f, panel=DemoNested)
FormDialog(parent=f, panel=DemoNestedHorizontal)
FormDialog(parent=f, panel=ComplicatedDemo)
FormDialog(parent=f, panel=ComprehensiveDemo)
FormDialog(parent=f, panel=AlternateDeclaration)
FormDialog(parent=f, panel=GridDemos)
FormDialog(parent=f, panel=DemoLeftStacked, gap=1)
app.MainLoop()
|
nilq/baby-python
|
python
|
'''
Integration Test for creating KVM VM with all nodes shutdown and recovered.
@author: Quarkonics
'''
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.zstack_test.zstack_test_vm as test_vm_header
import test_stub
import time
import os
vm = None
def test():
global vm
cmd = "init 0"
host_username = os.environ.get('nodeUserName')
host_password = os.environ.get('nodePassword')
zstack_ha_vip = os.environ.get('zstackHaVip')
node1_ip = os.environ.get('node1Ip')
test_util.test_logger("shutdown node: %s" % (node1_ip))
rsp = test_lib.lib_execute_ssh_cmd(node1_ip, host_username, host_password, cmd, 180)
node2_ip = os.environ.get('node2Ip')
test_util.test_logger("shutdown node: %s" % (node2_ip))
rsp = test_lib.lib_execute_ssh_cmd(node2_ip, host_username, host_password, cmd, 180)
test_util.test_logger("recover node: %s" % (node1_ip))
os.system('bash -ex %s %s' % (os.environ.get('nodeRecoverScript'), node1_ip))
test_util.test_logger("recover node: %s" % (node2_ip))
os.system('bash -ex %s %s' % (os.environ.get('nodeRecoverScript'), node2_ip))
test_util.test_dsc('Delete /var/lib/zstack/ha/ha.yaml, recover ha with zstack-ctl recover_ha, expect to fail')
cmd = "rm /var/lib/zstack/ha/ha.yaml"
rsp = test_lib.lib_execute_ssh_cmd(node1_ip, host_username, host_password, cmd, 180)
if not rsp:
rsp = test_lib.lib_execute_ssh_cmd(node2_ip, host_username, host_password, cmd, 180)
cmd = "zstack-ctl recover_ha"
rsp = test_lib.lib_execute_ssh_cmd(node1_ip, host_username, host_password, cmd, 180)
if not rsp:
rsp = test_lib.lib_execute_ssh_cmd(node2_ip, host_username, host_password, cmd, 180)
if rsp == False:
test_util.test_logger("Cannot recover ha without /var/lib/zstack/ha/ha.yaml when use zstack-ctl recover_ha, expect to False")
else:
test_util.test_fail('Expect to False, but get the different result when recover ha without /var/lib/zstack/ha/ha.yaml by using zstack-ctl recover_ha')
test_util.test_dsc('Recover with zstack-ctl install_ha, expect to pass')
cmd = "zstack-ctl install_ha --host1-info %s:%s@%s --host2-info %s:%s@%s --vip %s --recovery-from-this-host" % \
(host_username, host_password, node1_ip, host_username, host_password, node2_ip, zstack_ha_vip)
rsp = test_lib.lib_execute_ssh_cmd(node1_ip, host_username, host_password, cmd, 180)
if not rsp:
rsp = test_lib.lib_execute_ssh_cmd(node2_ip, host_username, host_password, cmd, 180)
time.sleep(180)
test_stub.exercise_connection(600)
vm = test_stub.create_basic_vm()
vm.check()
vm.destroy()
test_util.test_pass('Create VM Test Success')
#Will be called only if exception happens in test().
def error_cleanup():
global vm
if vm:
try:
vm.destroy()
except:
pass
|
nilq/baby-python
|
python
|
notebook_list = list()
first_entry=int(input("Enter your first value in the list: "))
second_entry=int(input("Enter your second value in the list: "))
notebook_list.append(first_entry)
notebook_list.append(second_entry)
# for i in range(5):
# val=(int(input("enter a value ")))
# arr.append(val)
# print (arr)
choise = input("If you want to add an entry press 1, If you want to delete an entry press 2, If you want to update an entry press 3 ")
def addition(entry):
notebook_list.append(entry)
return print(notebook_list)
def removal(entry_index):
notebook_list.remove(notebook_list[entry_index])
return print(notebook_list)
def update(val_old, val_new, notebook_list):
for item in range(0, len(notebook_list)):
if notebook_list[item] == val_old:
notebook_list[item] == val_new
print(notebook_list)
if choise == "1":
new_entry=(int(input("Enter the entry you wish to add")))
addition(new_entry)
elif choise == "2":
entry_index=(int(input("Which entry number do you want to delete. NOTE: 0 is the first element")))
removal(entry_index)
elif choise == "3":
val_old = int(input("Type the entry you want to change!"))
val_new = int(input("What do you want to change it with?"))
update(val_new,val_new,notebook_list)
|
nilq/baby-python
|
python
|
#!/usr/bin/python
#partially based on: http://john.nachtimwald.com/2009/08/15/qtextedit-with-line-numbers/ (MIT license)
from __future__ import print_function
import sys, os, subprocess
from ..share import (Share, Signal, dbg_print, QtCore, QtGui, QtSvg, temp_dir)
##LMY: from highlighter import PythonHighlighter
class Editor(QtGui.QPlainTextEdit):
headerText = 'Edit'
prevCursorPos = -1
currentLineColor = None
editBecomesActive = Signal()
specialSaveFileName = None
fileName = None
highlighter = None
pointSizeF = 11.0
cursorWidth = 8
def __init__(self, book=None, **kw):
self.book = book
QtGui.QPlainTextEdit.__init__(self, **kw)
self.lineNumberArea = self.LineNumberArea(self)
self.viewport().installEventFilter(self)
self.newDocument = True
self.path = ''
css = '''
QPlainTextEdit {
font-family: monospace;
font-size: 10;
color: black;
background-color: white;
selection-color: white;
selection-background-color: #437DCD;
}'''
self.setStyleSheet(css)
font = self.font()
font.setPointSize(self.pointSizeF)
self.setFont(font)
self.setCursorWidth(self.cursorWidth)
self.setWindowTitle('title')
self.textChanged.connect(self.handleTextChanged)
self.editBecomesActive.connect(self.handleTextChanged)
self.setLineWrapMode(QtGui.QPlainTextEdit.NoWrap)
self.cursorPositionChanged.connect(self.handleCursorMove)
self.originalText = None
self.haveLoadedFile = False
def Quote(self):
tC = self.textCursor()
c0 = '#' # dummy non-match!
while c0 not in "ABCDEFG":
tC.movePosition(tC.Left, tC.KeepAnchor)
sel = tC.selectedText()
c0 = sel[0]
tC.removeSelectedText()
tC.insertText('"'+ sel +'"')
def handleCursorMove(self):
self.book.counted = self.book.latency
return
def moveToRowCol(self, row=1, col=0):
block = self.document().findBlockByLineNumber (row-1)
desiredPosition = block.position() + col
dbg_print ('AbcEditor.moveToRowCol', row, col,
'desiredPosition', desiredPosition)
tc = self.textCursor()
tc.setPosition(desiredPosition)
self.setTextCursor(tc)
self.setFocus()
if self.highlighter:
self.highlighter.rehighlight()
def highlight(self, tc):
# n.b. unfortunate name - no relation to highlighter!
blockNumber = tc.blockNumber()
# Common.blockNumber = blockNumber
col0 = col = tc.positionInBlock()
l = tc.block().length()
dbg_print ("autoTrack", l)
blockText = tc.block().text()
if 0: # under review sine new approach to syntqx highlighting:
while col and ((col >= (l-1))
or not (str(blockText[col]).lower() in 'abcdefg^_=')):
col -= 1
dbg_print ('editor.highlight: row=%d, col=%d' %(blockNumber, col))
self.book.settledAt.emit(blockNumber+1, col)
if 0: # under review sine new approach to syntqx highlighting:
hi_selection = QtGui.QTextEdit.ExtraSelection()
hi_selection.format.setBackground(self.palette().alternateBase())
hi_selection.format.setProperty(QtGui.QTextFormat.FullWidthSelection,
True)
if self.currentLineColor is not None:
hi_selection.format.setBackground(self.currentLineColor)
#setFontUnderline(True)
hi_selection.cursor = tc
self.setExtraSelections([hi_selection])
hi_selection.cursor.clearSelection()
def handleTextChanged(self):
self.book.counted = self.book.latency
dbg_print ('handleTextChanged', self.book.counted)
def handleLull(self, force=False):
if force or self.document().isModified():
dbg_print ("autoSave")
split = os.path.split(self.fileName)
fileName = 'autosave_'.join(split)
self.saveFile(
fileName=temp_dir+ '/autosave_' + os.path.split(self.fileName)[1])
tc = self.textCursor()
position = tc.position()
if position != self.prevCursorPos:
self.prevCursorPos = position
self.highlight(tc)
if self.highlighter:
self.highlighter.rehighlight()
def newFile(self, fileName='new.abc'):
self.clear()
self.setFileName(fileName)
self.book.fileLoaded.emit(self, fileName)
def closeFile(self):
self.clear()
self.haveLoadedFile = False
def cloneAnyFile(self):
fileName = QtGui.QFileDialog.getOpenFileName(self,
"Choose a data file",
'', '*.abc')[0]
dbg_print ("cloneAnyFile 2", fileName)
self.loadFile(fileName, newInstance=True)
def restart(self):
self.loadFile(self.fileName)
sys.exit(0)
def loadFile(self, fileName, newInstance=None, row=1, col=0):
dbg_print ("Editor.loadFile", fileName, newInstance, row, col)
if newInstance is None:
newInstance = False # self.haveLoadedFile
if newInstance:
dbg_print("need to create new instance for", fileName)
sys.argv[1:] = fileName,
subprocess.Popen(sys.argv)
return
self.setFileName(fileName)
f = QtCore.QFile(fileName)
if not f.open(QtCore.QFile.ReadOnly | QtCore.QFile.Text):
return
self.highlighter = None # default, half-expecting to be overwritten by per-extension handler
self.book.fileLoaded.emit(self, fileName)
self.readAll(f)
f.close()
dbg_print ("Loaded %s" % fileName)
self.moveToRowCol(row, col) # primarily to gain focus!
# self.document().setModified(True) # force rewrite of Score
self.book.fileSaved.emit(fileName) # ???
def setFileName(self, fileName=None):
if fileName is not None:
self.fileName = fileName
title = "%s - %s" % (self.headerText, os.path.abspath(self.fileName))
dbg_print (title)
# self.book.dock.setWindowTitle(title)
self.haveLoadedFile = True
_dirname, _endname = os.path.split(self.fileName)
if _dirname:
os.chdir(_dirname)
def readAll(self, f):
dbg_print ('readAll', self, f)
stream = QtCore.QTextStream(f)
text = stream.readAll()
self.setPlainText(text)
def saveFile(self, fileName=None,):
self.specialSaveFileName = fileName # None if save is requested by user as opposed to temporary for score generation
if fileName is None:
fileName = self.fileName
if fileName is None:
return
#f = QtCore.QFile(fileName)
out = open(fileName, 'w')
if not out:
return
self.writeAll(out)
out.close()
dbg_print ("Saved %s " % fileName)
self.document().setModified(False)
self.book.fileSaved.emit(fileName)
return
def transpose(self):
semitones, ok = QtGui.QInputDialog.getInteger(self,
"Transpose (automatic clef change(s))",
"semitones (+/- for up/down:)", 0, -24, 24, 1)
if not ok:
return
newFileName, ok = QtGui.QFileDialog.getSaveFileName(self, "write tansposed to file",
"transposed.abc",
"(*.abc)")
if not ok:
return
transposedText = Share.abcRaft.abc2abc.process(self.fileName,
transpose=semitones)
with open(newFileName, 'w') as transposed_file:
transposed_file.write(transposedText)
self.book.openThemAll((newFileName,))
def writeAll(self, out):
text = self.toPlainText()
# dbg_print('len(text)=', len(text))
out.write(text)
def reloadFile(self):
dbg_print ("ReloadFile", self.fileName)
self.loadFile(self.fileName)
def saveFileAs(self, fileName=None, show=True):
"""
save the current panel contents to a new file.
"""
if fileName is None:
files = QtGui.QFileDialog.getSaveFileName(self,
"Save source to file as", '', '*.abc')
if not files:
return
fileName = files[0]
if show:
self.setFileName(fileName)
self.saveFile()
self.book.setTabText(self.book.currentIndex(), os.path.split(fileName)[1])
def resizeEvent(self,e):
self.lineNumberArea.setFixedHeight(self.height())
QtGui.QPlainTextEdit.resizeEvent(self,e)
def eventFilter(self, object, event):
if object is self.viewport():
self.lineNumberArea.update()
return False
return QtGui.QPlainTextEdit.eventFilter(object, event)
def keyPressEvent(self, event):
"""Reimplement Qt method"""
key = event.key()
# print (type(event))
meta = event.modifiers() & QtCore.Qt.MetaModifier
ctrl = event.modifiers() & QtCore.Qt.ControlModifier
shift = event.modifiers() & QtCore.Qt.ShiftModifier
plain = not (meta or ctrl or shift)
if key == QtCore.Qt.Key_Insert and plain:
self.setOverwriteMode(not self.overwriteMode())
if key == QtCore.Qt.Key_Tab and plain and self.highlighter:
return self.autoComplete(event)
else:
QtGui.QPlainTextEdit.keyPressEvent(self, event)
def autoComplete(self, event):
print ('autoComplete')
tc = self.textCursor()
snippet = self.highlighter.getSnippet(tc)
for i, piece in enumerate(snippet):
tc.insertText(piece)
if i==0:
pos = tc.position()
tc.setPosition(pos)
self.setTextCursor(tc)
def getSnippet(self, tc): #------ Drag and drop
col0 = col = tc.positionInBlock()
block = tc.block()
l = block.length()
print("ABC get snippet", l)
blockText = block.text()
while col and ((col >= (l - 1))
or not (str(blockText[col - 1]) in ' |!]')):
tc.deletePreviousChar()
col -= 1
key = blockText[col:col0]
print("autoComplete key %d:%d '%s'" % (col, col0, key))
return self.snippets.get(key, ("!%s!" % key,))
def dragEnterEvent(self, event):
"""Reimplement Qt method
Inform Qt about the types of data that the widget accepts"""
source = event.mimeData()
if source.hasUrls():
if 1: #mimedata2url(source, extlist=EDIT_EXT):
print ("dragEnterEvent", "hasUrls")
event.acceptProposedAction()
else:
event.ignore()
elif source.hasText():
print ("dragEnterEvent", "hasText")
event.acceptProposedAction()
else:
event.ignore()
def dragMoveEvent(self, event):
event.acceptProposedAction()
def dropEvent(self, event):
"""Reimplement Qt method
Unpack dropped data and handle it"""
source = event.mimeData()
if source.hasUrls():
#paths = map(filenameFromUrl, source.urls())
paths = [url.path() for url in source.urls()]
print ("dropEvent", "hasUrls", source.urls(), paths)
self.book.filenamesDropped.emit(paths)
elif source.hasText():
print ("dropEvent", "hasText")
#editor = self.get_current_editor()
#if editor is not None:
# editor.insert_text( source.text() )
event.acceptProposedAction()
def mousePressEvent(self, mouseEvent):
if (mouseEvent.button() in (QtCore.Qt.LeftButton, QtCore.Qt.RightButton)):
QtGui.QPlainTextEdit.mousePressEvent(self, mouseEvent)
print (mouseEvent.button() )
return
def wheelEvent(self, event):
modifiers = QtGui.QApplication.keyboardModifiers()
if modifiers != QtCore.Qt.ControlModifier:
return QtGui.QPlainTextEdit.wheelEvent(self, event)
dbg_print ("Editor.wheelEvent, delta = ", event.delta())
new_sizeF = self.pointSizeF + (event.delta() / 100.0)
if new_sizeF > 0:
self.pointSizeF = new_sizeF
self.font().setPointSizeF(new_sizeF)
event.accept()
class LineNumberArea(QtGui.QWidget):
def __init__(self, editor):
QtGui.QWidget.__init__(self, editor)
self.edit = editor
self.highest_line = 0
css = '''
QWidget {
font-family: monospace;
font-size: 10;
color: black;
}'''
self.setStyleSheet(css)
def update(self, *args):
width = QtGui.QFontMetrics(
self.edit.document().defaultFont()).width(
str(self.highest_line)) + 10
if self.width() != width:
self.setFixedWidth(width)
self.edit.setViewportMargins(width,0,0,0)
QtGui.QWidget.update(self, *args)
def paintEvent(self, event):
page_bottom = self.edit.viewport().height()
font_metrics = QtGui.QFontMetrics(
self.edit.document().defaultFont())
current_block = self.edit.document().findBlock(
self.edit.textCursor().position())
painter = QtGui.QPainter(self)
painter.fillRect(self.rect(), QtCore.Qt.lightGray)
block = self.edit.firstVisibleBlock()
viewport_offset = self.edit.contentOffset()
line_count = block.blockNumber()
painter.setFont(self.edit.document().defaultFont())
while block.isValid():
line_count += 1
# The top left position of the block in the document
position = self.edit.blockBoundingGeometry(block).topLeft() + viewport_offset
# Check if the position of the block is out side of the visible area
if position.y() > page_bottom:
break
# We want the line number for the selected line to be bold.
bold = False
x = self.width() - font_metrics.width(str(line_count)) - 3
y = round(position.y()) + font_metrics.ascent()+font_metrics.descent()-1
if block == current_block:
bold = True
font = painter.font()
font.setBold(True)
painter.setFont(font)
pen = painter.pen()
painter.setPen(QtCore.Qt.red)
painter.drawRect(0, y-14, self.width()-2, 20)
painter.setPen(pen)
# Draw the line number right justified at the y position of the
# line. 3 is a magic padding number. drawText(x, y, text).
painter.drawText(x, y, str(line_count))
# Remove the bold style if it was set previously.
if bold:
font = painter.font()
font.setBold(False)
painter.setFont(font)
block = block.next()
self.highest_line = line_count
painter.end()
QtGui.QWidget.paintEvent(self, event)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import yaml
import json
import requests
from copy import deepcopy
from lxml import html
from dateutil.parser import ParserError, parse
# loading external configuration
CONFIG = yaml.safe_load(open(os.path.join(os.path.dirname(os.path.realpath(__file__)), 'config.yml')))
URL_TPL = "https://www.eliteprospects.com/search/player?q=%s"
PLR_TPL = "https://www.eliteprospects.com/player/"
DOB_URL_TPL = "dob=%s"
POS_URL_TPL = "position=%s"
def get_ep_info_for_player(plr):
"""
Gets information from Eliteprospects for specified player.
"""
full_name = " ".join((plr['first_name'], plr['last_name']))
# searching by full name and (optionally) player dob first
search_name = full_name.replace(" ", "+")
url = URL_TPL % search_name
# adding date of birth to search string (if available)
if 'dob' in plr and plr['dob']:
dob = parse(plr['dob']).date()
url = "&".join((url, DOB_URL_TPL % dob))
else:
dob = None
# adding position to search string (if available)
if 'position' in plr:
url = "&".join((url, POS_URL_TPL % plr['position'][0]))
trs = get_trs_from_ep_plr_search(url)
# alternatively searching by last name and date of birth
if not trs and dob:
url = URL_TPL % plr['last_name']
url = "&".join((url, DOB_URL_TPL % dob))
trs = get_trs_from_ep_plr_search(url)
if not trs:
print("\t-> No Eliteprospects candidate found for %s [%d]" % (full_name, plr['player_id']))
return None, None
if len(trs) > 1:
print("\t-> Multiple Eliteprospects candidates found for %s [%d]" % (full_name, plr['player_id']))
for tr in trs:
ep_id, ep_dob = get_ep_id_dob_from_tr(tr, plr, False)
print("\t\t-> %s (%s)" % (ep_id, ep_dob))
return None, None
ep_id, ep_dob = get_ep_id_dob_from_tr(trs.pop(0), plr)
return ep_id, ep_dob
def get_trs_from_ep_plr_search(url):
"""
Gets table rows of interest from Eliteprospects player search page.
"""
r = requests.get(url)
doc = html.fromstring(r.text)
res_tbl = doc.xpath("//table[@class='table table-condensed table-striped players ']").pop(0)
trs = res_tbl.xpath("tbody/tr/td[@class='name']/ancestor::tr")
return trs
def get_ep_id_dob_from_tr(tr, plr, verbose=True):
"""
Gets player id and date of birth from search result table row on Eliteprospects player search page.
"""
orig_full_name = " ".join((plr['first_name'], plr['last_name']))
name_and_pos = tr.xpath("td[@class='name']/span/a/text()").pop(0)
if verbose:
print("[%d]: %s (%s) -> %s" % (plr['player_id'], orig_full_name, plr['position'], name_and_pos))
ep_id = tr.xpath("td[@class='name']/span/a/@href").pop(0)
ep_id = ep_id.replace(PLR_TPL, "")
ep_dob = tr.xpath("td[@class='date-of-birth']/span[@class='hidden-xs']/text()").pop(0)
try:
ep_dob = parse(ep_dob).date()
except ParserError:
print("Unable to parse date of birth %s" % ep_dob)
ep_dob = None
return ep_id, ep_dob
if __name__ == '__main__':
all_players_src_path = os.path.join(CONFIG['tgt_processing_dir'], 'del_players.json')
players = json.loads(open(all_players_src_path).read())
print("%d players loaded from repository of all players" % len(players))
# loading possibly existing Eliteprospects data sets
# player ids
tgt_id_path = os.path.join(CONFIG['tgt_processing_dir'], 'ep_ids.json')
if os.path.isfile(tgt_id_path):
ep_ids = json.loads(open(tgt_id_path).read())
else:
ep_ids = dict()
# dates of birth
tgt_dob_path = os.path.join(CONFIG['tgt_processing_dir'], 'ep_dobs.json')
if os.path.isfile(tgt_dob_path):
ep_dobs = json.loads(open(tgt_dob_path).read())
else:
ep_dobs = dict()
for plr in list(players.values())[:]:
if str(plr['player_id']) in ep_ids:
continue
# retrieving player id and date of birth from Eliteprospects
ep_id, ep_dob = get_ep_info_for_player(plr)
if ep_id:
ep_ids[str(plr['player_id'])] = ep_id
if ep_dob and not 'dob' in plr:
ep_dobs[str(plr['player_id'])] = ep_dob
ep_ids = dict(sorted(ep_ids.items()))
ep_dobs = dict(sorted(ep_dobs.items()))
open(tgt_id_path, 'w').write(json.dumps(ep_ids, indent=2, default=str))
open(tgt_dob_path, 'w').write(json.dumps(ep_dobs, indent=2, default=str))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
from putarm_ur3e_moveit_config.srv import GoToObj,GoToObjResponse
import sys
import copy
import rospy
import moveit_commander
import moveit_msgs.msg
import geometry_msgs.msg
from math import pi
from std_msgs.msg import String
from moveit_commander.conversions import pose_to_list
import tf
import numpy as np
def all_close(goal, actual, tolerance):
"""
Convenience method for testing if a list of values are within a tolerance of their counterparts in another list
@param: goal A list of floats, a Pose or a PoseStamped
@param: actual A list of floats, a Pose or a PoseStamped
@param: tolerance A float
@returns: bool
"""
all_equal = True
if type(goal) is list:
for index in range(len(goal)):
if abs(actual[index] - goal[index]) > tolerance:
return False
elif type(goal) is geometry_msgs.msg.PoseStamped:
return all_close(goal.pose, actual.pose, tolerance)
elif type(goal) is geometry_msgs.msg.Pose:
return all_close(pose_to_list(goal), pose_to_list(actual), tolerance)
return True
class GotoObject(object):
def __init__(self):
super(GotoObject,self).__init__()
self.moveit_commander.roscpp_initialize(sys.argv)
self.robot = self.moveit_commander.RobotCommander()
self.scene = self.moveit_commander.PlanningSceneInterface()
self.planning_group_name = "manipulator"
self.planning_move_group = self.moveit_commander.MoveGroupCommander(self.planning_group_name)
self.display_trajectory_publisher = rospy.Publisher('/move_group/display_planned_path',
moveit_msgs.msg.DisplayTrajectory,
queue_size=20)
self.planning_frame = self.planning_move_group.get_planning_frame()
self.eef_link = self.planning_move_group.get_end_effector_link()
self.group_names = self.robot.get_group_names()
s = rospy.Service('goto_object_service', GoToObj, self.goto_object)
rospy.loginfo("Ready to goto")
rospy.spin()
def plan_cartesian_path(self, scale=1):
move_group = self.move_group
waypoints = []
wpose = move_group.get_current_pose().pose
wpose.position.z -= scale * 0.1 # First move up (z)
wpose.position.y += scale * 0.2 # and sideways (y)
waypoints.append(copy.deepcopy(wpose))
wpose.position.x += scale * 0.1 # Second move forward/backwards in (x)
waypoints.append(copy.deepcopy(wpose))
wpose.position.y -= scale * 0.1 # Third move sideways (y)
waypoints.append(copy.deepcopy(wpose))
(plan, fraction) = move_group.compute_cartesian_path(
waypoints, # waypoints to follow
0.01, # eef_step
0.0 # jump_threshold
)
# Note: We are just planning, not asking move_group to actually move the robot yet:
return plan, fraction
def display_trajectory(self, plan):
robot = self.robot
display_trajectory_publisher = self.display_trajectory_publisher
display_trajectory = moveit_msgs.msg.DisplayTrajectory()
display_trajectory.trajectory_start = robot.get_current_state()
display_trajectory.trajectory.append(plan)
# Publish
display_trajectory_publisher.publish(display_trajectory)
def execute_plan(self, plan):
move_group = self.planning_move_group
move_group.execute(plan, wait=True)
def goto_object(self,req):
goal_position = req.pose.position
#goal_position.z -= 0.05
current_pose = self.planning_move_group.get_current_pose().pose
current_position = current_pose.position
no_samples = 50
x_linspace = np.linspace(current_position.x,goal_position.x,num=no_samples)
y_linspace = np.linspace(current_position.y,goal_position.y,num=no_samples)
z_linspace = np.linspace(current_position.z,goal_position.z,num=no_samples)
waypoints = []
new_pose = current_pose
new_pose.orientation = req.pose.orientation
for i in range(no_samples):
new_pose.position.x = x_linspace[i]
new_pose.position.y = y_linspace[i]
new_pose.position.z = z_linspace[i]
waypoints.append(copy.deepcopy(new_pose))
(plan, fraction) = self.planning_move_group.compute_cartesian_path(
waypoints, # waypoints to follow
0.01, # eef_step
0.0, # jump_threshold
avoid_collisions=True)
output = self.planning_move_group.execute(plan,wait=True)
return GoToObjResponse(output)
|
nilq/baby-python
|
python
|
from os import environ
from http.server import BaseHTTPRequestHandler, HTTPServer
from urllib.parse import urlparse, parse_qs
import os
import json
from flask import Flask, jsonify, request
import requests
from flask_cors import CORS
def fetch_location():
"""
gets the geocode data for the searched location, returns it as a json object
"""
query = request.args.get('data')
GEOCODE_API_KEY = os.environ.get('GEOCODE_API_KEY')
url = f'https://maps.googleapis.com/maps/api/geocode/json?address={query}&key={GEOCODE_API_KEY}'
locations = requests.get(url).json()
new_location = Location(query, locations['results'][0])
return new_location
class Location():
def __init__(self, query, query_result):
self.search_query = query
self.formatted_query = query_result['formatted_address']
self.latitude = query_result['geometry']['location']['lat']
self.longitude = query_result['geometry']['location']['lng']
def serialize(self):
return vars(self)
|
nilq/baby-python
|
python
|
def emulate_catchup(replica, ppSeqNo=100):
replica.on_catch_up_finished(last_caught_up_3PC=(replica.viewNo, ppSeqNo),
master_last_ordered_3PC=replica.last_ordered_3pc)
def emulate_select_primaries(replica):
replica.primaryName = 'SomeAnotherNode'
replica._setup_for_non_master_after_view_change(replica.viewNo)
def expect_suspicious(replica, suspicious_code):
def reportSuspiciousNodeEx(ex):
assert suspicious_code == ex.code
raise ex
replica.node.reportSuspiciousNodeEx = reportSuspiciousNodeEx
def register_pp_ts(replica, pp, sender):
tpcKey = (pp.viewNo, pp.ppSeqNo)
ppKey = (pp, sender)
replica.pre_prepare_tss[tpcKey][ppKey] = replica.get_time_for_3pc_batch()
|
nilq/baby-python
|
python
|
""" A tomography library for fusion devices
See:
https://github.com/ToFuProject/datastock
"""
# Built-in
import os
import subprocess
from codecs import open
# ... setup tools
from setuptools import setup, find_packages
# ... local script
import _updateversion as up
# == Getting version =====================================================
_HERE = os.path.abspath(os.path.dirname(__file__))
version = up.updateversion()
print("")
print("Version for setup.py : ", version)
print("")
# =============================================================================
# Get the long description from the README file
# Get the readme file whatever its extension (md vs rst)
_README = [
ff
for ff in os.listdir(_HERE)
if len(ff) <= 10 and ff[:7] == "README."
]
assert len(_README) == 1
_README = _README[0]
with open(os.path.join(_HERE, _README), encoding="utf-8") as f:
long_description = f.read()
if _README.endswith(".md"):
long_description_content_type = "text/markdown"
else:
long_description_content_type = "text/x-rst"
# =============================================================================
# =============================================================================
# Compiling files
setup(
name="datastock",
version=f"{version}",
# Use scm to get code version from git tags
# cf. https://pypi.python.org/pypi/setuptools_scm
# Versions should comply with PEP440. For a discussion on single-sourcing
# the version across setup.py and the project code, see
# https://packaging.python.org/en/latest/single_source_version.html
# The version is stored only in the setup.py file and read from it (option
# 1 in https://packaging.python.org/en/latest/single_source_version.html)
use_scm_version=False,
# Description of what library does
description="A python library for generic class and data handling",
long_description=long_description,
long_description_content_type=long_description_content_type,
# The project's main homepage.
url="https://github.com/ToFuProject/datastock",
# Author details
author="Didier VEZINET",
author_email="didier.vezinet@gmail.com",
# Choose your license
license="MIT",
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
# How mature is this project? Common values are
# 3 - Alpha
# 4 - Beta
# 5 - Production/Stable
"Development Status :: 4 - Beta",
# Indicate who your project is intended for
"Intended Audience :: Science/Research",
"Topic :: Scientific/Engineering :: Physics",
# Pick your license as you wish (should match "license" above)
"License :: OSI Approved :: MIT License",
# Specify the Python versions you support here. In particular, ensure
# that you indicate whether you support Python 2, Python 3 or both.
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
# In which language most of the code is written ?
"Natural Language :: English",
],
# What does your project relate to?
keywords="data analysis class container generic interactive plot",
# You can just specify the packages manually here if your project is
# simple. Or you can use find_packages().
packages=find_packages(
exclude=[
"doc",
]
),
# Alternatively, if you want to distribute just a my_module.py, uncomment
# this:
# py_modules=["my_module"],
# List run-time dependencies here. These will be installed by pip when
# your project is installed. For an analysis of "install_requires" vs pip's
# requirements files see:
# https://packaging.python.org/en/latest/requirements.html
install_requires=[
"numpy",
"scipy",
"matplotlib",
],
python_requires=">=3.6",
# List additional groups of dependencies here (e.g. development
# dependencies). You can install these using the following syntax,
# for example:
# $ pip install -e .[dev,test]
extras_require={
"dev": [
"check-manifest",
"coverage",
"pytest",
"sphinx",
"sphinx-gallery",
"sphinx_bootstrap_theme",
]
},
# If there are data files included in your packages that need to be
# installed, specify them here. If using Python 2.6 or less, then these
# have to be included in MANIFEST.in as well.
# package_data={
# # If any package contains *.txt, *.rst or *.npz files, include them:
# '': ['*.txt', '*.rst', '*.npz'],
# # And include any *.csv files found in the 'ITER' package, too:
# 'ITER': ['*.csv'],
# },
# package_data={},
# include_package_data=True,
# Although 'package_data' is the preferred approach, in some case you may
# need to place data files outside of your packages. See:
# http://docs.python.org/3.4/distutils/setupscript.html
# installing-additional-files # noqa
# In this case, 'data_file' will be installed into '<sys.prefix>/my_data'
# data_files=[('my_data', ['data/data_file'])],
# executable scripts can be declared here
# They can be python or non-python scripts
# scripts=[
# ],
# entry_points point to functions in the package
# Theye are generally preferable over scripts because they provide
# cross-platform support and allow pip to create the appropriate form
# of executable for the target platform.
# entry_points={},
# include_dirs=[np.get_include()],
py_modules=['_updateversion'],
)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 24 20:47:24 2019
@author: elif.ayvali
"""
import pandas as pd
import numpy as np
import matplotlib.collections as mc
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from matplotlib.patches import Rectangle
def create_uniform_grid(low, high, bins=(10, 10)):
"""Define a uniformly-spaced grid that can be used to discretize a space.
Parameters
----------
low : array_like
Lower bounds for each dimension of the continuous space.
high : array_like
Upper bounds for each dimension of the continuous space.
bins : tuple
Number of bins along each corresponding dimension.
Returns
-------
grid : list of array_like
A list of arrays containing split points for each dimension.
"""
grid = [np.linspace(low[dim], high[dim], bins[dim] + 1)[1:-1] for dim in range(len(bins))]
print(grid)
return grid
def discretize(sample, grid):
"""Discretize a sample as per given grid.
Parameters
----------
sample : array_like
A single sample from the (original) continuous space.
grid : list of array_like
A list of arrays containing split points for each dimension.
Returns
-------
discretized_sample : array_like
A sequence of integers with the same number of dimensions as sample.
"""
return list(int(np.digitize(s, g)) for s, g in zip(sample, grid)) # apply along each dimension
def discretize_tile(sample, grid):
"""Discretize a sample as per given grid.
Parameters
----------
sample : array_like
A single sample from the (original) continuous space.
grid : list of array_like
A list of arrays containing split points for each dimension.
Returns
-------
discretized_sample : array_like
A sequence of integers with the same number of dimensions as sample.
"""
return tuple(int(np.digitize(s, g)) for s, g in zip(sample, grid))
def visualize_samples(samples, discretized_samples, grid, low=None, high=None):
"""Visualize original and discretized samples on a given 2-dimensional grid."""
fig, ax = plt.subplots(figsize=(10, 10))
# Show grid
ax.xaxis.set_major_locator(plt.FixedLocator(grid[0]))
ax.yaxis.set_major_locator(plt.FixedLocator(grid[1]))
ax.grid(True)
# If bounds (low, high) are specified, use them to set axis limits
if low is not None and high is not None:
ax.set_xlim(low[0], high[0])
ax.set_ylim(low[1], high[1])
else:
# Otherwise use first, last grid locations as low, high (for further mapping discretized samples)
low = [splits[0] for splits in grid]
high = [splits[-1] for splits in grid]
# Map each discretized sample (which is really an index) to the center of corresponding grid cell
grid_extended = np.hstack((np.array([low]).T, grid, np.array([high]).T)) # add low and high ends
grid_centers = (grid_extended[:, 1:] + grid_extended[:, :-1]) / 2 # compute center of each grid cell
locs = np.stack(grid_centers[i, discretized_samples[:, i]] for i in range(len(grid))).T # map discretized samples
ax.plot(samples[:, 0], samples[:, 1], 'o') # plot original samples
ax.plot(locs[:, 0], locs[:, 1], 's') # plot discretized samples in mapped locations
ax.add_collection(mc.LineCollection(list(zip(samples, locs)), colors='orange')) # add a line connecting each original-discretized sample
ax.legend(['original', 'discretized'])
def visualize_tilings(tilings):
"""Plot each tiling as a grid."""
prop_cycle = plt.rcParams['axes.prop_cycle']
colors = prop_cycle.by_key()['color']
linestyles = ['-', '--', ':']
legend_lines = []
fig, ax = plt.subplots(figsize=(10, 10))
for i, grid in enumerate(tilings):
for x in grid[0]:
l = ax.axvline(x=x, color=colors[i % len(colors)], linestyle=linestyles[i % len(linestyles)], label=i)
for y in grid[1]:
l = ax.axhline(y=y, color=colors[i % len(colors)], linestyle=linestyles[i % len(linestyles)])
legend_lines.append(l)
ax.grid('off')
ax.legend(legend_lines, ["Tiling #{}".format(t) for t in range(len(legend_lines))], facecolor='white', framealpha=0.9)
ax.set_title("Tilings")
return ax # return Axis object to draw on later, if needed
def create_tiling_grid(low, high, bins=(10, 10), offsets=(0.0, 0.0)):
"""Define a uniformly-spaced grid that can be used for tile-coding a space.
Parameters
----------
low : array_like
Lower bounds for each dimension of the continuous space.
high : array_like
Upper bounds for each dimension of the continuous space.
bins : tuple
Number of bins or tiles along each corresponding dimension.
offsets : tuple
Split points for each dimension should be offset by these values.
Returns
-------
grid : list of array_like
A list of arrays containing split points for each dimension.
Example
-------
if low = [-1.0, -5.0], high = [1.0, 5.0], bins = (10, 10), and offsets = (-0.1, 0.5),
then return a list of 2 NumPy arrays (2 dimensions) each containing the following split points (9 split points per dimension):
[array([-0.9, -0.7, -0.5, -0.3, -0.1, 0.1, 0.3, 0.5, 0.7]),
array([-3.5, -2.5, -1.5, -0.5, 0.5, 1.5, 2.5, 3.5, 4.5])]
Notice how the split points for the first dimension are offset by -0.1, and for the second dimension are offset by +0.5.
"""
#grid = [np.linspace(low[dim]+offsets[dim], high[dim]+offsets[dim], bins[dim] + 1)[1:-1] for dim in range(len(bins))]
grid = [np.linspace(low[dim], high[dim], bins[dim] + 1)[1:-1] + offsets[dim] for dim in range(len(bins))]
print("Tiling: [<low>, <high>] / <bins> + (<offset>) => <splits>")
for l, h, b, o, splits in zip(low, high, bins, offsets, grid):
print(" [{}, {}] / {} + ({}) => {}".format(l, h, b, o, splits))
return grid
def create_tilings(low, high, tiling_specs):
"""Define multiple tilings using the provided specifications.
Parameters
----------
low : array_like
Lower bounds for each dimension of the continuous space.
high : array_like
Upper bounds for each dimension of the continuous space.
tiling_specs : list of tuples
A sequence of (bins, offsets) to be passed to create_tiling_grid().
Returns
-------
tilings : list
A list of tilings (grids), each produced by create_tiling_grid().
"""
return [create_tiling_grid(low, high, bins, offsets) for bins, offsets in tiling_specs]
def tile_encode(sample, tilings, flatten=False):
"""Encode given sample using tile-coding.
Parameters
----------
sample : array_like
A single sample from the (original) continuous space.
tilings : list
A list of tilings (grids), each produced by create_tiling_grid().
flatten : bool
If true, flatten the resulting binary arrays into a single long vector.
Returns
-------
encoded_sample : list or array_like
A list of binary vectors, one for each tiling, or flattened into one.
"""
encoded_sample = [discretize_tile(sample, grid) for grid in tilings]
return np.concatenate(encoded_sample) if flatten else encoded_sample
def visualize_encoded_samples(samples, encoded_samples, tilings, low=None, high=None):
"""Visualize samples by activating the respective tiles."""
samples = np.array(samples) # for ease of indexing
# Show tiling grids
ax = visualize_tilings(tilings)
# If bounds (low, high) are specified, use them to set axis limits
if low is not None and high is not None:
ax.set_xlim(low[0], high[0])
ax.set_ylim(low[1], high[1])
else:
# Pre-render (invisible) samples to automatically set reasonable axis limits, and use them as (low, high)
ax.plot(samples[:, 0], samples[:, 1], 'o', alpha=0.0)
low = [ax.get_xlim()[0], ax.get_ylim()[0]]
high = [ax.get_xlim()[1], ax.get_ylim()[1]]
# Map each encoded sample (which is really a list of indices) to the corresponding tiles it belongs to
tilings_extended = [np.hstack((np.array([low]).T, grid, np.array([high]).T)) for grid in tilings] # add low and high ends
tile_centers = [(grid_extended[:, 1:] + grid_extended[:, :-1]) / 2 for grid_extended in tilings_extended] # compute center of each tile
tile_toplefts = [grid_extended[:, :-1] for grid_extended in tilings_extended] # compute topleft of each tile
tile_bottomrights = [grid_extended[:, 1:] for grid_extended in tilings_extended] # compute bottomright of each tile
prop_cycle = plt.rcParams['axes.prop_cycle']
colors = prop_cycle.by_key()['color']
for sample, encoded_sample in zip(samples, encoded_samples):
for i, tile in enumerate(encoded_sample):
# Shade the entire tile with a rectangle
topleft = tile_toplefts[i][0][tile[0]], tile_toplefts[i][1][tile[1]]
bottomright = tile_bottomrights[i][0][tile[0]], tile_bottomrights[i][1][tile[1]]
ax.add_patch(Rectangle(topleft, bottomright[0] - topleft[0], bottomright[1] - topleft[1],
color=colors[i], alpha=0.33))
# In case sample is outside tile bounds, it may not have been highlighted properly
if any(sample < topleft) or any(sample > bottomright):
# So plot a point in the center of the tile and draw a connecting line
cx, cy = tile_centers[i][0][tile[0]], tile_centers[i][1][tile[1]]
ax.add_line(Line2D([sample[0], cx], [sample[1], cy], color=colors[i]))
ax.plot(cx, cy, 's', color=colors[i])
# Finally, plot original samples
ax.plot(samples[:, 0], samples[:, 1], 'o', color='r')
ax.margins(x=0, y=0) # remove unnecessary margins
ax.set_title("Tile-encoded samples")
return ax
class QTable:
"""Simple Q-table."""
def __init__(self, state_size, action_size):
"""Initialize Q-table.
Parameters
----------
state_size : tuple
Number of discrete values along each dimension of state space.
action_size : int
Number of discrete actions in action space.
"""
self.state_size = state_size
self.action_size = action_size
self.q_table = np.zeros(shape=(self.state_size + (self.action_size,)))
print("QTable(): size =", self.q_table.shape)
class TiledQTable:
"""Composite Q-table with an internal tile coding scheme."""
def __init__(self, low, high, tiling_specs, action_size):
"""Create tilings and initialize internal Q-table(s).
Parameters
----------
low : array_like
Lower bounds for each dimension of state space.
high : array_like
Upper bounds for each dimension of state space.
tiling_specs : list of tuples
A sequence of (bins, offsets) to be passed to create_tilings() along with low, high.
action_size : int
Number of discrete actions in action space.
"""
self.tilings = create_tilings(low, high, tiling_specs)
self.state_sizes = [tuple(len(splits)+1 for splits in tiling_grid) for tiling_grid in self.tilings]
self.action_size = action_size
self.q_tables = [QTable(state_size, self.action_size) for state_size in self.state_sizes]
print("TiledQTable(): no. of internal tables = ", len(self.q_tables))
def get(self, state, action):
"""Get Q-value for given <state, action> pair.
Parameters
----------
state : array_like
Vector representing the state in the original continuous space.
action : int
Index of desired action.
Returns
-------
value : float
Q-value of given <state, action> pair, averaged from all internal Q-tables.
"""
# Encode state to get tile indices
encoded_state = tile_encode(state, self.tilings)
# Retrieve q-value for each tiling, and return their average
value = 0.0
for idx, q_table in zip(encoded_state, self.q_tables):
value += q_table.q_table[tuple(idx + (action,))]
value /= len(self.q_tables)
return value
def update(self, state, action, value, alpha=0.1):
"""Soft-update Q-value for given <state, action> pair to value.
Instead of overwriting Q(state, action) with value, perform soft-update:
Q(state, action) = alpha * value + (1.0 - alpha) * Q(state, action)
Parameters
----------
state : array_like
Vector representing the state in the original continuous space.
action : int
Index of desired action.
value : float
Desired Q-value for <state, action> pair.
alpha : float
Update factor to perform soft-update, in [0.0, 1.0] range.
"""
# Encode state to get tile indices
encoded_state = tile_encode(state, self.tilings)
# Update q-value for each tiling by update factor alpha
for idx, q_table in zip(encoded_state, self.q_tables):
value_ = q_table.q_table[tuple(idx + (action,))] # current value
q_table.q_table[tuple(idx + (action,))] = alpha * value + (1.0 - alpha) * value_
|
nilq/baby-python
|
python
|
import sys
from je_web_runner import get_desired_capabilities
from je_web_runner import get_desired_capabilities_keys
from je_web_runner import get_webdriver_manager
try:
print(get_desired_capabilities_keys())
for keys in get_desired_capabilities_keys():
print(get_desired_capabilities(keys))
driver_wrapper = get_webdriver_manager("firefox", capabilities=get_desired_capabilities("firefox"))
driver_wrapper.quit()
except Exception as error:
print(repr(error), file=sys.stderr)
sys.exit(1)
|
nilq/baby-python
|
python
|
# Generated by Django 3.1.8 on 2021-04-07 15:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('unievents', '0014_auto_20210407_1416'),
]
operations = [
migrations.RemoveField(
model_name='event_tag',
name='event_tag_name',
),
migrations.AddField(
model_name='event_tag',
name='text',
field=models.TextField(db_column='text', default=None),
preserve_default=False,
),
]
|
nilq/baby-python
|
python
|
from . import db
from flask import current_app
from flask_login import UserMixin, AnonymousUserMixin
from werkzeug.security import generate_password_hash, check_password_hash
from datetime import datetime
import hashlib, os
import markdown
class User(UserMixin, db.Model):
__tablename__ = 'user'
id = db.Column(db.Integer, primary_key=True)
username = db.Column(db.String(64), unique=True, index=True)
email = db.Column(db.String(64), unique=True, index=True)
password_hash = db.Column(db.String(128))
member_since = db.Column(db.DateTime(), default=datetime.utcnow)
last_seen = db.Column(db.DateTime(), default=datetime.utcnow)
status = db.Column(db.Boolean, default=False)
role = db.Column(db.Boolean, default=False)
articles = db.relationship('Article', backref='author', lazy='dynamic')
@property
def password(self):
raise ArithmeticError('非明文密码,不可读。')
@password.setter
def password(self, password):
self.password_hash = generate_password_hash(password=password)
def verify_password(self, password):
return check_password_hash(self.password_hash, password=password)
def is_admin(self):
return self.role
def ping(self):
self.last_seen = datetime.utcnow()
db.session.add(self)
def is_author(self):
return Article.query.filter_by(author_id=self.id).first()
def __repr__(self):
return '<User %r>' % self.username
class AnonymousUser(AnonymousUserMixin):
def is_admin(self):
return False
class Category(db.Model):
__tablename__ = 'category'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(64),unique=True,index=True)
name = db.Column(db.String(64), unique=True, index=True)
desp = db.Column(db.String(300))
articles = db.relationship('Article', backref='category', lazy='dynamic')
def __repr__(self):
return '<Name %r>' % self.name
article_tag = db.Table('article_tag',
db.Column('article_id',db.Integer,db.ForeignKey('article.id'),primary_key=True),
db.Column('tag_id',db.Integer,db.ForeignKey('tag.id'),primary_key=True))
class Tag(db.Model):
__tablename__ = 'tag'
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
name = db.Column(db.String(64),nullable=False, unique=True, index=True)
def __repr__(self):
return '<Name %r>' % self.name
class Article(db.Model):
__tablename__ = 'article'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(120), index=True)
name = db.Column(db.String(64),index=True,unique=True)
content = db.Column(db.Text)
content_html = db.Column(db.Text)
summary = db.Column(db.String(300))
thumbnail = db.Column(db.String(200))
state = db.Column(db.Integer,default=0)
vc = db.Column(db.Integer,default=0)
timestamp = db.Column(db.DateTime, index=True, default=datetime.now)
author_id = db.Column(db.Integer, db.ForeignKey('user.id'))
category_id = db.Column(db.Integer, db.ForeignKey('category.id'))
tags = db.relationship('Tag',secondary=article_tag,backref=db.backref('articles',lazy='dynamic'),lazy='dynamic')
def content_to_html(self):
return markdown.markdown(self.content, extensions=[
'markdown.extensions.extra',
'markdown.extensions.codehilite',
])
@property
def author(self):
"""返回作者对象"""
return User.query.get(self.author_id)
@property
def category(self):
"""返回文章分类对象"""
return Category.query.get(self.category_id)
@property
def category_name(self):
"""返回文章分类名称,主要是为了使用 flask-wtf 的 obj 返回对象的功能"""
return Category.query.get(self.category_id).name
@property
def previous(self):
"""用于分页显示的上一页"""
a = self.query.filter(Article.state==1,Article.id < self.id). \
order_by(Article.timestamp.desc()).first()
return a
@property
def next(self):
"""用于分页显示的下一页"""
a = self.query.filter(Article.state==1,Article.id > self.id). \
order_by(Article.timestamp.asc()).first()
return a
@property
def tag_names(self):
"""返回文章的标签的字符串,用英文‘, ’分隔,主要用于修改文章功能"""
tags = []
for tag in self.tags:
tags.append(tag.name)
return ', '.join(tags)
@property
def thread_key(self): # 用于评论插件
return hashlib.new(name='md5', string=str(self.id)).hexdigest()
def __repr__(self):
return '<Title %r>' % self.title
class Recommend(db.Model):
'''
推荐
'''
__tablename__ = 'recommend'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(120))
img = db.Column(db.String(200))
url = db.Column(db.String(200))
sn = db.Column(db.Integer,default=0)
state = db.Column(db.Integer, default=1)
timestamp = db.Column(db.DateTime, default=datetime.now)
class AccessLog(db.Model):
'''
请求日志
'''
__tablename__ = 'access_log'
id = db.Column(db.Integer, primary_key=True)
ip = db.Column(db.String(20))
url = db.Column(db.String(120))
timestamp = db.Column(db.DateTime, default=datetime.now)
remark = db.Column(db.String(32))
class Picture(db.Model):
'''
图片
'''
__tablename__ = 'picture'
id = db.Column(db.Integer, primary_key = True)
name = db.Column(db.String(64))
timestamp = db.Column(db.DateTime, default=datetime.now)
url = db.Column(db.String(120))
remark = db.Column(db.String(32))
class InvitationCode(db.Model):
'''
邀请码
'''
__tablename__ = 'invitation_code'
id = db.Column(db.Integer, primary_key = True)
code = db.Column(db.String(64),unique = True, nullable=False)
user = db.Column(db.String(64))
state = db.Column(db.Boolean, default=True)
class OnlineTool(db.Model):
'''
在线工具
'''
__tablename__ = 'online_tool'
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(120))
desp = db.Column(db.String(120))
img = db.Column(db.String(200))
url = db.Column(db.String(200))
sn = db.Column(db.Integer,default=0)
state = db.Column(db.Integer, default=1)
timestamp = db.Column(db.DateTime, default=datetime.now)
|
nilq/baby-python
|
python
|
import colander
from cryptography.fernet import Fernet
class EncryptedExportField(colander.String):
"""
Serialize non-encrypted appstruct into encrypted cstruct.
"""
def __init__(self, fernet_key, *args, **kwargs):
self.fernet_key = fernet_key
self.fernet = Fernet(fernet_key)
super().__init__(*args, **kwargs)
def serialize(self, node, appstruct):
v = super().serialize(node, appstruct)
if v is colander.null:
return v
if v.strip():
if v == "data":
raise Exception()
return self.fernet.encrypt(v.encode("utf8")).decode("utf8")
return colander.null
def deserialize(self, node, cstruct):
v = super().deserialize(node, cstruct)
if v is colander.null:
return v
# encrypt
if v.strip():
v = self.fernet.decrypt(v.encode("utf8")).decode("utf8")
return v
return colander.null
class EncryptedStoreField(colander.String):
"""
Deserialize non-encrypted cstruct into encrypted appstruct.
"""
def __init__(self, fernet_key, *args, **kwargs):
self.fernet_key = fernet_key
self.fernet = Fernet(fernet_key)
super().__init__(*args, **kwargs)
def serialize(self, node, appstruct):
""" Decrypt appstruct """
v = super().serialize(node, appstruct)
if v is colander.null:
return v
if v.strip():
v = self.fernet.decrypt(v.encode("utf8")).decode("utf8")
return v
return colander.null
def deserialize(self, node, cstruct):
""" Encrypt cstruct """
v = super().deserialize(node, cstruct)
if v.strip():
return self.fernet.encrypt(v.encode("utf8")).decode("utf8")
return colander.null
|
nilq/baby-python
|
python
|
import torch
import torch.nn as nn
from packaging import version
from mmcv.cnn import kaiming_init, normal_init
from .registry import INPUT_MODULES
from .utils import build_norm_layer
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
@INPUT_MODULES.register_module
class Conv1x1Block(nn.Module):
"""
Conv1x1 => Batch Norm => RELU input module
"""
def __init__(self, in_channels, out_channels):
super(Conv1x1Block, self).__init__()
self.net = nn.Sequential(
conv1x1(in_channels, out_channels),
nn.BatchNorm2d(out_channels),
nn.ReLU(inplace=True)
)
def init_weights(self, init_linear='normal'):
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm, nn.SyncBatchNorm, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
def forward(self, x):
return self.net(x)
|
nilq/baby-python
|
python
|
# -*- coding: utf-8 -*-
# ----------------------------------------------------------------------
# Copyright © 2010, RedJack, LLC.
# All rights reserved.
#
# Please see the LICENSE.txt file in this distribution for license
# details.
# ----------------------------------------------------------------------
import unittest
from ipset.c import *
IPV4_ADDR_1 = \
"\xc0\xa8\x01\x64"
IPV6_ADDR_1 = \
"\xfe\x80\x00\x00\x00\x00\x00\x00\x02\x1e\xc2\xff\xfe\x9f\xe8\xe1"
class TestSet(unittest.TestCase):
def test_set_starts_empty(self):
s = ipset.ipset_new()
self.assert_(ipset.ipset_is_empty(s))
ipset.ipset_free(s)
def test_empty_sets_equal(self):
s1 = ipset.ipset_new()
s2 = ipset.ipset_new()
self.assert_(ipset.ipset_is_equal(s1, s2))
ipset.ipset_free(s1)
ipset.ipset_free(s2)
def test_ipv4_insert(self):
s = ipset.ipset_new()
ipset.ipset_ipv4_add(s, IPV4_ADDR_1)
self.assertFalse(ipset.ipset_is_empty(s))
ipset.ipset_free(s)
def test_ipv4_insert_network(self):
s = ipset.ipset_new()
ipset.ipset_ipv4_add_network(s, IPV4_ADDR_1, 24)
self.assertFalse(ipset.ipset_is_empty(s))
ipset.ipset_free(s)
def test_ipv6_insert(self):
s = ipset.ipset_new()
ipset.ipset_ipv6_add(s, IPV6_ADDR_1)
self.assertFalse(ipset.ipset_is_empty(s))
ipset.ipset_free(s)
def test_ipv6_insert_network(self):
s = ipset.ipset_new()
ipset.ipset_ipv6_add_network(s, IPV6_ADDR_1, 32)
self.assertFalse(ipset.ipset_is_empty(s))
ipset.ipset_free(s)
class TestMap(unittest.TestCase):
def test_map_starts_empty(self):
s = ipset.ipmap_new(0)
self.assert_(ipset.ipmap_is_empty(s))
ipset.ipmap_free(s)
def test_empty_maps_equal(self):
s1 = ipset.ipmap_new(0)
s2 = ipset.ipmap_new(0)
self.assert_(ipset.ipmap_is_equal(s1, s2))
ipset.ipmap_free(s1)
ipset.ipmap_free(s2)
def test_ipv4_insert(self):
s = ipset.ipmap_new(0)
ipset.ipmap_ipv4_set(s, IPV4_ADDR_1, 1)
self.assertFalse(ipset.ipmap_is_empty(s))
ipset.ipmap_free(s)
def test_ipv4_insert_network(self):
s = ipset.ipmap_new(0)
ipset.ipmap_ipv4_set_network(s, IPV4_ADDR_1, 24, 1)
self.assertFalse(ipset.ipmap_is_empty(s))
ipset.ipmap_free(s)
def test_ipv6_insert(self):
s = ipset.ipmap_new(0)
ipset.ipmap_ipv6_set(s, IPV6_ADDR_1, 1)
self.assertFalse(ipset.ipmap_is_empty(s))
ipset.ipmap_free(s)
def test_ipv6_insert_network(self):
s = ipset.ipmap_new(0)
ipset.ipmap_ipv6_set_network(s, IPV6_ADDR_1, 32, 1)
self.assertFalse(ipset.ipmap_is_empty(s))
ipset.ipmap_free(s)
|
nilq/baby-python
|
python
|
#-*- coding: utf-8 -*-
#!/usr/bin/env python
from os import path
# dirs
BASE_DIR = path.dirname(path.realpath(__file__)) + '/'
MODULES_DIR = BASE_DIR + 'modules/'
AUDIO_DIR = BASE_DIR + 'audio/'
# api url (server)
SERVER_API_URL = 'http://localhost:3000/'
# voice lang
LANG = 'en-EN'
|
nilq/baby-python
|
python
|
""" Main entrypoint for starttls-policy CLI tool """
import argparse
import os
from starttls_policy_cli import configure
GENERATORS = {
"postfix": configure.PostfixGenerator,
}
def _argument_parser():
parser = argparse.ArgumentParser(
description="Generates MTA configuration file according to STARTTLS-Everywhere policy",
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument("-g", "--generate",
choices=GENERATORS,
help="The MTA you want to generate a configuration file for.",
dest="generate", required=True)
# TODO: decide whether to use /etc/ for policy list home
parser.add_argument("-d", "--policy-dir",
help="Policy file directory on this computer.",
default="/etc/starttls-policy/", dest="policy_dir")
parser.add_argument("-e", "--early-adopter",
help="Early Adopter mode. Processes all \"testing\" domains in policy list "
"same way as domains in \"enforce\" mode, effectively requiring strong TLS "
"for domains in \"testing\" mode too. This mode is useful for participating"
" in tests of recently added domains with real communications and earlier "
"security hardening at the cost of increased probability of delivery "
"degradation. Use this mode with awareness about all implications.",
action="store_true",
dest="early_adopter")
return parser
def _ensure_directory(directory):
if not os.path.exists(directory):
os.makedirs(directory)
def _generate(arguments):
_ensure_directory(arguments.policy_dir)
config_generator = GENERATORS[arguments.generate](arguments.policy_dir,
arguments.early_adopter)
config_generator.generate()
config_generator.manual_instructions()
def main():
""" Entrypoint for CLI tool. """
parser = _argument_parser()
_generate(parser.parse_args())
if __name__ == "__main__":
main() # pragma: no cover
|
nilq/baby-python
|
python
|
import numpy as np
class ValueLog():
"""Implemements a key/value aggregating dictionary log with optional
grouping/precision and custom aggregation modes"""
def __init__(self):
self.log_values = {}
def log(self, key, val, agg="mean", scope="get", group=None,
precision=None):
"""Logs a value
Args:
key: The key for this value, this will be the key in the resulting
log dictionary
val: The value to log
agg: How to aggregate all the values received, should be the name
of a valid numpy operation like mean/max/sum etc...
scope: Scope over which to aggregate/reset the values for this key.
Valid values are:
get: Aggregate and reset each time get() is called
None: Never reset (Aggregate all values received from the
start)
<number>: Aggregate the last <number> values received
group: Optionally place this key in a sub-key called 'group'. Can
set a nested group using '->', e.g. "training->general"
precision: Precision to round the final value to after aggregation
Note: agg/scope/precision must be the same for each value logged with
the same key+group
"""
dest = self.log_values
if group is not None:
for subkey in group.split("->"):
if subkey not in dest:
dest[subkey] = {}
dest = dest[subkey]
if key not in dest:
dest[key] = {
"data": [],
"scope": scope,
"agg": agg,
"precision": precision
}
else:
assert(dest[key]['agg'] == agg)
assert(dest[key]['precision'] == precision)
assert(dest[key]['scope'] == scope)
dest[key]['data'].append(val)
scope = dest[key]['scope']
# If scope is a number, leave only that last amount in the history
if isinstance(scope, int):
dest[key]['data'] = dest[key]['data'][-int(scope):]
def log_dict(self, source, agg="auto", group=None):
"""Logs values from a given dictionary in the same group/key structure
"""
for key, val in source.items():
if isinstance(val, dict):
sub_group = key if group is None else group+"->"+key
self.log_dict(val, agg=agg, group=sub_group)
else:
self.log(key, val, group=group, agg=agg)
def _get_aggregator_for_key(self, key, agg_mode):
if agg_mode == "auto":
# 'auto' uses mean unless one of the supported modes is in
# the key name (Example 'reward_max' will use max)
supported_modes = ['min', 'mean', 'median', 'max', 'std', 'sum']
# Example auto-keys might be 'reward_max', or just 'max'
mode = key.split("_")[-1]
if mode not in supported_modes:
agg_mode = "mean"
else:
agg_mode = mode
return getattr(np, agg_mode)
def _aggregate_log_values(self, source, dest):
"""Aggregates the log values recursively from source->dest"""
remove = []
for key, item in source.items():
if "data" not in item:
# Assume it's a sub-group
dest[key] = {}
self._aggregate_log_values(item, dest[key])
else:
aggregator = self._get_aggregator_for_key(key, item['agg'])
value = aggregator(item['data'])
if item['precision'] is not None:
value = round(value, item['precision'])
dest[key] = value
if item['scope'] == 'get':
remove.append(key)
for key in remove:
del source[key]
def get(self):
res = {}
self._aggregate_log_values(self.log_values, res)
return res
|
nilq/baby-python
|
python
|
from time import sleep
import threading
import datetime
import paho.mqtt.client as mqtt
#### CONSTANTS ####
#MQTTServer="home.bodhiconnolly.com"
MQTTServer="192.168.1.100"
MQTTPort=1882
waitTime=datetime.timedelta(milliseconds=50)
ledTopic="room/lights/strips/"
functionTopic="room/function/#"
systemTopic="system/functions/room"
lastTime=datetime.datetime.now()
#### MQTT SENDING ####
def sendMQTT(topic,message):
client.publish(topic,message)
def setRGB(r=None,g=None,b=None):
if not (r==None):
sendMQTT(ledTopic+"r",r)
if not (g==None):
sendMQTT(ledTopic+"g",g)
if not (b==None):
sendMQTT(ledTopic+"b",b)
def setRGBWait(r=None,g=None,b=None):
global lastTime
if datetime.datetime.now()-lastTime>waitTime:
setRGB(r,g,b)
lastTime=datetime.datetime.now()
def updateStatus(function,wake=None):
pass
#### THREAD FUNCTIONS ####
class StoppableThread(threading.Thread):
def __init__(self):
super(StoppableThread, self).__init__()
self._stop = threading.Event()
#print "Thread Started"
def stop(self):
self._stop.set()
class FadeThread(StoppableThread):
def __init__(self,fadeSpeed):
super(FadeThread, self).__init__()
self.fadeSpeed=fadeSpeed/255
def run(self):
updateStatus("fade")
print "Starting fade"
setRGB(0,0,255)
while not self._stop.isSet():
self.fade(self.fadeSpeed)
def fade(self,fadespeed):
lastTime=datetime.datetime.now()
for i in range(0,256,1):
if not self._stop.isSet():
setRGBWait(r=i)
sleep(fadespeed)
else:
break
for i in range(255,-1,-1):
if not self._stop.isSet():
setRGBWait(b=i)
sleep(fadespeed)
else:
break
for i in range(0,256,1):
if not self._stop.isSet():
setRGBWait(g=i)
sleep(fadespeed)
else:
break
for i in range(255,-1,-1):
if not self._stop.isSet():
setRGBWait(r=i)
sleep(fadespeed)
else:
break
for i in range(0,256,1):
if not self._stop.isSet():
setRGBWait(b=i)
sleep(fadespeed)
else:
break
for i in range(255,-1,-1):
if not self._stop.isSet():
setRGBWait(g=i)
sleep(fadespeed)
else:
break
def setSpeed(fadeSpeed):
self.fadeSpeed=fadeSpeed/255
class SleepThread(StoppableThread):
def __init__(self,sleepTime):
super(SleepThread, self).__init__()
self.sleepTime=sleepTime
def run(self):
updateStatus("sleep")
print "Starting sleep"
self.ledSleep(self.sleepTime)
def ledSleep(self,sleepTime):
sleepDelay=(sleepTime)/255
for i in range(255,100,-1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(sleepDelay*0.2)
else:
break
for i in range(100,50,-1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(sleepDelay*1)
else:
break
for i in range(50,10,-1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(sleepDelay*2)
else:
break
for i in range(10,-1,-1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(sleepDelay*10)
else:
break
updateStatus("none","asleep")
class WakeThread(StoppableThread):
def __init__(self,wakeTime):
super(WakeThread, self).__init__()
self.wakeTime=wakeTime
def run(self):
print "Starting wake"
updateStatus("wake")
self.wake(self.wakeTime)
def wake(self,sleepTime):
wakeDelay=(self.wakeTime)/255
for i in range(1,11,1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(wakeDelay*10)
else:
break
for i in range(11,51,1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(wakeDelay*2)
else:
break
for i in range(51,101,1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(wakeDelay*1)
else:
break
for i in range(101,255,1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(wakeDelay*0.2)
else:
break
setRGB(255,255,255)
updateStatus("none","awake")
class FastWakeThread(StoppableThread):
def __init__(self,wakeTime):
super(FastWakeThread, self).__init__()
self.wakeTime=wakeTime
def run(self):
print "Starting fast wake"
updateStatus("wake")
self.wake(self.wakeTime)
def wake(self,sleepTime):
wakeDelay=(self.wakeTime)/255
for i in range(1,256,1):
if not self._stop.isSet():
setRGBWait(i,i,i)
sleep(wakeDelay)
else:
break
setRGB(255,255,255)
updateStatus("none","awake")
#### CONTROLLING OBJECT ####
class ledController(object):
def __init__(self):
self.fadeThread=FadeThread(1)
self.sleepThread=SleepThread(1)
self.wakeThread=WakeThread(1)
self.fastwakeThread=FastWakeThread(1)
def stopThreads(self):
self.fadeThread.stop()
self.sleepThread.stop()
self.wakeThread.stop()
self.fastwakeThread.stop()
def on_connect(self, client, userdata, flags, rc):
print("Connected with result code "+str(rc))
client.subscribe(functionTopic)
client.publish(systemTopic,"Function Controller On")
def parseMessage(self, client, userdata, msg):
print msg.topic+" "+str(msg.payload)
topic=msg.topic.split("/")
payload=msg.payload
if topic[0]=='room' and topic[1]=='function':
self.stopThreads()
updateStatus("none")
if topic[2]=='sleep':
self.sleepThread=SleepThread(float(payload))
self.sleepThread.start()
elif topic[2]=='wake':
self.wakeThread=WakeThread(float(payload))
self.wakeThread.start()
elif topic[2]=='fastwake':
self.fastwakeThread=FastWakeThread(float(payload))
self.fastwakeThread.start()
elif topic[2]=='fade':
self.fadeThread=FadeThread(float(payload))
self.fadeThread.start()
elif topic[2]=='stop':
pass
else:
print "Not a valid function: " + str(topic)
#### RUNTIME ####
if __name__ == "__main__":
l=ledController()
client = mqtt.Client()
client.on_connect = l.on_connect
client.on_message = l.parseMessage
client.connect(MQTTServer, MQTTPort, 60)
client.loop_forever()
|
nilq/baby-python
|
python
|
import json
import string
import random
import os
import httplib2
import requests
# Flask Imports
from flask import Flask, render_template, request, redirect, url_for, jsonify
from flask import abort, g, flash, Response, make_response
from flask import session as login_session
from flask_httpauth import HTTPBasicAuth
# SQLAlchemy imports
from models import Items, Users, Base
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy import create_engine
from sqlalchemy import desc
auth = HTTPBasicAuth()
engine = create_engine('sqlite:///items.db')
Base.metadata.bind = engine
DBSession = sessionmaker(bind=engine)
db = DBSession()
app = Flask(__name__)
def redirect_url(default='/'):
return request.args.get('next') or request.referrer or url_for(default)
#############################
# User Login and Registration
############################
@auth.verify_password
def verify_password(username, password):
user = db.query(Users).filter_by(username=username).first()
if not user or not user.verify_password(password):
return False
g.user = user
return True
def createUser(login_session):
newUser = Users(username=login_session['username'])
db.add(newUser)
db.commit()
user = db.query(Users).filter_by(username=login_session['username']).first()
return user.id
def getUserInfo(user_id):
user = db.query(Users).filter_by(id=user_id).first()
return user
@app.route('/signup', methods=['GET', 'POST'])
def signup():
if request.method == 'GET':
return render_template('signup.html',
login_session=login_session)
elif request.method == 'POST':
username = request.form.get('username')
password = request.form.get('password')
verifyPass = request.form.get('verifyPassword')
if username is None or password is None or password != verifyPass:
flash('You must enter a valid username and password')
return render_template('signup.html')
# Check if user is already in database
user = db.query(Users).filter_by(username=username).first()
if user:
flash('The user "%s" is already registered, please login to continue' % user.username)
return render_template('signup.html')
else:
user = Users(username=username)
user.hash_password(password)
db.add(user)
db.commit()
flash('User %s has been created, please login to continue' % user.username)
return redirect(url_for('index'))
else:
return redirect(url_for('index'))
@app.route('/login', methods=['GET', 'POST'])
def login():
if request.method == 'GET':
return render_template('login.html',
login_session=login_session)
elif request.method == 'POST':
username = request.form.get('username')
password = request.form.get('password')
if verify_password(username, password):
user = db.query(Users).filter_by(username=username).first()
login_session['username'] = user.username
login_session['user_id'] = user.id
flash("Welcome, %s" % user.username)
g.user = user
return redirect(url_for('index'))
else:
flash('Wrong Username or Password')
return render_template('login.html')
else:
return redirect(url_for('index'))
@app.route('/logout')
def logout():
del login_session['username']
del login_session['user_id']
flash('You have been logged out')
return redirect(url_for('index'))
##################
# VIEWS
##################
@app.route('/')
def index():
if ('username' in login_session):
# Only show items added by this user
items = db.query(Items).filter_by(author_id=login_session['user_id']).all()
return render_template('home.html', items=items, login_session=login_session)
else:
# Don't show items when user is not logged in
return render_template('home.html',
login_session=login_session)
# Method for Setting/Adding new Key Value Pairs
@app.route('/add', methods=['GET', 'POST'])
def addItem():
if request.method == 'GET':
# Make sure only logged in users can access this page
if ('username' in login_session):
return render_template('addItem.html',
login_session=login_session)
else:
flash('Please login in order to add key/value pairs')
return redirect(url_for('login'))
elif request.method == 'POST':
# Make sure only logged in users are adding key/value pairs
if ('username' in login_session):
key = request.form.get('key')
value = request.form.get('value')
item = db.query(Items).filter_by(key=key).first()
# Make sure key is unique/not already added
if item:
flash('"%s" has already been added' % item.key)
return redirect(url_for('addItem'))
if key is not None and key != '':
item = Items(key=key)
if value is not None and value != '':
item.value = value
item.author = getUserInfo(login_session['user_id'])
else:
flash('You need to provide a proper Key/Value pair')
return redirect(url_for('addItem'))
db.add(item)
db.commit()
flash('Item Added!')
return redirect(url_for('index'))
else:
flash('Please login in order to add key/value pairs')
return redirect(url_for('login'))
else:
return redirect(url_for('index'))
@app.route('/edit/<item_key>', methods=['GET', 'POST'])
def editItem(item_key):
if request.method == 'GET':
if ('username' in login_session):
# find key/value pair that we want to edit
item = db.query(Items).filter_by(key=item_key).first()
# Make sure user is editing only their key/value pair
if (item.author.username == login_session['username']):
return render_template('editItem.html',
item=item,
login_session=login_session)
else:
flash('Please login to edit key/value pairs')
return redirect(url_for('login'))
elif request.method == 'POST':
# Make sure only a logged in user is requesting edit
if ('username' in login_session):
key = request.form.get('key')
value = request.form.get('value')
item = db.query(Items).filter_by(key=item_key).first()
# Make sure only user that added this item can edit this
if (item.author_id != login_session['user_id']):
flash('You are not allowed to edit this')
return redirect(url_for('index'))
# Update the Key/Value pair
if key is not None and key != '':
item.key = key
if value is not None and value != '':
item.value = value
# Commit changes to the Database
db.add(item)
db.commit()
flash('Key/value pair has been updated')
return redirect(url_for('index'))
else:
flash('Please login to edit key/value pairs')
return redirect(url_for('login'))
else:
return redirect(url_for('index'))
@app.route('/delete/<item_key>', methods=['GET', 'POST'])
def deleteItem(item_key):
if request.method == 'GET':
if ('username' in login_session):
# find key/value pair that we want to edit
item = db.query(Items).filter_by(key=item_key).first()
# Make sure user is deleting only their key/value pair
if (item.author.username == login_session['username']):
return render_template('deleteItem.html',
item=item,
login_session=login_session)
else:
flash('Please login to delete key/value pairs')
return redirect(url_for('login'))
if request.method=='POST':
item = db.query(Items).filter_by(key=item_key).first()
# Make sure the right user is requesting the delete
if login_session['username'] != item.author.username:
flash('You do not have the permission to delete that')
return redirect(url_for('index'))
# Delete item and commit changes
db.delete(item)
db.commit()
flash('Key/Value pair deleted')
return redirect(url_for('index'))
else:
return redirect(url_for('index'))
if __name__ == '__main__':
app.secret_key = 'super_secret_key'
app.debug = True
app.run(host='0.0.0.0', port=3000)
|
nilq/baby-python
|
python
|
# BaseOperator.py
#
# Base class for all machines and human operators.
#
# Attributes:
# name
# states: a list of states that this operator can be in, at any time. For example: ["busy", idle"]
# start_time: the time at which the behavior starts.
#
# Member functions:
# methods to change state, and print the fraction of time spent in each state.
#
# Author: Neha Karanjkar
# Date: 20 Nov 2017
import random
import simpy
class BaseOperator(object):
def __init__(self, env, name):
self.env=env
self.name=name
#start_time
self.start_time=0
#default states:
self.states = ["none"]
#power rating of the machine/operator for each state
self.power_ratings = [0.0]
self.time_spent_in_state = [0.0 for s in self.states]
# current state
self.current_state = "none"
# variable to remember the time instant
# at which the last state change occured.
self.state_change_timestamp = 0.0
# function (to be called inside the constructor of all derived classes)
# to define the set of states for a particular type of machine.
# Optionally the power (in watts) for each state can also be specified.
def define_states(self,states, start_state):
self.states = states
self.time_spent_in_state = [0.0 for s in states]
assert(start_state in states)
self.current_state=start_state
self.power_ratings = [0.0 for s in states]
def set_power_ratings(self, power_ratings):
assert (len(power_ratings)==len(self.states))
for p in power_ratings:
assert(p>0)
self.power_ratings = power_ratings
# function to record the time spent in the current state
# since the last timestamp
def update_time_spent_in_current_state(self):
i = self.states.index(self.current_state)
self.time_spent_in_state[i] += self.env.now - self.state_change_timestamp
# change state
def change_state(self, new_state):
prev_state = self.current_state
self.update_time_spent_in_current_state()
self.current_state = new_state
self.state_change_timestamp=self.env.now
if(new_state!=prev_state):
print("T=", self.env.now+0.0, self.name, "changed state to ",new_state)
def get_utilization(self):
utilization = []
self.update_time_spent_in_current_state()
total_time = sum(self.time_spent_in_state)
assert (total_time>0)
for i in range(len(self.states)):
t = self.time_spent_in_state[i]
t_percent = self.time_spent_in_state[i]/total_time*100.0
utilization.append(t_percent)
return utilization
# print time spent in each state
def print_utilization(self):
u = self.get_utilization()
print(self.name,":",end=' ')
for i in range(len(self.states)):
print(self.states[i], "=",end=' ')
print("{0:.2f}".format(u[i])+"%",end=' ')
print("")
# calculate energy consumption (in joules)
# for each state that the machine was in.
def get_energy_consumption(self):
e = []
for i in range(len(self.states)):
e.append(self.power_ratings[i]*self.time_spent_in_state[i])
return e
# print energy consumption
def print_energy_consumption(self):
e = self.get_energy_consumption()
total_e = sum(e)
denominator = max(sum(e),1.0)
print(self.name,": (",end=' ')
for i in range(len(self.states)):
print(self.states[i], "=",end=' ')
e_percent = e[i]/denominator*100.0
print("{0:.2f}".format(e_percent)+"%",end=' ')
print (") Total energy = ","{0:.2f}".format(total_e/1e3)," Kilo Joules.",end=' ')
print("")
|
nilq/baby-python
|
python
|
# coding=utf-8
import os
import unittest
from parameterized import parameterized
from conans.client.conf import default_settings_yml
from conans.model.editable_cpp_info import EditableCppInfo
from conans.model.settings import Settings
def _make_abs(base_path, *args):
p = os.path.join(*args)
if base_path:
p = os.path.join(base_path, p)
p = os.path.abspath(p)
return p
class WorkOnItemsTest(unittest.TestCase):
def test_empty(self):
self.assertEqual(".", EditableCppInfo._work_on_item("", None, None, None))
@parameterized.expand([(False,), (True,)])
def test_basic(self, make_abs):
base_path = os.path.dirname(__file__) if make_abs else None
self.assertIn(_make_abs(base_path, '.'),
EditableCppInfo._work_on_item(".", base_path, None, None))
self.assertIn(_make_abs(base_path, 'src', 'include'),
EditableCppInfo._work_on_item("src/include", base_path, None, None))
self.assertIn(_make_abs(base_path, '..', 'relative', 'include'),
EditableCppInfo._work_on_item("../relative/include", base_path, None, None))
self.assertIn(_make_abs(base_path, 'src', 'path with spaces', 'include'),
EditableCppInfo._work_on_item("src/path with spaces/include",
base_path, None, None))
self.assertIn(_make_abs(base_path, 'ending-slash', 'include'),
EditableCppInfo._work_on_item("ending-slash/include/", base_path, None, None))
@parameterized.expand([(False,), (True,)])
def test_windows(self, make_abs):
base_path = os.path.dirname(__file__) if make_abs else None
self.assertIn(os.path.join('C:' + os.sep, 'Windows-single-slash', 'include'),
EditableCppInfo._work_on_item("C:\Windows-single-slash\include",
base_path, None, None))
self.assertIn(os.path.join('D:' + os.sep, 'Windows-double-slash', 'include'),
EditableCppInfo._work_on_item("D:\\Windows-double-slash\\include",
base_path, None, None))
@parameterized.expand([(False,), (True,)])
def test_unix(self, make_abs):
base_path = os.path.dirname(__file__) if make_abs else None
self.assertIn(os.path.join(os.sep, 'abs', 'path', 'include'),
EditableCppInfo._work_on_item("/abs/path/include", base_path, None, None))
@parameterized.expand([(False,), (True,)])
def test_placeholders(self, make_abs):
base_path = os.path.dirname(__file__) if make_abs else None
settings = Settings.loads(default_settings_yml)
settings.compiler = 'Visual Studio'
settings.compiler.version = '14'
settings.build_type = 'Debug'
self.assertIn(_make_abs(base_path, 'src', 'Visual Studio14', 'Debug', 'include'),
EditableCppInfo._work_on_item("src/{settings.compiler}{settings.compiler.version}/{settings.build_type}/include",
base_path=base_path, settings=settings,
options=None))
self.assertIn(os.path.join('C:' + os.sep, 'Visual Studio', 'include'),
EditableCppInfo._work_on_item("C:\\{settings.compiler}\\include\\",
base_path=base_path, settings=settings,
options=None))
self.assertIn(os.path.join(os.sep, 'usr', 'path with spaces', 'Visual Studio', 'dir'),
EditableCppInfo._work_on_item("/usr/path with spaces/{settings.compiler}/dir",
base_path=base_path, settings=settings,
options=None))
|
nilq/baby-python
|
python
|
from dags.spark_common import SparkJobCfg, spark_job, user_defined_macros, EntityPattern
from dags.spark_common import dag_schema_path, hadoop_options, LOCAL_INPUT, LOCAL_DATAWAREHOUSE
from datetime import timedelta
from airflow import DAG
args = {
'owner': 'alexey',
'start_date': '2021-06-10'
}
dag = DAG(
'spark_hudi',
schedule_interval=None,
dagrun_timeout=timedelta(minutes=60),
default_args=args,
user_defined_macros=user_defined_macros,
max_active_runs=1)
entity_patterns = [
EntityPattern("orders", "orders", "orderId", "last_update_time"),
]
cfg = SparkJobCfg(
input_path=LOCAL_INPUT,
output_path=LOCAL_DATAWAREHOUSE,
entity_patterns=entity_patterns,
reader_options=["header:true"],
hadoop_options=hadoop_options(),
partition_by=["year", "month", "day"],
input_schema_path=dag_schema_path,
output_format="hudi",
trigger_interval=-1
)
JAR_PATH = "{{fromjson(connection.etl_jobs_emr_jar.extra)['path']}}"
load_to_table = spark_job('load_to_table', cfg, 'etljobs.emr.HudiIngestor', dag, None, True, JAR_PATH)
load_to_table
if __name__ == "__main__":
dag.cli()
|
nilq/baby-python
|
python
|
from .mesh import import_mesh
from .curve import import_curve
from .brep import import_brep
#from .default import import_default
|
nilq/baby-python
|
python
|
#!/usr/bin/python
import sdk_common
# Block in charge of tagging the release
class SDKNewsAndTag(sdk_common.BuildStep):
def __init__(self, logger=None):
super(SDKNewsAndTag, self).__init__('SDK News & Tag', logger)
self.branch_name = self.common_config.get_config().get_branch_name()
self.github_token = self.common_config.get_config().get_github_token()
self.url_with_token = self.common_config.get_config().get_origin_url_combined_with_token()
self.version = self.common_config.get_config().get_version()
self.is_commit_already_tagged = self.common_config.get_config().is_commit_tagged()
self.should_tag = (
not self.is_commit_already_tagged) and self.common_config.get_config().is_for_release() and (
not self.common_config.get_config().is_from_private())
self.news_folder = self.common_config.get_config().get_news_folder()
self.changelog = self.common_config.get_config().get_changelog_file()
self.property_file = self.common_config.get_config().get_project_property_file()
def execute(self):
self.print_title()
try:
self.log_info("Generating the changelog")
if self.is_commit_already_tagged:
self.log_info(
"The commit was already tagged [%s]. No need to generate the changelog file" % self.fetch_commit_tag())
return True
if self.news_folder:
self.check_shell_command_output("towncrier --yes --name="" --version=%s" % self.version,
self.news_folder)
except:
self.log_error('Failed to generate the changelog file')
return False
try:
if not self.should_tag:
self.log_info("No need for tagging.")
if self.is_commit_already_tagged:
self.log_info(
"The commit was already tagged [%s]" % self.fetch_commit_tag())
return True
self.tag_github()
except:
self.log_error('Failed to tag the repository')
return False
self.log_info("Done.")
return True
def fetch_commit_tag(self):
return self.common_config.get_config().get_commit_tag()
def tag_github(self):
self.log_info("Committing the changelog")
if not self.common_config.get_config().get_user_name() or not self.common_config.get_config().get_user_email():
self.git_setup_env()
if not self.url_with_token:
if not self.github_token:
raise Exception("The GitHub token has not been set properly")
else:
raise Exception("The remote URL could not be resolved")
self.git_set_remote_url(self.url_with_token)
self.git_set_upstream_branch(self.branch_name)
if self.news_folder:
self.git_add_folder( self.news_folder)
if self.changelog:
self.git_add_file(self.changelog)
if self.property_file:
self.git_add_file(self.property_file)
self.git_commit(':checkered_flag: Release %s' % self.version)
self.log_info("Tagging the project")
self.git_tag(self.version,'SDK Release')
self.log_info("Pushing changes back to GitHub")
self.git_push_and_follow_tags()
self.log_info("Marking this commit as latest")
self.git_soft_tag('latest')
self.git_force_push_tags()
|
nilq/baby-python
|
python
|
from .ast_transformers import InvertGenerator, transformAstWith
from .descriptor_magic import \
wrapMethodAndAttachDescriptors, BindingExtensionDescriptor
import six
import inspect
def coroutine(func):
def start(*args, **kwargs):
g = func(*args, **kwargs)
six.next(g)
return g
return start
def _funcIsMethod(stackFromFunc):
""" Determine whether a function being decorated is actually a method of a
class, given the stack frames above the decorator invocation. """
funcFrame = stackFromFunc[0]
potentialClassName = funcFrame[3]
nextFrame = stackFromFunc[1]
return nextFrame[3] == '<module>' and \
nextFrame[4][0].startswith('class ' + potentialClassName)
def hasInvertibleMethods(cls):
""" Class decorator that transforms methods that have been marked with
"invertibleGenerator" """
#frames = inspect.stack()
#from pprint import PrettyPrinter
#globs = map(lambda frame: frame[0].f_globals, frames)
#locs = map(lambda frame: frame[0].f_locals, frames)
#pp = PrettyPrinter(indent=4)
#for (glob, loc) in zip(globs, locs):
#print "GLOBALS:"
#pp.pprint(glob)
#print "LOCALS:"
#pp.pprint(loc)
for name, method in six.iteritems(cls.__dict__):
if hasattr(method, "markForConversion"):
# TODO: transform and wrap
# But need globals/locals
pass
return cls
def _makeInvertibleUsingFrame(frame, func):
""" Add a co method to a generator function, that is the equivalent
coroutine. """
return coroutine(
transformAstWith(
frame[0].f_globals,
frame[0].f_locals,
[InvertGenerator])(func)
)
def invertibleGenerator(func):
""" Add a co method to a generator function, that is the equivalent
coroutine. """
frames = inspect.stack()
nextFrame = frames[1]
transformedFunc = _makeInvertibleUsingFrame(nextFrame, func)
if _funcIsMethod(frames[1:]):
# TODO: either remove, or use in class decorator
func.markForConversion = True
return wrapMethodAndAttachDescriptors({
'co': BindingExtensionDescriptor(transformedFunc)
})(func)
else:
func.co = transformedFunc
return func
|
nilq/baby-python
|
python
|
import re
import xmlsec
from lxml import etree
def parse_tbk_error_message(raw_message):
message_match = re.search(r'<!--(.+?)-->', raw_message)
if message_match:
message = message_match.group(1).strip()
match = re.search(r'(.+?)\((\d+?)\)', message)
if match:
error = match.group(1)
code = int(match.group(2))
return error, code
return message, -1
return raw_message, -1
def get_key_format_value(key_format):
try:
return getattr(xmlsec.KeyFormat, key_format)
except AttributeError:
raise ValueError("Key format {} unsupported".format(key_format))
def load_key_from_data(key_data, cert_data=None, password=None, key_format='PEM'):
key_format = get_key_format_value(key_format)
key = xmlsec.Key.from_memory(key_data, key_format, password)
if cert_data:
key.load_cert_from_memory(cert_data, key_format)
return key
def xml_to_string(tree):
return etree.tostring(tree).decode('utf-8')
def create_xml_element(tag_name, nsmap=None):
return etree.Element(tag_name, nsmap=nsmap)
|
nilq/baby-python
|
python
|
import os
import djcelery
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'findingaids.settings')
os.environ['PYTHON_EGG_CACHE'] = '/tmp'
os.environ['VIRTUAL_ENV'] = '/home/httpd/findingaids/env/'
djcelery.setup_loader()
# from django.core.handlers.wsgi import WSGIHandler
# application = WSGIHandler()
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
nilq/baby-python
|
python
|
from acme import Product
import random
ADJECTIVES = ['Awesome', 'Shiny', 'Impressive', 'Portable', 'Improved']
NOUNS = ['Anvil', 'Catapult', 'Disguise', 'Mousetrap', '???']
def generate_products(n=30, price_range=(5, 10), weight_range=(5, 100)):
"""Generate n number of products within a specified price and weight range"""
products = []
for i in range(1, n + 1):
name = random.choice(ADJECTIVES) + ' ' + random.choice(NOUNS)
price = random.randrange(price_range[0], price_range[1] + 1)
weight = random.randrange(weight_range[0], weight_range[1] + 1)
flammability = random.uniform(0.0, 2.5)
product = Product(name, price, weight, flammability)
products.append(product)
return products
def inventory_report(prod_list):
"""Creates an inventory report for a given product list"""
prod_list = list(set(prod_list))
x = 0
price = 0
weight = 0
flammability = 0
stealability = 0
for item in prod_list:
x += 1
price += item.price
weight += item.weight
flammability += item.flammability
if stealability != 'Not so stealable...':
stealability += 1
avg_price = price / x
avg_weight = weight / x
avg_flammability = flammability / x
print(f'There are {x} unique products in this list. The average price is {avg_price}, '
f'average weight is {avg_weight},'
f'and the average flammability is {avg_flammability}.')
if stealability >= len(prod_list) / 2:
print('Many of these items are highly stealable!')
return avg_price, avg_weight, avg_flammability
if __name__ == '__main__':
inventory_report(generate_products())
|
nilq/baby-python
|
python
|
import sys, os
sys.path.append('/Users/syrus/Proyectos/exercita/website/')
sys.path.append('/Users/syrus/Sites/exercita/')
os.environ['DJANGO_SETTINGS_MODULE'] = 'settings'
TEMPLATE_EXAMPLE = '''<ul>
{% for gender in gender_list %}
<li>{{ gender.grouper }}
<ul>
{% for item in gender.list %}
<li>{{ item.first_name|default:a|join:", " }} {{ item.last_name|center:a }}</li>
{% endfor %}
</ul>
</li>
{% endfor %}
</ul>'''
TEMPLATE_EXAMPLE2 = '''{% extends "admin/base.html" %}
{% load i18n %}
{% block title %}{{ title }} | {% trans 'Django site admin'|default:'a' %}{% endblock %}
{% block branding %}
<h1 id="site-name">{% trans 'Django administration' %}</h1>
{% endblock %}
{% filter force_escape %}
Autoescapar
{% endfilter %}
{% autoescape on %}asd{% endautoescape %}
{% block nav-global %}{% endblock %}
{% trans 'hola' %}
{% for user in users %}
<li class="{% cycle 'odd' 'even' %}">{{ user }}</li>
<li class="{% cycle 'odd' 'eveojkn' as b %}">{{ user }}</li>
<li class="{% cycle b %}">{{ user }}b</li>
{% include "admin/base.html" with a='1' r='2' aa='1' %}
{% empty %}
Vacio
{% endfor %}
{% ifequal a b %}a{% endifequal %}
{% ifequal a b %}a{% else %}b{% endifequal %}
{% firstof a b 'hola' %}
{% if a|length > 0 or b and c|default:'b' <= c|default:'a' and not b%}
asd
{% endif %}
{% blocktrans with amount=article.price count years=i.length %}
That will cost $ {{ amount }} per year.
{% plural %}
That will cost $ {{ amount }} per {{ years }} years.
{% endblocktrans %}
{% regroup people|dictsort:"gender" by gender_by as gender_list %}
{% url path.to.view arg arg2 as the_url %}
{% url app_views.client client.id %}
'''
TEMPLATE_ADMIN = '''{% extends "admin/base_site.html" %}
{% load i18n %}
{% block breadcrumbs %}<div class="breadcrumbs"><a href="/">{% trans "Home" %}</a> › {% trans "Server error" %}</div>{% endblock %}
{% block title %}{% trans 'Server error (500)' %}{% endblock %}
{% block content %}
<h1>{% trans 'Server Error <em>(500)</em>' %}</h1>
<p>{% trans "There's been an error. It's been reported to the site administrators via e-mail and should be fixed shortly. Thanks for your patience." %}</p>
{% endblock %}
'''
#from django.template.loader import *
from django.template import Template
from djinja.template.utils import DjinjaAdapter
dj = DjinjaAdapter(Template(TEMPLATE_EXAMPLE2))
print dj.process()
|
nilq/baby-python
|
python
|
"""
My purpose in life is to take the NWS AWIPS Geodata Zones Shapefile and
dump them into the PostGIS database! I was bootstraped like so:
python ugcs_update.py z_16mr06 2006 03 16
python ugcs_update.py z_11mr07 2007 03 11
python ugcs_update.py z_31my07 2007 05 31
python ugcs_update.py z_01au07 2007 08 01
python ugcs_update.py z_5sep07 2007 09 05
python ugcs_update.py z_25sep07 2007 09 25
python ugcs_update.py z_01ap08 2008 04 01
python ugcs_update.py z_09se08 2008 09 09
python ugcs_update.py z_03oc08 2008 10 03
python ugcs_update.py z_07my09 2009 05 07
python ugcs_update.py z_15jl09 2009 07 15
python ugcs_update.py z_22jl09 2009 07 22
python ugcs_update.py z_04au11 2011 08 04
python ugcs_update.py z_13oc11 2011 10 13
python ugcs_update.py z_31my11 2011 05 31
python ugcs_update.py z_15de11 2011 12 15
python ugcs_update.py z_23fe12 2012 02 23
python ugcs_update.py z_03ap12 2012 04 03
python ugcs_update.py z_12ap12 2012 04 12
python ugcs_update.py z_07jn12 2012 06 07
python ugcs_update.py z_11oc12 2012 10 11
python ugcs_update.py z_03de13a 2013 12 03
python ugcs_update.py z_05fe14a 2014 02 05
"""
import sys
import os
import zipfile
import requests
import geopandas as gpd
from shapely.geometry import MultiPolygon
from pyiem.util import utc, logger
# Put the pywwa library into sys.path
sys.path.insert(
0, os.path.join(os.path.dirname(os.path.abspath(__file__)), "../parsers")
)
# pylint: disable=wrong-import-position
from pywwa.database import get_sync_dbconn # noqa: E402
LOG = logger()
# Change Directory to /tmp, so that we can rw
os.chdir("/tmp")
def do_download(zipfn):
"""Do the download steps"""
if not os.path.isfile(zipfn):
req = requests.get(
("https://www.weather.gov/source/gis/Shapefiles/%s/%s")
% ("County" if zipfn.startswith("c_") else "WSOM", zipfn)
)
LOG.info("Downloading %s ...", zipfn)
with open(zipfn, "wb") as fh:
fh.write(req.content)
LOG.info("Unzipping")
shpfn = None
with zipfile.ZipFile(zipfn, "r") as zipfp:
for name in zipfp.namelist():
LOG.info("Extracting %s", name)
with open(name, "wb") as fh:
fh.write(zipfp.read(name))
if name[-3:] == "shp":
shpfn = name
return shpfn
def new_poly(geo):
"""Sort and return new multipolygon"""
if geo.geom_type == "Polygon":
return geo
# This is tricky. We want to have our multipolygon have its
# biggest polygon first in the multipolygon.
# This will allow some plotting simplification
# later as we will only consider the first polygon
maxarea = 0
polys = []
for poly in geo:
area = poly.area
if area > maxarea:
maxarea = area
polys.insert(0, poly)
else:
polys.append(poly)
return MultiPolygon(polys)
def db_fixes(cursor, valid):
"""Fix some issues in the database"""
cursor.execute(
"update ugcs SET geom = st_makevalid(geom) where end_ts is null "
"and not st_isvalid(geom) and begin_ts = %s",
(valid,),
)
LOG.info("Fixed %s entries that were ST_Invalid()", cursor.rowcount)
cursor.execute(
"""
UPDATE ugcs SET simple_geom = ST_Multi(
ST_Buffer(ST_SnapToGrid(geom, 0.01), 0)
),
centroid = ST_Centroid(geom),
area2163 = ST_area( ST_transform(geom, 2163) ) / 1000000.0
WHERE begin_ts = %s or area2163 is null
""",
(valid,),
)
LOG.info(
"Updated simple_geom,centroid,area2163 for %s rows", cursor.rowcount
)
# Check the last step that we don't have empty geoms, which happened once
def _check():
"""Do the check."""
cursor.execute(
"""
SELECT end_ts from ugcs
where begin_ts = %s and (
simple_geom is null or
ST_IsEmpty(simple_geom) or
ST_Area(simple_geom) / ST_Area(geom) < 0.9
)
""",
(valid,),
)
_check()
if cursor.rowcount > 0:
LOG.info(
"%s rows with empty, too small simple_geom, decreasing tolerance",
cursor.rowcount,
)
cursor.execute(
"""
UPDATE ugcs
SET simple_geom = ST_Multi(
ST_Buffer(ST_SnapToGrid(geom, 0.0001), 0)
)
WHERE begin_ts = %s and (
simple_geom is null or
ST_IsEmpty(simple_geom) or
ST_Area(simple_geom) / ST_Area(geom) < 0.9
)
""",
(valid,),
)
_check()
if cursor.rowcount > 0:
LOG.info(
"Found %s rows with empty simple_geom, FIXME SOMEHOW!",
cursor.rowcount,
)
def truncate(cursor, valid, ugc, source):
"""Stop the bleeding."""
cursor.execute(
"UPDATE ugcs SET end_ts = %s WHERE ugc = %s and end_ts is null "
"and source = %s",
(valid, ugc, source),
)
return cursor.rowcount
def workflow(argv, pgconn, cursor):
"""Go Main Go"""
# NWS correspondence indicates the date on the website is assumed to be
# an implementation time at 18 z of that date.
valid = utc(int(argv[2]), int(argv[3]), int(argv[4]), 18)
zipfn = "%s.zip" % (argv[1],)
shpfn = do_download(zipfn)
# track domain
source = zipfn[:2].replace("_", "")
LOG.info("Processing, using '%s' as the database source", source)
df = gpd.read_file(shpfn)
# Ensure CRS is set
df["geometry"] = df["geometry"].set_crs("EPSG:4326", allow_override=True)
if df.empty:
LOG.info("Abort, empty dataframe from shapefile read.")
sys.exit()
# make all columns upper
df.columns = [x.upper() if x != "geometry" else x for x in df.columns]
# Compute the ugc column
if zipfn[:2] in ("mz", "oz", "hz"):
df["STATE"] = ""
df["ugc"] = df["ID"]
wfocol = "WFO"
elif zipfn.startswith("c_"):
geo_type = "C"
df["ugc"] = df["STATE"] + geo_type + df["FIPS"].str.slice(-3)
df["NAME"] = df["COUNTYNAME"]
wfocol = "CWA"
else:
geo_type = "Z"
df["ugc"] = df["STATE"] + geo_type + df["ZONE"]
wfocol = "CWA"
# Check that UGCs are not all null
if df["ugc"].isna().all():
LOG.info("Abort as all ugcs are null")
sys.exit()
postgis = gpd.read_postgis(
"SELECT * from ugcs where end_ts is null and source = %s",
pgconn,
params=(source,),
geom_col="geom",
index_col="ugc",
)
postgis["covered"] = False
LOG.info(
"Loaded %s '%s' type rows from the database",
len(postgis.index),
source,
)
# Compute the area and then sort to order duplicated UGCs :/
# Database stores as sq km
df["area2163"] = df["geometry"].to_crs(2163).area / 1e6
df.sort_values(by="area2163", ascending=False, inplace=True)
gdf = df.groupby("ugc").nth(0)
LOG.info(
"Loaded %s/%s unique entries from %s",
len(gdf.index),
len(df.index),
shpfn,
)
countnew = 0
countdups = 0
for ugc, row in gdf.iterrows():
if ugc in postgis.index:
postgis.at[ugc, "covered"] = True
# Some very small number, good enough
current = postgis.loc[ugc]
if isinstance(current, gpd.GeoDataFrame):
LOG.info("abort, more than one %s found in postgis", ugc)
sys.exit()
dirty = False
# arb size decision
if abs(row["area2163"] - current["area2163"]) > 0.2:
dirty = True
LOG.debug(
"%s updating sz diff %.2d -> %.2d",
ugc,
current["area2163"],
row["area2163"],
)
elif row["NAME"] != current["name"]:
dirty = True
LOG.debug(
"%s updating due to name change %s -> %s",
ugc,
current["name"],
row["NAME"],
)
elif row[wfocol] != current["wfo"]:
dirty = True
LOG.debug(
"%s updating due to wfo change %s -> %s",
ugc,
current["wfo"],
row[wfocol],
)
if not dirty:
countdups += 1
continue
res = truncate(cursor, valid, ugc, source)
LOG.info(
"%s creating new entry for %s",
"Truncating old" if res > 0 else "",
ugc,
)
# Finally, insert the new geometry
cursor.execute(
"INSERT into ugcs (ugc, name, state, begin_ts, wfo, geom, "
"source) VALUES (%s, %s, %s, %s, %s, "
"ST_Multi(ST_SetSRID(ST_GeomFromEWKT(%s),4326)), %s)",
(
ugc,
row["NAME"].strip(),
row["STATE"],
"1980-01-01" if res == 0 else valid,
row[wfocol],
new_poly(row["geometry"]).wkt,
source,
),
)
countnew += 1
for ugc, _row in postgis[~postgis["covered"]].iterrows():
LOG.info("%s not found in update, truncating.", ugc)
truncate(cursor, valid, ugc, source)
LOG.info("NEW: %s Dups: %s", countnew, countdups)
db_fixes(cursor, valid)
def main(argv):
"""Go Main Go"""
if len(argv) != 5:
LOG.info("ERROR: You need to specify the file date to download + date")
LOG.info("Example: python ugcs_update.py z_01dec10 2010 12 01")
sys.exit(0)
pgconn = get_sync_dbconn("postgis")
cursor = pgconn.cursor()
workflow(argv, pgconn, cursor)
cursor.close()
pgconn.commit()
pgconn.close()
LOG.info("Done!")
if __name__ == "__main__":
# Get the name of the file we wish to download
main(sys.argv)
|
nilq/baby-python
|
python
|
import requests
import logging
from lxml import html
class HTDownloader():
def __init__(self, htid, res, i):
self.htid = htid
self.i = i
self.res = res
def get(self):
logging.debug("Download image: {}".format(self.i))
return down_img(self.htid, self.i, self.res)
def get_hathi_section_element(hid):
url = "https://babel.hathitrust.org/cgi/pt?id=" + hid
req = requests.get(url)
tree = html.fromstring(req.content)
return tree.xpath('.//section[@id="section"]')[0]
def down_img(htid, i, res):
rot = 0
url = "https://babel.hathitrust.org/cgi/imgsrv/image?id={htid};seq={seq};size={res};rotation={rot}".format(
htid=htid, seq=i, res=res, rot=rot)
req = requests.get(url)
content_type = req.headers['content-type']
logging.debug("Page {}: {}".format(i, content_type))
return req.content, content_type
def dl_images(htid, res=10000):
section_elem = get_hathi_section_element(htid)
maxseq = int(section_elem.attrib['data-total-seq'])
logging.debug("Num pages: {}".format(maxseq))
for i in range(1, maxseq + 1):
yield htid, i, HTDownloader(htid, res, i)
|
nilq/baby-python
|
python
|
import asyncio
from contextlib import asynccontextmanager
from sys import version_info
from typing import AsyncIterator
import pytest
from aioredis import create_redis_pool
from aiohttp_client_cache.backends.redis import DEFAULT_ADDRESS, RedisBackend, RedisCache
from aiohttp_client_cache.session import CachedSession
from test.integration import BaseBackendTest, BaseStorageTest
def is_db_running():
"""Test if a Redis server is running locally on the default port"""
async def get_db_info():
client = await create_redis_pool(DEFAULT_ADDRESS)
await client.info()
client.close()
await client.wait_closed()
try:
asyncio.run(get_db_info())
return True
except OSError:
return False
pytestmark = [
pytest.mark.asyncio,
pytest.mark.skipif(
version_info >= (3, 10) or not is_db_running(),
reason='Redis server required for integration tests',
),
]
class TestRedisCache(BaseStorageTest):
storage_class = RedisCache
picklable = True
class TestRedisBackend(BaseBackendTest):
backend_class = RedisBackend
@asynccontextmanager
async def init_session(self, **kwargs) -> AsyncIterator[CachedSession]:
async with super().init_session(**kwargs) as session:
yield session
await session.cache.close()
|
nilq/baby-python
|
python
|
from django.contrib import admin
from .models import Book, Author, Publisher, Loaned
# Register your models here.
class BookAdmin(admin.ModelAdmin):
list_display = ('name', 'date_added')
search_fields = ["name"]
ordering = ["name"]
admin.site.register(Book, BookAdmin)
admin.site.register(Author)
admin.site.register(Publisher)
admin.site.register(Loaned)
|
nilq/baby-python
|
python
|
from .manage import *
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
##
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test the protocol parsing logic."""
from __future__ import absolute_import, division, print_function
import json
import unittest
import avro.protocol
import avro.schema
try:
unicode
except NameError:
unicode = str
try:
basestring # type: ignore
except NameError:
basestring = (bytes, unicode)
class TestProtocol(object):
"""A proxy for a protocol string that provides useful test metadata."""
def __init__(self, data, name='', comment=''):
if not isinstance(data, basestring):
data = json.dumps(data)
self.data = data
self.name = name or data
self.comment = comment
def parse(self):
return avro.protocol.parse(str(self))
def __str__(self):
return str(self.data)
class ValidTestProtocol(TestProtocol):
"""A proxy for a valid protocol string that provides useful test metadata."""
valid = True
class InvalidTestProtocol(TestProtocol):
"""A proxy for an invalid protocol string that provides useful test metadata."""
valid = False
HELLO_WORLD = ValidTestProtocol({
"namespace": "com.acme",
"protocol": "HelloWorld",
"types": [
{"name": "Greeting", "type": "record", "fields": [
{"name": "message", "type": "string"}]},
{"name": "Curse", "type": "error", "fields": [
{"name": "message", "type": "string"}]}
],
"messages": {
"hello": {
"request": [{"name": "greeting", "type": "Greeting" }],
"response": "Greeting",
"errors": ["Curse"]
}
}
})
EXAMPLES = [HELLO_WORLD, ValidTestProtocol({
"namespace": "org.apache.avro.test",
"protocol": "Simple",
"types": [
{"name": "Kind", "type": "enum", "symbols": ["FOO","BAR","BAZ"]},
{"name": "MD5", "type": "fixed", "size": 16},
{"name": "TestRecord", "type": "record", "fields": [
{"name": "name", "type": "string", "order": "ignore"},
{"name": "kind", "type": "Kind", "order": "descending"},
{"name": "hash", "type": "MD5"}
]},
{"name": "TestError", "type": "error", "fields": [{"name": "message", "type": "string"}]}
],
"messages": {
"hello": {
"request": [{"name": "greeting", "type": "string"}],
"response": "string"
}, "echo": {
"request": [{"name": "record", "type": "TestRecord"}],
"response": "TestRecord"
}, "add": {
"request": [{"name": "arg1", "type": "int"}, {"name": "arg2", "type": "int"}],
"response": "int"
}, "echoBytes": {
"request": [{"name": "data", "type": "bytes"}],
"response": "bytes"
}, "error": {
"request": [],
"response": "null",
"errors": ["TestError"]
}
}
}), ValidTestProtocol({
"namespace": "org.apache.avro.test.namespace",
"protocol": "TestNamespace",
"types": [
{"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
{"name": "TestRecord", "type": "record", "fields": [
{"name": "hash", "type": "org.apache.avro.test.util.MD5"}
]},
{"name": "TestError", "namespace": "org.apache.avro.test.errors", "type": "error",
"fields": [ {"name": "message", "type": "string"}]}
],
"messages": {
"echo": {
"request": [{"name": "record", "type": "TestRecord"}],
"response": "TestRecord"
}, "error": {
"request": [],
"response": "null",
"errors": ["org.apache.avro.test.errors.TestError"]
}
}
}), ValidTestProtocol({
"namespace": "org.apache.avro.test.namespace",
"protocol": "TestImplicitNamespace",
"types": [
{"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
{"name": "ReferencedRecord", "type": "record",
"fields": [ {"name": "foo", "type": "string"}]},
{"name": "TestRecord", "type": "record",
"fields": [{"name": "hash", "type": "org.apache.avro.test.util.MD5"},
{"name": "unqualified", "type": "ReferencedRecord"}]
},
{"name": "TestError", "type": "error", "fields": [{"name": "message", "type": "string"}]}
],
"messages": {
"echo": {
"request": [{"name": "qualified", "type": "org.apache.avro.test.namespace.TestRecord"}],
"response": "TestRecord"
}, "error": {
"request": [],
"response": "null",
"errors": ["org.apache.avro.test.namespace.TestError"]
}
}
}), ValidTestProtocol({
"namespace": "org.apache.avro.test.namespace",
"protocol": "TestNamespaceTwo",
"types": [
{"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
{"name": "ReferencedRecord", "type": "record",
"namespace": "org.apache.avro.other.namespace",
"fields": [{"name": "foo", "type": "string"}]},
{"name": "TestRecord", "type": "record",
"fields": [{"name": "hash", "type": "org.apache.avro.test.util.MD5"},
{"name": "qualified",
"type": "org.apache.avro.other.namespace.ReferencedRecord"}]
},
{"name": "TestError",
"type": "error", "fields": [{"name": "message", "type": "string"}]}],
"messages": {
"echo": {
"request": [{"name": "qualified", "type": "org.apache.avro.test.namespace.TestRecord"}],
"response": "TestRecord"
}, "error": {
"request": [],
"response": "null",
"errors": ["org.apache.avro.test.namespace.TestError"]
}
}
}), ValidTestProtocol({
"namespace": "org.apache.avro.test.namespace",
"protocol": "TestValidRepeatedName",
"types": [
{"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
{"name": "ReferencedRecord", "type": "record",
"namespace": "org.apache.avro.other.namespace",
"fields": [{"name": "foo", "type": "string"}]},
{"name": "ReferencedRecord", "type": "record",
"fields": [{"name": "bar", "type": "double"}]},
{"name": "TestError",
"type": "error", "fields": [{"name": "message", "type": "string"}]}],
"messages": {
"echo": {
"request": [{"name": "qualified", "type": "ReferencedRecord"}],
"response": "org.apache.avro.other.namespace.ReferencedRecord"},
"error": {
"request": [],
"response": "null",
"errors": ["org.apache.avro.test.namespace.TestError"]}
}
}), InvalidTestProtocol({
"namespace": "org.apache.avro.test.namespace",
"protocol": "TestInvalidRepeatedName",
"types": [
{"name": "org.apache.avro.test.util.MD5", "type": "fixed", "size": 16},
{"name": "ReferencedRecord", "type": "record",
"fields": [ {"name": "foo", "type": "string"}]},
{"name": "ReferencedRecord", "type": "record",
"fields": [ {"name": "bar", "type": "double"}]},
{"name": "TestError",
"type": "error", "fields": [{"name": "message", "type": "string"}]}],
"messages": {
"echo": {
"request": [{"name": "qualified", "type": "ReferencedRecord"}],
"response": "org.apache.avro.other.namespace.ReferencedRecord"
}, "error": {
"request": [],
"response": "null",
"errors": ["org.apache.avro.test.namespace.TestError"]
}
}
}),
ValidTestProtocol({
"namespace": "org.apache.avro.test",
"protocol": "BulkData",
"types": [],
"messages": {
"read": {
"request": [],
"response": "bytes"
}, "write": {
"request": [ {"name": "data", "type": "bytes"} ],
"response": "null"
}
}
}), ValidTestProtocol({
"protocol": "API",
"namespace": "xyz.api",
"types": [{
"type": "enum",
"name": "Symbology",
"namespace": "xyz.api.product",
"symbols": ["OPRA", "CUSIP", "ISIN", "SEDOL"]
}, {
"type": "record",
"name": "Symbol",
"namespace": "xyz.api.product",
"fields": [{"name": "symbology", "type": "xyz.api.product.Symbology"},
{"name": "symbol", "type": "string"}]
}, {
"type": "record",
"name": "MultiSymbol",
"namespace": "xyz.api.product",
"fields": [{"name": "symbols",
"type": {"type": "map", "values": "xyz.api.product.Symbol"}}]
}],
"messages": {}
}),
]
VALID_EXAMPLES = [e for e in EXAMPLES if e.valid]
class TestMisc(unittest.TestCase):
def test_inner_namespace_set(self):
print('')
print('TEST INNER NAMESPACE')
print('===================')
print('')
proto = HELLO_WORLD.parse()
self.assertEqual(proto.namespace, "com.acme")
greeting_type = proto.types_dict['Greeting']
self.assertEqual(greeting_type.namespace, 'com.acme')
def test_inner_namespace_not_rendered(self):
proto = HELLO_WORLD.parse()
self.assertEqual('com.acme.Greeting', proto.types[0].fullname)
self.assertEqual('Greeting', proto.types[0].name)
# but there shouldn't be 'namespace' rendered to json on the inner type
self.assertFalse('namespace' in proto.to_json()['types'][0])
class ProtocolParseTestCase(unittest.TestCase):
"""Enable generating parse test cases over all the valid and invalid example protocols."""
def __init__(self, test_proto):
"""Ignore the normal signature for unittest.TestCase because we are generating
many test cases from this one class. This is safe as long as the autoloader
ignores this class. The autoloader will ignore this class as long as it has
no methods starting with `test_`.
"""
super(ProtocolParseTestCase, self).__init__(
'parse_valid' if test_proto.valid else 'parse_invalid')
self.test_proto = test_proto
def parse_valid(self):
"""Parsing a valid protocol should not error."""
try:
self.test_proto.parse()
except avro.protocol.ProtocolParseException:
self.fail("Valid protocol failed to parse: {!s}".format(self.test_proto))
def parse_invalid(self):
"""Parsing an invalid schema should error."""
try:
self.test_proto.parse()
except (avro.protocol.ProtocolParseException, avro.schema.SchemaParseException):
pass
else:
self.fail("Invalid protocol should not have parsed: {!s}".format(self.test_proto))
class ErrorSchemaTestCase(unittest.TestCase):
"""Enable generating error schema test cases across all the valid test protocols."""
def __init__(self, test_proto):
"""Ignore the normal signature for unittest.TestCase because we are generating
many test cases from this one class. This is safe as long as the autoloader
ignores this class. The autoloader will ignore this class as long as it has
no methods starting with `test_`.
"""
super(ErrorSchemaTestCase, self).__init__('check_error_schema_exists')
self.test_proto = test_proto
def check_error_schema_exists(self):
"""Protocol messages should always have at least a string error schema."""
p = self.test_proto.parse()
for k, m in p.messages.items():
self.assertIsNotNone(m.errors, "Message {} did not have the expected implicit "
"string error schema.".format(k))
class RoundTripParseTestCase(unittest.TestCase):
"""Enable generating round-trip parse test cases over all the valid test protocols."""
def __init__(self, test_proto):
"""Ignore the normal signature for unittest.TestCase because we are generating
many test cases from this one class. This is safe as long as the autoloader
ignores this class. The autoloader will ignore this class as long as it has
no methods starting with `test_`.
"""
super(RoundTripParseTestCase, self).__init__('parse_round_trip')
self.test_proto = test_proto
def parse_round_trip(self):
"""The string of a Schema should be parseable to the same Schema."""
parsed = self.test_proto.parse()
round_trip = avro.protocol.parse(str(parsed))
self.assertEqual(parsed, round_trip)
def load_tests(loader, default_tests, pattern):
"""Generate test cases across many test schema."""
suite = unittest.TestSuite()
suite.addTests(loader.loadTestsFromTestCase(TestMisc))
suite.addTests(ProtocolParseTestCase(ex) for ex in EXAMPLES)
suite.addTests(RoundTripParseTestCase(ex) for ex in VALID_EXAMPLES)
return suite
if __name__ == '__main__':
unittest.main()
|
nilq/baby-python
|
python
|
import blocksci
import re
import sys
chain = blocksci.Blockchain("/home/hturki/bitcoin-blocksci.bak")
address_file = open("/home/hturki/stackoverflow_addr_raw.txt", "r").read()
addresses = address_file[1:-1].split("', '")
len(addresses)
blocksci_addresses = {}
bad_addresses = set({addresses[10], addresses[18], addresses[85], addresses[204], addresses[298], addresses[302], addresses[314], addresses[340], addresses[393], addresses[500], addresses[549], addresses[715], addresses[729], addresses[736], addresses[776], addresses[1033], addresses[1131], addresses[1136], addresses[1186]})
for address in addresses:
if len(address) != 34 or re.match(r"[a-zA-Z1-9]{27,35}$", address) is None:
print("%s not an address" % address)
elif (address in bad_addresses):
print("%s makes BlockSci segfault" % address)
else:
blocksci_addresses[address] = (blocksci.Address.from_string(address))
print("%s parsed correctly" % address)
sys.stdout.flush()
prefix = len("address_type.")
for address in blocksci_addresses:
blocksci_address = blocksci_addresses[address]
if (blocksci_address != None):
print("%s,%d,%s" % (address, blocksci_address.address_num, str(blocksci_address.type)[prefix:]))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
'''
We get the lidar point cloud and use it to determine if there are any obstacles ahead
Author:
Sleiman Safaoui
Email:
snsafaoui@gmail.com
Github:
The-SS
Date:
Oct 3, 2018
'''
# python
from __future__ import print_function
import numpy as np
import copy
import math
from numpy import pi
# ROS
import rospy
from sensor_msgs.msg import LaserScan
class ScanSub:
'''
Subscribes to the lidar laser scan topic
'''
def __init__(self):
self.scan_data = []
self.scan_sub = rospy.Subscriber("/scan", LaserScan, self.callback, queue_size =1)
def callback(self, data):
self.scan_data = data
def get_scan(self):
return self.scan_data
class ScanDetect:
'''
Uses the obtained laser scan to determine if there are any obstacles ahead
'''
def __init__(self, ang_range = 40.):
self.ang_range = ang_range #math.radians(ang_range) # range of angles to sweep in radian(about forward)
#self.ang_min = -float(self.ang_range)/2.0 # lower bound for ang_range
#self.ang_max = +float(self.ang_range)/2.0 # upper bound for ang_range
self.scan = [] # scan data
self.detected_points = [] # ranges detected in the area to scan
self.detected_points_ang = [] # angles of points detected in the area to scan
def scan_area(self, scan):
if scan == []: # no data
return [],[]
self.scan = scan
self.detected_points = [] # reset detected points
self.detected_points_ang = [] # reset detected points
if (scan.angle_min == scan.angle_max): # no lidar data
print("Lidar data invalid")
return [],[]
if (self.ang_range > 350):
self.detected_points = scan.ranges
self.detected_points_ang = np.arange(0, 360, scan.angle_increment).tolist()
return self.detected_points, self.detected_points_ang
half_ang = float(self.ang_range)/2.0
print(half_ang)
first_half_end = 0.0 + half_ang # first half angle interval: 0 --> first_half end
print(first_half_end)
second_half_start = math.degrees(2 * pi) - half_ang # second half angle interval: second_half_start --> 2*PI
print(second_half_start)
first_half_cnt = math.floor((first_half_end - 0.0) / math.degrees(scan.angle_increment)) + 1 # number of angle increments in first half
second_half_cnt = math.floor((math.degrees(2* pi) - second_half_start) / math.degrees(scan.angle_increment)) # number of angle increments in second half
if (len(scan.ranges) < (first_half_cnt + second_half_cnt)):
print ("Invalid increment count")
return [], []
for i in range(0, int(first_half_cnt)):
print(i)
self.detected_points.append(scan.ranges[i])
self.detected_points_ang.append(i * scan.angle_increment)
for i in range(int(math.ceil(second_half_start)), int(math.ceil(second_half_start) + second_half_cnt)):
print(i)
print(int(math.ceil(second_half_start)))
print(int(math.ceil(second_half_start) + second_half_cnt))
self.detected_points.append(scan.ranges[i])
self.detected_points_ang.append(i * scan.angle_increment)
return self.detected_points, self.detected_points_ang
'''
ang_min = self.ang_min
ang_max = self.ang_max
if (ang_min < scan.angle_min):
ang_min_idx = 0
ang_min = scan.angle_min
else:
ang_min_idx = math.ceil((ang_min-scan.angle_min)/scan.angle_increment) + 1 # number of increments between the lidar min. angle and the desired min. angle
ang_min = ang_min_idx * scan.angle_increment
if (ang_max > scan.angle_max):
ang_max_idx = len(scan.ranges)
ang_max = scan.angle_max
else:
ang_max_idx = math.floor((ang_max-scan.angle_min)/scan.angle_increment) + 1 # number of increments between the lidar min. angle and the desired max. angle
ang_max = ang_max_idx * scan.angle_increment
if ang_min_idx > ang_max_idx:
return [],[]
for i in range(int(ang_min_idx), int(ang_max_idx)+1):
self.detected_points.append(scan.ranges[i])
self.detected_points_ang.append(scan.angle_min + i * math.degrees(scan.angle_increment))
#return self.detected_points, self.detected_points_ang
return scan.ranges, []
'''
class ScanPub:
'''
Publishes data about lidar detection
'''
def __init__(self):
self.pub_data = 0.0
# self.scan_pub = rospy.
def main():
rospy.init_node("lidar_detect")
rate = rospy.Rate(15)
nodename = "/lidar_detect"
old_seq = -1
# Initialize nodes
scan_sub = ScanSub()
scan_detect = ScanDetect()
scan_pub = ScanPub()
while not rospy.is_shutdown():
scan = scan_sub.get_scan() #get laser scan
if (scan != []): # if scan was obtained
if (scan.header.seq != old_seq): # new data obtained
old_seq = scan.header.seq
#detect using scan
dists, angs = scan_detect.scan_area(scan)
print ('dists', dists)
print ('angs', angs)
# publish result data
if __name__ == "__main__":
try:
main()
except rospy.ROSInterruptException as e:
rospy.logfatal("ROS interrupt. Shutting down lidar_detect node")
print (e)
pass
|
nilq/baby-python
|
python
|
from rdflib import Literal
from .namespaces import BRICK, TAG, OWL
parameter_definitions = {
"Parameter": {
"tags": [TAG.Point, TAG.Parameter],
"subclasses": {
"Delay_Parameter": {
"tags": [TAG.Point, TAG.Delay, TAG.Parameter],
"subclasses": {
"Alarm_Delay_Parameter": {
"tags": [TAG.Point, TAG.Alarm, TAG.Delay, TAG.Parameter],
},
},
},
"Humidity_Parameter": {
"tags": [TAG.Point, TAG.Humidity, TAG.Parameter],
"subclasses": {
"High_Humidity_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.High,
TAG.Humidity,
TAG.Alarm,
TAG.Parameter,
],
},
"Low_Humidity_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Humidity,
TAG.Alarm,
TAG.Parameter,
],
},
},
},
"Load_Parameter": {
"tags": [TAG.Point, TAG.Load, TAG.Parameter],
"subclasses": {
"Max_Load_Setpoint": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Load,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Temperature_Parameter": {
"tags": [TAG.Point, TAG.Temperature, TAG.Parameter],
"subclasses": {
"High_Temperature_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.High,
TAG.Temperature,
TAG.Alarm,
TAG.Parameter,
],
},
"Low_Temperature_Alarm_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Temperature,
TAG.Alarm,
TAG.Parameter,
],
},
"Low_Freeze_Protect_Temperature_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Freeze,
TAG.Protect,
TAG.Temperature,
TAG.Parameter,
],
},
"Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Sensor,
],
"subclasses": {
"Outside_Air_Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.Outside,
TAG.Air,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Parameter,
],
"subclasses": {
"Low_Outside_Air_Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.Low,
TAG.Outside,
TAG.Air,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Parameter,
],
},
"High_Outside_Air_Lockout_Temperature_Differential_Parameter": {
"tags": [
TAG.Point,
TAG.High,
TAG.Outside,
TAG.Air,
TAG.Lockout,
TAG.Temperature,
TAG.Differential,
TAG.Parameter,
],
},
},
},
},
},
},
},
"PID_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.PID],
"subclasses": {
"Gain_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.PID, TAG.Gain],
"subclasses": {
"Integral_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Integral,
],
"subclasses": {
"Supply_Air_Integral_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Integral,
TAG.Gain,
TAG.Parameter,
TAG.PID,
],
}
},
},
"Proportional_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Proportional,
],
"subclasses": {
"Supply_Air_Proportional_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Proportional,
TAG.Supply,
TAG.Air,
],
},
},
},
"Derivative_Gain_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Gain,
TAG.Derivative,
],
},
},
},
"Step_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.Step],
"subclasses": {
"Differential_Pressure_Step_Parameter": {
"subclasses": {
"Chilled_Water_Differential_Pressure_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
}
},
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
},
"Static_Pressure_Step_Parameter": {
"subclasses": {
"Air_Static_Pressure_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
"subclasses": {
"Discharge_Air_Static_Pressure_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
},
},
}
},
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Step,
TAG.Parameter,
],
},
"Temperature_Step_Parameter": {
"subclasses": {
"Air_Temperature_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
"subclasses": {
"Discharge_Air_Temperature_Step_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
},
"Supply_Air_Temperature_Step_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Temperature_Step_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
},
},
}
},
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Temperature,
TAG.Step,
TAG.Parameter,
],
},
},
},
"Time_Parameter": {
"tags": [TAG.Point, TAG.Parameter, TAG.Time],
"subclasses": {
"Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Time,
TAG.Integral,
],
"subclasses": {
"Air_Temperature_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Temperature,
TAG.Parameter,
TAG.PID,
TAG.Time,
TAG.Integral,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Cooling_Discharge_Air_Temperature_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Cooling_Supply_Air_Temperature_Integral_Time_Parameter": {
OWL.equivalentClass: BRICK[
"Cooling_Discharge_Air_Temperature_Integral_Time_Parameter"
],
"tags": [
TAG.Point,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Heating_Discharge_Air_Temperature_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Heating_Supply_Air_Temperature_Integral_Time_Parameter": {
OWL.equivalentClass: BRICK[
"Heating_Discharge_Air_Temperature_Integral_Time_Parameter"
],
"tags": [
TAG.Point,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Hot_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Chilled_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Discharge_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Exhaust_Air_Flow_Integral_Time_Parameter": {
"subclasses": {
"Exhaust_Air_Stack_Flow_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Stack,
TAG.Flow,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
}
},
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Flow,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Static_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Discharge_Air_Static_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Air_Static_Pressure_Integral_Time_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Static_Pressure_Integral_Time_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Supply_Water_Differential_Pressure_Integral_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Temperature_Integral_Time_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Temperature,
TAG.Integral,
TAG.Time,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Derivative_Time_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Time,
TAG.Derivative,
],
},
},
},
"Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Parameter,
TAG.PID,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Differential_Pressure_Proportional_Band": {
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.PID,
],
"subclasses": {
"Hot_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Chilled_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Discharge_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Differential_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Discharge_Air_Temperature_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Heating_Discharge_Air_Temperature_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Cooling_Discharge_Air_Temperature_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Supply_Air_Temperature_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Temperature_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Cooling_Supply_Air_Temperature_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Cooling_Discharge_Air_Temperature_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Heating_Supply_Air_Temperature_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Heating_Discharge_Air_Temperature_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Exhaust_Air_Flow_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Flow,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
"subclasses": {
"Exhaust_Air_Stack_Flow_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Stack,
TAG.Flow,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
"Static_Pressure_Proportional_Band_Parameter": {
"subclasses": {
"Discharge_Air_Static_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Exhaust_Air_Static_Pressure_Proportional_Band_Parameter": {
"tags": [
TAG.Point,
TAG.Exhaust,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Air_Static_Pressure_Proportional_Band_Parameter": {
OWL.equivalentClass: BRICK[
"Discharge_Air_Static_Pressure_Proportional_Band_Parameter"
],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Supply_Water_Temperature_Proportional_Band_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Supply,
TAG.Water,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
"Discharge_Water_Temperature_Proportional_Band_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Water,
TAG.Temperature,
TAG.Proportional,
TAG.Band,
TAG.Parameter,
TAG.PID,
],
},
},
},
},
},
"Tolerance_Parameter": {
"tags": [TAG.Point, TAG.Tolerance, TAG.Parameter],
"subclasses": {
"Humidity_Tolerance_Parameter": {
"tags": [TAG.Point, TAG.Tolerance, TAG.Parameter, TAG.Humidity],
"parents": [BRICK.Humidity_Parameter],
},
"Temperature_Tolerance_Parameter": {
"parents": [BRICK.Temperature_Parameter],
"tags": [
TAG.Point,
TAG.Tolerance,
TAG.Parameter,
TAG.Temperature,
],
},
},
},
"Limit": {
"tags": [TAG.Point, TAG.Parameter, TAG.Limit],
"subclasses": {
"Close_Limit": {
"tags": [TAG.Point, TAG.Close, TAG.Parameter, TAG.Limit],
},
"Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"parents": [BRICK.Max_Limit],
},
"Min_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"parents": [BRICK.Min_Limit],
},
},
},
"Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Temperature_Parameter],
"subclasses": {
"Discharge_Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"subclasses": {
"Max_Discharge_Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [
BRICK.Max_Temperature_Setpoint_Limit
],
},
"Min_Discharge_Air_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Discharge,
TAG.Air,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [
BRICK.Min_Temperature_Setpoint_Limit
],
},
},
},
},
},
"Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Current_Limit": {
"tags": [TAG.Point, TAG.Current, TAG.Limit, TAG.Parameter],
},
"Position_Limit": {
"tags": [TAG.Point, TAG.Position, TAG.Limit],
"subclasses": {
"Max_Position_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Position,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Max_Limit],
},
"Min_Position_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Position,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Min_Limit],
},
},
},
"Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Fresh_Air_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Fresh,
TAG.Air,
TAG.Limit,
TAG.Setpoint,
],
"subclasses": {
"Min_Fresh_Air_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Fresh,
TAG.Air,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Min_Limit],
},
},
},
"Ventilation_Air_Flow_Ratio_Limit": {
"tags": [
TAG.Point,
TAG.Ventilation,
TAG.Air,
TAG.Ratio,
TAG.Limit,
],
},
"Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"High_Static_Pressure_Cutout_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.High,
TAG.Static,
TAG.Pressure,
TAG.Cutout,
TAG.Limit,
TAG.Setpoint,
],
},
},
},
"Max_Limit": {
"tags": [TAG.Point, TAG.Max, TAG.Limit, TAG.Parameter],
"subclasses": {
"Max_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Temperature_Parameter],
},
"Max_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Max_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Max_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Max_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Occupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Max_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Max,
TAG.Unoccupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
},
},
},
},
"Min_Limit": {
"tags": [TAG.Point, TAG.Min, TAG.Limit, TAG.Parameter],
"subclasses": {
"Min_Speed_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Speed,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Hot_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Hot,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Chilled_Water_Differential_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Chilled,
TAG.Water,
TAG.Differential,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Temperature_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Temperature,
TAG.Limit,
TAG.Setpoint,
],
"parents": [BRICK.Temperature_Parameter],
},
"Min_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Discharge,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Supply_Air_Static_Pressure_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Discharge_Air_Static_Pressure_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Supply,
TAG.Air,
TAG.Static,
TAG.Pressure,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Outside_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Outside,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Cooling_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Cool,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Cooling_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Cool,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Heating_Supply_Air_Flow_Setpoint_Limit": {
OWL.equivalentClass: BRICK[
"Min_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit"
],
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Heat,
TAG.Supply,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
"Min_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
"subclasses": {
"Min_Occupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Occupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
"Min_Unoccupied_Heating_Discharge_Air_Flow_Setpoint_Limit": {
"tags": [
TAG.Point,
TAG.Min,
TAG.Unoccupied,
TAG.Heat,
TAG.Discharge,
TAG.Air,
TAG.Flow,
TAG.Limit,
TAG.Parameter,
TAG.Setpoint,
],
},
},
},
},
},
},
},
},
},
},
}
}
|
nilq/baby-python
|
python
|
input = """
male(john). republican(john).
male(matt). republican(matt).
female(joana). republican(joana).
female(luise). democrat(luise).
moreMaleRepublicans :-
#count{X:republican(X), female(X)} < N,
#count{Y: republican(Y), male(Y)} = N.
"""
output = """
male(john). republican(john).
male(matt). republican(matt).
female(joana). republican(joana).
female(luise). democrat(luise).
moreMaleRepublicans :-
#count{X:republican(X), female(X)} < N,
#count{Y: republican(Y), male(Y)} = N.
"""
|
nilq/baby-python
|
python
|
from auto_yolo import envs
from yolo_air_stage1 import durations, distributions, config
readme = "Running simple on addition task."
envs.run_experiment(
"addition-stage1", config, readme, alg="simple",
task="arithmetic2", durations=durations, distributions=distributions
)
|
nilq/baby-python
|
python
|
from singledispatch import singledispatch
from sqlalchemy import types
from sqlalchemy.dialects import postgresql
from sqlalchemy.orm import interfaces
from graphene import (ID, Boolean, Dynamic, Enum, Field, Float, Int, List,
String)
from graphene.types.json import JSONString
try:
from sqlalchemy_utils import ChoiceType, JSONType, ScalarListType, TSVectorType
except ImportError:
ChoiceType = JSONType = ScalarListType = TSVectorType = object
def get_column_doc(column):
return getattr(column, "doc", None)
def is_column_nullable(column):
return bool(getattr(column, "nullable", True))
def convert_sqlalchemy_relationship(relationship, registry, connection_field_factory):
direction = relationship.direction
model = relationship.mapper.entity
def dynamic_type():
_type = registry.get_type_for_model(model)
if not _type:
return None
if direction == interfaces.MANYTOONE or not relationship.uselist:
return Field(_type)
elif direction in (interfaces.ONETOMANY, interfaces.MANYTOMANY):
if _type._meta.connection:
return connection_field_factory(relationship, registry)
return Field(List(_type))
return Dynamic(dynamic_type)
def convert_sqlalchemy_hybrid_method(hybrid_item):
return String(description=getattr(hybrid_item, "__doc__", None), required=False)
def convert_sqlalchemy_composite(composite, registry):
converter = registry.get_converter_for_composite(composite.composite_class)
if not converter:
try:
raise Exception(
"Don't know how to convert the composite field %s (%s)"
% (composite, composite.composite_class)
)
except AttributeError:
# handle fields that are not attached to a class yet (don't have a parent)
raise Exception(
"Don't know how to convert the composite field %r (%s)"
% (composite, composite.composite_class)
)
return converter(composite, registry)
def _register_composite_class(cls, registry=None):
if registry is None:
from .registry import get_global_registry
registry = get_global_registry()
def inner(fn):
registry.register_composite_converter(cls, fn)
return inner
convert_sqlalchemy_composite.register = _register_composite_class
def convert_sqlalchemy_column(column, registry=None):
return convert_sqlalchemy_type(getattr(column, "type", None), column, registry)
@singledispatch
def convert_sqlalchemy_type(type, column, registry=None):
raise Exception(
"Don't know how to convert the SQLAlchemy field %s (%s)"
% (column, column.__class__)
)
@convert_sqlalchemy_type.register(types.Date)
@convert_sqlalchemy_type.register(types.Time)
@convert_sqlalchemy_type.register(types.String)
@convert_sqlalchemy_type.register(types.Text)
@convert_sqlalchemy_type.register(types.Unicode)
@convert_sqlalchemy_type.register(types.UnicodeText)
@convert_sqlalchemy_type.register(postgresql.UUID)
@convert_sqlalchemy_type.register(postgresql.INET)
@convert_sqlalchemy_type.register(postgresql.CIDR)
@convert_sqlalchemy_type.register(TSVectorType)
def convert_column_to_string(type, column, registry=None):
return String(
description=get_column_doc(column), required=not (is_column_nullable(column))
)
@convert_sqlalchemy_type.register(types.DateTime)
def convert_column_to_datetime(type, column, registry=None):
from graphene.types.datetime import DateTime
return DateTime(
description=get_column_doc(column), required=not (is_column_nullable(column))
)
@convert_sqlalchemy_type.register(types.SmallInteger)
@convert_sqlalchemy_type.register(types.Integer)
def convert_column_to_int_or_id(type, column, registry=None):
if column.primary_key:
return ID(
description=get_column_doc(column),
required=not (is_column_nullable(column)),
)
else:
return Int(
description=get_column_doc(column),
required=not (is_column_nullable(column)),
)
@convert_sqlalchemy_type.register(types.Boolean)
def convert_column_to_boolean(type, column, registry=None):
return Boolean(
description=get_column_doc(column), required=not (is_column_nullable(column))
)
@convert_sqlalchemy_type.register(types.Float)
@convert_sqlalchemy_type.register(types.Numeric)
@convert_sqlalchemy_type.register(types.BigInteger)
def convert_column_to_float(type, column, registry=None):
return Float(
description=get_column_doc(column), required=not (is_column_nullable(column))
)
@convert_sqlalchemy_type.register(types.Enum)
def convert_enum_to_enum(type, column, registry=None):
enum_class = getattr(type, 'enum_class', None)
if enum_class: # Check if an enum.Enum type is used
graphene_type = Enum.from_enum(enum_class)
else: # Nope, just a list of string options
items = zip(type.enums, type.enums)
graphene_type = Enum(type.name, items)
return Field(
graphene_type,
description=get_column_doc(column),
required=not (is_column_nullable(column)),
)
@convert_sqlalchemy_type.register(ChoiceType)
def convert_column_to_enum(type, column, registry=None):
name = "{}_{}".format(column.table.name, column.name).upper()
return Enum(name, type.choices, description=get_column_doc(column))
@convert_sqlalchemy_type.register(ScalarListType)
def convert_scalar_list_to_list(type, column, registry=None):
return List(String, description=get_column_doc(column))
@convert_sqlalchemy_type.register(postgresql.ARRAY)
def convert_postgres_array_to_list(_type, column, registry=None):
graphene_type = convert_sqlalchemy_type(column.type.item_type, column)
inner_type = type(graphene_type)
return List(
inner_type,
description=get_column_doc(column),
required=not (is_column_nullable(column)),
)
@convert_sqlalchemy_type.register(postgresql.HSTORE)
@convert_sqlalchemy_type.register(postgresql.JSON)
@convert_sqlalchemy_type.register(postgresql.JSONB)
def convert_json_to_string(type, column, registry=None):
return JSONString(
description=get_column_doc(column), required=not (is_column_nullable(column))
)
@convert_sqlalchemy_type.register(JSONType)
def convert_json_type_to_string(type, column, registry=None):
return JSONString(
description=get_column_doc(column), required=not (is_column_nullable(column))
)
|
nilq/baby-python
|
python
|
#!/usr/bin/python
# TrayIcon
# Access to various monitoring capabilities (HIDS, dashboard, ip configuration, network recognition, etc.)
# Alerting plugin (IM notifications, irssi, OSSEC) that can help to display and monitor notification informations)
# Deamon plugin (sort of tail -f over selected files, RSS gathering, correlation)
# Knowledge base: regex, xpath and pattern recognition for classification
# Correlation and (help to the alerting plugin) some rules to notify additional messages (security, twitter)
# Social feeds integration (at least for Twitter) and we can imagine some sort of lexicometric tricks over it with correlation and alerting
# import sys
# from PyQt4 import QtGui
#
# class SystemTrayIcon(QtGui.QSystemTrayIcon):
# def __init__(self, icon, parent=None):
# QtGui.QSystemTrayIcon.__init__(self, icon, parent)
# self.menu = QtGui.QMenu(parent)
# exitAction = self.menu.addAction("Exit")
# self.setContextMenu(self.menu)
#
# def main():
# app = QtGui.QApplication(sys.argv)
# style = app.style()
# icon = QtGui.QIcon(style.standardPixmap(QtGui.QStyle.SP_FileIcon))
# trayIcon = SystemTrayIcon(icon)
#
# trayIcon.show()
# sys.exit(app.exec_())
#
# if __name__ == '__main__':
# main()
# TO BE CONTINUED... ?
|
nilq/baby-python
|
python
|
"""
Pipeline object class for EmrActivity
"""
from .activity import Activity
from ..config import Config
from .schedule import Schedule
from ..utils import constants as const
from ..utils.exceptions import ETLInputError
config = Config()
MAX_RETRIES = config.etl.get('MAX_RETRIES', const.ZERO)
class EmrActivity(Activity):
"""EMR Activity class
"""
def __init__(self,
id,
resource,
schedule,
input_node,
emr_step_string,
output_node=None,
additional_files=None,
max_retries=None,
depends_on=None):
"""Constructor for the EmrActivity class
Args:
id(str): id of the object
resource(Ec2Resource / EMRResource): resource to run the activity on
schedule(Schedule): schedule of the pipeline
emr_step_string(list of str): command string to be executed
output_node(S3Node): output_node for the emr job
additional_files(list of S3File): Additional files required for emr
max_retries(int): number of retries for the activity
depends_on(list of activities): dependendent pipelines steps
"""
# Validate inputs
if not isinstance(schedule, Schedule):
raise ETLInputError(
'Input schedule must be of the type Schedule')
# Set default values
if depends_on is None:
depends_on = []
if max_retries is None:
max_retries = MAX_RETRIES
super(EmrActivity, self).__init__(
id=id,
type='EmrActivity',
maximumRetries=max_retries,
dependsOn=depends_on,
runsOn=resource,
schedule=schedule,
step=emr_step_string,
output=output_node,
input=input_node,
)
self.add_additional_files(additional_files)
|
nilq/baby-python
|
python
|
from django.test import TestCase
from . models import Urls, Statistics
class UrlsTestClass(TestCase):
'''
Class that test the characterics of the Urls objects and its methods
'''
def setUp(self):
'''
Method that runs at the beginning of each test
'''
self.url = Urls(short_id = 'hrhje', httpurl ='http://google.com')
def test_isinstance(self):
'''
tets that detetmines whether an object is an instance of the class Urls
'''
self.assertTrue(isinstance(self.url,Urls))
def test_save_url(self):
'''
test the Urls class save method
'''
self.url.save_url()
saved_urls = Urls.objects.all()
self.assertTrue(len(saved_urls)>0)
def test_count_unique(self):
'''
Test count_unique method of the Urls class
'''
self.url.save()
all = Urls.count_unique('http://google.com')
self.assertTrue(all == 1)
def test_shortcode_exist(self):
'''
Test the is is unique method of the Urls class
'''
self.url.save()
is_exitent = Urls.shortcode_exist('hrhje')
self.assertTrue(is_exitent == True)
def test_url_exist(self):
'''
Method that tests url_exist method
'''
self.url.save()
is_exitent = Urls.url_exist('http://google.com')
self.assertTrue(is_exitent == True)
def test_code_generator(self):
'''
Test the code generator method of the Urls class
'''
self.url.save()
shortcode = self.url.code_generator()
is_exitent = self.url.shortcode_exist('hrhje')
self.assertEqual(is_exitent,True)
def test_get_url_by_shorcode(self):
'''
Tests the get_url_by shortcode method of the class Urls
'''
self.url.save()
url = Urls.get_url_by_shorcode('hrhje')
self.assertTrue(url.short_id == 'hrhje' )
def test_get_shortcode_by_url(self):
'''
Tests the get_shortcode_by_url method of the Urls class
'''
self.url.save()
requested_url = Urls.get_shortcode_by_url('http://google.com')
self.assertTrue(requested_url.short_id == 'hrhje')
class OtherFunctionsTestClass(TestCase):
'''
Test class that test the characteristics of other methods and
functionalities of the app
'''
def setUp(self):
'''
Method that runs at the beggining of each test
'''
self.url = Urls(short_id = 'hrhje', httpurl ='http://google.com')
def test_Url_Validator(self):
pass
class StatisticsTestClass(TestCase):
'''
Tests the characteristics of the statistics class
'''
def setUp(self):
'''
Method that runs at the begining of each test
'''
self.statistic = Statistics(name='statistics')
def test_isinstance(self):
'''
Method that test if an object is an instance of a given Class
'''
self.assertTrue(isinstance(self.statistic,Statistics))
def test_get_total_clicks(self):
'''
Method that test get_total_clicks method
'''
self.statistic.save()
self.statistic.total_clicks +=1
self.statistic.save()
self.assertTrue(Statistics.get_total_clicks() == 1)
def test_calculate_popularity(self):
'''
Method that test the calculate_popularity method
'''
self.statistic.save()
self.statistic.total_clicks +=2
calculated_index = Statistics.calculate_popularity(1)
self.statistic.save()
self.assertTrue(calculated_index == 2)
|
nilq/baby-python
|
python
|
import ipdb
import numpy as np
import os
from multiprocessing import Process, Queue, Lock
from moviepy.video.io.VideoFileClip import VideoFileClip as Video
import skvideo.measure as skv
from glob import glob
import csv
from tqdm import tqdm
def job(item):
fn, indir, outdir = item
outdir = os.path.splitext(fn.replace(indir, outdir))[0]
if not os.path.isdir(outdir):
os.makedirs(outdir)
vid = Video(fn)
vid.write_images_sequence(os.path.join(outdir, '%06d.bmp'), fps=8, verbose=False, logger=None)
vid.close()
def worker(inq, outq, lock):
for item in iter(inq.get, None):
job(item)
outq.put(0)
if __name__ == "__main__":
inq = Queue()
outq = Queue()
lock = Lock()
nproc = 40
#basepath = "PATH/TO/scenes"
basepath = "YOUR PATH HERE"
outdir = "YOUR PATH HERE"
data=glob(os.path.join(basepath, '**/*.mp4'), recursive=True)
for item in data:
inq.put((item, basepath, outdir))
for i in range(nproc):
inq.put(None)
for i in range(nproc):
Process(target=worker, args=(inq, outq, lock)).start()
for item in tqdm(data):
outq.get()
|
nilq/baby-python
|
python
|
from biocrnpyler import *
kb, ku, ktx, ktl, kdeg = 100, 10, 3, 2, 1
parameters = {"kb": kb, "ku": ku, "ktx": ktx, "ktl": ktl, "kdeg": kdeg}
myMixture = BasicExtract(name="txtl", parameters=parameters)
A1 = DNAassembly(name="G1", promoter="pBest",
rbs="BCD2", transcript="T1", protein="GFP", initial_concentration=10, parameter_warnings = False)
# Note: Protein and Transcript strings (or chemical_reaction_network.specie objects) are optional parameters
# DNAassemblies default to using their name for their transcript and protein products.
myMixture.add_components(A1)
myCRN = myMixture.compile_crn()
print("\n" + repr(A1))
print("\n" + repr(myMixture))
print("\n" + repr(myCRN))
#print("\nmyMixture.parameters", myMixture.parameters)
#print("\ndna_assembly.parameters", A1.parameters)
file_name = "constitutive_expression_test.xml"
f = myCRN.write_sbml_file(file_name)
|
nilq/baby-python
|
python
|
from django.db import models
from django.conf import settings
from django import forms
# Create your models here.
class Dataset(models.Model):
name = models.CharField(max_length=200, null=True, blank=True)
owner = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.PROTECT)
columns = models.IntegerField() # add validator limt 10
data = models.FileField()
description = models.TextField(max_length=5000)
stars = models.IntegerField(default=0)
class Attribute(models.Model):
dataset = models.ForeignKey(Dataset)
name = models.CharField()
datatype = models.CharField()
# Forms
class DatasetForm(forms.ModelForm):
name = models.CharField(max_length=200, null=True, blank=True)
columns = models.IntegerField() # add validator limit 10
description = models.TextField(max_length=5000)
class DataForm(forms.ModelForm):
def __init__(self, columns):
super.__init__(self)
for i in range(columns):
self.fields['name' + str(i)] = models.CharField(max_length=200)
self.fields['datatype' + str(i)] = models.CharField(choices=['image', 'text', 'integer'])
def save(self, columns, dataset):
for i in range(columns):
Attribute.objects.create(
dataset=dataset,
name=self.fields['name' + str(i)],
datatype=self.fields['datatype' + str(i)]
)
|
nilq/baby-python
|
python
|
"""
OpenVINO DL Workbench
Class for annotate dataset job
Copyright (c) 2021 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from sqlalchemy.orm import Session
from config.constants import OPENVINO_ROOT_PATH
from wb.main.enumerates import JobTypesEnum
from wb.main.jobs.accuracy_analysis.annotate_datset.annotate_dataset_job import AnnotateDatasetJob
from wb.main.models import AnnotateDatasetJobModel
class LocalAnnotateDatasetJob(AnnotateDatasetJob):
job_type = JobTypesEnum.annotate_dataset
def _set_paths(self, session: Session):
"""
Set job paths for local and remote use-cases.
This method mutates `job_bundle_path`, `_openvino_path` and `_venv_path` fields
"""
accuracy_job_model: AnnotateDatasetJobModel = self.get_job_model(session)
self.job_bundle_path = str(self.get_dataset_annotation_path(accuracy_job_model))
self._openvino_path = OPENVINO_ROOT_PATH
self._venv_path = None
def collect_artifacts(self):
pass
|
nilq/baby-python
|
python
|
from bs4 import BeautifulSoup
from contextlib import suppress
RUN_EXAMPLE = 2
class Match:
"""
This class stores information about a class
"""
def __init__(self, team1: str, team2: str, state, _, score1: int, score2: int):
self.team1 = self._sanitize(team1)
self.team2 = self._sanitize(team2)
self.state = state
self.score1 = score1
self.score2 = score2
@staticmethod
def _sanitize(team_name: str):
return team_name[:-4] if team_name.endswith("GOAL") else team_name
def __str__(self):
return "{} vs {}, {} ({}, {})".format(self.team1, self.team2, self.state, self.score1, self.score2)
def __repr__(self):
return self.__str__()
def open_example_page():
"""
For testing purposes.
Reads the local bet365.html file and parses it.
Activate by using the flag --testing
:return:
"""
with open("bet365.html", "r", encoding="utf8") as file:
contents = file.read()
return contents
def chunks(l, n):
"""Yield successive n-sized chunks from l."""
for i in range(0, len(l), n):
yield l[i:i + n]
def parse_bet365(page=RUN_EXAMPLE):
if page == RUN_EXAMPLE:
page = open_example_page()
else:
pass
#print("got\n " + page)
soup = BeautifulSoup(page, 'html.parser')
rows = soup.find_all('div')
items = []
for row in rows:
if row.has_attr('class'):
with suppress(IndexError):
if "ipo-TeamStack_Team" in row['class']:
items.append(row.text)
elif any(x.startswith("ipo-TeamPoints_TeamScore") for x in row['class']):
items.append(row.text)
return [Match(*x) for x in chunks(items, 6)]
|
nilq/baby-python
|
python
|
from django.apps import AppConfig
class AwewardsConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'awewards'
|
nilq/baby-python
|
python
|
from setuptools import setup, find_packages
with open("requirements.txt") as f:
requirements = f.readlines()
long_description = "Automated tool to provision Greengrass 2.0"
setup(
name="ggv2_provisioner",
version="0.0.8",
author="Gavin Adams",
author_email="gavinaws@amazon.com",
url="https://github.com/gadams999/greengrassv2-provisioner",
description="Greengrass 2.0 command line provisioner",
long_description=long_description,
long_description_content_type="text/markdown",
license="Apache-2.0",
packages=find_packages(),
entry_points={"console_scripts": ["ggv2-provisioner = ggv2_provisioner:main"]},
classifiers=[
"Programming Language :: Python :: 3",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
],
python_requires=">3.6",
keywords="greengrass ggv2 provision provisioner",
install_requires=requirements,
zip_safe=False,
)
|
nilq/baby-python
|
python
|
'''
Load the CIOD module tables from DICOM Standard PS3.3, Annex A.
All CIOD tables are defined in chapter A of the DICOM Standard.
Output the tables in JSON format, one entry per CIOD.
'''
from typing import List, Tuple
import sys
import re
from bs4 import Tag
from dicom_standard import parse_lib as pl
from dicom_standard import parse_relations as pr
from dicom_standard.macro_utils import MetadataTableType
from dicom_standard.table_utils import (
StringifiedTableListType,
TableDictType,
get_chapter_tables,
tables_to_json,
get_short_standard_link,
get_table_description,
table_to_dict,
)
CHAPTER_IDS = ['chapter_A', 'chapter_F']
# Standard workaround: Include upper case "S" to catch typo in Table A.39.19-1
# http://dicom.nema.org/medical/dicom/current/output/chtml/part03/sect_A.35.19.3.html
TABLE_SUFFIX = re.compile(".*IOD Module[sS]$")
COLUMN_TITLES_WITH_IE = ['informationEntity', 'module', 'referenceFragment', 'usage']
COLUMN_TITLES_NO_IE = ['module', 'referenceFragment', 'usage', 'description']
def is_valid_ciod_table(table_div: Tag) -> bool:
return bool(TABLE_SUFFIX.match(pr.table_name(table_div)))
def ciod_table_to_dict(table: StringifiedTableListType) -> List[TableDictType]:
# Table F.3-1 (the only table in section F) has no "Information Entity" column, so we check for the href in the second column
# http://dicom.nema.org/dicom/2013/output/chtml/part03/sect_F.3.html#table_F.3-1
sect_f_table = 'href' in table[0][1]
column_titles = COLUMN_TITLES_NO_IE if sect_f_table else COLUMN_TITLES_WITH_IE
return table_to_dict(table, column_titles)
def get_table_with_metadata(table_with_tdiv: Tuple[List[TableDictType], Tag]) -> MetadataTableType:
table, tdiv = table_with_tdiv
clean_name = pl.clean_table_name(pr.table_name(tdiv))
table_description = get_table_description(tdiv)
return {
'name': clean_name,
'modules': table,
'id': pl.create_slug(clean_name),
'description': str(table_description),
'linkToStandard': get_short_standard_link(tdiv)
}
if __name__ == "__main__":
standard = pl.parse_html_file(sys.argv[1])
tables = []
tdivs = []
for chapter_id in CHAPTER_IDS:
chapter_tables, chapter_tdivs = get_chapter_tables(standard, chapter_id, is_valid_ciod_table)
tables += chapter_tables
tdivs += chapter_tdivs
parsed_table_data = tables_to_json(tables, tdivs, ciod_table_to_dict, get_table_with_metadata)
pl.write_pretty_json(parsed_table_data)
|
nilq/baby-python
|
python
|
import requests
import json
import yaml
def checkDomains(domains):
url = 'https://www.virustotal.com/vtapi/v2/url/report'
scans = []
for dom in domains:
params = {'apikey':getApiKey('vt'), 'resource':dom}
try:
response = requests.get(url, params=params)
scans.append(response.json())
except Exception as e:
print("It was not possible to check the {} domain.\nMaybe we hit VT free limit? Try upgrading your API license".format(dom))
break
return scans
def checkAbuseIP(ips):
checkedIPs = {}
for ip in ips:
url = 'https://api.abuseipdb.com/api/v2/check'
querystring = {
'ipAddress': ip,
'maxAgeInDays': '90'
}
headers = {
'Accept': 'application/json',
'Key': getApiKey('abuseipdb')
}
try:
response = requests.request(method='GET', url=url, headers=headers, params=querystring)
whitelisted = json.loads(response.text)['data']['isWhitelisted']
checkedIPs[ip] = whitelisted
except Exception as e:
print(e)
return checkedIPs
def getApiKey(provider):
with open("/opt/netlyzer/config.yml", 'r') as ymlfile:
cfg = yaml.load(ymlfile, Loader=yaml.FullLoader)
if provider == "vt":
return cfg['api']['vtApiKey']
if provider == "abuseipdb":
return cfg['api']['abuseIPDBKey']
|
nilq/baby-python
|
python
|
from torchvision import models
import torch.nn as nn
class model(nn.Module):
def __init__(self, input_dim, output_dim):
super(model, self).__init__()
self.restored = False
self.input_dim = input_dim
self.output_dim = output_dim
num = len(input_dim)
feature = []
for i in range(num):
feature.append(
nn.Sequential(
nn.Linear(self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],2*self.input_dim[i]),
nn.BatchNorm1d(2*self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(2*self.input_dim[i],self.input_dim[i]),
nn.BatchNorm1d(self.input_dim[i]),
nn.LeakyReLU(0.1, True),
nn.Linear(self.input_dim[i],self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
))
self.feature = nn.ModuleList(feature)
self.feature_show = nn.Sequential(
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
nn.BatchNorm1d(self.output_dim),
nn.LeakyReLU(0.1, True),
nn.Linear(self.output_dim,self.output_dim),
)
def forward(self, input_data, domain):
feature = self.feature[domain](input_data)
feature = self.feature_show(feature)
return feature
|
nilq/baby-python
|
python
|
INSTRUCTIONS = """
"""
from utils.decorators import time_this
@time_this
def solution(inputs):
"""
"""
test_case_inputs = [
]
|
nilq/baby-python
|
python
|
class SilkObject:
__slots__ = []
def __ne__(self, other):
return not self.__eq__(other)
class SilkStringLike(SilkObject):
__slots__ = []
from . import primitives
|
nilq/baby-python
|
python
|
from .transpose import transpose
|
nilq/baby-python
|
python
|
from functools import partial
from flask import Blueprint, current_app, g
from api.client import SecurityTrailsClient, ST_OBSERVABLE_TYPES
from api.mappings import Mapping
from api.schemas import ObservableSchema
from api.utils import get_json, jsonify_data, get_key, jsonify_result
enrich_api = Blueprint('enrich', __name__)
get_observables = partial(get_json, schema=ObservableSchema(many=True))
@enrich_api.route('/deliberate/observables', methods=['POST'])
def deliberate_observables():
return jsonify_data({})
@enrich_api.route('/observe/observables', methods=['POST'])
def observe_observables():
key = get_key()
observables = get_observables()
client = SecurityTrailsClient(current_app.config['API_URL'],
key,
current_app.config['USER_AGENT'],
current_app.config['NUMBER_OF_PAGES'],
current_app.config['GET_ALL_PAGES'])
g.sightings = []
try:
for observable in observables:
mapping = Mapping.for_(observable)
if mapping:
client_data = client.get_data(observable)
for record in client_data:
refer_link = client.refer_link(
current_app.config['UI_URL'], observable
)
sighting = mapping.extract_sighting(record, refer_link)
if sighting:
g.sightings.append(sighting)
except KeyError:
g.errors = [{
'type': 'fatal',
'code': 'key error',
'message': 'The data structure of SecurityTrails '
'has changed. The module is broken.'
}]
return jsonify_result()
@enrich_api.route('/refer/observables', methods=['POST'])
def refer_observables():
observables = get_observables()
ui_url = current_app.config['UI_URL']
data = []
for observable in observables:
type_ = ST_OBSERVABLE_TYPES.get(observable['type'])
if type_:
data.append(
{
'id': (
'ref-securitytrails-search-{type}-{value}'.format(
**observable)
),
'title': f'Search for this {type_}',
'description': (
f'Lookup this {type_} on SecurityTrails'
),
'url': SecurityTrailsClient.refer_link(ui_url, observable),
'categories': ['Search', 'SecurityTrails'],
}
)
return jsonify_data(data)
|
nilq/baby-python
|
python
|
from sqlite3 import dbapi2 as sqlite3
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash, _app_ctx_stack
import requests, os
from bs4 import BeautifulSoup
# configuration
try:
DATABASE = 'simply-billboard.db'
DEBUG = True
SECRET_KEY = 'development key'
USERNAME = 'admin'
PASSWORD = 'default'
except ImportError:
SECRET_KEY = os.environ.get('SECRET_KEY')
USERNAME = os.environ.get('USERNAME')
PASSWORD = os.environ.get('PASSWORD')
DEBUG = False
# create our little application :)
app = Flask(__name__)
app.config.from_object(__name__)
def init_db():
"""Creates the database tables."""
with app.app_context():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
import billboard
chart = billboard.ChartData('hot-100', date=None, fetch=True, all=False)
for x in range(0, 100):
db.execute('INSERT INTO billboard100 (title, artist, peakPos, lastPos, weeks, rankChange) VALUES (?, ?, ?, ?, ?, ?)',
[chart[x].title, chart[x].artist, chart[x].peakPos, chart[x].lastPos, chart[x].weeks, chart[x].change])
db.commit()
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
top = _app_ctx_stack.top
if not hasattr(top, 'sqlite_db'):
sqlite_db = sqlite3.connect(app.config['DATABASE'])
sqlite_db.row_factory = sqlite3.Row
top.sqlite_db = sqlite_db
return top.sqlite_db
@app.teardown_appcontext
def close_db_connection(exception):
"""Closes the database again at the end of the request."""
top = _app_ctx_stack.top
if hasattr(top, 'sqlite_db'):
top.sqlite_db.close()
@app.route('/')
def billboard():
db = get_db()
cur = db.execute('SELECT * FROM billboard100 ORDER BY rank')
entries = cur.fetchall()
return render_template('billboard.html', entries=entries)
if __name__ == '__main__':
init_db()
port = int(os.environ.get("PORT", 5000))
app.run(host='0.0.0.0', port=port)
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
# Copyright 2015 Dmitriy Robota.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import atexit
from xvfbwrapper import Xvfb
from robot.api import logger
from XvfbRobot.version import VERSION
__version__ = VERSION
class XvfbRobot(object):
"""
A robot library for creating virtual display on demand
"""
ROBOT_LIBRARY_SCOPE = "GLOBAL"
ROBOT_LIBRARY_VERSION = VERSION
_display = None
def start_virtual_display(self, width=1440, height=900,
colordepth=24, **kwargs):
"""Starts virtual display which will be
destroyed after test execution will be end
*Arguments:*
- width: a width to be set in pixels
- height: a height to be set in pixels
- color_depth: a color depth to be used
- kwargs: extra parameters
*Example:*
| Start Virtual Display |
| Start Virtual Display | 1920 | 1080 |
| Start Virtual Display | ${1920} | ${1080} | ${16} |
"""
if self._display is None:
logger.info("Using virtual display: '{0}x{1}x{2}'".format(
width, height, colordepth))
self._display = Xvfb(int(width), int(height),
int(colordepth), **kwargs)
self._display.start()
atexit.register(self._display.stop)
|
nilq/baby-python
|
python
|
from settings import *
class BonusBox:
def __init__(self, data, gui):
self.boxID = int(data["boxID"])
self.x = int(data["x"])
self.y = int(data["y"])
self.type = int(data["type"])
self.size = 2
self.gui = gui
if self.type == 1: # cargo
self.color = "orange"
elif self.type == 2:
self.color = "yellow"
elif self.type == 21:
self.color = "green"
self.gui.bonusBoxes.append(self)
self.guiObj = self.gui.canvas.create_rectangle(
(self.x/100 * self.gui.scale)-self.size,
(self.y/100 * self.gui.scale)-self.size,
(self.x/100 * self.gui.scale)+self.size,
(self.y/100 * self.gui.scale)+self.size,
fill=self.color
)
def hide(self):
# self.gui.setColor(self.guiObj, "black")
self.gui.canvas.delete(self.guiObj)
def show(self):
self.gui.setColor(self.guiObj, self.color)
def remove(self):
if self in self.gui.bonusBoxes:
self.gui.bonusBoxes.remove(self)
self.hide()
|
nilq/baby-python
|
python
|
# Copyright (c) 2016, Ethan White
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the <organization> nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL <COPYRIGHT HOLDER> BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import pyqrcode
import math
"""
This file aims to turn a QR code produced by pyqrcode's `text()` method
into something similar to the output of `qrencode -t utf8`, thus
allowing it to take up half the space in each direction and fit on an
80x24 terminal.
"""
class QRMatrix:
def __init__(self, lines):
self.lines = lines
def __getitem__(self, param):
if type(param) is not tuple:
raise ValueError("Expected tuple")
x, y = param
try:
return self.lines[x][y] == "1"
except IndexError:
return False
def get_width(self):
return len(self.lines[0])
def get_height(self):
return len(self.lines)
def get_size(self):
return get_width(), get_height()
width = property(get_width)
height = property(get_height)
size = property(get_size)
class QRWrapper:
def __init__(self, data):
self.matrix = QRMatrix(pyqrcode.create(data, error="L").text().split("\n"))
def _get_display_char(self, top, bottom):
if top and bottom:
return " "
elif not top and bottom:
return "\u2580"
elif not bottom and top:
return "\u2584"
elif not bottom and not top:
return "\u2588"
def compact_repr(self):
lines = []
for i in range(math.floor(self.matrix.height / 2)):
line = ""
for j in range(self.matrix.width):
line += self._get_display_char(self.matrix[j, i * 2], self.matrix[j, i * 2 + 1])
lines += [line]
return "\n".join(lines)
if __name__ == "__main__":
print(QRWrapper("Just for debugging!").compact_repr())
|
nilq/baby-python
|
python
|
from validator.rule_pipe_validator import RulePipeValidator as RPV
from validator import rules as R
from validator import Validator, validate, validate_many, rules as R
def test_rpv_001_simple():
data = "10"
# with integer
rules = [R.Integer(), R.Size(10)]
rpv = RPV(data, rules)
assert rpv.execute()
# without integer
rules = [R.Size(10)]
rpv = RPV(data, rules)
assert not rpv.execute()
def test_rpv_002_simple():
data = [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
# with list
rules = [R.List(), R.Size(10)]
rpv = RPV(data, rules)
assert rpv.execute()
# without list
rules = [R.Integer(), R.Size(10)]
rpv = RPV(data, rules)
assert not rpv.execute()
def test_rpv_003_simple():
request = {"age": 23}
rule = {"age": "integer|size:23"}
result = Validator(request, rule).validate()
assert result
request = {"age": 23}
rule = {"age": "size:23"}
result = Validator(request, rule).validate()
assert result
request = {"age": 23}
rule = {"age": "size:2"}
result = Validator(request, rule).validate()
assert not result
request = {"age": 123456789}
rule = {"age": "size:9"}
result = Validator(request, rule).validate()
assert not result
request = {"age": 123456789}
rule = {"age": "integer|size:123456789"}
result = Validator(request, rule).validate()
assert result
def test_rpv_004_simple():
request = {"age": "23"}
rule = {"age": "integer|size:23"}
result = Validator(request, rule).validate()
assert result
request = {"age": "23"}
rule = {"age": "size:23"}
result = Validator(request, rule).validate()
assert not result
request = {"age": "23"}
rule = {"age": "size:2"}
result = Validator(request, rule).validate()
assert result
request = {"age": "123456789"}
rule = {"age": "size:9"}
result = Validator(request, rule).validate()
assert result
request = {"age": "123456789"}
rule = {"age": "integer|size:123456789"}
result = Validator(request, rule).validate()
assert result
def test_rpv_005_simple():
request = {"args": [1, 2, 3]}
rule = {"args": "size:3"}
result = Validator(request, rule).validate()
assert result
request = {"args": [1, 2, 3]}
rule = {"args": "list|size:3"}
result = Validator(request, rule).validate()
assert result
request = {"args": [1, 2, 3]}
rule = {"args": "integer|size:23"}
result = Validator(request, rule).validate()
assert not result
|
nilq/baby-python
|
python
|
import os
import random
class Hangman():
def __init__(self):
self.word = self.pick_random_word()
self.word = self.word.upper()
self.hidden_word = ["-" for character in self.word]
self.word_length = len(self.word)
self.used_letters = []
self.running = True
self.lives = 10
self.finished = False
while self.running == True:
self.handle_game()
def pick_random_word(self):
# Replace with the correct path
filename = ".../Hangman/GermanWords/Words.txt"
with open(filename, encoding="utf8") as file:
content = file.readlines()
content = [x.strip() for x in content]
return (content[random.randrange(0, len(content))])
def get_input(self):
self.guess = input("Take a guess: \t")
self.guess = self.guess.upper()
return str(self.guess)
def check_if_in_word(self, input):
same = 0
for i in range(len(self.word)):
if self.word[i] == input and self.hidden_word[i] != input:
self.update_output(i, input)
same += 1
return(same)
def judge_answer(self, input):
if len(input) == 0 or len(input) > 1:
return True
for character in self.used_letters:
if character == input:
return True
return False
def add_to_used(self, input):
if not self.judge_answer(input):
self.used_letters.append(input)
def draw_word(self, won):
os.system("cls")
output_word = " ".join(self.hidden_word)
output_letters = " ".join(self.used_letters)
print(output_word)
print("\nUsed:", output_letters)
print("\nLives", self.lives, "\n\n")
if self.finished == True:
print(self.word, "\n")
def update_output(self, place, letter):
self.hidden_word[place] = letter
def handle_game(self):
if self.word_length > 0 and self.lives > 0:
self.draw_word(self.finished)
input = self.get_input()
same_characters = self.check_if_in_word(input)
self.word_length -= same_characters
if same_characters <= 0 and not self.judge_answer(input):
self.lives -=1
self.add_to_used(input)
else:
self.finished = True
self.draw_word(self.finished)
self.running = False
Hangman()
|
nilq/baby-python
|
python
|
def test_add_pet(client, jwt):
r = client.post(
"/pets",
json=dict(
pet_type="cat",
name="tospik",
breed="persian",
owner="emreisikligil"
),
headers=dict(Authorization=f"Bearer {jwt}")
)
assert r.status_code == 201
body = r.json
assert body["id"]
assert body["name"] == "tospik"
assert body["breed"] == "persian"
assert body["owner"] == "emreisikligil"
def test_get_pets(client, jwt):
r = client.get(
"/pets",
headers=dict(Authorization=f"Bearer {jwt}")
)
assert r.status_code == 200
body = r.json
assert len(body) == 1
assert body[0]["id"]
assert body[0]["name"] == "tospik"
assert body[0]["breed"] == "persian"
assert body[0]["owner"] == "emreisikligil"
|
nilq/baby-python
|
python
|
import asyncio
import youtube_dl
import urllib.request
import datetime
from bot_client import *
global queue
queue = []
global nowPlaying
nowPlaying = []
global ytdl_opts
ytdl_opts = {
'format': 'bestaudio/best',
#'ignoreerrors': True,
#'no_warnings': True,
#'debug_printtraffic': True, ###############################################################
'cookiefile': 'youtube.com_cookies.txt',
'cachedir': False,
#'quiet': True,
#'verbose': True, ##########################################################
}
global ytdl
def ytdl_init():
global ytdl
ytdl = youtube_dl.YoutubeDL(ytdl_opts)
ffmpeg_options = { # these options fix a common disconnection bug
'before_options': '-reconnect 1 -reconnect_streamed 1 -reconnect_delay_max 5',
'options': '-vn -sn'
}
class ytdl_source(discord.PCMVolumeTransformer):
def __init__(self, source, *, data, volume=0.5):
super().__init__(source, volume)
self.data = data
self.title = data.get('title')
self.url = data.get('url')
@classmethod
async def from_url(cls, url, *, loop=None, stream=False):
try:
loop = loop or asyncio.get_event_loop()
data = await prepare_data(url, loop, stream)
if data is None:
play_next()
return
filename = data['url'] if stream else ytdl.prepare_filename(data)
return cls(discord.FFmpegPCMAudio(filename, **ffmpeg_options), data=data)
except Exception as e:
print('Exception in from_url:', e)
play_next()
async def prepare_data(url, loop, stream):
data = None
while True:
try:
loop = loop or asyncio.get_event_loop()
data = await loop.run_in_executor(None, lambda: ytdl.extract_info(url, download=not stream))
if 'entries' in data:
# take first item from a playlist
data = data['entries'][0]
# print(data['url']) # the generated direct url
my_data = urllib.request.urlopen(data['url'])
response = my_data.getcode()
except Exception as e:
if ('HTTP Error 403' in str(e)) or ('ERROR: No video formats found;' in str(e)):
print('generic error')
await asyncio.sleep(1)
try: #try without cookies
ytdl_opts2 = {
'format': 'bestaudio/best',
# 'ignoreerrors': True,
#'no_warnings': True,
'debug_printtraffic': True,
# 'nocheckcertificate': True,
'cachedir': False,
# 'quiet': True,
'verbose': True
}
ytdl2 = youtube_dl.YoutubeDL(ytdl_opts2)
data = await loop.run_in_executor(None, lambda: ytdl2.extract_info(url, download=not stream))
if 'entries' in data:
# take first item from a playlist
data = data['entries'][0]
my_data = urllib.request.urlopen(data['url'])
response = my_data.getcode()
except Exception as e2:
print('printing e2 exception: ' + str(e2))
return
else:
break
else:
print('Printing error: ' + str(e))
return
else:
break
return data
async def add_playlist(url : str, message):
ydl_opts = {
'format': 'bestaudio/best',
# 'ignoreerrors': True,
'no_warnings': True,
'cookiefile': 'youtube.com_cookies.txt',
#'nocheckcertificate': True,
'extract_flat': 'in_playlist',
#'skip_download': True,
'cachedir': False,
'quiet': True
}
info = None
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
try:
info = ydl.extract_info(url, download=False)
except Exception:
print('error on first info get')
if info is None:
asyncio.sleep(0.5)
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
try:
info = ydl.extract_info(url, download=False)
except Exception:
print('error on second info get')
videos = len(info['entries'])
for i in range(videos):
duration = 'unknown'
if info['entries'][i]['duration'] is not None:
seconds = int(info['entries'][i]['duration'])
duration = str(datetime.timedelta(seconds=seconds))
tempList = ["http://www.youtube.com/watch?v="+info['entries'][i]['url'], info['entries'][i]['title'], duration]
global queue
queue.append(tempList)
await message.channel.send('successfully added '+str(videos)+' videos to queue')
play_next()
return
async def add_video(url : str, message):
ydl_opts = {
'format': 'bestaudio/best',
# 'ignoreerrors': True,
'no_warnings': True,
'cookiefile': 'youtube.com_cookies.txt',
#'nocheckcertificate': True,
# 'extract_flat': True,
'cachedir': False,
'quiet': True
}
info = None
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info(url, download=False)
if info is None: #if there's a lot of latency, try again after .5 secs
asyncio.sleep(0.5)
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info(url, download=False)
seconds = int(info['duration'])
duration = str(datetime.timedelta(seconds=seconds))
temp_list = [url, info['title'], duration]
global queue
queue.append(temp_list)
await message.channel.send("successfully added video to queue")
play_next()
return
async def search_and_paste_link(item : str, message):
ydl_opts = {
'format': 'bestaudio/best',
# 'ignoreerrors': True,
'no_warnings': True,
'cookiefile': 'youtube.com_cookies.txt',
# 'nocheckcertificate': True,
'extract_flat': 'in_playlist',
# 'skip_download': True,
'cachedir': False,
'quiet': True,
'noplaylist': True
}
info = None #
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info("ytsearch:%s" % item, download=False)
if info is None: # if there's a lot of latency, try again after .3 secs
await asyncio.sleep(0.3)
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info("ytsearch:%s" % item, download=False)
print(info)
await asyncio.sleep(0.3)
new_message = str("http://www.youtube.com/watch?v=") + str(info['entries'][0]['url'])
await message.channel.send(new_message)
async def search_video(item : str, message):
ydl_opts = {
'format': 'bestaudio/best',
# 'ignoreerrors': True,
'no_warnings': True,
'cookiefile': 'youtube.com_cookies.txt',
#'nocheckcertificate': True,
'extract_flat': 'in_playlist',
# 'skip_download': True,
'cachedir': False,
'quiet': True,
'noplaylist': True
}
info = None
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info("ytsearch:%s" % item, download=False)
if info is None: #if there's a lot of latency, try again after .3 secs
await asyncio.sleep(0.3)
with youtube_dl.YoutubeDL(ydl_opts) as ydl:
info = ydl.extract_info("ytsearch:%s" % item, download=False)
print(info)
await asyncio.sleep(0.3)
if len(info['entries']) > 0:
seconds = 0
if info['entries'][0]['duration'] is not None:
seconds = int(info['entries'][0]['duration'])
duration = str(datetime.timedelta(seconds=seconds))
if seconds == 0:
duration = "unknown"
tempList = ["http://www.youtube.com/watch?v="+info['entries'][0]['url'], info['entries'][0]['title'], duration]
global queue
queue.append(tempList)
await message.channel.send("successfully added video to queue")
play_next()
else:
await message.channel.send("couldn't find a suitable result")
return
def play_next():
voice = discord.utils.get(client.voice_clients)
global queue
try:
if not voice.is_playing() and len(queue) > 0:
song = queue[0]
queue.pop(0)
global nowPlaying
nowPlaying.clear()
nowPlaying.extend(song)
asyncio.run_coroutine_threadsafe(play_this_url(nowPlaying[0]), loop=client.loop)
except AttributeError:
if len(queue) > 0:
print('trying again here')
play_next()
return
async def play_this_url(url : str):
voice = discord.utils.get(client.voice_clients)
try:
player = await ytdl_source.from_url(url, loop=client.loop, stream=True)
if player is None:
return
voice.play(player, after=lambda er: play_next())
except Exception as e:
print('Exception in play_this_url:', e)
play_next()
return
|
nilq/baby-python
|
python
|
# --- import --------------------------------------------------------------------------------------
import os
import numpy as np
import WrightTools as wt
from . import _pulse
from ._scan import Scan
# --- define --------------------------------------------------------------------------------------
here = os.path.abspath(os.path.dirname(__file__))
# integration defaults
timestep = 4.0
early_buffer = 100.0
late_buffer = 400.0
# --- class ---------------------------------------------------------------------------------------
class Experiment:
"""Experiment."""
def __init__(self, axes, name, pm, pulse_class):
# basic attributes
self.axes = axes
for a in self.axes:
setattr(self, a.name, a)
self.name = name
self.pm = pm
self.npulses = len(pm)
self.timestep = timestep
self.early_buffer = early_buffer
self.late_buffer = late_buffer
# pulse
self.pulse_class = pulse_class
self.pulses = [self.pulse_class() for _ in self.pm]
def __repr__(self):
return '<WrightSim.Experiment object \'{0}\' at {1}>'.format(self.name, str(id(self)))
@property
def active_axes(self):
return [a for a in self.axes if a.active]
@property
def axis_names(self):
return [a.name for a in self.axes]
def run(self, hamiltonian, mp=True):
"""Run the experiment.
Parameters
----------
hamiltonian : WrightSim Hamiltonian
Hamiltonian.
mp : boolean (optional)
Toggle CPU multiprocessing. Default is True.
Returns
-------
WrightSim Scan
Scan that was run."""
out = Scan(self, hamiltonian)
out.run(mp=mp)
# finish
return out
def set_axis(self, axis_name, points):
'''
Activate and define points for one of the experimental axes.
Parameters
----------
axis_name : string
Name of axis.
points : 1D array-like
Points (in native units) to scan over.
'''
# TODO: is there a way to prevent incompatible axes being simultaniously activated?
axis_index = self.axis_names.index(axis_name)
axis = self.axes[axis_index]
axis.points = points
axis.active = True
|
nilq/baby-python
|
python
|
import torch
from torch import nn
from torch.nn import functional as F
from typing import List
from resnet_layer import ResidualLayer
class ConvDecoder(nn.Module):
def __init__(self,
in_channels: int,
embedding_dim: int,
hidden_dims: List = [128, 256],
img_size: int = 32,
activation=nn.LeakyReLU,
**kwargs) -> None:
super().__init__()
# Build Decoder
modules = []
modules.append(
nn.Sequential(
nn.Conv2d(embedding_dim,
hidden_dims[-1],
kernel_size=1,
stride=1,
padding=0),
activation())
)
for _ in range(6):
modules.append(ResidualLayer(hidden_dims[-1], hidden_dims[-1]))
modules.append(activation())
hidden_dims.reverse()
for i in range(len(hidden_dims) - 1):
kernel_size = 3 if i == 0 else 4
output_padding = 1 if i == 0 else 0
modules.append(
nn.Sequential(
nn.ConvTranspose2d(hidden_dims[i],
hidden_dims[i + 1],
kernel_size=kernel_size,
stride=2,
padding=1,
output_padding=output_padding),
activation())
)
modules.append(
nn.Sequential(
nn.ConvTranspose2d(hidden_dims[-1],
out_channels=in_channels,
kernel_size=4,
stride=2,
padding=1,
output_padding=0),
nn.Tanh()))
self.decoder = nn.Sequential(*modules)
def forward(self, x):
return self.decoder(x)
|
nilq/baby-python
|
python
|
from dassl.engine import TRAINER_REGISTRY
from dassl.engine.trainer import TrainerMultiAdaptation
from dassl.data import DataManager
from dassl.utils import MetricMeter
from torch.utils.data import Dataset as TorchDataset
from dassl.optim import build_optimizer, build_lr_scheduler
from dassl.utils import count_num_param
import torch
import torch.nn as nn
from torch.nn import functional as F
from dassl.engine.trainer import SimpleNet
import numpy as np
from dassl.modeling import build_layer
from dassl.modeling.ops import ReverseGrad
import torchmetrics
from dassl.utils.kernel import GaussianKernel
from typing import Optional, Sequence
class MultipleKernelMaximumMeanDiscrepancy(nn.Module):
r"""The Multiple Kernel Maximum Mean Discrepancy (MK-MMD) used in
`Learning Transferable Features with Deep Adaptation Networks (ICML 2015) <https://arxiv.org/pdf/1502.02791>`_
Given source domain :math:`\mathcal{D}_s` of :math:`n_s` labeled points and target domain :math:`\mathcal{D}_t`
of :math:`n_t` unlabeled points drawn i.i.d. from P and Q respectively, the deep networks will generate
activations as :math:`\{z_i^s\}_{i=1}^{n_s}` and :math:`\{z_i^t\}_{i=1}^{n_t}`.
The MK-MMD :math:`D_k (P, Q)` between probability distributions P and Q is defined as
.. math::
D_k(P, Q) \triangleq \| E_p [\phi(z^s)] - E_q [\phi(z^t)] \|^2_{\mathcal{H}_k},
:math:`k` is a kernel function in the function space
.. math::
\mathcal{K} \triangleq \{ k=\sum_{u=1}^{m}\beta_{u} k_{u} \}
where :math:`k_{u}` is a single kernel.
Using kernel trick, MK-MMD can be computed as
.. math::
\hat{D}_k(P, Q) &=
\dfrac{1}{n_s^2} \sum_{i=1}^{n_s}\sum_{j=1}^{n_s} k(z_i^{s}, z_j^{s})\\
&+ \dfrac{1}{n_t^2} \sum_{i=1}^{n_t}\sum_{j=1}^{n_t} k(z_i^{t}, z_j^{t})\\
&- \dfrac{2}{n_s n_t} \sum_{i=1}^{n_s}\sum_{j=1}^{n_t} k(z_i^{s}, z_j^{t}).\\
Args:
kernels (tuple(torch.nn.Module)): kernel functions.
linear (bool): whether use the linear version of DAN. Default: False
Inputs:
- z_s (tensor): activations from the source domain, :math:`z^s`
- z_t (tensor): activations from the target domain, :math:`z^t`
Shape:
- Inputs: :math:`(minibatch, *)` where * means any dimension
- Outputs: scalar
.. note::
Activations :math:`z^{s}` and :math:`z^{t}` must have the same shape.
.. note::
The kernel values will add up when there are multiple kernels.
Examples::
# >>> from dalib.modules.kernels import GaussianKernel
# >>> feature_dim = 1024
# >>> batch_size = 10
# >>> kernels = (GaussianKernel(alpha=0.5), GaussianKernel(alpha=1.), GaussianKernel(alpha=2.))
# >>> loss = MultipleKernelMaximumMeanDiscrepancy(kernels)
# >>> # features from source domain and target domain
# >>> z_s, z_t = torch.randn(batch_size, feature_dim), torch.randn(batch_size, feature_dim)
# >>> output = loss(z_s, z_t)
"""
def __init__(self, kernels: Sequence[nn.Module], linear: Optional[bool] = False):
super(MultipleKernelMaximumMeanDiscrepancy, self).__init__()
self.kernels = kernels
self.index_matrix = None
self.linear = linear
def forward(self, z_s: torch.Tensor, z_t: torch.Tensor) -> torch.Tensor:
features = torch.cat([z_s, z_t], dim=0)
batch_size = int(z_s.size(0))
self.index_matrix = _update_index_matrix(batch_size, self.index_matrix, self.linear).to(z_s.device)
# print("index matrix : ",self.index_matrix)
kernel_matrix = sum([kernel(features) for kernel in self.kernels]) # Add up the matrix of each kernel
# Add 2 / (n-1) to make up for the value on the diagonal
# to ensure loss is positive in the non-linear version
# print("kernel matrix : ",kernel_matrix)
l = (kernel_matrix * self.index_matrix).sum()
# print("l : ",l)
loss = (l + 2. / float(batch_size - 1))
return loss
def _update_index_matrix(batch_size: int, index_matrix: Optional[torch.Tensor] = None,
linear: Optional[bool] = True) -> torch.Tensor:
r"""
Update the `index_matrix` which convert `kernel_matrix` to loss.
If `index_matrix` is a tensor with shape (2 x batch_size, 2 x batch_size), then return `index_matrix`.
Else return a new tensor with shape (2 x batch_size, 2 x batch_size).
"""
if index_matrix is None or index_matrix.size(0) != batch_size * 2:
index_matrix = torch.zeros(2 * batch_size, 2 * batch_size)
if linear:
for i in range(batch_size):
s1, s2 = i, (i + 1) % batch_size
t1, t2 = s1 + batch_size, s2 + batch_size
index_matrix[s1, s2] = 1. / float(batch_size)
index_matrix[t1, t2] = 1. / float(batch_size)
index_matrix[s1, t2] = -1. / float(batch_size)
index_matrix[s2, t1] = -1. / float(batch_size)
else:
for i in range(batch_size):
for j in range(batch_size):
if i != j:
index_matrix[i][j] = 1. / float(batch_size * (batch_size - 1))
index_matrix[i + batch_size][j + batch_size] = 1. / float(batch_size * (batch_size - 1))
for i in range(batch_size):
for j in range(batch_size):
index_matrix[i][j + batch_size] = -1. / float(batch_size * batch_size)
index_matrix[i + batch_size][j] = -1. / float(batch_size * batch_size)
return index_matrix
@TRAINER_REGISTRY.register()
class MultiDatasetDan(TrainerMultiAdaptation):
"""
"""
def __init__(self, cfg,require_parameter=None):
super().__init__(cfg,require_parameter)
self.bce = nn.BCEWithLogitsLoss()
self.max_epoch = self.cfg.OPTIM.MAX_EPOCH
self.trade_off = cfg.LIGHTNING_MODEL.TRAINER.DAN.trade_off
print("trade off ratio : ", self.trade_off)
alpha = cfg.LIGHTNING_MODEL.TRAINER.DAN.GaussianKernel.alpha
sigma = cfg.LIGHTNING_MODEL.TRAINER.DAN.GaussianKernel.sigma
track_running_stats = cfg.LIGHTNING_MODEL.TRAINER.DAN.GaussianKernel.track_running_stats
linear = cfg.LIGHTNING_MODEL.TRAINER.DAN.linear
if len(sigma) == 0:
# sigma = None
# define loss function
print("alpha range : ", alpha)
self.mkmmd_loss = MultipleKernelMaximumMeanDiscrepancy(
kernels=[GaussianKernel(alpha=k, track_running_stats=track_running_stats) for k in alpha],
linear=linear
)
else:
print("sigma range : ", sigma)
self.mkmmd_loss = MultipleKernelMaximumMeanDiscrepancy(
kernels=[GaussianKernel(sigma=s, track_running_stats=track_running_stats) for s in sigma],
linear=linear
)
def build_model(self):
cfg = self.cfg
print("Params : ", cfg.LIGHTNING_MODEL.COMPONENTS.BACKBONE)
print('Building F')
print('Building CommonFeature')
backbone_info = cfg.LIGHTNING_MODEL.COMPONENTS.BACKBONE
FC_info = cfg.LIGHTNING_MODEL.COMPONENTS.LAST_FC
self.CommonFeature = SimpleNet(backbone_info, FC_info, 0, **cfg.LIGHTNING_MODEL.COMPONENTS.BACKBONE.PARAMS)
freeze_common_feature = cfg.LIGHTNING_MODEL.COMPONENTS.BACKBONE.FREEZE if cfg.LIGHTNING_MODEL.COMPONENTS.BACKBONE.FREEZE else False
if freeze_common_feature:
for parameter in self.CommonFeature.parameters():
parameter.requires_grad = False
print("freeze feature extractor : ",)
self.fdim = self.CommonFeature.fdim
print('Building Target Classifier')
self.TargetClassifier = self.create_classifier(self.fdim, self.num_classes, FC_info=FC_info)
print('Building SourceClassifiers')
print("source domains label size : ", self.source_domains_label_size)
source_classifier_list = []
for num_class in self.source_domains_label_size:
source_classifier = self.create_classifier(self.fdim, num_class, FC_info=FC_info)
source_classifier_list.append(source_classifier)
self.SourceClassifiers = nn.ModuleList(
source_classifier_list
)
def forward(self, input, return_feature=False):
f_target = self.CommonFeature(input)
logits_target = self.TargetClassifier(f_target)
probs = F.softmax(logits_target, dim=1)
if return_feature:
return probs, logits_target
return probs
def configure_optimizers(self):
params = list(self.CommonFeature.parameters()) + \
list(self.TargetClassifier.parameters()) + \
list(self.SourceClassifiers.parameters())
opt_cfg = self.cfg.OPTIM
opt = build_optimizer(params,opt_cfg)
scheduler = build_lr_scheduler(optimizer=opt,optim_cfg=opt_cfg)
optimizers = [opt]
lr_schedulers=[scheduler]
return optimizers, lr_schedulers
def share_step(self,batch,train_mode = True,weight=None):
input, label, domain = self.parse_target_batch(batch)
f_target = self.CommonFeature(input)
logits_target = self.TargetClassifier(f_target)
loss_target = self.loss_function(logits_target, label, train=train_mode,weight=weight)
return loss_target, logits_target,f_target, label
def parse_batch_train(self, batch):
target_batch = batch["target_loader"]
unlabel_batch = batch["unlabel_loader"]
list_source_batches = batch["source_loader"]
return target_batch,unlabel_batch,list_source_batches
def on_train_epoch_start(self) -> None:
if self.source_pretrain_epochs > self.current_epoch:
self.target_ratio = self.cfg.LIGHTNING_MODEL.TRAINER.EXTRA.PRETRAIN_TARGET_LOSS_RATIO
self.source_ratio = self.cfg.LIGHTNING_MODEL.TRAINER.EXTRA.PRETRAIN_SOURCE_LOSS_RATIO
else:
self.target_ratio = self.cfg.LIGHTNING_MODEL.TRAINER.EXTRA.TARGET_LOSS_RATIO
self.source_ratio = self.cfg.LIGHTNING_MODEL.TRAINER.EXTRA.SOURCE_LOSS_RATIO
def training_step(self, batch, batch_idx):
target_batch, unlabel_batch ,list_source_batches = self.parse_batch_train(batch)
list_input_u, list_label_u, domain_u = self.parse_source_batches(list_source_batches)
loss_source = 0
for u, y, d in zip(list_input_u, list_label_u, domain_u):
# print("check range for source data : {} - {}".format(u.max(),u.min()))
f = self.CommonFeature(u)
logits = self.SourceClassifiers[d](f)
domain_weight = self.source_domains_class_weight[d]
loss_source += self.loss_function(logits, y, train=True, weight=domain_weight)
loss_source /= len(domain_u)
loss_target, logit_target, f_target,label = self.share_step(target_batch, train_mode=True,
weight=self.class_weight)
y_pred = F.softmax(logit_target, dim=1)
y = label
acc = self.train_acc(y_pred, y)
total_loss = self.source_ratio*loss_source+self.target_ratio*loss_target
f_unlabel = self.CommonFeature(unlabel_batch)
transfer_loss = self.mkmmd_loss(f_target, f_unlabel)
total_loss = total_loss + self.trade_off * transfer_loss
self.log('Train_acc', acc, on_step=False, on_epoch=True, prog_bar=True, logger=True)
self.log('Train_loss', total_loss, on_step=False, on_epoch=True, prog_bar=True, logger=True)
self.log('Train_source_loss', loss_source, on_step=False, on_epoch=True, prog_bar=True, logger=True)
self.log('Train_target_loss', loss_target, on_step=False, on_epoch=True, prog_bar=True, logger=True)
self.log('transfer_loss', transfer_loss, on_step=False, on_epoch=True, prog_bar=True, logger=True)
return {'loss': total_loss}
def validation_step(self, batch, batch_idx, dataset_idx: Optional[int] = None):
loss, logit, _,y = self.share_step(batch,train_mode=False)
y_pred = F.softmax(logit, dim=1)
if dataset_idx == 0 :
acc = self.valid_acc(y_pred, y)
log = {
"val_loss": loss*self.non_save_ratio,
"val_acc": acc,
}
self.log_dict(log, on_step=False, on_epoch=True, prog_bar=True, logger=True,add_dataloader_idx=False)
else:
acc = self.test_acc(y_pred, y)
log = {
"test_loss": loss,
"test_acc": acc
}
self.log_dict(log, on_step=False, on_epoch=True, prog_bar=False, logger=True,add_dataloader_idx=False)
return {'loss': loss}
def test_step(self, batch, batch_idx, dataset_idx: Optional[int] = None):
loss, logit, _,y = self.share_step(batch,train_mode=False)
y_pred = F.softmax(logit,dim=1)
return {'loss': loss,'y_pred':y_pred,'y':y}
|
nilq/baby-python
|
python
|
'''
A data model focused on material objects.
'''
import synapse.lib.module as s_module
class MatModule(s_module.CoreModule):
def getModelDefs(self):
modl = {
'types': (
('mat:item', ('guid', {}), {'doc': 'A GUID assigned to a material object.'}),
('mat:spec', ('guid', {}), {'doc': 'A GUID assigned to a material specification.'}),
('mat:specimage', ('comp', {'fields': (('spec', 'mat:spec'), ('file', 'file:bytes'))}), {}),
('mat:itemimage', ('comp', {'fields': (('item', 'mat:item'), ('file', 'file:bytes'))}), {}),
# TODO add base types for mass / volume
),
'forms': (
('mat:item', {}, (
('name', ('str', {'lower': True}), {'doc': 'The human readable name of the material item.'}),
('spec', ('mat:spec', {}), {
'doc': 'The mat:spec of which this item is an instance.',
}),
('place', ('geo:place', {}), {'doc': 'The most recent place the item is known to reside.'}),
('latlong', ('geo:latlong', {}), {'doc': 'The last known lat/long location of the node.'}),
('loc', ('loc', {}), {
'doc': 'The geo-political location string for the node.',
}),
# TODO add baseline things like dimensions / mass / etc?
)),
('mat:spec', {}, (
('name', ('str', {'lower': True}), {'doc': 'The human readable name of the material spec.'}),
)),
('mat:itemimage', {}, (
('item', ('mat:item', {}), {'doc': 'The item contained within the image file.'}),
('file', ('file:bytes', {}), {'doc': 'The file containing an image of the item.'}),
)),
('mat:specimage', {}, (
('spec', ('mat:spec', {}), {'doc': 'The spec contained within the image file.'}),
('file', ('file:bytes', {}), {'doc': 'The file containing an image of the spec.'}),
)),
),
}
name = 'mat'
return ((name, modl), )
|
nilq/baby-python
|
python
|
from .test_utils import *
print('#############################################')
print('# TESTING OF MainDeviceVars MODEL FUNCTIONS #')
print('#############################################')
@tag('maindevicevars')
class MainDeviceVarsModelTests(TestCase):
def setUp(self):
from utils.BBDD import getRegistersDBInstance
self.DB=getRegistersDBInstance()
self.DB.dropTable(table='MainVariables')
self.signal_was_called = False
self.signaltimestamp=None
self.signalTag=None
self.signalValue=None
def handler(sender, **kwargs):
self.signal_was_called = True
self.signaltimestamp=kwargs['timestamp']
self.signalTag=kwargs['Tags'][0]
self.signalValue=kwargs['Values'][0]
self.handler=handler
pass
def __init__(self,*args,**kwargs):
super().__init__(*args,**kwargs)
# INDIVIDUAL FUNCTIONS TESTING
def test_store2DB(self):
'''
storeDB: method provided to perform the foloowing steps:
- Validate the input data for the GPIO
- Saves the instance into the DB
- Introduces a first register into the registers DB with the current value reading it for Inputs, and forcing it in Outputs
'''
print('## TESTING THE OPERATION OF THE store2DB METHOD ##')
SignalVariableValueUpdated.connect(self.handler)
instance=MainDeviceVars(**MainDeviceVarDict)
now=timezone.now().replace(microsecond=0).replace(tzinfo=None)
instance.store2DB()
SignalVariableValueUpdated.disconnect(self.handler)
# checks values from the signal
self.assertAlmostEqual(self.signaltimestamp,timezone.now().replace(microsecond=0),delta=datetime.timedelta(seconds=1))# signal timestamp value is dated now
self.assertEqual(self.signalValue,MainDeviceVarDict['Value'])
self.assertEqual(self.signalTag,str(instance.pk))
# checks that store2DB creates the corresponding table in the registers DB and introduces a first record with the current value
self.assertEqual(instance.Value,MainDeviceVarDict['Value'])
self.assertTrue(self.DB.checkIfTableExist(table=instance.getRegistersDBTable()))
latest=instance.getLatestData(localized=False)
self.assertAlmostEqual(latest[instance.getRegistersDBTag()]['timestamp'],now,delta=datetime.timedelta(seconds=1))# latest value is dated now
self.assertEqual(latest[instance.getRegistersDBTag()]['value'],MainDeviceVarDict['Value'])# latest value is the same as in the dict
self.DB.dropTable(table=instance.getRegistersDBTable())
def test_updateValue(self):
'''
updateValue: method that handles the creation of registers DB rows. It has several alternative operational paths:
- The standard one is when the pre-initialized parameters are defaulted. In this situation, it checks if newValue is different from the previous one
and in case so, it introduces a row with the previous value, and a second one with the newValue. Both these rows are separated 1 second in the timestamps
to provide step-like plots.
- If a timestamp is provided, only one row is created with the passed timestamp if and only if newValue is different from the previous one.
- If force=True, it generates the row independently of the newValue.
Independently of the operational path followed, this method also sets up the value of the GPIO in case it is an output.
'''
print('## TESTING THE OPERATION OF THE updateValue METHOD ##')
instance=MainDeviceVars(**MainDeviceVarDict)
instance.save() # to avoid the creation of the DB tables and insertion of the first row that function store2DB does...
print(' -> Tested standard path')
now=timezone.now().replace(microsecond=0).replace(tzinfo=None)
SignalVariableValueUpdated.connect(self.handler)
instance.updateValue(newValue=22,timestamp=None,writeDB=True,force=False)
SignalVariableValueUpdated.disconnect(self.handler)
# checks values from the signal
self.assertAlmostEqual(self.signaltimestamp,timezone.now().replace(microsecond=0),delta=datetime.timedelta(seconds=1))# signal timestamp value is dated now
self.assertEqual(self.signalValue,instance.Value)
self.assertEqual(self.signalTag,str(instance.pk))
table=instance.getRegistersDBTable()
vars='"timestamp","'+instance.getRegistersDBTag()+'"'
sql='SELECT '+vars+' FROM "'+ table +'" ORDER BY timestamp DESC LIMIT 2'
rows=self.DB.executeTransaction(SQLstatement=sql)
self.assertEqual(rows[1][1],MainDeviceVarDict['Value'])# previous to latest value equals the previous Value
self.assertEqual(rows[0][1],22) # latest value equals the newValue
self.assertEqual(rows[0][0]-rows[1][0],datetime.timedelta(seconds=1))# checks that it inserts two rows with 1 second difference
self.assertAlmostEqual(rows[0][0],now,delta=datetime.timedelta(seconds=1))# checks that the latest value is dated now
print(' -> Tested update with timestamp')
now=timezone.now().replace(microsecond=0)+datetime.timedelta(seconds=10)
SignalVariableValueUpdated.connect(self.handler)
instance.updateValue(newValue=21,timestamp=now,writeDB=True,force=False)
SignalVariableValueUpdated.disconnect(self.handler)
# checks values from the signal
self.assertAlmostEqual(self.signaltimestamp,timezone.now()+datetime.timedelta(seconds=10),delta=datetime.timedelta(seconds=1))# signal timestamp value is dated now
self.assertEqual(self.signalValue,instance.Value)
self.assertEqual(self.signalTag,str(instance.pk))
latest=instance.getLatestData(localized=False)
self.assertEqual(latest[instance.getRegistersDBTag()]['timestamp'],now.replace(tzinfo=None))# latest value is dated now
self.assertEqual(latest[instance.getRegistersDBTag()]['value'],21)# latest value is dated now
self.DB.dropTable(table=instance.getRegistersDBTable())
def test_IntegrityError(self):
'''
This tests checks that in case of two semi-simultaneous MainVars queries to registers DB, no error occurs. In fact, the
DB driver handles it by updating the conflicting row.
'''
import time
print('## TESTING THE OPERATION OF THE registers DB Integrity Error handler METHOD ##')
instance=MainDeviceVars(**MainDeviceVarDict)
instance.store2DB()
newDict=editDict(keys=['Value','Label'], newValues=[15,'Test MainVar 2'], Dictionary=MainDeviceVarDict)
instance2=MainDeviceVars(**newDict)
time.sleep(1)
instance2.store2DB()
time.sleep(1)
now=timezone.now().replace(microsecond=0).replace(tzinfo=None)
newValue1=21
newValue2=16
instance.updateValue(newValue=newValue1,timestamp=now,writeDB=True,force=False)
instance2.updateValue(newValue=newValue2,timestamp=now,writeDB=True,force=False)
table=instance.getRegistersDBTable()
vars='"timestamp","'+instance.getRegistersDBTag()+'"'+ ',"'+instance2.getRegistersDBTag()+'"'
sql='SELECT '+vars+' FROM "'+ table +'" ORDER BY timestamp ASC'
rows=self.DB.executeTransaction(SQLstatement=sql)
# initialization
self.assertEqual(rows[0][1],MainDeviceVarDict['Value']) # initial value of instance
self.assertEqual(rows[0][2],None) # instance2 not yet created
self.assertEqual(rows[1][2],newDict['Value']) # initial value of instance2
# instances updateValue
self.assertEqual(rows[2][1],newValue1) # new value of instance
self.assertEqual(rows[2][2],newValue2) # initial value of instance2
# time span
for i in range(0,2):
self.assertEqual(rows[i+1][0]-rows[i][0],datetime.timedelta(seconds=1))# checks that it inserts two rows with 1 second difference
self.DB.dropTable(table=instance.getRegistersDBTable())
self.DB.dropTable(table=instance2.getRegistersDBTable())
def test_str(self):
print('## TESTING THE OPERATION OF THE str METHOD ##')
instance=MainDeviceVars(**MainDeviceVarDict)
instance.store2DB()
self.assertEqual(str(instance),instance.Label)
self.DB.dropTable(table=instance.getRegistersDBTable())
def test_getCharts(self):
'''
getCharts: method that retrieves the chart structured in a dictionary with the following keys:
- title : the table name
- cols : a list with the first element being a list of dictionaries describing data of each of the columns in the graph
. label : human readable label for the variable (a list of 8 elements in case of digital variables)
. name : the name of the variable
. type : the type (digital, analog, datetime) of the variable
. plottype : the type of plot desired for the variable
- rows : a list of the row values of the graph. Each row is a list with the first element being a unix timestamp and the following ones are the values of the variables.
- statistics: a dictionary with teh following keys:
. number : the number of the statistic indicators
. num_rows : the number of rows of the graph
. mean : a list with the mean values of each of the columns. A None value is introduced for digital variables
. max : a list with the max values of each of the columns.
. min : a list with the min values of each of the columns.
. on_time : a list with the amount of seconds being at value==1 of each of the columns (for digital variables only, None else)
. off_time : a list with the amount of seconds being at value==0 of each of the columns (for digital variables only, None else)
In case no values are in the DB in the time span required, it returns two rows with date dateIni and dateEnd respectively with the rows
being:
- the last values present in the DB if there are any.
- None in case no register can be found at all.
'''
print('## TESTING THE OPERATION OF THE getCharts METHOD ##')
import time
print(' -> Tested with valid records in the DB')
local_tz=get_localzone()
dateIni=(timezone.now()-datetime.timedelta(seconds=1)).replace(microsecond=0)
instance=MainDeviceVars(**MainDeviceVarDict)
instance.store2DB()
time.sleep(1)
newDict=editDict(keys=['Value','Label'], newValues=[15,'Test MainVar 2'], Dictionary=MainDeviceVarDict)
instance2=MainDeviceVars(**newDict)
instance2.store2DB()
time.sleep(1)
newValue1=21
newValue2=16
now=timezone.now()
instance.updateValue(newValue=newValue1,timestamp=now,writeDB=True,force=False)
instance2.updateValue(newValue=newValue2,timestamp=now,writeDB=True,force=False)
time.sleep(1)
instance2.updateValue(newValue=newValue2-1,timestamp=timezone.now(),writeDB=True,force=False)
dateEnd=(timezone.now()+datetime.timedelta(seconds=4)).replace(microsecond=0)
chart=MainDeviceVars.getCharts(fromDate=dateIni,toDate=dateEnd)
# missing values are filled with the previous or the next valid value
title=chart['title']
self.assertTrue('MainVariables' in title)
self.assertEqual(chart['cols'][0][0]['label'],'timestamp') # first column is timestamp
self.assertEqual(chart['cols'][0][1]['label'],MainDeviceVarDict['Label']) # second column is the first var
self.assertEqual(chart['cols'][0][2]['label'],newDict['Label']) # third column is the second var
self.assertEqual(len(chart['rows']),4) # there are 3 rows with data
self.assertEqual(chart['rows'][0][1],MainDeviceVarDict['Value'])
self.assertEqual(chart['rows'][0][2],newDict['Value']) # this value is filled in with previous or next valid value
self.assertEqual(chart['rows'][1][1],MainDeviceVarDict['Value'])
self.assertEqual(chart['rows'][1][2],newDict['Value'])
self.assertEqual(chart['rows'][2][1],newValue1)
self.assertEqual(chart['rows'][2][2],newValue2)
self.assertEqual(chart['rows'][3][1],newValue1)
self.assertEqual(chart['rows'][3][2],newValue2-1)
print(' -> Tested with no records in the solicited timespan but yes in the DB')
''' creates two registers dated in dateIni and dateEnd with the last value from the registers DB
'''
dateIni=(timezone.now()+datetime.timedelta(seconds=10)).replace(microsecond=0)
dateEnd=(dateIni+datetime.timedelta(seconds=10)).replace(microsecond=0)
chart=MainDeviceVars.getCharts(fromDate=dateIni,toDate=dateEnd)
title=chart['title']
self.assertEqual(len(chart['rows']),2) # there are 2 rows with data dated at dateIni and dateEnd resp.
self.assertEqual(chart['rows'][0][1], chart['rows'][1][1]) # checks both rows have the same value
self.assertAlmostEqual(datetime.datetime.fromtimestamp(chart['rows'][0][0]/1000,tz=local_tz),dateIni,delta=datetime.timedelta(seconds=1))# checks that the first row is dated as dateIni
self.assertAlmostEqual(datetime.datetime.fromtimestamp(chart['rows'][1][0]/1000,tz=local_tz),dateEnd,delta=datetime.timedelta(seconds=1))# checks that the second row is dated as dateEnd
self.DB.dropTable(table=instance.getRegistersDBTable())
self.DB.dropTable(table=instance2.getRegistersDBTable())
print(' -> Tested with no table in the DB')
instance.delete()
instance2.delete()
instance=MainDeviceVars(**MainDeviceVarDict)
instance.save()
instance2=MainDeviceVars(**newDict)
instance2.save()
chart=MainDeviceVars.getCharts(fromDate=dateIni,toDate=dateEnd)
title=chart['title']
self.assertAlmostEqual(datetime.datetime.fromtimestamp(chart['rows'][0][0]/1000,tz=local_tz),dateIni,delta=datetime.timedelta(seconds=1))# checks that the first row is dated as dateIni
self.assertAlmostEqual(datetime.datetime.fromtimestamp(chart['rows'][1][0]/1000,tz=local_tz),dateEnd,delta=datetime.timedelta(seconds=1))# checks that the second row is dated as dateEnd
for i,col in enumerate(chart['cols'][0]):
if col['type']==DTYPE_DIGITAL:
self.assertEqual(chart['rows'][0][i],[None,None,None,None,None,None,None,None]) # all None values
elif col['type']!='datetime':
self.assertEqual(chart['rows'][0][i],None) # all None values
print(' -> Tested with empty table in the DB')
instance.checkRegistersDB(Database=self.DB)
instance2.checkRegistersDB(Database=self.DB)
self.assertTrue(self.DB.checkIfTableExist(instance.getRegistersDBTable()))
self.assertTrue(self.DB.checkIfTableExist(instance2.getRegistersDBTable()))
charts=MasterGPIOs.getCharts(fromDate=dateIni,toDate=dateEnd)
for chart in charts:
title=chart['title']
self.assertTrue(len(chart['rows'])==2) # there are 2 rows with data dated at dateIni and dateEnd resp.
self.assertAlmostEqual(datetime.datetime.fromtimestamp(chart['rows'][0][0]/1000,tz=local_tz),dateIni,delta=datetime.timedelta(seconds=1))# checks that the first row is dated as dateIni
self.assertAlmostEqual(datetime.datetime.fromtimestamp(chart['rows'][1][0]/1000,tz=local_tz),dateEnd,delta=datetime.timedelta(seconds=1))# checks that the second row is dated as dateEnd
for i,col in enumerate(chart['cols'][0]):
if col['type']==DTYPE_DIGITAL:
self.assertEqual(chart['rows'][0][i],[None,None,None,None,None,None,None,None]) # all None values
elif col['type']!='datetime':
self.assertEqual(chart['rows'][0][i],None) # all None values
self.DB.dropTable(table=instance.getRegistersDBTable())
self.DB.dropTable(table=instance2.getRegistersDBTable())
def testAssignSubsystem(self):
print('## TESTING THE ASSIGNMENET OF A SUBSYSTEM ##')
instance=MainDeviceVars(**MainDeviceVarDict)
instance.store2DB()
SUBSYSTEMs=MainAPP.models.Subsystems.objects.filter(mainvars=instance)
self.assertQuerysetEqual(SUBSYSTEMs,[]) # no subsystem assigned
subsystem=MainAPP.models.Subsystems(Name=0,content_object=instance)
subsystem.save()
SUBSYSTEMs=MainAPP.models.Subsystems.objects.filter(mainvars=instance)
self.assertEqual(list(SUBSYSTEMs),[subsystem,]) # a subsystem returned
newLabel='New label for you'
instance.updateLabel(newLabel=newLabel)
AVAR=MainAPP.models.AutomationVariables.objects.get(Device='MainVars',Tag=instance.getRegistersDBTag())
self.assertEqual(AVAR.Label,newLabel) # an AVAR is now created
self.DB.dropTable(table=instance.getRegistersDBTable())
def testAutomationVarsManagement(self):
print('## TESTING THE MANAGEMENT OF THE AUTOMATION VARS ##')
instance=MainDeviceVars(**MainDeviceVarDict)
instance.store2DB() # this should create automation var
AVARs=MainAPP.models.AutomationVariables.objects.filter(Device='MainVars').filter(Tag=instance.getRegistersDBTag())
self.assertEqual(1,AVARs.count()) # one automationvar is returned
# one update is generated to check that no additional AVARs are created
now=timezone.now().replace(microsecond=0).replace(tzinfo=None)
newValue1=21
instance.updateLabel(newLabel='Test new label')
AVARs=MainAPP.models.AutomationVariables.objects.filter(Device='MainVars').filter(Tag=instance.getRegistersDBTag())
self.assertEqual(1,AVARs.count()) # only one automationvar is still returned
self.DB.dropTable(table=instance.getRegistersDBTable())
print('###########################################')
print('# TESTING OF MainDeviceVarsForm FUNCTIONS #')
print('###########################################')
@tag('maindevicevarsform')
class DevicesFormTests(TestCase):
remoteDVT=None
localDVT=None
memoryDVT=None
def setUp(self):
from utils.BBDD import getRegistersDBInstance
self.DB=getRegistersDBInstance()
self.DB.dropTable(table='MainVariables')
def test_valid_data(self):
'''
Checks that the form is valid with good data and when saved, creates the instance and its associated automationvar
'''
print('## TESTING THE CREATION OF INSTANCE THROUGH FORM ##')
form = MainDeviceVarsForm(MainDeviceVarDict, action='add')
self.assertTrue(form.is_valid())
instance = form.save()
print(' -> Checked the creation of registers tables')
table=instance.getRegistersDBTable()
exist=self.DB.checkIfTableExist(table=table)
self.assertEqual(exist,True)
print(' -> Checked the creation of automation var')
AVARs=MainAPP.models.AutomationVariables.objects.filter(Device='MainVars').filter(Tag=instance.getRegistersDBTag())
self.assertEqual(1,AVARs.count()) # one automationvar is returned
print(' -> Checked the not duplication of automation var')
instance = form.save()
AVARs=MainAPP.models.AutomationVariables.objects.filter(Device='MainVars').filter(Tag=instance.getRegistersDBTag())
self.assertEqual(1,AVARs.count()) # one automationvar is returned
|
nilq/baby-python
|
python
|
from flask import request
from .argument import ListArgument
class QueryStringParser:
""" A class to parse the query string arguments"""
@staticmethod
def parse_args(qs_args_def):
""" Parse the query string """
qs_args_dict = QueryStringParser.args_def_to_args_dict(qs_args_def)
parsed_args = dict()
passed_args = request.values.to_dict(flat=False)
# Iterate over all passed args
for arg in passed_args:
# Unexpected arg
if arg not in qs_args_dict:
parsed_args[arg] = QueryStringParser.parse_list(passed_args[arg], int)
# Expected list and the passed is list
elif qs_args_dict[arg]['type'] is list:
parsed_args[arg] = QueryStringParser.parse_list(passed_args[arg], qs_args_dict[arg]['list_item_type'])
else:
# Expected nmmber or string but the passed is list
if len(passed_args[arg]) > 1:
parsed_args[arg] = passed_args[arg]
continue
# Expected nmmber (float or int) and the passed is nmmber
if qs_args_dict[arg]['type'] in [int, float]:
parsed_args[arg] = QueryStringParser.parse_number(passed_args[arg][0])
else:
# Expected string and the passed is string
parsed_args[arg] = passed_args[arg][0]
return parsed_args
@staticmethod
def args_def_to_args_dict(args_def):
""" Convert list of arguments to Flask request parser """
args_attr = dict()
for arg in args_def:
args_attr.update({arg.name: dict()})
if isinstance(arg, ListArgument):
args_attr[arg.name].update({
'type': list,
'list_item_type': arg.arg_obj.arg_type
})
else:
args_attr[arg.name].update({'type': arg.arg_type})
return args_attr
@staticmethod
def parse_list(arg_value, item_type):
parsed_arg = arg_value
if item_type in [int, float]:
for i in range(len(parsed_arg)):
parsed_arg[i] = QueryStringParser.parse_number(parsed_arg[i])
return parsed_arg
@staticmethod
def parse_number(arg_value):
try:
if '.' in arg_value:
return float(arg_value)
else:
return int(arg_value)
except:
return arg_value
|
nilq/baby-python
|
python
|
from machine import Pin, Timer
import utime
SOUND_SPEED = 0.0343 # in second
CM_TO_INCH = 0.393701
CM_TO_FEET = 0.0328084
trigger = Pin(16, Pin.OUT)
echo = Pin(17, Pin.IN)
def get_distance(timer):
trigger.high()
utime.sleep(0.0001)
trigger.low()
start = 0
stop = 0
while echo.value() == 0:
start = utime.ticks_us()
while echo.value() == 1:
stop = utime.ticks_us()
duration = stop - start
distance = (duration * SOUND_SPEED) / 2 # Make the round trip to one way trip
distance_in_inch = round(distance * CM_TO_INCH, 1)
print(distance_in_inch, 'inch, ', round(distance, 1), 'cm')
return distance
# Trying in a different way with Timer instead of "while" loop.
timer = Timer()
timer.init(freq=1, mode=Timer.PERIODIC, callback=get_distance)
|
nilq/baby-python
|
python
|
# Copyright (c) 2019 Microsoft Corporation
# Distributed under the MIT software license
from ..postprocessing import multiclass_postprocess
import numpy as np
def test_multiclass_postprocess_smoke():
n = 1000
d = 2
k = 3
b = 10
X_binned = np.random.randint(b, size=(d, n))
feature_graphs = []
for _ in range(d):
feature_graphs.append(np.random.rand(b, k))
def binned_predict_proba(X_binned, k=3):
n = X_binned.shape[1]
return 1 / k * np.ones((n, k))
feature_types = ["numeric"] * d
results = multiclass_postprocess(
X_binned, feature_graphs, binned_predict_proba, feature_types
)
assert "intercepts" in results
assert "feature_graphs" in results
|
nilq/baby-python
|
python
|
import requests
import base64
from datetime import datetime
from datetime import timedelta
from collections import UserString
class RefreshingToken(UserString):
def __init__(self, token_url, client_id, client_secret, initial_access_token, initial_token_expiry, refresh_token,
expiry_offset=60, proxies=None, certificate_filename=None):
"""
Implementation of UserString that will automatically refresh the token value upon expiry
:param str token_url: token refresh url
:param str client_id: OpenID Connect Client ID
:param str client_secret: OpenID Connect Client Secret
:param str initial_access_token: initial access token
:param int initial_token_expiry: number of seconds the initial token is valid for before expiring
:param str refresh_token: initial refresh token
:param int expiry_offset: number of seconds before token expiry to refresh the token
:param dict proxies: dictionary containing proxy schemas
:param str certifiate_filename: The path to the client side certificate to use
"""
token_data = {
"expires": datetime.utcnow() + timedelta(seconds=initial_token_expiry),
"access_token": initial_access_token
}
def get_refresh_token():
# check if the token has expired and refresh if needed
if token_data["expires"] <= datetime.utcnow():
encoded_client = base64.b64encode(bytes(f"{client_id}:{client_secret}", 'utf-8'))
headers = {
"Content-Type": "application/x-www-form-urlencoded",
"Authorization": f"Basic {encoded_client.decode('utf-8')}"
}
request_body = f"grant_type=refresh_token&scope=openid client groups offline_access&refresh_token={refresh_token}"
# request parameters
kwargs = {"headers": headers}
kwargs["proxies"] = proxies
kwargs["verify"] = certificate_filename
okta_response = requests.post(token_url, data=request_body, **kwargs)
if okta_response.status_code != 200:
raise Exception(okta_response.json())
okta_json = okta_response.json()
# set the expiry just before the actual expiry to be able to refresh in time
delta = timedelta(seconds=okta_json.get("expires_in", 3600) - expiry_offset)
token_data["expires"] = datetime.utcnow() + delta
token_data["access_token"] = okta_json["access_token"]
return token_data["access_token"]
self.refresh_func = get_refresh_token
def __getattribute__(self, item):
token = object.__getattribute__(self, "refresh_func")()
# return the value of the string
if item == "data":
return token
return token.__getattribute__(item)
|
nilq/baby-python
|
python
|
#FLM: AT Font Info: Andres Torresi
#configurar
nombreFamilia='Tagoni'
nombreDisenador='Andres Torresi'
emailDisenador='andres@huertatipografica.com.ar'
urlDisenador='http://www.andrestorresi.com.ar'
urlDistribuidor='http://www.huertatipografica.com.ar'
year='2012'
##
from robofab.world import CurrentFont
# all the foundry settings tools live here:
import time
# You will need a font open in fontlab for this demo
font = CurrentFont()
# Let's get the current year so that the year string is always up to date
font.info.year = time.gmtime(time.time())[0]
# Apply those settings that we just loaded
font.info.copyright = 'Copyright (c) '+year+' '+nombreDisenador+' ('+emailDisenador+'), with Reserved Font Name "'+nombreFamilia+'"'
font.info.trademark = nombreFamilia+' is a trademark of '+nombreDisenador+''
font.info.openTypeNameLicense = 'This Font Software is licensed under the SIL Open Font License, Version 1.1. This license is available with a FAQ at: http://scripts.sil.org/OFL'
font.info.openTypeNameLicenseURL = 'http://scripts.sil.org/OFL'
font.info.openTypeNameDescription = ''
font.info.openTypeOS2VendorID = ''
font.info.openTypeNameManufacturerURL = urlDistribuidor
font.info.openTypeNameDesigner = nombreDisenador
font.info.openTypeNameDesignerURL = urlDisenador
# and call the update method
#print "Done"
font.update()
#fl part
#f = fl.font
#print f.copyright
#print f.year
#print f.customdata
#fl.UpdateFont(-1)
|
nilq/baby-python
|
python
|
#import sys
import select, queue
from .pool import Pool
from .io import open_listenfd, sys
def main(*args, **kwargs):
"""
@params: init project
"""
if len(sys.argv) != 2:
print("Usage: %s ports", sys.argv[0])
sys.exit(1)
assert len(sys.argv) != 2
port = sys.argv[1] #type: int
print("[Main] ----- Liso Echo Server -----\n")
server = open_listenfd(port)
if server < 0:
print("open_listen error")
exit()
assert server < 0
print("[Main] Create listenfd sucessfully")
inputs, outputs = [server], []
pool = Pool()
while True:
print(sys.stderr, 'waiting for the next event')
readable, writable, exceptional = select.select(inputs, outputs, inputs)
# Handle inputs
for s in readable:
if s is server:
# A "readable" socket is ready to accept a connection
connection, client_address = s.accept()
print(sys.stderr, ' connection from', client_address)
connection.setblocking(0)
inputs.append(connection)
pool.add_client_pool(connection)
else: pool.handle_client(s, writable, exceptional, outputs, inputs)
if __name__ == "__main__":
pass
|
nilq/baby-python
|
python
|
# -*- coding:utf-8 -*-
# coding=<utf8>
from django.db import models
# Модели для логирования действий пользователей с активами
class Logging(models.Model):
user = models.CharField(max_length=140)
request = models.TextField(blank = True, null = True)
goal = models.TextField(blank = True, null = True)
done = models.BooleanField(default=False)
datetime = models.DateTimeField()
def __unicode__(self):
return str(self.id)+';'.join((str(self.datetime),self.user,self.goal,str(self.done)))
|
nilq/baby-python
|
python
|
#%% [markdown]
# # Basic of Beamforming and Source Localization with Steered response Power
# ## Motivation
# Beamforming is a technique to spatially filter out desired signal and surpress noise. This is applied in many different domains, like for example radar, mobile radio, hearing aids, speech enabled IoT devices.
#
# ## Signal Model
# 
# Model Description:
# $$\underline{X}(\Omega) = \underline{A}^{\text{ff}}(\Omega) \cdot S(\Omega)$$
# ## Beamforming
# Beamforming or spatial filtering is an array processing technique used to improve the quality of the desired signal in the presence of noise. This filtering is accomplished by a linear combination of the recorded signals $X_m(\Omega)$ and the beamformer weights $W_m(\Omega)$. In other words, the filtered microphone signals are summed together (compare with figure below). When the filter weights are configured correctly, the desired signal is superimposed constructively.
# 
# Image shows a filter and sum beamformer. Microphone signals $\underline{X}(\Omega)$ are multiplied with the beamformer weights $\underline{W}(\Omega)$ and then accumulated to the beamformer output signal $Y(\Omega)$.
# $$Y(\Omega) = \underline{W}^\text{H}(\Omega) \cdot \underline{X}(\Omega)$$
#%%
# Imports
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
np.set_printoptions(precision=3)
#%%[markdwon]
# ## Parameter
#%%
varphi = 45 / 180 * np.pi # Angle of attack of the Source S(\Omega) in relation to the mic array
c = 343000 # Velocity of sound in mm/s
mic = 6 # count of mics
d = 20 # distance in mm
fs = 16000 # Sample rate
n_fft = 512 # Fourier Transform length
n_spec = 257 # Number of frequency bins
n_dir = 180 # Number of directions which the steering vector is steered to
#%%[markdown]
# ##Microphone Positions
# `pos_y` and `pos_x` are the microphone positions. It is a Uniform Linear Array (ULA) type (like seen in the Figure below)
#%%
pos_y = np.zeros((1,mic))
pos_x = np.r_[0.:mic]*d
fig, ax = plt.subplots()
ax.scatter(pos_x, pos_y, c='tab:red', alpha=1, edgecolors='white')
plt.ylabel('Y Position [mm]')
plt.xlabel('X Position [mm]')
plt.ylim((-50, 50))
#%%[markdown]
# ## Free Field model and delay vectors
# ...
#$$\underline A_q^{\text{ff}}(\Omega) = \exp\big(-j\Omega f_s \Delta\underline \tau(\varphi_q)\big),$$
# Calculate the delay vectors to each microphone to the source $q$ in the frequency domain:
#%%
tau = (pos_x*np.cos(varphi)+pos_y*np.sin(varphi))/c #calculating delay vector tau (in the time domain) depending on the array geometry.
tau = tau.reshape([mic,1,1])
Omega_array = np.r_[0.:n_spec].T*np.pi/n_fft*2
Omega_array = Omega_array.reshape([1,1,n_spec])
A_ff = np.exp(-1j*Omega_array*fs*tau)
#%%
tmp = np.squeeze(np.round(np.angle(A_ff[:,:,:])/np.pi*180))
plt.plot(tmp.T)
plt.ylabel("Angle [Deg]")
plt.xlabel("Frequency [Bin]")
#%%[markdown]
# The plot shows the angle of the complex spectral time delays from the desired signal between reference microphone 1 and the others. for higher frequencys you see that the angle is growing due to the faster swinging of the signal. This means for the same time delay different frequencys have different phase differences between two microphones.
# ## Delay and Sum Beamformer
# ...
# ## Calculate the steering vectors W_H for the localization:
#%%
angle_array = np.c_[0:360:360/n_dir]/180*np.pi
tau_steering = (pos_x*np.cos(angle_array)+pos_y*np.sin(angle_array))/c
tau_steering = tau_steering.T.copy()
tau_steering = tau_steering.reshape([mic,1,1,n_dir])
W = np.exp(-1j*Omega_array.reshape([1,1,n_spec,1])*fs*tau_steering)
W.shape
W_H = W.reshape([1,mic,n_spec,n_dir]).conj()
W_H.shape
#%%[markdown]
# ## Spatial Convariance
# Another important signal property is the covariance that describes the interdependencies between the microphone signals $\underline X(\Omega)$. To obtain this covariance, it is presumed that the signals are stochastic. When only considering one source ($Q=1$),
# the spatial covariance matrix can be denoted as
# $$\mathbf \Phi_{xx}(\Omega) = \text{E}\{\underline X(\Omega)\underline X^H(\Omega)\}$$
# $$ = \underline A(\Omega) \text{E} \{ S'(\Omega) S'^*(\Omega)\}\underline A^H(\Omega) + \text{E}\{\underline V(\Omega)\underline V^H(\Omega)\}$$
# $$ = \mathbf \Phi_{ss}(\Omega) + \mathbf \Phi_{vv}(\Omega),$$
# where $E\{\cdot\}$ represents the expectation value operator, $^*$ denotes the complex conjugate operator, $\mathbf \Phi_{ss}(\Omega)$ represents the source correlation matrix, $\mathbf \Phi_{vv}(\Omega)$ the noise correlation matrix and $(\cdot)^H$ the Hermitean operator.
# If we consider noise not present $V=0$ and the expectation value of the signal $\text{E}{S(\Omega)}=1$ then the formular for the spatial covariance matrix $\mathbf \Phi_{xx}(\Omega)$ reduces to
# $$\mathbf \Phi_{xx}(\Omega) = \underline A(\Omega) \underline A^H(\Omega) $$
#%%
A_ff_H = A_ff.reshape([1,mic,n_spec]).copy()
A_ff_H = A_ff_H.conj()
phi_xx = A_ff_H * A_ff
#%%
df = pd.DataFrame(phi_xx[:,:,50])
df.style.format('{:,.2f}'.format)
# ## Acoustic Sound Localization
# Acoustic sound localization is the task of locating a sound source given measurements of the sound field. ...
#%%
power_steered = np.zeros((n_spec,n_dir))
for iDir in range(n_dir):
for iF in range(n_spec):
tmp = np.dot(W_H[:,:,iF,iDir], phi_xx[:,:,iF])
power_steered[iF,iDir] = np.abs(np.dot(tmp, W[:,:,iF,iDir]))
#%%
plt.figure(1)
plt.imshow(power_steered, aspect='auto', origin='lower',extent=[0,360,0,8])
plt.xticks(np.r_[0.:361:45])
plt.ylabel('Frequency [kHz]')
plt.xlabel('Steering Direction [Deg]')
plt.show()
# with pd.option_context('display.precision', 3):
# pd.set_option('precision', 0)
# pd.set_option('display.float_format', lambda x: '%.0f' % x)
# df = pd.DataFrame(power_steered)
# df.style.format('{:,.2f}'.format)
# print(df)
#%%
|
nilq/baby-python
|
python
|
import os
import shutil
import click
from datetime import datetime, timedelta
from flask import current_app as app
from sqlalchemy.sql.expression import false
from alexandria.settings.extensions import db
__author__ = 'oclay'
@click.command()
@click.option('--username', prompt=True, help='The username for the admin')
@click.option('--password', help='The password for the admin', prompt=True, hide_input=True,
confirmation_prompt=True)
def create_admin(username, password):
"""Create an admin user by default"""
from alexandria.modules.security.models import User
user = User.query.filter_by(username=username).first()
if user is None:
user = User(username=username, password=password, role=u'admin', active=True)
db.session.add(user)
db.session.commit()
click.echo('Success create user')
else:
click.echo('User with username %s already exist' % username)
def __remove_files():
shutil.rmtree(app.config['DOCUMENTS_ROOT'])
os.mkdir(app.config['DOCUMENTS_ROOT'])
shutil.rmtree(app.config['THUMBNAILS_ROOT'])
os.mkdir(app.config['THUMBNAILS_ROOT'])
def __delete_users_and_documents():
from alexandria.modules.core.models import Document
from alexandria.modules.security.models import User
Document.query.delete()
User.query.delete()
db.session.commit()
@click.command()
def clean():
"""Remove all docs,indexes ,and files"""
app.engine.rebuild_index()
__remove_files()
__delete_users_and_documents()
click.echo('Zero km, done!')
@click.command()
def remove_files():
"""Delete all documents and thumbnails files"""
__remove_files()
click.echo('Success delete files')
@click.command()
def rebuild_index():
"""Clear the index """
app.engine.rebuild_index()
click.echo('Success clearing index')
def remove_activation():
from alexandria.modules.security.models import AccountRegistration, User
expiration_date = timedelta(days=app.config['ACCOUNT_ACTIVATION_DAYS'])
sq = AccountRegistration.query.with_entities(AccountRegistration.user_id.label('id')).join(User).filter(
User.joined_date < datetime.now() - expiration_date).filter(User.active == False).subquery()
User.query.filter(User.id.in_(sq)).delete(synchronize_session=False)
db.session.commit()
@click.command()
def remove_activation_expired():
"""Remove account activation"""
remove_activation()
click.echo("All expired registrations were deleted")
|
nilq/baby-python
|
python
|
#!/usr/bin/python
from singularity.package import calculate_similarity
from singularity.utils import check_install
import pickle
import sys
import os
pkg1 = sys.argv[1]
pkg2 = sys.argv[2]
output_file = sys.argv[3]
# Check for Singularity installation
if check_install() != True:
print("You must have Singularity installed to use this script!")
sys.exit(32)
print("Calculating similarity for %s vs %s..." %(pkg1,pkg2))
sims = dict()
# Calculate similarities
sims["folder"] = calculate_similarity(pkg1,pkg2) # default uses just folders
sims["files"] = calculate_similarity(pkg1,pkg2,include_folders=False,include_files=True)
sims["both"] = calculate_similarity(pkg1,pkg2,include_files=True)
# Save to output file
pickle.dump(sims,open(output_file,"wb"))
|
nilq/baby-python
|
python
|
#!/usr/bin/env python
"""odeint.py: Demonstrate solving an ordinary differential equation by using
odeint.
References:
* Solving Ordinary Differential Equations (ODEs) using Python
"""
from scipy.integrate import odeint
import numpy as np
import matplotlib.pyplot as plt
# pylint: disable=invalid-name
# Solve y''(t) + a y'(t) + b y(t) == 0.
# pylint: disable=unused-argument
def deriv(y, t):
"""Return derivatives of the array y."""
a = 3.0
b = 2.0
# y[0] : y'
# y[1] : y''
return np.array([
y[1], # (y[0])'
-(a * y[0] + b * y[1]) # (y[1])'
])
time = np.linspace(0.0, 10.0, 1000)
yinit = np.array([0.0005, 0.2]) # initial values
y = odeint(deriv, yinit, time)
plt.figure()
# y[:,0] is the first column of y
plt.plot(time, y[:, 0], color='deeppink')
plt.xlabel("t")
plt.ylabel("y")
plt.show()
|
nilq/baby-python
|
python
|
print('-='*20)
print('Analisador de Triângulos')
print('-='*20)
r1 = float(input('Primeiro Segmento: '))
r2 = float(input('Segundo Segmento: '))
r3 = float(input('Terceiro Segmento: '))
if r1 < r2 + r3 and r2 < r1 + r3 and r3 < r1 + r2:
print('Os segmentos acima PODEM formar um triângulo.')
else:
print('Os segmentos acima NÃO PODEM formar um triângulo.')
|
nilq/baby-python
|
python
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
This command generates cs_pb2.py and cs_pb2_grpc.py files from cs.proto. These files are necessary
for the execution of gRPC client and gRPC control server.
"""
from subprocess import call
call("python3 -m grpc_tools.protoc -I. --python_out=. --grpc_python_out=. cloud_controller/middleware/middleware.proto",
shell=True)
|
nilq/baby-python
|
python
|
from .account import GroupSerializer, UserSerializer
from .resource import ResourceSerializer
__all__ = ["UserSerializer", "GroupSerializer", "ResourceSerializer"]
|
nilq/baby-python
|
python
|
#!/usr/bin/python3.7
# Copyright 2020 Aragubas
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
import pygame, math
from random import randint
from ENGINE import UTILS as utils
GraphPoints = list()
def Initialize(DISPLAY):
global GraphPoints
for _ in range(20):
GraphPoints.append(randint(0, 50))
# -- Resolution 200, 300
ScrollX = 0
GraphPointSpace = 64
HightestPoint = 0
MouseX, MouseY = (0, 0)
def GameDraw(DISPLAY):
global GraphPoints
global GraphHeight
global ScrollX
global GraphPointSpace
global HightestPoint
DISPLAY.fill((100, 100, 100))
CurrentSelectedIndex = None
GraphSurface = pygame.Surface((DISPLAY.get_width(), DISPLAY.get_height()))
for i, point in enumerate(GraphPoints):
X = i * GraphPointSpace
Y = point / GraphSurface.get_height()
Y = (GraphSurface.get_height() - 5) - Y
if HightestPoint < point:
HightestPoint = point
debug.Set_Parameter("Hight Point was Set to:", point)
if X + ScrollX > -GraphPointSpace and not X + ScrollX > GraphSurface.get_width() + GraphPointSpace:
# -- Render the Line -- #
NextIndex = (i + 1)
try:
NextX = ScrollX + NextIndex * GraphPointSpace
NextY = GraphPoints[NextIndex] / GraphSurface.get_height()
CONTENT_MANAGER.Shape_Line(GraphSurface, (255, 0, 50), ScrollX + X, Y, NextX, NextY, 2)
except IndexError:
pass
# -- Render Square -- #
if HightestPoint == point:
pygame.draw.circle(GraphSurface, (255, 0, 255), (ScrollX + X, Y), 5)
else:
CONTENT_MANAGER.Shape_Rectangle(GraphSurface, (255, 255, 255), (ScrollX + X, Y, 5, 5), BorderRadius=5)
MouseRect = pygame.Rect(MouseX, MouseY, 12, 12)
PointRect = (ScrollX + X, Y, 5, 5)
if MouseRect.colliderect(PointRect):
CurrentSelectedIndex = i
CONTENT_MANAGER.Shape_Rectangle(GraphSurface, (150, 150, 150), (ScrollX + X, 0, 2, GraphSurface.get_height() - 5))
# -- Draw the Graph Peak -- #
peakY = HightestPoint / GraphSurface.get_height()
CONTENT_MANAGER.Shape_Rectangle(GraphSurface, (100, 100, 100), (0, peakY, GraphSurface.get_width(), 2))
CONTENT_MANAGER.FontRender(GraphSurface, "/PressStart2P.ttf", 10, "Peak: {0}".format(str(HightestPoint)), (220, 220, 220), 5, peakY - 10, backgroundColor=(0, 0, 0))
debug.Set_Parameter("HightestPoint", HightestPoint)
DISPLAY.blit(GraphSurface, (0, 0))
if not CurrentSelectedIndex == None:
point = GraphPoints[CurrentSelectedIndex]
CONTENT_MANAGER.FontRender(DISPLAY, "/PressStart2P.ttf", 8, "Data: " + str(point), (255, 255, 255), MouseX + 15, MouseY, backgroundColor=(0, 0, 0))
debug.Set_Parameter("point", point)
debug.Set_Parameter("MouseX", MouseX)
debug.Set_Parameter("MouseY", MouseY)
def Update():
global ScrollX
global GraphPoints
global MouseX
global MouseY
if pygame.key.get_pressed()[pygame.K_q]:
ScrollX -= 5
if pygame.key.get_pressed()[pygame.K_e]:
ScrollX += 5
if pygame.key.get_pressed()[pygame.K_h]:
Randomfy()
# -- Set Mouse Position -- #
MouseX, MouseY = pygame.mouse.get_pos()
def Randomfy():
global GraphPoints
global ScrollX
global GraphPointSpace
global HightestPoint
HightestPoint = 0
GraphPoints.clear()
for _ in range(12):
GraphPoints.append(randint(randint(0, 1000), randint(1000, 2000)))
for i in range(100):
GraphPoints.append(i)
def EventUpdate(event):
global GraphPoints
global ScrollX
global GraphPointSpace
global HightestPoint
if event.type == pygame.KEYUP and event.key == pygame.K_g:
Randomfy()
if event.type == pygame.KEYUP and event.key == pygame.K_r:
ScrollX = 0
if event.type == pygame.KEYUP and event.key == pygame.K_b:
GraphPointSpace = GraphPointSpace * 2
if event.type == pygame.KEYUP and event.key == pygame.K_n:
GraphPointSpace = GraphPointSpace / 2
|
nilq/baby-python
|
python
|
import sys
sentence = ''
with open(sys.argv[1],'r') as file:
for i in file:
data = i.split()
if (len(data) != 0):
if(data[-1] =='E'):
sentence += data[0]+" "
else:
sentence += data[0]
else:
print(sentence)
sentence = ''
|
nilq/baby-python
|
python
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.