id stringlengths 1 8 | text stringlengths 6 1.05M | dataset_id stringclasses 1
value |
|---|---|---|
3416497 | """"
Does full table verification for the single hit policies
"""
import warnings
from dmnconverter.tools.decisiontable import DecisionTable
from dmnconverter.transform.meta_language import MetaLanguageConverter
from dmnconverter.tools import texttools as text_tools
# from dmnconverter.verify.unique_policy import VerifyUniquePolicy
class Verification(MetaLanguageConverter):
def convert(self, decision_tables: [DecisionTable]) -> ([str], [str], [str]):
if len(decision_tables) > 1:
warnings.warn("Only first table is verified even though multiple DMN tables were given")
dmn_table: DecisionTable = decision_tables[0]
# todo : automatically go to single hit policy if needed
# if dmn_table.hit_policy == 'Unique':
# return VerifyUniquePolicy().convert(decision_tables)
vocabulary: [str] = self.build_vocabulary(dmn_table)
theory = self.build_theory(dmn_table)
structure = self.build_structure(dmn_table)
return vocabulary, theory, structure
def build_structure_dict(self, dmn_table: DecisionTable) -> [str]:
"""
Build structure dictionary for general verification of a single table
:param dmn_table:
:return:
"""
# todo: good method for this
modelint_start = 0
modelint_stop = 20
structure_dict = dict()
# Model int
structure_dict['ModelInt'] = [str(modelint_start) + ".." + str(modelint_stop)]
# Variables
(input_variables, output_variables) = self.structure_variables(dmn_table)
structure_dict['InputVariable '] = input_variables
structure_dict['OutputVariable '] = output_variables
# Domain and ranges
input_label_dict = dmn_table.input_label_dict
output_label_dict = dmn_table.output_label_dict
(input_domain, input_range) = self.specify_meta_domain(input_label_dict, modelint_start, modelint_stop)
(output_domain, output_range) = self.specify_meta_domain(output_label_dict, modelint_start, modelint_stop)
domain = input_domain + output_domain
ranges = input_range + output_range
# add enumerated domains and ranges to structure
structure_dict['Domain'] = text_tools.make_str(domain)
structure_dict['Range'] = text_tools.make_str(ranges)
# Policies
# fixme: currently still brackets around this when building structure
structure_dict['TablePolicy'] = [dmn_table.hit_policy]
# Rule components
structure_dict['RuleIn'] = super().build_meta_input_rule(dmn_table)
structure_dict['RuleOut'] = super().build_output_rule(dmn_table)
# Priorities
# fixme support priorities
return structure_dict
def build_vocabulary(self, decision_table: DecisionTable) -> [str]:
vocabulary = ["type RuleNr isa int",
"type CaseNr isa int",
"type Variable",
"type InputVariable isa Variable",
"type OutputVariable isa Variable",
"",
"type ModelInt isa int // Sets range of all integers in the problem",
"type Value contains ModelInt",
"// Assigns a value to a variable",
"InputVarValue(InputVariable):Value",
"OutputVarValue(OutputVariable,Value)",
"",
"// Indicate if the current assignments 'trigger' a rule",
"Match(RuleNr,Variable)",
"Triggered(RuleNr)",
"",
"// Domains and ranges of variables",
"Domain(Variable,Value)",
"Range(Variable,Value,Value) // inclusive range with min and max as values",
"// Allow alternative comparison operators",
"type Operator constructed from {eq,less, leq, grt, geq}",
"Compare(Variable, Operator, Value)",
"// RuleComp forms a component of a rule",
"RuleIn(RuleNr, CaseNr, InputVariable, Operator, Value)",
"RuleOut(RuleNr, OutputVariable, Value) // output can only be equal",
"",
"type Policy constructed from {unique, first, priority}",
"TablePolicy():Policy",
"",
"type Priority isa int",
"RulePriority(RuleNr, Priority)"]
return vocabulary
def build_theory(self, decision_table: DecisionTable) -> [str]:
theory = ["// FIND CONFLICTING OR NON-COVERED RULES",
"?var[OutputVariable]: (?>1 val[Value]:OutputVarValue(var,val)) | (?0 val[Value]:OutputVarValue(var,val)).",
"",
"// CORRECT OUTPUT OF RULES",
"{",
"!rN[RuleNr],var[InputVariable]: Match( rN, var)<- ?cN[CaseNr]: ?val1[Value], op1[Operator]: RuleIn(rN,cN,var,op1, val1) & ( !val2[Value], op2[Operator]: RuleIn(rN,cN,var, op2, val2) => Compare(var,op2,val2) ).",
"!rN[RuleNr],var[InputVariable]: Match(rN,var) <- ?0 val[Value],cN[CaseNr], op[Operator]: RuleIn(rN, cN,var, op, val).",
"}",
"",
"// Define when a rule is triggered",
"{ !rN[RuleNr]: Triggered(rN)<- !var[InputVariable]: Match(rN,var). }",
"",
"// Assign output based on hit policy of the table",
"{ ",
"!yvar[OutputVariable], yval[Value]: OutputVarValue(yvar,yval)<- ?n[RuleNr]: TablePolicy() = unique & RuleOut(n, yvar, yval) & Triggered(n).",
"!yvar[OutputVariable], yval[Value]: OutputVarValue(yvar,yval)<- ?n[RuleNr]: TablePolicy() = first & RuleOut(n, yvar, yval) & n = min{n2[RuleNr]:Triggered(n2):n2}.",
"!yvar[OutputVariable], yval[Value]: OutputVarValue(yvar,yval)<- ?n[RuleNr], maxPr[Priority]: TablePolicy() = priority & RuleOut(n, yvar, yval) & Triggered(n) & RulePriority(n, maxPr) & maxPr = max{n2[RuleNr], pr[Priority]:Triggered(n2) & RulePriority(n2,pr) :pr}. ",
"}",
"",
"// RESTRICT DOMAINS & RANGES",
"// for all good values within the range, link these to the domain",
"!var[InputVariable], minVal[Value], maxVal[Value]: ?val[Value]: Range(var, minVal, maxVal) => (InputVarValue(var)=val & minVal =< val =< maxVal).",
"// If a domain is defined, all variable assignments match it.x",
"!var[InputVariable], val1[Value]: ?val2[Value], val3[Value]: Domain(var,val1)=> (Domain(var,val2) & InputVarValue(var)=val3 & val3=val2).",
"",
"// DEFINE COMPARISON OPERATORS",
"{ ",
" !a[Variable], val[Value]: Compare(a, eq, val) <- InputVarValue(a)=val.",
" !a[Variable], val[Value], compVal[Value]: Compare(a, less, compVal) <- InputVarValue(a)=val & val<compVal. ",
" !a[Variable], val[Value], compVal[Value]: Compare(a, leq, compVal) <- InputVarValue(a)=val & val=<compVal.",
" !a[Variable], val[Value], compVal[Value]: Compare(a, grt, compVal) <- InputVarValue(a)=val & val>compVal.",
" !a[Variable], val[Value], compVal[Value]: Compare(a, geq, compVal) <- InputVarValue(a)=val & val>=compVal.",
"}"]
return theory
| StarcoderdataPython |
6447398 | <filename>ELK/main.py
from elasticsearch import Elasticsearch
es = Elasticsearch(HOST="http://localhost", PORT=9200)
es = Elasticsearch()
| StarcoderdataPython |
1666255 | import os
import sys
from collections import OrderedDict
from openpyxl import Workbook, load_workbook
from tqdm import tqdm
os.chdir(os.path.dirname(os.path.abspath(__file__)))
SENATE_PATH = "../data/raw/david_leip/Senate Election Data (xls)"
OUTPUT_PATH = "../data/interim/"
candidates = []
county_votes = []
all_parties = OrderedDict()
data_files = os.listdir(SENATE_PATH)
data_files.sort()
for fname in data_files:
if fname[0] == '.' or fname[0] == "~":
continue
year = int(fname[len("Sen_Election_Data_"):len("Sen_Election_Data_")+4])
print("Extracting Senate voting data from " + str(year))
fpath = os.path.join(SENATE_PATH, fname)
wb = load_workbook(filename=fpath, data_only=True)
candidate_idx = -1
for idx, sheetname in enumerate(wb.sheetnames):
if sheetname == "Candidates":
candidate_idx = idx
break
if candidate_idx == -1:
print("ERROR! Couldn't find candidate info sheet.")
sys.exit(1)
blanks_allowed = 20
candidate_data = wb.worksheets[candidate_idx]
ok = False
short_name_col = 1
blanks_allowed = 20
while blanks_allowed > 0:
val = candidate_data.cell(row=1, column=short_name_col).value
if val == "Short Name":
ok = True
break
short_name_col += 1
if not ok:
print("ERROR: Couldn't find short name column.")
sys.exit(1)
party_row = 2
parties = {}
while True:
party_idx = candidate_data.cell(row=party_row, column=1).value
if party_idx == None:
break
party_idx = int(party_idx)
party_name = candidate_data.cell(row=party_row, column=short_name_col).value
if party_name == None:
party_name = "[None]"
# The data set used different short names for the Peace and Freedom party across years;
# normalizing it
if party_name in ["Peace&Free", "Peace & Free"]:
party_name = "Peace and Free"
parties[party_idx] = party_name
if party_name not in all_parties.keys():
all_parties[party_name] = len(all_parties)
party_row += 1
ok = False
candidate_row_start = party_row
while blanks_allowed > 0:
val = candidate_data.cell(row=candidate_row_start, column=1).value
if val == None:
candidate_row_start += 1
blanks_allowed -= 1
continue
val = str(val)
if len(val) == 0:
candidate_row_start += 1
blanks_allowed -= 1
continue
if val.strip() == "#":
ok = True
break
candidate_row_start += 1
if not ok:
print("ERROR: Couldn't find candidate region. " + str(candidate_row_start))
sys.exit(1)
incumbency_col = short_name_col + 2
curr_row = candidate_row_start + 1
while True:
candidate = {}
natl_party_idx = candidate_data.cell(row=curr_row, column=1).value
if natl_party_idx == None:
break
candidate["Year"] = year
candidate["National Party"] = parties[int(natl_party_idx)]
candidate["Name"] = candidate_data.cell(row=curr_row, column=2).value
candidate["Ballot Party"] = candidate_data.cell(row=curr_row, column=3).value
candidate["State"] = candidate_data.cell(row=curr_row, column=4).value
candidate["Incumbency"] = candidate_data.cell(row=curr_row, column=incumbency_col).value
candidates.append(candidate)
curr_row += 1
county_idx = -1
for idx, sheetname in enumerate(wb.sheetnames):
if sheetname == "County":
county_idx = idx
break
if county_idx == -1:
print("ERROR! Couldn't find county voting data sheet.")
sys.exit(1)
county_data = wb.worksheets[county_idx]
ok = False
fips_column = 1
while fips_column < county_data.max_column:
if county_data.cell(row=1, column=fips_column).value == "FIPS":
ok = True
break
fips_column += 1
if ok == False:
print("ERROR! Couldn't find FIPS column.")
sys.exit(1)
party_col = 14 # starting at N
if county_data.cell(row=1, column=party_col).value != "Democratic":
print("ERROR! Parties don't start at expected column.")
sys.exit(1)
for pi in range(len(parties)):
pbase = parties[pi+1]
pcheck = county_data.cell(row=1, column=party_col + pi).value
if pcheck == 0:
pcheck = "[None]"
if pcheck in ["Peace&Free", "Peace & Free"]:
pcheck = "Peace and Free"
if pbase != pcheck:
print("ERROR! Party", pbase, "doesn't match", pcheck, "at index", pi)
sys.exit(1)
for row in county_data.iter_rows(min_row=2):
county = {}
county_name = row[0].value
if county_name == None:
continue
county_state = row[1].value
if county_state == "T":
continue
county["_year"] = year
county["_state"] = county_state
county["_name"] = county_name
county["_fips"] = row[fips_column-1].value
for pi in range(len(parties)):
vote_count = row[party_col-1 + pi].value
if vote_count == None or len(str(vote_count).strip()) == 0:
vote_count = 0
county[parties[pi+1]] = vote_count
county_votes.append(county)
candidate_wb = Workbook()
candidate_ws = candidate_wb.active
candidate_ws.title = "Senate Candidate Data"
candidate_ws.cell(row=1, column=1, value="Year")
candidate_ws.cell(row=1, column=2, value="State")
candidate_ws.cell(row=1, column=3, value="Incumbency")
candidate_ws.cell(row=1, column=4, value="Name")
candidate_ws.cell(row=1, column=5, value="National Party")
candidate_ws.cell(row=1, column=6, value="Ballot Party")
curr_row = 2
for candidate in candidates:
candidate_ws.cell(row=curr_row, column=1, value=candidate["Year"])
candidate_ws.cell(row=curr_row, column=2, value=candidate["State"])
candidate_ws.cell(row=curr_row, column=3, value=candidate["Incumbency"])
candidate_ws.cell(row=curr_row, column=4, value=candidate["Name"])
candidate_ws.cell(row=curr_row, column=5, value=candidate["National Party"])
candidate_ws.cell(row=curr_row, column=6, value=candidate["Ballot Party"])
curr_row += 1
candidate_wb.save(os.path.join(OUTPUT_PATH, "senate_candidates.xlsx"))
voting_wb = Workbook()
voting_ws = voting_wb.active
voting_ws.title = "Senate Voting Data"
voting_ws.cell(row=1, column=1, value="Year")
voting_ws.cell(row=1, column=2, value="State")
voting_ws.cell(row=1, column=3, value="FIPS")
voting_ws.cell(row=1, column=4, value="County Name")
party_col = 5
for pname, pi in all_parties.items():
voting_ws.cell(row=1, column=party_col + pi, value=pname)
county_row = 2
print("Preparing collective data for output")
for county in tqdm(county_votes):
voting_ws.cell(row=county_row, column=1, value=county["_year"])
voting_ws.cell(row=county_row, column=2, value=county["_state"])
voting_ws.cell(row=county_row, column=3, value=county["_fips"])
voting_ws.cell(row=county_row, column=4, value=county["_name"])
party_list = all_parties.copy()
for col, val in county.items():
if col[0] == "_":
continue
voting_ws.cell(
row=county_row,
column=party_col + all_parties[col],
value=val
)
del party_list[col]
for pname, pi in party_list.items():
voting_ws.cell(
row=county_row,
column = party_col + pi,
value = 0
)
county_row += 1
print("Writing data to output file")
voting_wb.save(os.path.join(OUTPUT_PATH, "senate_voting.xlsx"))
| StarcoderdataPython |
9785808 | from consts import *
import torch
import torch.nn.functional as F
from torch.autograd import Variable
from ipdb import set_trace
#==============================
class Net(torch.nn.Module):
def __init__(self):
super(Net, self).__init__()
in_nn = STATE_DIM
out_nn = NN_FC_DENSITY
self.l_fc = []
for i in range(NN_HIDDEN_LAYERS):
l = torch.nn.Linear(in_nn, out_nn)
in_nn = out_nn
self.l_fc.append(l)
self.add_module("l_fc_"+str(i), l)
self.l_out_q_val = torch.nn.Linear(in_nn, ACTION_DIM) # q-value prediction
self.opt = torch.optim.RMSprop(self.parameters(), lr=OPT_LR, alpha=OPT_ALPHA)
self.loss_f = torch.nn.MSELoss()
self.cuda()
def forward(self, batch):
"""
Params:
batch = batch of input states = FEATURE_DIM*2
"""
flow = batch
for l in self.l_fc:
flow = F.relu(l(flow))
a_out_q_val = self.l_out_q_val(flow)
return a_out_q_val
def copy_weights(self, other, rho=TARGET_RHO):
"""
Soft update
"""
params_other = list(other.parameters())
params_self = list(self.parameters())
for i in range( len(params_other) ):
val_self = params_self[i].data
val_other = params_other[i].data
val_new = rho * val_other + (1-rho) * val_self
params_self[i].data.copy_(val_new)
def train_network(self, s, action, q_):
"""
We are considering gamma = 1
Params:
s = batch of input states = FEATURE_DIM *2
action = batch of actions performed
q_ = batch of TD errors
Outputs:
adjust the network parameters to minimize loss
"""
s = Variable(s)
action = Variable(action)
q_ = Variable(q_)
## self(s) == call forward function
q_pred = self(s).gather(1, action.view(-1,1)) # we have results only for performed actions
loss_q = self.loss_f(q_pred, q_)
self.opt.zero_grad()
torch.nn.utils.clip_grad_norm_(self.parameters(), OPT_MAX_NORM)
loss_q.backward()
self.opt.step()
def set_lr(self, lr):
for param_group in self.opt.param_groups:
param_group['lr'] = lr | StarcoderdataPython |
4869369 | from django.urls import reverse
from django.contrib.auth.models import Group
from functional_tests.base import FunctionalTest
from apps.accounts.models import User
from apps.accounts.tests.test_models import create_new_user
from apps.accounts.tests.test_views import accounts_route_questionnaires
# @patch('wocat.views.generic_questionnaire_list')
# @patch.object(WocatAuthenticationBackend, 'authenticate')
# class LoginTest(FunctionalTest):
#
# def test_login(
# self, mock_authenticate, mock_get_user_id, mock_get_and_update,
# mock_questionnaire_list
# ):
#
# user = create_new_user()
#
# mock_get_and_update.return_value = user
# mock_authenticate.return_value = None
# mock_authenticate.__name__ = ''
# mock_get_user_id.return_value = user.id
# mock_questionnaire_list.return_value = {}
#
# # Alice opens her web browser and goes to the home page
# self.browser.get(self.live_server_url)
#
# # She sees the top navigation bar with the login button, on which she
# # clicks.
# navbar = self.findBy('class_name', 'top-bar')
# navbar.find_element_by_link_text('Login').click()
#
# # She tries to submit the form empty and sees that the form was
# # not submitted.
# self.findBy('id', 'button_login').click()
# self.findBy('name', 'username')
#
# # She enters some (wrong) user credentials
# self.findBy('name', 'username').send_keys('<EMAIL>')
# self.findBy('name', 'password').send_keys('wrong')
#
# # She tries to submit the form and sees an error message
# self.findBy('id', 'button_login').click()
# self.checkOnPage('Please enter a correct email address and password.')
#
# mock_authenticate.return_value = user
# self.browser.add_cookie({'name': 'fe_typo_user', 'value': 'session_id'})
#
# # She enters some (correct) user credentials
# self.findBy('name', 'password').send_keys('<PASSWORD>')
# self.findBy('id', 'button_login').click()
#
# # She sees that she was redirected to the landing page
# self.assertEqual(self.browser.current_url,
# self.live_server_url + reverse('wocat:home'))
# self.checkOnPage(user.get_display_name())
# self.checkOnPage('Logout')
class UserTest(FunctionalTest):
fixtures = [
'groups_permissions',
]
def test_superusers(self):
user = create_new_user()
user.is_superuser = True
user.save()
self.doLogin(user)
# Superusers see the link to the administration
self.findBy(
'xpath', '//ul[@class="dropdown"]/li/a[@href="/admin/"]')
# Superusers see the link to the Dashboard
self.findBy(
'xpath', '//ul[@class="dropdown"]/li/a[contains(@href, "search/'
'admin")]')
def test_administrators(self):
user = create_new_user()
user.groups.set([Group.objects.get(pk=1)])
self.doLogin(user)
# Administrators see the link to the administration
self.findBy(
'xpath', '//ul[@class="dropdown"]/li/a[@href="/admin/"]')
# Administrators do not see the link to the Dashboard
self.findByNot(
'xpath', '//ul[@class="dropdown"]/li/a[contains(@href, "search/'
'admin")]')
def test_moderators(self):
user = create_new_user()
group = Group.objects.get(pk=3)
group.user = user
self.doLogin(user)
# Moderators do not see the link to the administration
self.findByNot(
'xpath', '//ul[@class="dropdown"]/li/a[@href="/admin/"]')
# Moderators do not see the link to the Dashboard
self.findByNot(
'xpath', '//ul[@class="dropdown"]/li/a[contains(@href, "search/'
'admin")]')
def test_translators(self):
user = create_new_user()
user.groups.set([Group.objects.get(pk=2)])
self.doLogin(user)
# Translators see the link to the administration
self.findBy(
'xpath', '//ul[@class="dropdown"]/li/a[@href="/admin/"]')
# Translators do not see the link to the Dashboard
self.findByNot(
'xpath', '//ul[@class="dropdown"]/li/a[contains(@href, "search/'
'admin")]')
# @patch('accounts.authentication.WocatAuthenticationBackend._do_auth')
# class LogoutTest(FunctionalTest):
# def test_logout(self, mock_do_auth):
# mock_do_auth.return_value = ('tempsessionid')
# # Alice logs in
# self.doLogin('<EMAIL>', 'foo')
# # She sees a logout button in the top navigation bar and clicks on it
# navbar = self.findBy('class_name', 'top-bar')
# navbar.find_element_by_link_text('Logout').click()
# # She notices she was redirected to the home page and is now logged
# # out (the top bar showing a login button)
# self.assertEqual(self.browser.current_url, self.live_server_url + '/')
# navbar = self.findBy('class_name', 'top-bar')
# navbar.find_element_by_link_text('Login')
class ModerationTest(FunctionalTest):
fixtures = [
'groups_permissions',
'global_key_values',
'sample',
'sample_questionnaire_status',
'sample_user',
]
def test_user_questionnaires(self):
user_alice = User.objects.get(pk=101)
user_moderator = User.objects.get(pk=103)
user_secretariat = User.objects.get(pk=107)
# Alice logs in
self.doLogin(user=user_alice)
# She logs in as moderator and sees that she can access the view
self.doLogin(user=user_moderator)
self.browser.get(self.live_server_url + reverse(
accounts_route_questionnaires))
self.wait_for(
'xpath', '//img[@src="/static/assets/img/ajax-loader.gif"]',
visibility=False)
# She sees all the Questionnaires which are submitted plus the one where
# he is compiler
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[1]//a['
'contains(text(), "Foo 6")]',
wait=True)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[2]//a['
'contains(text(), "Foo 2")]',
wait=True)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[3]//a['
'contains(text(), "Foo 8")]',
wait=True)
list_entries = self.findManyBy(
'xpath', '//article[contains(@class, "tech-item")]')
self.assertEqual(len(list_entries), 3)
# He logs in as WOCAT secretariat
self.doLogin(user=user_secretariat)
self.browser.get(self.live_server_url + reverse(
accounts_route_questionnaires))
self.wait_for(
'xpath', '//img[@src="/static/assets/img/ajax-loader.gif"]',
visibility=False)
# She sees all the Questionnaires (2 drafts, 2 submitted, 2 reviewed and
# 1 rejected)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[1]//a['
'contains(text(), "Foo 1")]',
wait=True)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[2]//a['
'contains(text(), "Foo 6")]',
wait=True)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[3]//a['
'contains(text(), "Foo 2")]',
wait=True)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[4]//a['
'contains(text(), "Foo 8")]',
wait=True)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[5]//a['
'contains(text(), "Foo 7")]',
wait=True)
self.findBy(
'xpath', '(//article[contains(@class, "tech-item")])[6]//a['
'contains(text(), "Foo 9")]',
wait=True)
list_entries = self.findManyBy(
'xpath', '//article[contains(@class, "tech-item")]')
self.assertEqual(len(list_entries), 6)
| StarcoderdataPython |
366493 | <reponame>ShaonMajumder/documentor<gh_stars>0
#from distutils.core import setup
from setuptools import setup
from os import path
# read the contents of your README file
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
setup(
name = 'documentor',
packages = ['documentor'],
version = '0.0.0.21.1',
long_description=long_description,
long_description_content_type='text/markdown',
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://github.com/ShaonMajumder/documentor',
download_url = 'https://github.com/ShaonMajumder/documentor/archive/0.0.0.21.1.tar.gz',
keywords = ['shaon', 'document generator', 'documentation'],
classifiers = [],
entry_points={
'console_scripts': [
'documentor=documentor.documentor:main',
],
},
) | StarcoderdataPython |
9607180 | <gh_stars>1-10
#!/usr/bin/env python3
import argparse
import arviz as az
import copy
import csv
import engineering_notation
import hashlib
import numpy as np
import os
import pandas as pd
import platform
import pymc3 as pm
import shutil
import scipy.stats
import statistics
import subprocess
import sys
import time
import tqdm
import warnings
# Miscellaneous functions
def eng(x):
return engineering_notation.EngNumber(x, precision=3)
def delay(delay_var, delay_name):
if delay_var > 0:
if delay_name is not None:
print('Sleeping between {} for {}s'.format(delay_name, delay_var),
flush=True)
time.sleep(delay_var)
def save_to_csv(filename, num_runs, tests, baseline_src, files, serial_time,
parallel_time):
all_files = [baseline_src] + files
with open(filename, 'w', newline='') as csvfile:
writer = csv.writer(csvfile, delimiter=',')
writer.writerow(['Test', 'Run', 'Serial'] + all_files)
for test in tests:
for run in range(num_runs):
row = [test, run+1, serial_time[test][run]]
for f in all_files:
row += [parallel_time[test][f][run]]
writer.writerow(row)
# Compilation
def generate_makefile(generator, c_compiler, cxx_compiler):
shutil.rmtree('build', ignore_errors=True)
os.mkdir('build')
os.chdir('build')
compiler_string = ''
if c_compiler is not None:
compiler_string += '-DCMAKE_C_COMPILER=' + c_compiler + ' '
if cxx_compiler is not None:
compiler_string += '-DCMAKE_CXX_COMPILER=' + cxx_compiler + ' '
if c_flags is not None:
compiler_string += '-DCMAKE_C_FLAGS=' + c_flags + ' '
if cxx_flags is not None:
compiler_string += '-DCMAKE_CXX_FLAGS=' + cxx_flags + ' '
os.system('cmake ' + generator + compiler_string +
'-DCMAKE_BUILD_TYPE=Release ..')
os.chdir('..')
def compile_code():
# Cleaning is required because cmake sometimes doesn't recognize changes
# after copying a new version of the source file in compile_all().
subprocess.run(['cmake', '--build', 'build', '--target', 'clean'])
r = subprocess.run(['cmake', '--build', 'build', '--config', 'Release'])
if r.returncode != 0:
sys.stderr.write("Compile error\n")
sys.exit(1)
def compile_all(executable_path, parallel_executable, executable_extension,
parallel_extension, parallel_src, baseline_src, files):
shutil.copy(os.path.join('src', baseline_src + parallel_extension),
os.path.join('src', parallel_src + parallel_extension))
compile_code()
shutil.copy(os.path.join(executable_path,
parallel_executable + executable_extension),
os.path.join(executable_path,
baseline_src + executable_extension))
for f in files:
shutil.copy(os.path.join('src', f + parallel_extension),
os.path.join('src', parallel_src + parallel_extension))
compile_code()
shutil.copy(os.path.join(executable_path,
parallel_executable + executable_extension),
os.path.join(executable_path, f + executable_extension))
# Statistical functions
def create_pd_data_frame(serial, bl, opt):
value = np.r_[serial, bl]
group = np.r_[['serial']*len(serial), ['bl']*len(bl)]
for f in opt:
value = np.r_[value, opt[f]]
group = np.r_[group, [f]*len(opt[f])]
return pd.DataFrame(dict(value=value, group=group))
def statistical_analysis(serial, bl, opt):
# Inspired by https://docs.pymc.io/notebooks/BEST.html
y = create_pd_data_frame(serial, bl, opt)
μ_m = y.value.mean()
μ_s = y.value.std()
σ_low = µ_s/1000
σ_high = µ_s*1000
with pm.Model() as model:
serial_mean = pm.Normal('serial_mean', mu=µ_m, sd=1000*µ_s)
serial_std = pm.Uniform('serial_std', lower=µ_s/1000, upper=1000*µ_s)
λ_serial = serial_std**-2
bl_mean = pm.Normal('bl_mean', mu=μ_m, sd=1000*μ_s)
bl_std = pm.Uniform('bl_std', lower=µ_s/1000, upper=1000*µ_s)
λ_bl = bl_std**-2
opt_mean = {}
opt_std = {}
λ_opt = {}
for f in opt:
opt_mean[f] = pm.Normal('opt_{}_mean'.format(f),
mu=μ_m, sd=1000*μ_s)
opt_std[f] = pm.Uniform('opt_{}_std'.format(f),
lower=µ_s/1000, upper=1000*µ_s)
λ_opt[f] = opt_std[f]**-2
ν = pm.Exponential('ν_minus_one', 1/29.) + 1
dist_serial = pm.StudentT('serial', nu=ν, mu=serial_mean,
lam=λ_serial, observed=serial)
dist_bl = pm.StudentT('bl', nu=ν, mu=bl_mean, lam=λ_bl, observed=bl)
dist_opt = {}
for f in opt:
dist_opt[f] = pm.StudentT('opt_{}'.format(f), nu=ν, mu=opt_mean[f],
lam=λ_opt[f], observed=opt[f])
dmean_serial_bl = pm.Deterministic('dmean_serial_bl',
serial_mean - bl_mean)
dmean_bl_opt = {}
for f in opt:
dmean_bl_opt[f] = pm.Deterministic('dmean_bl_opt_{}'.format(f),
bl_mean - opt_mean[f])
speedup_bl = pm.Deterministic('speedup_bl', serial_mean/bl_mean)
speedup_opt = {}
improv_opt = {}
for f in opt:
speedup_opt = pm.Deterministic('speedup_opt_{}'.format(f),
serial_mean/opt_mean[f])
improv_opt = pm.Deterministic('improv_opt_{}'.format(f),
bl_mean/opt_mean[f])
trace = pm.sample(draws=3000, tune=2000)
res1 = [('serial', 'serial_mean'), ('bl', 'bl_mean')]
res2 = [('bl', 'speedup_bl')]
res3 = []
res4 = [('bl', 'dmean_serial_bl')]
for f in opt:
res1 += [('opt_{}'.format(f), 'opt_{}_mean'.format(f))]
res2 += [('opt_{}'.format(f), 'speedup_opt_{}'.format(f))]
res3 += [('opt_{}'.format(f), 'improv_opt_{}'.format(f))]
res4 += [('opt_{}'.format(f), 'dmean_bl_opt_{}'.format(f))]
runtime = {}
for r in res1:
tr = trace[r[1]]
hdi = az.hdi(tr)
runtime[r[0]] = (hdi[0], tr.mean(), hdi[1])
speedup = {}
for r in res2:
tr = trace[r[1]]
hdi = az.hdi(tr)
speedup[r[0]] = (hdi[0], tr.mean(), hdi[1])
improv = {}
for r in res3:
tr = trace[r[1]]
hdi = az.hdi(tr)
improv[r[0]] = (hdi[0], tr.mean(), hdi[1])
prob = {}
for r in res4:
tr = trace[r[1]]
prob[r[0]] = (tr > 0).sum()/len(tr)
return (runtime, speedup, improv, prob)
def compute_summary_statistics(tests, baseline_src, files, serial_time,
parallel_time):
runtime_mean = {}
runtime_sd = {}
speedup = {}
for test in tests:
runtime_mean[test] = {}
runtime_sd[test] = {}
speedup[test] = {}
parallel_time_opt = dict(parallel_time[test])
del parallel_time_opt[baseline_src]
y = create_pd_data_frame(serial_time[test],
parallel_time[test][baseline_src],
parallel_time_opt)
runtime_mean[test]['serial'] = y[y['group'] == 'serial'].mean().value
runtime_sd[test]['serial'] = y[y['group'] == 'serial'].std().value
runtime_mean[test]['bl'] = y[y['group'] == 'bl'].mean().value
runtime_sd[test]['bl'] = y[y['group'] == 'bl'].std().value
speedup[test]['bl'] = runtime_mean[test]['serial'] \
/ runtime_mean[test]['bl']
for f in parallel_time_opt:
runtime_mean[test][f] = y[y['group'] == f].mean().value
runtime_sd[test][f] = y[y['group'] == f].std().value
speedup[test][f] = runtime_mean[test]['serial'] \
/ runtime_mean[test][f]
return (runtime_mean, runtime_sd, speedup)
def compute_full_statistics(tests, baseline_src, files, serial_time,
parallel_time):
runtime = {}
speedup = {}
improv = {}
prob = {}
for test in tests:
print('\nComputing statistics for test {}'.format(test))
parallel_time_opt = dict(parallel_time[test])
del parallel_time_opt[baseline_src]
(runtime[test],
speedup[test],
improv[test],
prob[test]) = statistical_analysis(serial_time[test],
parallel_time[test][baseline_src],
parallel_time_opt)
return (runtime, speedup, improv, prob)
def compute_and_print_summary_statistics(tests, baseline_src, files,
serial_time, parallel_time):
print('\nSummary statistics, format = mean (SD):')
(runtime_mean, runtime_sd, speedup) = compute_summary_statistics(
tests, baseline_src, files, serial_time, parallel_time)
print('\tBaseline:')
for test in tests:
print(
'\t\tTest {}: speedup = {:.4f}x, '
'tser = {}s ({}s), '
'tpar = {}s ({}s)'.format(
test,
speedup[test]['bl'],
eng(runtime_mean[test]['serial']),
eng(runtime_sd[test]['serial']),
eng(runtime_mean[test]['bl']),
eng(runtime_sd[test]['bl'])
)
)
for f in files:
print('\tFile {}:'.format(f))
for test in tests:
key = 'opt_{}'.format(f)
print(
'\t\tTest {}: speedup = {:.4f}x, '
'tser = {}s ({}s), '
'tpar = {}s ({}s)'.format(
test,
speedup[test][f],
eng(runtime_mean[test]['serial']),
eng(runtime_sd[test]['serial']),
eng(runtime_mean[test][f]),
eng(runtime_sd[test][f])
)
)
def compute_and_print_full_statistics(tests, baseline_src, files, serial_time,
parallel_time):
print('\nComputing full statistics, this may take a while')
(runtime, speedup, improv, prob) = compute_full_statistics(
tests, baseline_src, files, serial_time, parallel_time)
print('\nStatistics:')
print('\tBaseline:')
for test in tests:
print(
'\t\tTest {}:\n'
'\t\t\tavg speedup = {:.4f}x HDI = ({:.4f}x,{:.4f}x)\n'
'\t\t\tavg tser = {}s HDI = ({},{})\n'
'\t\t\tavg tpar = {}s HDI = ({},{})\n'
'\t\t\tP(tpar < tser) = {:.1f}%'.format(
test,
speedup[test]['bl'][1],
speedup[test]['bl'][0],
speedup[test]['bl'][2],
eng(runtime[test]['serial'][1]),
eng(runtime[test]['serial'][0]),
eng(runtime[test]['serial'][2]),
eng(runtime[test]['bl'][1]),
eng(runtime[test]['bl'][0]),
eng(runtime[test]['bl'][2]),
100*prob[test]['bl']
)
)
for f in files:
print('\tFile {}:'.format(f))
for test in tests:
key = 'opt_{}'.format(f)
print(
'\t\tTest {}:\n'
'\t\t\tavg speedup = {:.4f}x HDI = ({:.4f}x,{:.4f}x)\n'
'\t\t\tavg improvement over baseline = {:.4f}x '
'HDI = ({:.4f}x,{:.4f}x)\n'
'\t\t\tavg topt = {}s HDI = ({},{})\n'
'\t\t\tP(topt < tbl) = {:.1f}%'.format(
test,
speedup[test][key][1],
speedup[test][key][0],
speedup[test][key][2],
improv[test][key][1],
improv[test][key][0],
improv[test][key][2],
eng(runtime[test][key][1]),
eng(runtime[test][key][0]),
eng(runtime[test][key][2]),
100*prob[test][key]
)
)
# Test runners
def run_one_test(file_path, test_path, pbar=None, pbar_value=None):
t = subprocess.run(
[file_path, test_path], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output_hash = hashlib.sha256(t.stdout).hexdigest()
exec_time = float(t.stderr)
# Avoid division by zero if the test is faster than 1 us
if exec_time < 1e-6:
exec_time = 1e-6
return (exec_time, output_hash)
def run_all_tests(num_runs, tests, executable_path, executable_extension,
parallel_src, baseline_src, files, delay_run=None,
initial_run=True):
serial_hash = {}
serial_time = {}
parallel_time = {}
all_files = [baseline_src] + files
for test in tests:
serial_time[test] = []
serial_hash[test] = []
parallel_time[test] = {}
for f in all_files:
parallel_time[test][f] = []
if initial_run is True:
total = len(tests)*(len(all_files) + 1)
else:
total = num_runs
with tqdm.tqdm(total=total,
bar_format='{l_bar}{bar}|{elapsed}<{remaining} ') as pbar:
for _ in range(num_runs):
for test in tests:
test_path = os.path.join('tests', test + '.in')
serial_path = os.path.join(executable_path,
'serial' + executable_extension)
if delay_run is not None:
delay(delay_run[test], None)
(t, h) = run_one_test(serial_path, test_path)
if initial_run is True:
pbar.update()
serial_time[test].append(t)
serial_hash[test] = h
for f in all_files:
parallel_path = os.path.join(executable_path,
f + executable_extension)
if delay_run is not None:
delay(delay_run[test], None)
(t, h) = run_one_test(parallel_path, test_path)
if initial_run is True:
pbar.update()
if h != serial_hash[test]:
sys.stderr.write(
'Error in test {}: output of file {} is different '
'from serial code\n'.format(test, f))
sys.exit(1)
parallel_time[test][f].append(t)
if initial_run is False:
pbar.update()
return (serial_time, parallel_time)
# HOW TO USE THIS SCRIPT
#
# Save it in the folder that contains CMakeLists.txt.
#
# Install required packages using pip: arviz, engineering_notation, pandas,
# pymc3, numpy, scipy and tqdm. Under Windows, it's recommended to install
# conda (https://docs.conda.io/en/latest/miniconda.html), and use it to install
# a version of gcc (via "conda install m2w64-toolchain"). This speeds up the
# statistical processing routines immensely.
#
# Save source files that you want to test in the src/ folder. Choose one
# to be the baseline result, the one to be compared against other (presumably
# optimized) versions. The baseline file name should be placed in the
# 'baseline_src' variable below, whereas the list of optimized files should
# be placed in the 'files' variable.
#
# Go through the definitions below, checking if they make sense. In
# principle the file names ('parallel_src', 'baseline_src' and 'files') need
# to be changed from one exercise to the next, and you should select the
# appropriate compilers for your environment, in variables 'c_compiler' and
# 'cxx_compiler' (the default setting of None allows CMake to choose the
# compiler automatically).
#
# Finally, just run the script. It should perform all testing automatically
# and report figures at the end.
#
# Note this has been tested on macOS 10.15 and Windows 10 using Cygwin and
# Visual Studio. It may have errors in other platforms; if you fix them,
# please send a pull request.
# HOW TO INTERPRET THE OUTPUT
#
# Here is some sample output:
#
###############################################################################
#
# Statistics:
# Baseline:
# Test 1:
# avg speedup = 2.3907x HDI = (2.3897x,2.3918x)
# avg tser = 206.216ms HDI = (206.156m,206.280m)
# avg tpar = 86.257ms HDI = (86.231m,86.284m)
# P(tpar < tser) = 100.0%
#
# ...
#
# Test 5:
# avg speedup = 2.3575x HDI = (2.3568x,2.3581x)
# avg tser = 40.891s HDI = (40.887,40.895)
# avg tpar = 17.345s HDI = (17.341,17.349)
# P(tpar < tser) = 100.0%
# File xxx:
# Test 1:
# avg speedup = 3.885661x HDI = (3.8836x,3.8879x)
# avg improvement over baseline = 1.6253x HDI = (1.6244x,1.6262x)
# avg topt = 53.071ms HDI = (53.048m,53.096m)
# P(topt < tbl) = 100.0%
#
# ...
#
# Test 5:
# avg speedup = 3.973781x HDI = (3.9719x,3.9756x)
# avg improvement over baseline = 1.6856x HDI = (1.6847x,1.6864x)
# avg topt = 10.290s HDI = (10.286,10.295)
# P(topt < tbl) = 100.0%
###############################################################################
#
# Results are grouped by file (baseline and optimized files) and then by test
# number.
#
# Speedups are given as the ratio of the serial and parallel execution times.
# Improvements of the optimized version over the baseline parallel version are
# given by the ratio of the baseline parallel execution time by the optimized
# parallel execution time. The execution times of each version are also
# reported.
#
# Bayesian analysis is performed on the datasets to obtain a point estimate
# (mean of each variable) as well as 95% credible intervals for each -- i.e.
# the real mean has 95% chance of lying within this interval. Also reported is
# the probability that the execution time for one version is smaller than the
# execution time for another one; if low or high enough (say, < 5% or > 95%),
# then such a difference cannot reasonably be attributed to noise only.
#
# Note that intersecting credible intervals or non-extreme probabilities may
# either indicate there is no discernible difference between the execution time
# of the baseline and optimized files, or that the number of runs is
# insufficient to declare that a statistically significant difference exists.
# It's up to you to interpret which one is the case, and if necessary, run the
# script again increasing the 'num_runs' variable to seek a definitive answer.
# BEST PRACTICES FOR BENCHMARKING
#
# For best results, you should close off as many apps/services as possible,
# as anything competing for CPU time will potentially poison the results.
#
# It's also recommended to disable Turbo Boost or equivalent technology in
# your CPU, since clock speeds are usually higher when a single core is in
# use (e.g. when running serial code) and lower when multiple cores are in
# use (e.g. when running parallel code). This means your speedups will be
# too pessimistic. In addition, clock speed may vary throughout the run,
# even if you have a good cooling solution, further poisoning the results.
# See https://bit.ly/2H2eChG for more details on Turbo Boost issues.
# Disabling Turbo Boost is OS-dependent. For macOS, there is an app called
# Turbo Boost Switcher. You may need to search for similar apps for other
# OSes.
#
# When CPUs reach high temperatures (e.g. 100 ºC), they forcefully reduce
# clock speeds to avoid damaging the processor. This poisons the results, and
# if your hardware is under the risk of thermal throttling (especially
# laptops), you should take steps to mitigate the issues.
#
# Disabling Turbo Boost as suggested previously may be enough to solve the
# problem, as the CPU will dissipate much less power.
#
# It's also suggested that you configure your fans to run at full speed all
# the time while testing. They may take too long to ramp up, and reach the
# temperature threshold (and therefore activate thermal throttling).
#
# If, despite previous suggestions, temperatures are still high enough that
# thermal throttling kicks in (even if infrequently), it's suggested that
# you add delays between runs to let your CPU cool off (see the 'delay_run'
# variable below). You'll need to experiment with different values to see
# the minimum required to avoid thermal throttling.
#
# Under Windows, you may want to change the power plan to "High performance"
# (in Control Panel -> Hardware and Sound -> Power Options). This uses more
# power, but keeps the CPU at its nominal clock speed all the time, removing
# a source of timing variability, as the CPU may take some time to "spool up"
# from a lower clock speed (for power savings) to a higher one for
# CPU-intensive workloads. See https://bit.ly/3163kjD for more details.
if __name__ == '__main__':
# DEFINITIONS
# How many repeated runs (of all files and tests)
# Use more if needed to achieve statistical significance
num_runs = 30
# Which tests to run (one for each file in the 'tests' folder)
# Do not add .in extension, it's done automatically
tests = ['1', '2', '3', '4', '5']
# Folder for executable files; may be different under
# Windows when using VS's version of cmake
executable_path = 'build' # os.path.join('build', 'Release')
# Default name for the executable of the parallel code
# In principle this shouldn't be changed
parallel_executable = 'parallel'
# Extension for parallel files, typically either '.c' or '.cu'
parallel_extension = '.c'
# Default name for the source file of the parallel code (note this file
# will be overwritten by the script with the baseline and optimized files)
# This changes from one exercise to the next
parallel_src = 'prime-parallel'
# Name of your baseline file (the one others will be compared against)
baseline_src = 'prime-parallel-baseline'
# List of your optimized files to be compared against the baseline file
files = ['prime-parallel-opt1']
# Choice of compiler, or None to let CMake choose automatically
c_compiler = None # 'gcc-10' # 'icl' # 'x86_64-w64-mingw32-gcc-10'
cxx_compiler = None # 'g++-10' # 'icl' # 'x86_64-w64-mingw32-g++'
# Extra C and C++ compiler flags that may be required for some reason
# Use None if no extra flags are necessary
c_flags = None
cxx_flags = None
# As explained above, this adds a delay between each run of the code to
# cool off the CPU and reduce, or ideally prevent, thermal throttling. This
# is a dict mapping of tests (as in, elements of the 'tests' variable
# above) to the number of seconds the CPU will sleep after a given test.
# The default value of None means no delays are inserted
delay_run = None # { '1': 0, '2': 0, '3': 1, '4': 3, '5': 6 }
# Name of a file to save the measurements to (in CSV format), or None
csv_output_file = None
# OS-specific definitions
if platform.system() != 'Windows':
executable_extension = ''
generator = ''
else:
executable_extension = '.exe'
generator = ''
# If using Visual Studio, use the generator line below
# generator = '-G "NMake Makefiles" '
# Beginning of the script
warnings.simplefilter('ignore')
parser = argparse.ArgumentParser()
parser.add_argument(
'--configure',
action='store_true',
help='rerun CMake configure step (required after switching compilers)'
)
parser.add_argument(
'--mode',
choices=['compile-only', 'compile-and-test', 'full'],
default='full',
help='choose what operations the script will perform (default: full)'
)
parser.add_argument(
'--statistics',
choices=['summary', 'full'],
default='full',
help='print summary (quick) or full (Bayesian, very slow) statistics '
'(default: full)'
)
args = parser.parse_args()
if delay_run is None:
print('{} total runs: {} repetitions on {} executables '
'for test files {}'.format(
num_runs*(1 + len(files))*len(tests),
num_runs,
1 + len(files),
tests)
)
else:
print('{} total runs: {} repetitions on {} executables '
'for test files {}, with {}-{}s delay between runs'.format(
num_runs*(1 + len(files))*len(tests),
num_runs,
1 + len(files),
tests,
min(delay_run.values()),
max(delay_run.values()))
)
if args.configure or not os.path.exists('build'):
print('\nGenerating Makefiles')
generate_makefile(generator, c_compiler, cxx_compiler)
print('\nCompiling code')
compile_all(executable_path, parallel_executable, executable_extension,
parallel_extension, parallel_src, baseline_src, files)
if args.mode == 'compile-only':
sys.exit()
print('\nStarting initial run')
(st, pt) = run_all_tests(
1, tests, executable_path, executable_extension, parallel_src,
baseline_src, files, delay_run, initial_run=True
)
if args.mode == 'compile-and-test':
sys.exit()
print('\nStarting main run')
(serial_time, parallel_time) = run_all_tests(
num_runs, tests, executable_path, executable_extension,
parallel_src, baseline_src, files, delay_run, initial_run=False
)
compute_and_print_summary_statistics(tests, baseline_src, files,
serial_time, parallel_time)
if args.statistics == 'full':
compute_and_print_full_statistics(tests, baseline_src, files,
serial_time, parallel_time)
if csv_output_file is not None:
save_to_csv(csv_output_file, num_runs, tests, baseline_src, files,
serial_time, parallel_time)
# TODOs:
# TODO: less runs if a test takes longer, more runs if it's shorter?
# Would that be enough for statistical significance?
# TODO: run some high-performance code for 10-50ms before running tests,
# to "spool up" TurboBoost
# TODO: select number of runs automatically to achieve statistical
# significance
# TODO: select delay between runs automatically (how?)
# TODO: measure actual execution time of each test run, and use this to
# update the progress bar more frequently (but still somewhat
# accurately)
# TODO: break up the project into different files, this single file is
# getting huge
| StarcoderdataPython |
1941517 | <reponame>goztrk/django-htk<filename>test_scaffold/tests.py
# Python Standard Library Imports
import re
# Django Imports
# https://docs.djangoproject.com/en/1.11/topics/testing/overview/#provided-test-case-classes
from django.test import TestCase
from django.test.client import Client
from django.urls import NoReverseMatch
from django.urls import reverse
#from django.utils import unittest
from django.utils.http import urlencode
# HTK Imports
from htk.test_scaffold.constants import TESTSERVER
from htk.test_scaffold.models import TestScaffold
from htk.test_scaffold.utils import create_test_email
from htk.test_scaffold.utils import create_test_password
from htk.test_scaffold.utils import create_test_user
class BaseTestCase(TestCase):
"""Base class for all test cases
"""
fixtures = ['initial_data',]
def setUp(self):
self.assigned_users = 0
self.users = []
self._create_batch_test_users()
def _create_batch_test_users(self):
for x in range(5):
self.users.append(create_test_user())
def _assign_test_user(self):
"""Returns:
a user previously not returned
"""
if self.assigned_users >= len(self.users):
self._create_batch_test_users()
user = self.users[self.assigned_users - 1]
self.assigned_users += 1
return user
class BaseWebTestCase(BaseTestCase):
"""Base class for other Web test cases
Sets up some commonly-used parameters
"""
fixtures = ['initial_data',]
def setUp(self):
super(BaseWebTestCase, self).setUp()
# disable prelaunch mode for unit tests
TestScaffold.set_fake_prelaunch(prelaunch_mode=False, prelaunch_host=False)
def _get_user_session(self):
"""Returns an authenticated user, its password, and authenticated client
"""
user = self._assign_test_user()
password = <PASSWORD>()
user.set_password(password)
user.save()
client = Client()
success = client.login(username=user.username, password=password)
self.assertTrue(success)
return (user, password, client,)
def _get_user_session_with_primary_email(self):
"""Returns an authenticated user, its primary email, password, and authenticated client
"""
(user, password, client,) = self._get_user_session()
from htk.apps.accounts.utils import associate_user_email
email = create_test_email()
user_email = associate_user_email(user, email, confirmed=True)
user_email.set_primary_email()
return (user, email, password, client,)
def _get(self, view_name, client=None, params=None, follow=False, view_args=None, view_kwargs=None, secure=False, **extra):
"""Wrapper for performing an HTTP GET request
"""
params = {} if params is None else params
view_args = [] if view_args is None else view_args
view_kwargs = {} if view_kwargs is None else view_kwargs
path = reverse(view_name, args=view_args, kwargs=view_kwargs)
if type(client) != Client:
client = Client()
response = client.get(path, data=params, follow=follow, secure=secure, **extra)
return response
def _post(self, view_name, client=None, params=None, get_params=None, follow=False, view_args=None, view_kwargs=None, secure=False, **extra):
"""Wrapper for performing an HTTP POST request
"""
params = {} if params is None else params
get_params = {} if get_params is None else get_params
view_args = [] if view_args is None else view_args
view_kwargs = {} if view_kwargs is None else view_kwargs
path = reverse(view_name, args=view_args, kwargs=view_kwargs)
if get_params:
query_string = urlencode(get_params)
path = '%s?%s' % (path, query_string,)
if type(client) != Client:
client = Client()
response = client.post(path, data=params, follow=follow, secure=secure, **extra)
return response
def _put(self, view_name, client=None, params=None, follow=False, view_args=None, view_kwargs=None, secure=False, **extra):
"""Wrapper for performing an HTTP PUT request
"""
params = {} if params is None else params
view_args = [] if view_args is None else view_args
view_kwargs = {} if view_kwargs is None else view_kwargs
path = reverse(view_name, args=view_args, kwargs=view_kwargs)
if type(client) != Client:
client = Client()
response = client.put(path, data=params, follow=follow, secure=secure, **extra)
return response
def _delete(self, view_name, client=None, params=None, get_params=None, follow=False, view_args=None, view_kwargs=None, secure=False, **extra):
"""Wrapper for performing an HTTP DELETE request
"""
params = {} if params is None else params
get_params = {} if get_params is None else get_params
view_args = [] if view_args is None else view_args
view_kwargs = {} if view_kwargs is None else view_kwargs
path = reverse(view_name, args=view_args, kwargs=view_kwargs)
if get_params:
query_string = urlencode(get_params)
path = '%s?%s' % (path, query_string,)
if type(client) != Client:
client = Client()
response = client.delete(path, data=params, follow=follow, secure=secure, **extra)
return response
def _check_view_is_okay(self, view_name, client=None, params=None, follow=False, secure=False):
response = self._get(view_name, client=client, params=params, follow=follow, secure=secure)
self.assertEqual(200,
response.status_code,
'[%s] got unexpected response code %d' %
(view_name,
response.status_code))
def _check_view_does_not_exist(self, view_name, client=None, params=None, follow=False, message='View should not exist'):
try:
response = self._get(view_name, client=client, params=params, follow=follow)
except NoReverseMatch:
response = None
self.assertIsNone(response, message)
def _check_view_404(self, view_name, client=None, params=None, follow=False):
response = self._get(view_name, client=client, params=params, follow=follow)
self.assertEqual(404, response.status_code)
def _check_response_redirect_chain_empty(self, view_name, response, extra_message=''):
"""Checks that response.redirect_chain is empty
"""
redirect_chain = response.redirect_chain
self.assertEqual(0,
len(redirect_chain),
'Unexpected redirect, should have stayed on [%s]. %s' % (view_name, extra_message,))
def _check_response_redirect_chain(
self,
view_name,
another,
response,
extra_message='',
secure=False,
redirect_chain_offset=-1,
**kwargs
):
"""Check that response.redirect_chain is behaving correctly
"""
redirect_chain = response.redirect_chain
self.assertTrue(
len(redirect_chain) > 0,
'[%s] did not redirect to [%s]. %s' % (view_name, another, extra_message,)
)
self.assertEqual(302, redirect_chain[0][1])
actual = redirect_chain[redirect_chain_offset][0]
protocol_pattern = r'^https://' if secure else r'^http://'
if re.match(protocol_pattern, another):
# `another` is a full uri
pattern = another
match = re.match(pattern, actual)
else:
# `another` is a view name
reversed_url = reverse(another)
protocol_pattern = protocol_pattern + '%s%s'
pattern = protocol_pattern % (TESTSERVER, reversed_url,)
match = re.match(pattern, actual)
if match is None:
# https://docs.djangoproject.com/en/3.1/releases/1.9/#http-redirects-no-longer-forced-to-absolute-uris
# make another attempt to check against the partial URI
match = re.match(reversed_url, actual)
self.assertIsNotNone(
match,
'[%s] redirected to [%s] instead of [%s][%s]' % (
view_name,
actual,
another,
pattern
)
)
def _check_view_redirects_to_another(
self,
view_name,
another,
client=None,
params=None,
view_args=None,
view_kwargs=None,
method='get',
secure=True,
**kwargs
):
"""Perform an HTTP request and check that the redirect_chain behaves correctly for a page that is expected to redirect
"""
if method == 'get':
response = self._get(view_name, client=client, params=params, view_args=view_args, view_kwargs=view_kwargs, follow=True, secure=secure)
elif method == 'post':
response = self._post(view_name, client=client, params=params, view_args=view_args, view_kwargs=view_kwargs, follow=True, secure=secure)
else:
raise Exception('Unknown HTTP method: %s' % method)
self._check_response_redirect_chain(
view_name,
another,
response,
secure=secure,
**kwargs
)
def _check_view_redirects_to_login(
self,
view_name,
client=None,
login_url_name='account_login',
secure=True
):
self._check_view_redirects_to_another(
view_name,
login_url_name,
client=client,
secure=secure
)
def _check_prelaunch_mode(self, view_name):
from htk.apps.prelaunch.utils import get_prelaunch_url_name
prelaunch_url_name = get_prelaunch_url_name()
self._check_view_redirects_to_another(view_name, get_prelaunch_url_name())
def test_basic(self):
self.assertTrue(True)
| StarcoderdataPython |
11373549 | <gh_stars>0
# -*- coding: utf-8 -*-
# --------------------------------------------------------------------------------- #
# Software de Observaciones Sintéticas S.O.S.
# Units handling
#
# <NAME>, @ 5 May 2021
# Latest Revision: 5 May 2021, 14:00 GMT-6
#
# For all kind of problems, requests of enhancements and bug reports, please
# write to me at:
#
# <EMAIL>
# <EMAIL>
#
# --------------------------------------------------------------------------------- #
import numpy as np
# Fundamental magnitudes for sos
# MKS system [REFERENCE SYSTEM]
mks = {
'mass' : {'name': 'kg',
'units': {'g':1000, 'lb':2.20462, 'Ms':5.0274e-31}},
'length' : {'name': 'm',
'units': {'m':1, 'ft':3.28084, 'pc':3.24077929e-17}},
'time' : {'name': 's',
'units': {'s':1, 'min':1/60, 'hr':1/3600}},
'angular' : {'name': 'deg',
'units': {'deg':1, 'degree':1,'rad':np.pi/180, 'arcmin':60, 'arcsec':3600}},
'temp' : {'name': 'K',
'units': {'k':1, 'kcmb':1}},
'pixel' : {'name': 'pixel',
'units': {'pixel':1, 'px':1}},
'flux' : {'name': 'W m^-2 Hz^-1',
'units': {'w m^-2 hz^-1':1, 'jy':1e26}},
'freq' : {'name': 'Hz',
'units': {'hz':1}},
'power' : {'name': 'W',
'units': {'w':1, 'erg s^-1':1e7}},
'den_mag' : {'name': 'T',
'units': {'t':1, 'g':1e4}},
'energy' : {'name': 'J',
'units': {'j':1, 'erg':1e7}},
'flux_mag': {'name': 'Wb',
'units': {'wb':1, 'mx':1e8}}
}
# CGS system
cgs = {
'mass' : {'name': 'g',
'units': {'g':1, 'lb':0.00220462, 'Ms':5.0274e-33}},
'length' : {'name': 'cm',
'units': {'m':0.01, 'ft':0.0328084, 'pc':3.24077929e-19}},
'time' : {'name': 's',
'units': {'s':1, 'min':1/60, 'hr':1/3600}},
'angular' : {'name': 'deg',
'units': {'deg':1, 'degree':1, 'rad':np.pi/180, 'arcmin':60, 'arcsec':3600}},
'temp' : {'name': 'K',
'units': {'k':1, 'kcmb':1}},
'pixel' : {'name': 'pixel',
'units': {'pixel':1, 'px':1}},
'flux' : {'name': 'erg cm^-2 s^-1 Hz^-1',
'units': {'erg cm^-2 s^-1 hz^-1':1, 'jy':1e23}},
'freq' : {'name': 'Hz',
'units': {'hz':1}},
'power' : {'name': 'erg s^-1',
'units': {'erg s^-1':1, 'w':1e-7}},
'den_mag' : {'name': 'G',
'units': {'g':1, 't':1e-4}},
'energy' : {'name': 'erg',
'units': {'erg':1, 'j':1e-7}},
'flux_mag': {'name': 'Mx',
'units': {'mx':1, 'wb':1e-8}}
}
# Prefix-Subfix
sub_pre = {'a':-18,'f':-15,'p':-12,'n':-9,'u':-6,'m':-3,'c':-2,'d':-1,
'D':1,'H':2,'k':3,'M':6,'G':9,'T':12,'P':15,'E':18}
# Extract subfix or prefix
def _get_presub_fix(unit, sys):
# Get all the units in terms of the International System of Units
for mag in sys.keys():
for u in sys[mag]['units'].keys():
if unit == u:
return (1/sys[mag]['units'][u], mag)
elif len(unit) > 0:
if unit[1:] == u:
if unit[0] in sub_pre:
factor = 10**sub_pre[unit[0]]/sys[mag]['units'][u]
return (factor, mag)
else:
print("Prefix/Subfix not defined")
return (None, None)
print("Magnituide is not available")
return (None, None)
def get_factors_units(units, ref):
"""
Get the prefix and subfix of the units
"""
# Identify the system of units
if ref == 'mks':
su_ref = mks
elif ref == 'cgs':
su_ref = cgs
# Factors
factors = {}
# Get unities
uts = _parse_units(units)
# Initiate pows
pows = 0
# Get factor
f = 1
# Get the multiples
for u in uts.keys():
# Search for the param
k, mag = _get_presub_fix(u, su_ref)
# Get factors
if mag in factors:
factors[mag]['factor'] = factors[mag]['factor']*(k)**uts[u]
factors[mag]['pow'] += uts[u]
else:
factors[mag] = {}
factors[mag]['factor'] = k**uts[u]
factors[mag]['pow'] = uts[u]
# Get factor
f = f*factors[mag]['factor']
# Get units
factors[mag]['units'] = su_ref[mag]['name']
return f, factors
def unit2str(units):
"""
Transform units dictionary format to dictionary
"""
str_units = ""
for u in units.keys():
p = units[u]['pow']
ut = units[u]['units']
if p == 1:
str_units += ut+' '
else:
str_units += ut+'^'+str(p)+' '
return str_units[:-1]
def _parse_units(units):
"""
Get the units factors regarding the cgs system
"""
# Get all the physical magnitudes
mags = []
pwrs = []
# Sign of the power
sign_pwr = 1
val_pwr = 1
# Multiplications symbols
mul_signs = [' ', '*']
# Auxiliar units
aux_unit = ''
aux_pwr = ''
# Some flags
flag_apply = False
flag_div = False
flag_mag = False
flag_pwr = False
for u in units:
if u.isalpha():
aux_unit += u
flag_mag = True
elif u == '^':
flag_pwr = True
elif flag_pwr and (u.isnumeric() or u in [',','.','-']):
if u == '-' and len(aux_pwr) > 0:
if aux_pwr[0] == '-':
aux_pwr = aux_pwr[1:]
else:
aux_pwr += u
elif u == '/':
flag_div = True
flag_apply = True
else:
flag_apply = True
# If the character is the last
if u == units[-1]:
flag_apply = True
if flag_apply:
if flag_pwr or flag_mag:
if aux_pwr == '':
aux_pwr = '1'
elif aux_pwr == '-':
aux_pwr = '-1'
pwrs.append(int(aux_pwr))
flag_pwr = False
if flag_div:
aux_pwr = '-'
flag_div = False
else:
aux_pwr = ''
if flag_mag:
mags.append(aux_unit)
aux_unit = ''
flag_mag = False
flag_apply = False
# Check if the units are repeated
uts = {}
while len(mags) > 0:
# Masking
mask = [a==mags[0] for a in mags]
# Get powers
pwrs_mag = [pwrs[n] for n in range(len(mask)) if mask[n]]
# Asign not repeated magnitude and powers
uts[mags[0]] = sum(pwrs_mag)
# Update magnitudes and powers
mags = [mags[m] for m in range(len(mask)) if not mask[m]]
pwrs = [pwrs[m] for m in range(len(mask)) if not mask[m]]
return uts
# su = 'cgs'
# units = "W m^-2 Hz^-1"
# factors, uts = _get_factors_units(units, su) | StarcoderdataPython |
1918640 | <gh_stars>100-1000
import sys
from optparse import OptionParser, IndentedHelpFormatter
from ripper.constants import *
from ripper.actions.attack import attack_method_labels
def create_parser() -> OptionParser:
"""Initialize parser with options."""
formatter = IndentedHelpFormatter(
indent_increment=2,
max_help_position=56,
width=120,
short_first=1
)
parser = OptionParser(usage=USAGE, epilog=EPILOG, version=f'%prog {VERSION}', formatter=formatter)
parser_add_options(parser)
return parser
def parser_add_options(parser: OptionParser) -> None:
"""Add options to a parser."""
parser.add_option('-s', '--targets',
dest='targets', action='append',
help='Attack target in {scheme}://{hostname}[:{port}][{path}] format. Multiple targets allowed.')
parser.add_option('--targets-list',
dest='targets_list', type='str',
help='File (fs or http/https) with targets in {scheme}://{hostname}[:{port}][{path}] line format.')
parser.add_option('-m', '--method',
dest='attack_method', type='str',
help=f'Attack method: {", ".join(attack_method_labels)}')
parser.add_option('-e', '--http-method',
dest='http_method', type='str', default=ARGS_DEFAULT_HTTP_ATTACK_METHOD,
help=f'HTTP method. Default: {ARGS_DEFAULT_HTTP_ATTACK_METHOD}')
parser.add_option('-t', '--threads',
dest='threads_count', type='str', default=ARGS_DEFAULT_THREADS_COUNT,
help=f'Total fixed threads count (number) or "auto" (text) for automatic threads selection. Default: {ARGS_DEFAULT_THREADS_COUNT}')
parser.add_option('--min-random-packet-len',
dest='min_random_packet_len', type='int',
help=f'Min random packets length. Default: {DEFAULT_MIN_RND_PACKET_LEN}')
parser.add_option('-l', '--max-random-packet-len',
dest='max_random_packet_len', type='int',
help=f'Max random packets length. Default: {DEFAULT_MAX_RND_PACKET_LEN} for udp/tcp')
parser.add_option('-y', '--proxy-list',
dest='proxy_list',
help='File (fs or http/https) with proxies in ip:port:username:password line format. Proxies will be ignored in udp attack!')
parser.add_option('-k', '--proxy-type',
dest='proxy_type', type='str', default=ARGS_DEFAULT_PROXY_TYPE,
help=f'Type of proxy to work with. Supported types: socks5, socks4, http. Default: {ARGS_DEFAULT_PROXY_TYPE}')
parser.add_option('-c', '--health-check',
dest='health_check', type='int', default=ARGS_DEFAULT_HEALTH_CHECK,
help=f'Controls health check availability. Turn on: 1, turn off: 0. Default: {ARGS_DEFAULT_HEALTH_CHECK}')
parser.add_option('-o', '--socket-timeout',
# default value is not set here to keep dynamic logic during initialization
dest='socket_timeout', type='int', default=ARGS_DEFAULT_SOCK_TIMEOUT,
help=f'Timeout for socket connection is seconds. Default (seconds): {ARGS_DEFAULT_SOCK_TIMEOUT} without proxy, {2*ARGS_DEFAULT_SOCK_TIMEOUT} with proxy')
parser.add_option('--dry-run',
dest='dry_run', action="store_true",
help='Print formatted output without full script running.')
parser.add_option('--log-size',
dest='log_size', type='int', default=DEFAULT_LOG_SIZE,
help='Set the Events Log history frame length.')
parser.add_option('--log-level',
dest='event_level', type='str', default=DEFAULT_LOG_LEVEL,
help='Log level for events board. Supported levels: info, warn, error, none.')
parser.add_option('-d', '--duration',
dest='duration', type='int',
help='Attack duration in seconds. After this duration script will stop it\'s execution.')
def print_usage():
"""Wrapper for Logo with help."""
print(LOGO_NOCOLOR)
create_parser().print_help()
sys.exit()
| StarcoderdataPython |
9787077 | # =========================================================================
# Copyright 2012-present Yunify, Inc.
# -------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =========================================================================
from qingcloud.cli.iaas_client.actions.base import BaseAction
class VerifyNotificationItemAction(BaseAction):
action = 'VerifyNotificationItem'
command = 'verify-notification-item'
usage = '%(prog)s [-c --notification_item_content...] [-f <conf_file>]'
@classmethod
def add_ext_arguments(cls, parser):
parser.add_argument('-c', '--notification-item-content', dest='notification_item_content',
action='store', type=str, default='',
help='The content of notification item which will be verified.')
parser.add_argument('-v', '--verification-code', dest='verification_code',
action='store', type=str, default='',
help='The verification code.')
@classmethod
def build_directive(cls, options):
if options.notification_item_content == '':
print('error: notification_item_content should be specified.')
return None
if options.verification_code == '':
print('error: verification_code should be specified.')
return None
directive = {
"notification_item_content": options.notification_item_content,
"verification_code": options.verification_code,
}
return directive
| StarcoderdataPython |
11319759 | <gh_stars>0
from sympy import *
import re
x = Symbol('x')
y = Symbol('y')
z = Symbol('z')
# if (tipo==1){ alert("numerador");}
# if (tipo==2){ alert("numerador raiz");}
# if (tipo==3){ alert("numerador e denominador");}
# if (tipo==4){ alert("numerador raiz e denominador");}
# if (tipo==5){ alert("numerador raiz e denominador raiz");}
# if (tipo==6){ alert("numerador e denominador raiz");}
def calcTipo(funcao):
math = re.search(r'/')
return type(math)
| StarcoderdataPython |
8055543 | from m5.SimObject import SimObject
from BaseHarvester import BaseHarvester
from m5.params import *
from m5.proxy import *
class SimpleHarvester(BaseHarvester):
type = 'SimpleHarvester'
cxx_header = "engy/energy_harvester.hh" | StarcoderdataPython |
6584977 | <filename>piquery/piq_feature.py
import cv2 as cv
import numpy as np
import pandas as pd
import os
def parseKeypoints(kp):
return [{'center': p.pt,
'diameter': p.size,
'angle': p.angle,
'class_id': p.class_id,
'octave': p.octave,
'response': p.response} for p in kp]
def kpdfsort(kp):
return pd.DataFrame(parseKeypoints(kp)).sort_values(by=['response'], ascending=False)[['center', 'diameter', 'angle', 'response']]
def orbparams(orb):
params = dict()
params['DefaultName'] = orb.getDefaultName()
params['EdgeThreshold'] = orb.getEdgeThreshold()
params['FastThreshold'] = orb.getFastThreshold()
params['FirstLevel'] = orb.getFirstLevel()
params['MaxFeatures'] = orb.getMaxFeatures()
params['NLevels'] = orb.getNLevels()
params['PatchSize'] = orb.getPatchSize()
params['ScaleFactor'] = orb.getScaleFactor()
params['ScoreType'] = orb.getScoreType()
params['WTA_K'] = orb.getWTA_K()
return params
def byte2hex(bt):
hx = hex(bt).split('x')[1]
if bt < 16:
return '0' + hx
return hx
# 解决中文路径问题
def cv_imread(file_path):
root_dir, file_name = os.path.split(file_path)
pwd = os.getcwd()
if root_dir:
os.chdir(root_dir)
cv_img = cv.imread(file_name)
os.chdir(pwd)
return cv_img
class ImFeature:
def __init__(self, alg=None, k=500):
if alg == 'sift':
self.algf = cv.xfeatures2d.SIFT_create()
elif alg == 'surf':
self.algf = cv.xfeatures2d.SURF_create()
else:
self.algf = cv.ORB_create(k)
self.alg = alg
self.matcher = None
self.flann_matcher = None
self.store = dict()
def read(self, img_path):
# if not img_path in self.store:
# store = self.store
# store[img_path] = dict()
bgr = cv_imread(img_path)
gray= cv.cvtColor(bgr, cv.COLOR_BGR2GRAY)
# store[img_path]['bgr'] = bgr
# store[img_path]['gray'] = gray
return bgr, gray
def keypoint(self, im):
if isinstance(im, str):
bgr, gray = self.read(im)
return gray, self.algf.detect(gray, None)
elif isinstance(im, np.ndarray):
return im, self.algf.detect(im, None)
return None, None
def descriptor(self, img, kp):
return self.algf.compute(img, kp)
def fingerprint(self, descriptor):
return ''.join([''.join([byte2hex(d) for d in dps]) for dps in descriptor])
def feature(self, im):
if isinstance(im, str):
bgr, gray = self.read(im)
return self.algf.detectAndCompute(gray, None)
elif isinstance(im, np.ndarray):
return self.algf.detectAndCompute(im, None)
return None, None
def fastFeature(self, im):
bgr, gray = self.read(im)
fast = cv.FastFeatureDetector_create()
kp = fast.detect(gray, None)
return kp
def match(self, im1, im2, k=None):
kp1, des1 = self.feature(im1)
kp2, des2 = self.feature(im2)
alg = self.alg
if self.matcher is None:
if alg == 'sift':
self.matcher = cv.BFMatcher()
elif alg == 'surf':
self.matcher = cv.BFMatcher()
else:
self.matcher = cv.BFMatcher(cv.NORM_HAMMING, crossCheck=True)
if k is None:
return self.matcher.match(des1, des2)
else:
return self.matcher.knnMatch(des1,des2, k)
def flannMatch(self, im1, im2):
if isinstance(im1, str):
kp1, des1 = self.feature(im1)
kp2, des2 = self.feature(im2)
else:
des1 = im1
des2 = im2
alg = self.alg
if self.flann_matcher is None:
if alg == 'sift' or alg == 'surf':
FLANN_INDEX_KDTREE = 1
index_params = dict(algorithm = FLANN_INDEX_KDTREE, trees = 5)
search_params = dict(checks=50)
self.flann_matcher = cv.FlannBasedMatcher(index_params,search_params)
else:
FLANN_INDEX_LSH = 6
index_params= dict(algorithm = FLANN_INDEX_LSH,
table_number = 6, # 12
key_size = 12, # 20
multi_probe_level = 1) #2
search_params = dict(checks=50)
self.flann_matcher = cv.FlannBasedMatcher(index_params,search_params)
if alg == 'sift' or alg == 'surf':
return self.flann_matcher.knnMatch(des1,des2,k=2)
else:
return self.flann_matcher.match(des1, des2)
class CropImFeature(ImFeature):
def crop(self, img):
h, w = img.shape[:2]
if h > w:
offset = int((h - w)/2)
limit = offset + w
if limit > h:
limit = h
return img[offset:limit, :]
elif h < w:
offset = int((w - h)/2)
limit = offset + h
if limit > w:
limit = w
return img[:, offset:limit]
return img
def read(self, img_path, size=100):
bgr, gray = super().read(img_path)
return bgr, self.crop(gray)
class ResizeImFeature(CropImFeature):
def resize(self, img, size=128):
h, w = img.shape[:2]
resize_w = size
resize_h = int(h * resize_w/w)
return cv.resize(img, (resize_w, resize_h), interpolation = cv.INTER_CUBIC)
def read(self, img_path, size=100):
bgr, gray = super().read(img_path)
return bgr, self.resize(gray, size=size)
class ImSim:
def __init__(self, k=500, resize=False, crop=True):
self.k = k
if resize == True:
self.feature = ResizeImFeature(k=k)
elif crop == True:
self.feature = CropImFeature(k=k)
else:
self.feature = ImFeature(k=k)
def match(self, img1, img2):
matches = self.feature.flannMatch(img1, img2)
return sorted([match for match in matches if match.distance < 10], key=lambda x:x.distance)
def getFeature(self, img):
return self.feature.feature(img)
def calcSim(self, img1, img2):
k = self.k
if isinstance(img1, str):
kp1, des1 = self.feature.feature(img1)
kp2, des2 = self.feature.feature(img2)
if len(kp1) > len(kp2):
k = len(kp2)
else:
k = len(kp1)
matches = self.match(des1, des2)
else:
if len(img1) > len(img2):
k = len(img2)
else:
k = len(img1)
matches = self.match(img1, img2)
return len(matches)/k | StarcoderdataPython |
336484 | # coding:utf-8
from view import route, url_for, View
from model.user import User
from tornado.web import authenticated
@route('/signin', name='signin')
class SignIn(View):
def get(self):
self.render('user/signin.html')
def post(self):
username = self.get_argument("username")
password = self.get_argument("password")
error = False
u = User.auth(username, password)
if not u:
error = True
self.messages.error("帐号或密码错误!")
if not error:
self.messages.success("登陆成功!")
self.set_secure_cookie("u", u.salt) # 嫌麻烦,直接用salt做key
return self.redirect(url_for("index"))
self.render('user/signin.html')
@authenticated
@route('/signout', name='signout')
class SignOut(View):
def get(self):
self.clear_cookie('u')
self.messages.success("您已成功登出!")
self.redirect(url_for("index"))
@route('/signup', name='signup')
class SignUp(View):
def get(self):
self.render('user/signup.html')
def post(self):
username = self.get_argument("username")
password = self.get_argument("password")
error = False
if len(username) < 3:
error = True
self.messages.error("用户名长度必须大于等于3")
if len(password) < 3:
error = True
self.messages.error("密码长度必须大于等于3")
if User.exist(username):
error = True
self.messages.error("用户已存在!")
if not error:
u = User.new(username, password)
self.messages.success("账户创建成功!")
return self.redirect(url_for('signin'))
self.render('user/signup.html')
| StarcoderdataPython |
1813518 | import argparse
import torch
def get_args(args):
# print(arglist)
# args = parser.parse_args(arglist)
args.cuda = not args.no_cuda and torch.cuda.is_available()
assert args.algo in ["a2c", "ppo", "acktr"]
if args.recurrent_policy:
assert args.algo in [
"a2c",
"ppo",
], "Recurrent policy is not implemented for ACKTR"
return args
| StarcoderdataPython |
3408728 | from shutil import copytree
import subprocess
import pytest
import sys
import os
from shopyo import __version__
pytestmark = pytest.mark.cli_integration
@pytest.mark.usefixtures("restore_cwd")
def test_initialise_after_new(tmp_path):
"""run shopyo new inside a tmp directory foo,
create a venv, install the shopyo.tar.gz dependencies,
run `shopyo new` and then run `shopyo initialise`.
"""
# go one level up to the cwd so we are are the root where
# setup.py exits
os.chdir("../")
# create the dist folder with shoypo-<version>.tar.gz file
subprocess.check_call([sys.executable, "setup.py", "sdist"])
# store all path names to be used later
dist_path = os.path.join(os.getcwd(), "dist")
shopyo_dist_name = f"shopyo-{__version__}.tar.gz"
project_path = tmp_path / "foo"
# copy the shopyo dist to the test project path
copytree(dist_path, os.path.join(project_path, "dist"))
# change cwd to that of test project
os.chdir(project_path)
# create a new virtual environment(venv)
subprocess.check_call([sys.executable, "-m", "venv", "env"])
# store path for python and shopyo executable of venv for the case when OS
# is Unix
python_env = os.path.join(os.getcwd(), "env", "bin", "python")
shopyo_env = os.path.join(os.getcwd(), "env", "bin", "shopyo")
# if OS is Windows, update the python and shopyo executable
if sys.platform == "win32":
python_env = os.path.join(os.getcwd(), "env", "Scripts", "python")
shopyo_env = os.path.join(os.getcwd(), "env", "Scripts", "shopyo")
# update pip of venv
subprocess.check_call(
[python_env, "-m", "pip", "install", "--upgrade", "pip"]
)
# install the shopyo package from dist added earlier
subprocess.check_call(
[
python_env,
"-m",
"pip",
"install",
os.path.join("dist", shopyo_dist_name)
]
)
# run shopyo help command followed by new command
subprocess.check_call(["shopyo", "--help"])
subprocess.check_call([shopyo_env, "new"])
# change the cwd to the newly created shopyo project
os.chdir(os.path.join(project_path, "foo"))
# initialise the project
subprocess.check_call(
[shopyo_env, "initialise"]
)
assert os.path.exists("shopyo.db")
assert os.path.exists("migrations")
| StarcoderdataPython |
1928422 | <filename>onnx_tf/handlers/backend/floor.py<gh_stars>10-100
import tensorflow as tf
from onnx_tf.handlers.backend_handler import BackendHandler
from onnx_tf.handlers.handler import onnx_op
from onnx_tf.handlers.handler import tf_func
from .math_mixin import BasicMathMixin
@onnx_op("Floor")
@tf_func(tf.floor)
class Floor(BasicMathMixin, BackendHandler):
@classmethod
def version_1(cls, node, **kwargs):
return [cls.make_tensor_from_onnx_node(node, **kwargs)]
@classmethod
def version_6(cls, node, **kwargs):
return [cls.make_tensor_from_onnx_node(node, **kwargs)]
| StarcoderdataPython |
12828415 | <reponame>bdrich/neutron-lbaas
# Copyright 2015, <NAME>, Copyright IBM Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from neutron import context
from neutron_lbaas.drivers.octavia import driver
from neutron_lbaas.services.loadbalancer import data_models
from neutron_lbaas.tests.unit.db.loadbalancer import test_db_loadbalancerv2
class ManagerTest(object):
def __init__(self, parent, manager, mocked_req):
self.parent = parent
self.context = parent.context
self.driver = parent.driver
self.manager = manager
self.mocked_req = mocked_req
def create(self, model, url, args):
self.manager.create(self.context, model)
self.mocked_req.post.assert_called_with(url, args)
def update(self, old_model, model, url, args):
self.manager.update(self.context, old_model, model)
self.mocked_req.put.assert_called_with(url, args)
def delete(self, model, url):
self.manager.delete(self.context, model)
self.mocked_req.delete.assert_called_with(url)
# TODO(Banashankar) : Complete refresh function. Need more info.
def refresh(self):
pass
# TODO(Banashankar): Complete stats function. Need more info.
def stats(self):
pass
class BaseOctaviaDriverTest(test_db_loadbalancerv2.LbaasPluginDbTestCase):
# Copied it from Brocade's test code :/
def _create_fake_models(self):
# This id is used for all the entities.
id = 'test_id'
lb = data_models.LoadBalancer(id=id)
sni_container = data_models.SNI(listener_id=id)
listener = data_models.Listener(id=id, loadbalancer=lb,
sni_containers=[sni_container])
pool = data_models.Pool(id=id, listener=listener)
member = data_models.Member(id=id, pool=pool)
hm = data_models.HealthMonitor(id=id, pool=pool)
lb.listeners = [listener]
listener.default_pool = pool
pool.members = [member]
pool.healthmonitor = hm
return lb
def setUp(self):
super(BaseOctaviaDriverTest, self).setUp()
self.context = context.get_admin_context()
self.plugin = mock.Mock()
self.driver = driver.OctaviaDriver(self.plugin)
# mock of rest call.
self.driver.req = mock.Mock()
self.lb = self._create_fake_models()
class TestOctaviaDriver(BaseOctaviaDriverTest):
def test_allocates_vip(self):
self.addCleanup(cfg.CONF.clear_override,
'allocates_vip', group='octavia')
cfg.CONF.set_override('allocates_vip', True, group='octavia')
test_driver = driver.OctaviaDriver(self.plugin)
self.assertTrue(test_driver.load_balancer.allocates_vip)
def test_load_balancer_ops(self):
m = ManagerTest(self, self.driver.load_balancer,
self.driver.req)
lb = self.lb
# urls for assert test.
lb_url = '/v1/loadbalancers'
lb_url_id = '/v1/loadbalancers/' + lb.id
# Create LB test
# args for create assert.
args = {
'id': lb.id,
'name': lb.name,
'description': lb.description,
'enabled': lb.admin_state_up,
'project_id': lb.tenant_id,
'vip': {
'subnet_id': lb.vip_subnet_id,
'ip_address': lb.vip_address,
'port_id': lb.vip_port_id,
}
}
m.create(lb, lb_url, args)
# Update LB test
# args for update assert.
args = args = {
'name': lb.name,
'description': lb.description,
'enabled': lb.admin_state_up,
}
m.update(lb, lb, lb_url_id, args)
# delete LB test
m.delete(lb, lb_url_id)
# TODO(Banashankar) : refresh n stats fucntions are not yet done.
#m.refresh()
#m.stats()
def test_listener_ops(self):
m = ManagerTest(self, self.driver.listener,
self.driver.req)
listener = self.lb.listeners[0]
# urls for assert test.
list_url = '/v1/loadbalancers/%s/listeners' % listener.loadbalancer.id
list_url_id = list_url + '/%s' % (listener.id)
# Create Listener test.
# args for create and update assert.
sni_containers = [sni.tls_container_id
for sni in listener.sni_containers]
args = {
'id': listener.id,
'name': listener.name,
'description': listener.description,
'enabled': listener.admin_state_up,
'protocol': listener.protocol,
'protocol_port': listener.protocol_port,
'connection_limit': listener.connection_limit,
'tls_certificate_id': listener.default_tls_container_id,
'sni_containers': sni_containers,
'project_id': listener.tenant_id
}
m.create(listener, list_url, args)
# Update listener test.
del args['id']
del args['project_id']
m.update(listener, listener, list_url_id, args)
# Delete listener.
m.delete(listener, list_url_id)
def test_pool_ops(self):
m = ManagerTest(self, self.driver.pool,
self.driver.req)
pool = self.lb.listeners[0].default_pool
# urls for assert test.
pool_url = '/v1/loadbalancers/%s/listeners/%s/pools' % (
pool.listener.loadbalancer.id,
pool.listener.id)
pool_url_id = pool_url + "/%s" % pool.id
# Test create pool.
# args for create and update assert.
args = {
'id': pool.id,
'name': pool.name,
'description': pool.description,
'enabled': pool.admin_state_up,
'protocol': pool.protocol,
'lb_algorithm': pool.lb_algorithm,
'project_id': pool.tenant_id
}
if pool.session_persistence:
args['session_persistence'] = {
'type': pool.session_persistence.type,
'cookie_name': pool.session_persistence.cookie_name,
}
m.create(pool, pool_url, args)
# Test update pool.
del args['id']
del args['project_id']
m.update(pool, pool, pool_url_id, args)
# Test pool delete.
m.delete(pool, pool_url_id)
def test_member_ops(self):
m = ManagerTest(self, self.driver.member,
self.driver.req)
member = self.lb.listeners[0].default_pool.members[0]
# urls for assert.
mem_url = '/v1/loadbalancers/%s/listeners/%s/pools/%s/members' % (
member.pool.listener.loadbalancer.id,
member.pool.listener.id,
member.pool.id)
mem_url_id = mem_url + "/%s" % member.id
# Test Create member.
# args for create assert.
args = {
'id': member.id,
'enabled': member.admin_state_up,
'ip_address': member.address,
'protocol_port': member.protocol_port,
'weight': member.weight,
'subnet_id': member.subnet_id,
'project_id': member.tenant_id
}
m.create(member, mem_url, args)
# Test member update.
# args for update assert.
args = {
'enabled': member.admin_state_up,
'protocol_port': member.protocol_port,
'weight': member.weight,
}
m.update(member, member, mem_url_id, args)
# Test member delete.
m.delete(member, mem_url_id)
def test_health_monitor_ops(self):
m = ManagerTest(self, self.driver.health_monitor,
self.driver.req)
hm = self.lb.listeners[0].default_pool.healthmonitor
# urls for assert.
hm_url = '/v1/loadbalancers/%s/listeners/%s/pools/%s/healthmonitor' % (
hm.pool.listener.loadbalancer.id,
hm.pool.listener.id,
hm.pool.id)
# Test HM create.
# args for create and update assert.
args = {
'type': hm.type,
'delay': hm.delay,
'timeout': hm.timeout,
'rise_threshold': hm.max_retries,
'fall_threshold': hm.max_retries,
'http_method': hm.http_method,
'url_path': hm.url_path,
'expected_codes': hm.expected_codes,
'enabled': hm.admin_state_up,
'project_id': hm.tenant_id
}
m.create(hm, hm_url, args)
# Test HM update
del args['project_id']
m.update(hm, hm, hm_url, args)
# Test HM delete
m.delete(hm, hm_url)
class TestThreadedDriver(BaseOctaviaDriverTest):
def setUp(self):
super(TestThreadedDriver, self).setUp()
cfg.CONF.set_override('request_poll_interval', 1, group='octavia')
cfg.CONF.set_override('request_poll_timeout', 5, group='octavia')
self.driver.req.get = mock.MagicMock()
self.succ_completion = mock.MagicMock()
self.fail_completion = mock.MagicMock()
self.context = mock.MagicMock()
ctx_patcher = mock.patch('neutron.context.get_admin_context',
return_value=self.context)
ctx_patcher.start()
self.addCleanup(ctx_patcher.stop)
self.driver.load_balancer.successful_completion = (
self.succ_completion)
self.driver.load_balancer.failed_completion = self.fail_completion
def test_thread_op_goes_active(self):
self.driver.req.get.side_effect = [
{'provisioning_status': 'PENDING_CREATE'},
{'provisioning_status': 'ACTIVE'}
]
driver.thread_op(self.driver.load_balancer, self.lb)
self.succ_completion.assert_called_once_with(self.context, self.lb,
delete=False)
self.assertEqual(0, self.fail_completion.call_count)
def test_thread_op_goes_deleted(self):
self.driver.req.get.side_effect = [
{'provisioning_status': 'PENDING_DELETE'},
{'provisioning_status': 'DELETED'}
]
driver.thread_op(self.driver.load_balancer, self.lb, delete=True)
self.succ_completion.assert_called_once_with(self.context, self.lb,
delete=True)
self.assertEqual(0, self.fail_completion.call_count)
def test_thread_op_goes_error(self):
self.driver.req.get.side_effect = [
{'provisioning_status': 'PENDING_CREATE'},
{'provisioning_status': 'ERROR'}
]
driver.thread_op(self.driver.load_balancer, self.lb)
self.fail_completion.assert_called_once_with(self.context, self.lb)
self.assertEqual(0, self.succ_completion.call_count)
def test_thread_op_a_times_out(self):
cfg.CONF.set_override('request_poll_timeout', 1, group='octavia')
self.driver.req.get.side_effect = [
{'provisioning_status': 'PENDING_CREATE'}
]
driver.thread_op(self.driver.load_balancer, self.lb)
self.fail_completion.assert_called_once_with(self.context, self.lb)
self.assertEqual(0, self.succ_completion.call_count)
def test_thread_op_updates_vip_when_vip_delegated(self):
cfg.CONF.set_override('allocates_vip', True, group='octavia')
expected_vip = '10.1.1.1'
self.driver.req.get.side_effect = [
{'provisioning_status': 'PENDING_CREATE',
'vip': {'ip_address': ''}},
{'provisioning_status': 'ACTIVE',
'vip': {'ip_address': expected_vip}}
]
driver.thread_op(self.driver.load_balancer,
self.lb,
lb_create=True)
self.succ_completion.assert_called_once_with(self.context, self.lb,
delete=False,
lb_create=True)
self.assertEqual(expected_vip, self.lb.vip_address)
| StarcoderdataPython |
4864602 | from django.contrib import admin
from .models import Election, Candidate, Ticket, FPTPVote, APRVVote, STVVote, STVPreference, STVResult
def archive(modeladmin, request, queryset):
queryset.filter(open=False).update(archived=True)
archive.short_description = "Archive selected elections (if closed)"
class PreferenceInline(admin.StackedInline):
model = STVPreference
readonly_fields = ['order', 'candidate']
can_delete = False
extra = 0
class CandidateInline(admin.StackedInline):
model = Candidate
extra = 1
class FPTPVoteAdmin(admin.ModelAdmin):
readonly_fields = ['election', 'uuid', 'time', 'selection']
search_fields = ['uuid']
class TicketAdmin(admin.ModelAdmin):
search_fields = ['uuid']
class STVVoteAdmin(admin.ModelAdmin):
inlines = [PreferenceInline]
readonly_fields = ['election', 'uuid', 'time', 'selection']
search_fields = ['uuid']
class STVResultAdmin(admin.ModelAdmin):
readonly_fields = ['election', 'full_log', 'winners', 'generated']
class ElectionAdmin(admin.ModelAdmin):
inlines = [CandidateInline]
actions = [archive]
list_filter = ['archived','open', 'vote_type']
list_display = ['__str__', 'open','vote_type']
# Register your models here.
admin.site.register(Election, ElectionAdmin)
admin.site.register(FPTPVote, FPTPVoteAdmin)
admin.site.register(APRVVote, FPTPVoteAdmin)
admin.site.register(Ticket, TicketAdmin)
admin.site.register(STVVote, STVVoteAdmin)
admin.site.register(STVResult, STVResultAdmin)
| StarcoderdataPython |
12817395 | <filename>website/drawquest/apps/brushes/api.py
from django.shortcuts import get_object_or_404
from canvas.exceptions import ServiceError
from drawquest.api_decorators import api_decorator
from canvas.templatetags.jinja_base import render_jinja_to_string
from canvas.view_guards import require_staff, require_user
from drawquest.apps.brushes import models
from drawquest import economy
urlpatterns = []
api = api_decorator(urlpatterns)
@api('purchase_brush')
def purchase_brush(request, brush_canonical_name):
try:
economy.purchase_brush(request.user, brush_canonical_name)
except economy.InvalidPurchase as e:
raise ServiceError(e.message)
return {
'shop_brushes': list(models.Brush.for_shop(viewer=request.user)),
'user_brushes': list(models.Brush.for_user(request.user)),
'balance': economy.balance(request.user),
}
| StarcoderdataPython |
9617430 | from models import DeepMindBigGAN, StyleGAN2, GPT2
from latent import DeepMindBigGANLatentSpace, StyleGAN2LatentSpace, GPT2LatentSpace
from utils import biggan_norm, biggan_denorm
configs = dict(
GPT2 = dict(
task = "img2txt",
dim_z = 20,
max_tokens_len = 30,
max_text_len = 50,
encoder_size = 50257,
latent = GPT2LatentSpace,
model = GPT2,
use_discriminator = False,
init_text = "the picture of",
weights = "./gpt2/weights/gpt2-pytorch_model.bin",
encoder = "./gpt2/weights/encoder.json",
vocab = "./gpt2/weights/vocab.bpe",
stochastic = False,
algorithm = "ga",
pop_size = 100,
batch_size = 25,
problem_args = dict(
n_var = 20,
n_obj = 1,
n_constr = 20,
xl = 0,
xu = 50256
)
),
DeepMindBigGAN256 = dict(
task = "txt2img",
dim_z = 128,
num_classes = 1000,
latent = DeepMindBigGANLatentSpace,
model = DeepMindBigGAN,
weights = "biggan-deep-256",
use_discriminator = False,
algorithm = "ga",
norm = biggan_norm,
denorm = biggan_denorm,
truncation = 1.0,
pop_size = 64,
batch_size = 32,
problem_args = dict(
n_var = 128 + 1000,
n_obj = 1,
n_constr = 128,
xl = -2,
xu = 2
)
),
DeepMindBigGAN512 = dict(
task = "txt2img",
dim_z = 128,
num_classes = 1000,
latent = DeepMindBigGANLatentSpace,
model = DeepMindBigGAN,
weights = "biggan-deep-512",
use_discriminator = False,
algorithm = "ga",
norm = biggan_norm,
denorm = biggan_denorm,
truncation = 1.0,
pop_size = 32,
batch_size = 8,
problem_args = dict(
n_var = 128 + 1000,
n_obj = 1,
n_constr = 128,
xl = -2,
xu = 2
)
),
StyleGAN2_ffhq_d = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = True,
weights = "./stylegan2/weights/ffhq-config-f",
algorithm = "nsga2",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 2,
n_constr = 512,
xl = -10,
xu = 10,
),
),
Adaily = dict(
task = "txt2img",
dim_z = 256, # this might be 512? # this might be 512? # this might be 512?
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = True,
weights = "./stylegan2/weights/adaily/",
algorithm = "nsga2",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 256,
n_obj = 2,
n_constr = 256,
xl = -10,
xu = 10,
),
),
Adaily_A = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = True,
weights = "../models/Adaily_A/torch_custom",
algorithm = "nsga2",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 2,
n_constr = 512,
xl = -10,
xu = 10,
),
),
Adaily_B = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = True,
weights = "../models/Adaily_B/torch_custom",
algorithm = "nsga2",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 2,
n_constr = 512,
xl = -10,
xu = 10,
),
),
StyleGAN2_car_d = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = True,
weights = "./stylegan2/weights/car-config-f",
algorithm = "nsga2",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 2,
n_constr = 512,
xl = -10,
xu = 10
),
),
StyleGAN2_church_d = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = True,
weights = "./stylegan2/weights/church-config-f",
algorithm = "nsga2",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 2,
n_constr = 512,
xl = -10,
xu = 10
),
),
StyleGAN2_ffhq_nod = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = False,
weights = "./stylegan2/weights/ffhq-config-f",
algorithm = "ga",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 1,
n_constr = 512,
xl = -10,
xu = 10
)
),
StyleGAN2_car_nod = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = False,
weights = "./stylegan2/weights/car-config-f",
algorithm = "ga",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 1,
n_constr = 512,
xl = -10,
xu = 10
)
),
StyleGAN2_church_nod = dict(
task = "txt2img",
dim_z = 512,
latent = StyleGAN2LatentSpace,
model = StyleGAN2,
use_discriminator = False,
weights = "./stylegan2/weights/church-config-f",
algorithm = "ga",
norm = biggan_norm,
denorm = biggan_denorm,
pop_size = 16,
batch_size = 4,
problem_args = dict(
n_var = 512,
n_obj = 1,
n_constr = 512,
xl = -10,
xu = 10
)
)
)
def get_config(name):
return configs[name]
| StarcoderdataPython |
3443201 | from multimds import multimds
import sys
chrom = sys.argv[1]
multimds.full_mds("hic_data/GM12878_combined_{}_100kb.bed".format(chrom), "hic_data/K562_{}_100kb.bed".format(chrom))
| StarcoderdataPython |
385241 | #!/usr/bin/env python3
"""\
Script for managing Zig versions in use.
The script uses and updates the version listed in the README.md file.
current-path can be used to add zig to PATH.
"""
import sys
import json
from argparse import ArgumentParser
from pathlib import Path
import urllib.request
import urllib
import re
import functools
import enum
import tarfile
# Utils =======================================================================
def download(url, msg=''):
msg += '...'
print('Downloading', url + msg)
try:
return urllib.request.urlopen(url)
except urllib.error.URLError as e:
sys.exit('Failed to download {}: {}'.format(url, str(e)))
def download_file(url, dest=None):
if dest.is_dir():
dest = dest / url.split('/')[-1]
with dest.open('wb') as f:
f.write(download(url, ', saving to' + str(dest)).read())
return dest
def extract_archive(archive, dest):
print('Extracting', archive, 'to', dest)
with tarfile.open(archive) as tf:
tf.extractall(dest)
archive.unlink()
# Main Logic ==================================================================
georgios_root = Path(__file__).resolve().parent.parent
readme_path = georgios_root / 'README.md'
default_zigs_path = georgios_root / '../zigs'
if not default_zigs_path.is_dir():
default_zigs_path = georgios_root / 'tmp/zigs'
os = 'linux'
cpu = 'x86_64'
zig_version_url = 'https://ziglang.org/download/index.json'
field = r'0|[1-9]\d*'
version_regex = r'({f})\.({f})\.({f})(?:-dev\.(\d+))?'.format(f=field)
version_re = re.compile(version_regex)
current_re = re.compile(
r'(.*\[Zig\]\(https:\/\/ziglang.org\/\) )(?P<ver>{}.*)$'.format(version_regex))
zig_name_re = re.compile('zig-([\w-]+)-(?P<ver>{}.*)$'.format(version_regex))
@functools.total_ordering
class Zig:
def __init__(self, zigs_path, name):
self.name = name
m = zig_name_re.match(name)
if not m:
raise ValueError('Invalid zig name: ' + name)
self.version = m.group('ver')
self.path = zigs_path / name
def exists(self):
return (self.path / 'zig').is_file()
@classmethod
def from_version(cls, zigs_path, version):
return cls(zigs_path, 'zig-{}-{}-{}'.format(os, cpu, version))
@classmethod
def from_info(cls, zigs_path, info):
return cls.from_version(zigs_path, info['version'])
def is_release(self):
return self.version_parts()[3] is None
def guess_url(self):
return "https://ziglang.org/{}/{}.tar.xz".format(
"download/" + self.version if self.is_release() else "builds", self.name)
def version_parts(self):
return version_re.match(self.version).groups()
def __eq__(self, other):
return self.version == other.version
def __lt__(self, other):
ours = self.version_parts()
theirs = other.version_parts()
def lt(i):
if i == 3:
if theirs[3] is None:
return ours[3] is not None
elif ours[3] is None:
return False
elif ours[i] == theirs[i]:
return lt(i + 1)
return ours[i] < theirs[i]
return lt(0)
def __str__(self):
return self.version
def __repr__(self):
return '<Zig: {}>'.format(self.name)
def get_current_version():
with readme_path.open() as f:
for line in f:
m = current_re.match(line)
if m:
return m.group('ver')
raise ValueError('Could not get current from README.md')
def set_current_version(version):
lines = readme_path.read_text().split('\n')
for i, line in enumerate(lines):
m = current_re.search(line)
if m:
lines[i] = m.group(1) + version
readme_path.write_text('\n'.join(lines))
def get_current(zigs_path):
return Zig.from_version(zigs_path, get_current_version())
def get_downloaded(zigs_path):
l = []
for zig_path in zigs_path.glob('*'):
try:
zig = Zig(zigs_path, zig_path.name)
if zig.exists():
l.append(zig)
except:
pass
return sorted(l)
def use(zigs_path, version=None, zig=None):
if version is not None:
zig = Zig.from_version(zigs_path, version)
elif zig is None:
raise ValueError
print('Using', zig.version)
if not zig.exists():
raise ValueError('Trying to use a Zig that does not exist: ' + str(zig.path))
set_current_version(zig.version)
print('Refresh PATH if needed')
def get_latest_info():
return json.load(download(zig_version_url))["master"]
class CheckStatus(enum.Enum):
UsingLatest = enum.auto(),
LatestNotCurrent = enum.auto(),
NeedToDownloadLatest = enum.auto(),
def check(zigs_path, for_update=False):
current = get_current(zigs_path)
print('Current is', current)
if not for_update and not current.exists():
print(' It needs to be downloaded!')
latest_info = get_latest_info()
latest = Zig.from_info(args.zigs_path, latest_info)
print('Latest is', latest)
using_latest = latest == current
if using_latest:
print(' Same as current')
if latest.exists():
if using_latest:
status = CheckStatus.UsingLatest
else:
print(' Latest was downloaded, but isn\'t current')
status = CheckStatus.LatestNotCurrent
else:
if for_update:
print(' Will download the latest')
else:
print(' Would need to be downloaded')
status = CheckStatus.NeedToDownloadLatest
return status, latest, latest_info
def download_zig(zigs_path, zig, url):
extract_archive(download_file(url, zigs_path), zigs_path)
def update(zigs_path):
status, latest, latest_info = check(args.zigs_path, for_update=True)
if status == CheckStatus.UsingLatest:
return
if status == CheckStatus.NeedToDownloadLatest:
download_zig(zigs_path, latest, latest_info['{}-{}'.format(cpu, os)]['tarball'])
status = CheckStatus.LatestNotCurrent
if status == CheckStatus.LatestNotCurrent:
use(zigs_path, zig=latest)
# Subcommands =================================================================
def current_path_subcmd(args):
zig = get_current(args.zigs_path)
if not zig.exists():
raise ValueError('Zig in README.md does not exist: ' + str(zig.path))
print(zig.path.resolve())
def current_version_subcmd(args):
print(get_current_version())
def check_subcmd(args):
check(args.zigs_path)
def list_subcmd(args):
for zig in reversed(get_downloaded(args.zigs_path)):
print(zig.version)
def use_subcmd(args):
use(args.zigs_path, version=args.version)
def update_subcmd(args):
update(args.zigs_path)
def download_subcmd(args):
zig = get_current(args.zigs_path)
if zig.exists():
print('Already downloaded', zig.version)
return
download_zig(args.zigs_path, zig, zig.guess_url())
# Main ========================================================================
if __name__ == '__main__':
arg_parser = ArgumentParser(description=__doc__)
arg_parser.add_argument('--zigs-path',
type=Path, default=default_zigs_path,
help='Where to store all the versions of Zig'
)
subcmds = arg_parser.add_subparsers()
cp = subcmds.add_parser('current-path',
help='Print the path of the current Zig',
)
cp.set_defaults(func=current_path_subcmd)
cv = subcmds.add_parser('current-version',
help='Print the the current version of Zig',
)
cv.set_defaults(func=current_version_subcmd)
c = subcmds.add_parser('check',
help='See if there is a new latest',
)
c.set_defaults(func=check_subcmd)
l = subcmds.add_parser('list',
help='List all downloaded versions',
)
l.set_defaults(func=list_subcmd)
us = subcmds.add_parser('use',
help='Use a specified downloaded version',
)
us.set_defaults(func=use_subcmd)
us.add_argument('version', metavar='VERSION')
up = subcmds.add_parser('update',
help='Use the latest version, downloading if needed',
)
up.set_defaults(func=update_subcmd)
dl = subcmds.add_parser('download',
help='Downloading the current version if missing',
)
dl.set_defaults(func=download_subcmd)
args = arg_parser.parse_args()
if not hasattr(args, 'func'):
arg_parser.error('Must provide a subcommand')
if args.zigs_path == default_zigs_path:
args.zigs_path.resolve().mkdir(parents=True, exist_ok=True)
args.zigs_path = args.zigs_path.resolve()
args.func(args)
| StarcoderdataPython |
153073 | <reponame>shihab4t/Software-Development
import smtplib
import os
from email.message import EmailMessage
EMAIL_ADDRESS = os.environ.get("GMAIL_ADDRESS")
EMAIL_PASSWORD = os.environ.get("GMAIL_APP_PASS")
with smtplib.SMTP_SSL("smtp.gmail.com", 465) as smtp:
smtp.login(EMAIL_ADDRESS, EMAIL_PASSWORD)
reciver = "<EMAIL>"
msg = EmailMessage()
msg["Subject"] = "Grab dinner this weekend? 2"
msg["From"] = EMAIL_ADDRESS
msg["To"] = reciver
msg.set_content("This is plain text")
msg.add_alternative("""\
<!DOCTYPE html>
<html>
<body>
<h1 style="color:SlateGray;">This is an HTML Email!</h1>
</body>
</html>
""", subtype="html")
smtp.send_message(msg)
print(f"Email was sented to {reciver}")
| StarcoderdataPython |
1729188 | import numpy
def UIDLoss(probsSeq):
mean=0
for prob in probsSeq:
mean+=prob
mean=mean*1.0/len(probsSeq)
print mean
sd=0
for prob in probsSeq:
sd+=(prob-mean)*(prob-mean)
sd=sd*1.0/len(probsSeq)
return numpy.sqrt(sd)
probs=[0.4,0.5,0.5,0.6,0.5,0.51]
print UIDLoss(probs)
'''
Defines a ranking loss
'''
def rankingLoss(perfect,proposed):
weIndex=0
suma=0
for (elemPer,elemProp) in zip (perfect,proposed):
newElem=(elemPer-elemProp)*1.0/(weIndex+1)
weIndex+=1
sum+=newElem
return suma | StarcoderdataPython |
5090264 | <filename>ws2122-lspm/Lib/site-packages/pm4py/visualization/graphs/util/common.py<gh_stars>1-10
'''
This file is part of PM4Py (More Info: https://pm4py.fit.fraunhofer.de).
PM4Py is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PM4Py is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PM4Py. If not, see <https://www.gnu.org/licenses/>.
'''
import shutil
import tempfile
from pm4py.util import vis_utils
def get_temp_file_name(format):
"""
Gets a temporary file name for the image
Parameters
------------
format
Format of the target image
"""
filename = tempfile.NamedTemporaryFile(suffix='.' + format)
return filename.name
def save(temp_file_name, target_path):
"""
Saves the temporary image associated to the graph to the specified path
Parameters
--------------
temp_file_name
Path to the temporary file hosting the graph
target_path
Path where the image shall eventually be saved
"""
shutil.copyfile(temp_file_name, target_path)
def view(temp_file_name):
"""
View the graph
Parameters
------------
temp_file_name
Path to the temporary file hosting the graph
"""
if vis_utils.check_visualization_inside_jupyter():
vis_utils.view_image_in_jupyter(temp_file_name)
else:
vis_utils.open_opsystem_image_viewer(temp_file_name)
def matplotlib_view(temp_file_name):
"""
Views the diagram using Matplotlib
Parameters
---------------
temp_file_name
Path to the temporary file hosting the graph
"""
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
img = mpimg.imread(temp_file_name)
plt.imshow(img)
plt.show()
def serialize(temp_file_name: str) -> bytes:
"""
Serializes the graph
Parameters
------------
temp_file_name
Path to the temporary file hosting the graph
"""
with open(temp_file_name, "rb") as f:
return f.read()
| StarcoderdataPython |
9725684 | """Assignment - making a sklearn estimator.
The goal of this assignment is to implement by yourself a scikit-learn
estimator for the OneNearestNeighbor and check that it is working properly.
The nearest neighbor classifier predicts for a point X_i the target y_k of
the training sample X_k which is the closest to X_i. We measure proximity with
the Euclidean distance. The model will be evaluated with the accuracy (average
number of samples corectly classified). You need to implement the `fit`,
`predict` and `score` methods for this class. The code you write should pass
the test we implemented. You can run the tests by calling at the root of the
repo `pytest test_sklearn_questions.py`.
We also ask to respect the pep8 convention: https://pep8.org. This will be
enforced with `flake8`. You can check that there is no flake8 errors by
calling `flake8` at the root of the repo.
Finally, you need to write docstring similar to the one in `numpy_questions`
for the methods you code and for the class. The docstring will be checked using
`pydocstyle` that you can also call at the root of the repo.
"""
import numpy as np
from sklearn.base import BaseEstimator
from sklearn.base import ClassifierMixin
from sklearn.utils.validation import check_X_y
from sklearn.utils.validation import check_array
from sklearn.utils.validation import check_is_fitted
from sklearn.utils.multiclass import check_classification_targets
from sklearn.metrics import pairwise_distances
from scipy import stats
from sklearn.metrics import accuracy_score
class OneNearestNeighbor(BaseEstimator, ClassifierMixin):
"OneNearestNeighbor classifier."
def __init__(self, n_neighbors=1): # noqa: D107
self.n_neighbors = n_neighbors
def fit(self, X, y):
"""Write docstring.
And describe parameters
"""
X, y = check_X_y(X, y)
check_classification_targets(y)
self.classes_ = np.unique(y)
self.X_ = X
self.y_ = y
# XXX fix
return self
def predict(self, X):
"""Write docstring.
And describe parameters
"""
check_is_fitted(self)
X = check_array(X)
if isinstance(self.y_ ,np.ndarray):
d = pairwise_distances(X, self.X_)
sorted_indices = np.argsort(d)
d = np.take_along_axis(d, sorted_indices, axis=1)[:, :self.n_neighbors]
neighbors_indices = sorted_indices[:, :self.n_neighbors]
Y_neighbors = self.y_[neighbors_indices]
y_pred, _ = stats.mode(Y_neighbors, axis=1)
return y_pred.ravel()
else:
raise ValueError
def score(self, X, y):
"""Write docstring.
And describe parameters
"""
X, y = check_X_y(X, y)
return accuracy_score(y, self.predict(X))
| StarcoderdataPython |
11357427 | from collections import defaultdict
from utils.json_utils import read_json
from utils.date_utils import DATE_HELPER
achievements = defaultdict(list)
def add_achievement(week: int, achievement: str):
achievements[week].append(achievement)
def get_progress():
list_of_progress = []
progress = read_json("data/progress.json")
for achievement in progress:
difference = achievement["Week"] - DATE_HELPER.current_week_number()
if difference >= -1 and difference <= 0:
list_of_progress.extend(achievement["Achievements"])
return list_of_progress
| StarcoderdataPython |
335244 | <reponame>aleofreitas/pyppmc
# -*- coding: utf-8 -*-
# Copyright 2018 <NAME>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package containing security related functions
Attributes
----------
CYPHER_TEXT_PATTERN : str
Pattern of encrypted messages.
HASH_DELIMITER : str
HASH delimiter used by PPM (#?#).
PASSWORD_DELIMITER : str
Password delimiter used by PPM (#!#).
PRIVATE_KEY_HEADER : str
Default content of the first line on the private key file.
PUBLIC_KEY_HEADER : str
Default content of the first line on the public key file.
"""
import elgamal
import re
from elgamal import PrivateKey
from elgamal import PublicKey
CYPHER_TEXT_PATTERN = '^%s(.*)%s$'
HASH_DELIMITER = '#?#'
PASSWORD_DELIMITER = '#!#'
PRIVATE_KEY_HEADER = 'ElGamal Private Key'
PUBLIC_KEY_HEADER = 'ElGamal Public Key'
def get_public_key(public_key_file):
"""Returns a public key object.
Parameters
----------
public_key_file : str
Path to the public_key_file
Returns
-------
:obj:`PublicKey`
Resulting public key object.
"""
with open(public_key_file, 'r') as f:
lines = f.readlines()
if lines[0].rstrip() != PUBLIC_KEY_HEADER:
raise IOError('Invalid file format.')
bit_length = int(lines[1].rstrip())
p = long(lines[2].rstrip(), 16)
g = long(lines[3].rstrip(), 16)
y = long(lines[4].rstrip(), 16)
return PublicKey(bit_length, p, g, y)
def get_private_key(private_key_file):
"""Returns a private key object.
Parameters
----------
private_key_file : str
Path to the private_key_file
Returns
-------
:obj:`PrivateKey`
Resulting private key object.
"""
with open(private_key_file, 'r') as f:
lines = f.readlines()
if lines[0].rstrip() != PRIVATE_KEY_HEADER:
raise IOError('Invalid file format.')
bit_length = int(lines[1].rstrip())
p = long(lines[2].rstrip(), 16)
g = long(lines[3].rstrip(), 16)
x = long(lines[4].rstrip(), 16)
return PrivateKey(bit_length, p, g, x)
def is_encrypted(text):
"""Checks if a given text is encrypted or not.
Result is given by checking if it is enclosed by `HASH_DELIMITER` or `PASSWORD_DELIMITER`.
Parameters
----------
text : str
Text to be checked.
Returns
-------
bool
True if text is encrypted; False otherwise.
"""
result = False
mp = re.match(CYPHER_TEXT_PATTERN % (PASSWORD_DELIMITER, PASSWORD_DELIMITER), text)
mh = re.match(CYPHER_TEXT_PATTERN % (HASH_DELIMITER, HASH_DELIMITER), text)
if mp is not None or mh is not None:
result = True
return result
def get_cypher_text(text):
"""Returns the pure encrypted message.
PPM signs if text is encrypted or not by enclosing it with `HASH_DELIMITER` or `PASSWORD_DELIMITER`. This method
removes these delimiters returning the pure encrypted message.
Parameters
----------
text : str
Encrypted message with or without delimiters.
Returns
-------
str
Encrypted message without delimiters.
"""
mp = re.match(CYPHER_TEXT_PATTERN % (PASSWORD_DELIMITER, PASSWORD_DELIMITER), text)
mh = re.match(CYPHER_TEXT_PATTERN % (HASH_DELIMITER, HASH_DELIMITER), text)
if mp is not None:
result = mp.groups()[0]
elif mh is not None:
result = mh.groups()[0]
else:
result = text
return result
def encrypt(text, public_key):
"""Encrypts given text using given public key.
Parameters
----------
text : str
Text to be encrypted.
public_key
Public key to encrypt.
Returns
-------
str
Encrypted text (without delimiters).
"""
return elgamal.encrypt2(text, public_key)
def decrypt(text, private_key):
"""Decrypts given text using given private key.
This methods handles if `text` is delimited (by `HASH_DELIMITER` or `PASSWORD_DELIMITER`) or not.
Parameters
----------
text : str
Text to be decrypted.
private_key
Private key to decrypt.
Returns
-------
str
Decrypted text.
"""
return elgamal.decrypt2(get_cypher_text(text), private_key)
| StarcoderdataPython |
6650398 | <reponame>yookiwooki/AstroLib<gh_stars>0
#!/usr/bin/env python3
"""
rkf45.py - Runge-Kutta-Fehlberg 4(5) algorithm
"""
import numpy as np
import numpy.linalg as LA
import pudb
class OptionsRKF(object):
""" Options for RKF integrator
tol (float): error tolerance
h0 (float): initial step size
r_param (float): factor to reduce step size when tol. is not met
i_param (float): factor to increase step size when tol is met
db_param (float): deadband factor for increasing step size
"""
def __init__(self, tol, h0=0.001, r_param=0.5, i_param=1.5, \
db_param=0.9):
self.tol = tol
self.h0 = h0
self.r_param = r_param
self.i_param = i_param
self.db_param = db_param
class ResultRKF(object):
""" Output data from RKF integrator
t (numpy.array): m x 1 floats with times corresponding to rows of y
x (numpy.array): m x n floats with integrated result
error (numpy.array): m x 1 floats with errors, first error is zero
h (numpy.array): m x 1 floats with step sizes
"""
def __init__(self,t,x):
self.t = t
self.x = x
self.error = 0
self.h = 0
def rkf45(func, tspan, x0, options):
""" RKF 4(5)th order variable step size numerical integration
Ref:
<NAME>. "Low order classical runge-kutta formulas with stepsize
control and their application to some heat transfer problems." (1969).
NOTE: The reference above has a horrifically tricky typo related to the
step size in Equation 2 that has been corrected in this implementation.
Args:
func: derivative function
tspan (numpy.array): 2 x 1 floats with initial and final times
x0 (numpy.array): n x 1 floats with initial state
options (OptionsRKF): see class definition above
Returns:
ResultRKF: Output data from RKF integrator
"""
# Set coefficients
alpha = np.array([1/4, 3/8, 12/13, 1, 1/2])
beta = np.zeros([5,5])
beta[0,0] = np.array([1/4])
beta[1,0:2] = np.array([3/32, 9/32])
beta[2,0:3] = np.array([1932/2197, -7200/2197, 7296/2197])
beta[3,0:4] = np.array([439/216, -8, 3680/513, -845/4104])
beta[4,0:5] = np.array([-8/27, 2, -3544/2565, 1859/4104, -11/40])
c = np.array([25/216, 0, 1408/2565, 2197/4104, -1/5])
c_hat = np.array([16/135, 0, 6656/12825, 28561/56430, -9/50, 2/55])
# Initialization
f = np.zeros([6,x0.size])
t = tspan[0]
x = x0
h = options.h0
result = ResultRKF(np.array([tspan[0]]), x0)
result.h = h
while (t < tspan[1]):
# Evaluate derivatives
f[0,:] = h*func(t, x)
f[1,:] = h*func(t + alpha[0]*h, x + beta[0,0]*f[0,:])
f[2,:] = h*func(t + alpha[1]*h, x + beta[1,0]*f[0,:] + beta[1,1]*f[1,:])
f[3,:] = h*func(t + alpha[2]*h, x + beta[2,0]*f[0,:] + beta[2,1]*f[1,:] \
+ beta[2,2]*f[2,:])
f[4,:] = h*func(t + alpha[3]*h, x + beta[3,0]*f[0,:] + beta[3,1]*f[1,:] \
+ beta[3,2]*f[2,:] + beta[3,3]*f[3,:])
f[5,:] = h*func(t + alpha[4]*h, x + beta[4,0]*f[0,:] + beta[4,1]*f[1,:] \
+ beta[4,2]*f[2,:] + beta[4,3]*f[3,:] + beta[4,4]*f[4,:])
# Evaluate integrated states
x_hat_add = 0
x_add = 0
for k in range(0,6):
x_hat_add = x_hat_add + c_hat[k]*f[k,:]
for k in range(0,5):
x_add = x_add + c[k]*f[k,:]
x_hat = x + x_hat_add
x = x + x_add
# Check accuracy
error = LA.norm(abs(x-x_hat))
# Save current step
result.t = np.vstack([result.t, t])
result.x = np.vstack([result.x, x])
result.error = np.vstack([result.error, error])
result.h = np.vstack([result.h, h])
# Step forward (Stepsize control)
options.r_param = 0.5
options.i_param = 1.5
options.db_param = 0.9
if (abs(error) > options.tol) and (h > options.h0):
h = max(h*options.r_param, options.h0)
elif (abs(error) < options.tol*options.db_param):
h = h*options.i_param
t = t + h
result.x = result.x.transpose()
return result
| StarcoderdataPython |
11293019 | from .api import BasisClient
__all__ = ['BasisClient']
| StarcoderdataPython |
11359796 | from requests import post
base_headers = {
'Content-Type' : 'application/json',
}
url = "http://app-name.herokuapp.com/new_link"
data = {
'link':'https://github.com/dmdhrumilmistry/AnonUS'
}
response = post(url=url, json=data).content.decode()
print(response)
| StarcoderdataPython |
6500156 | # while bole:
# n = n + 1
# print(f'Ola Turma {n}')
# while n <= 30:
# n = n + 1
# print('Ola Turma')
# break
# print('Passou!')
# while n <= 20:
# n = n + 1
# print(f'Ola Turma {n}')
# continue
# print('Passou')
from random import randint
sorteio = randint(1,10)
numero = 0
while not numero == sorteio:
numero = int(input('Digite um número para ser sorteado: '))
if numero > sorteio:
print('O número é maior')
elif numero < sorteio:
print('O número é menor')
else:
print('Você acertou')
| StarcoderdataPython |
4823730 | <reponame>sam6134/SLA_WebServer<gh_stars>0
import matplotlib.pyplot as plt
from math import log
import numpy as np
from random import randint
import pickle
import time
# use ggplot style for more sophisticated visuals
with open('fractalStream.pkl', 'rb') as f:
dfFractal = pickle.load(f)
with open('serverStream.pkl', 'rb') as f:
dfServer = pickle.load(f)
with open('trafficStream.pkl', 'rb') as f:
dfTrafiic = pickle.load(f)
with open('TimeStream.pkl', 'rb') as f:
dfTime = pickle.load(f)
plt.style.use('ggplot')
plt.ion()
fig, ax = plt.subplots()
fig.canvas.set_window_title('Live Chart')
ax.set_title("Servers")
df=[]
df1=[]
for i in range(len(dfTime)):
df.append([dfServer[i]])
df1.append([log(dfTrafiic[i],3)])
ax.plot(df, '-o')
ax.plot(df1,'b')
plt.show()
plt.pause(0.0001)
time.sleep(.4)
plt.style.use('ggplot')
plt.ion()
fig, ax = plt.subplots()
fig.canvas.set_window_title('Live Chart')
ax.set_title("Server data")
df=[]
df1=[]
for i in range(len(dfTime)):
df.append([dfTime[i]])
df1.append([2.5])
ax.plot(df, '-o')
ax.plot(df1,'b')
plt.show()
plt.pause(0.0001)
time.sleep(.4)
| StarcoderdataPython |
242736 | <gh_stars>0
import board
import busio
from digitalio import DigitalInOut, Direction, Pull
from time import sleep
#from roboticsmasters_bluetoothuart import BluetoothUART
uart = busio.UART(board.GPS_TX, board.GPS_RX, baudrate=9600)
enable = DigitalInOut(board.SDA)
enable.direction = Direction.OUTPUT
enable.value = True
#state = DigitalInOut(board.SCL)
#state.direction = Direction.INPUT
#print(state.value)
at = b'AT+HELP\r\n'
rt = b'\r\n'
#uart.write(at)
while True:
uart.write(at)
data = uart.readline()
#uart.write(at)
if data is not None:
print("data: ", data)
sleep(1)
#bt = BluetoothUART(uart)
#print(bt.connected)
| StarcoderdataPython |
4846432 | <gh_stars>0
from __future__ import print_function, division
import unittest
class KnowValues(unittest.TestCase):
def test_thrj(self):
""" """
from pyscf.nao.m_thrj import thrj, thrj_nobuf
from sympy.physics.wigner import wigner_3j
for l1 in range(0,3):
for l2 in range(0,3):
for l3 in range(0,3):
for m1 in range(-4,4+1):
for m2 in range(-4,4+1):
for m3 in range(-4,4+1):
w3j1 = thrj(l1, l2, l3, m1, m2, m3)
w3j2 = thrj_nobuf(l1, l2, l3, m1, m2, m3)
w3j3 = float(wigner_3j(l1, l2, l3, m1, m2, m3))
#print(w3j1, w3j2, w3j3, l1, l2, l3)
self.assertAlmostEqual(w3j1, w3j2)
self.assertAlmostEqual(w3j2, w3j3)
if __name__ == "__main__":
unittest.main()
| StarcoderdataPython |
3340518 | import pytest
import aioftp
@pytest.mark.asyncio
async def test_client_list_override(pair_factory, expect_codes_in_exception):
async with pair_factory(logged=False, do_quit=False) as pair:
with expect_codes_in_exception("503"):
await pair.client.get_current_directory()
@pytest.mark.asyncio
async def test_anonymous_login(pair_factory):
async with pair_factory():
pass
@pytest.mark.asyncio
async def test_login_with_login_data(pair_factory):
async with pair_factory(logged=False) as pair:
await pair.client.login("foo", "bar")
@pytest.mark.asyncio
async def test_login_with_login_and_no_password(pair_factory, Server):
s = Server([aioftp.User("foo")])
async with pair_factory(None, s, logged=False) as pair:
await pair.client.login("foo")
@pytest.mark.asyncio
async def test_login_with_login_and_password(pair_factory, Server):
s = Server([aioftp.User("foo", "bar")])
async with pair_factory(None, s, logged=False) as pair:
await pair.client.login("foo", "bar")
@pytest.mark.asyncio
async def test_login_with_login_and_password_no_such_user(
pair_factory, Server, expect_codes_in_exception):
s = Server([aioftp.User("foo", "bar")])
async with pair_factory(None, s, logged=False) as pair:
with expect_codes_in_exception("530"):
await pair.client.login("fo", "bar")
@pytest.mark.asyncio
async def test_login_with_login_and_password_bad_password(
pair_factory, Server, expect_codes_in_exception):
s = Server([aioftp.User("foo", "bar")])
async with pair_factory(None, s, logged=False) as pair:
with expect_codes_in_exception("530"):
await pair.client.login("foo", "baz")
@pytest.mark.asyncio
async def test_pass_after_login(pair_factory, Server,
expect_codes_in_exception):
s = Server([aioftp.User("foo", "bar")])
async with pair_factory(None, s, logged=False) as pair:
await pair.client.login("foo", "bar")
with expect_codes_in_exception("503"):
await pair.client.command("PASS baz", ("230", "33x"))
| StarcoderdataPython |
97947 | <filename>incomepropertyevaluatorkit/pdf/pdfdocgen.py
# -*- coding: utf-8 -*-
import os, sys
from xhtml2pdf import pisa
from incomepropertyevaluatorkit.foundation.constants import *
from incomepropertyevaluatorkit.foundation.utils import *
from incomepropertyevaluatorkit.pdf.evaluatorfileformat import *
class PDFDocGen:
"""
Class will take financial information about a rental property and
perform PDF generation for various reports. The following reports
are available: evaluator.
"""
#--------------------------------------------------------------------------#
# P U B L I C F U N C T I O N S #
#--------------------------------------------------------------------------#
def __init__(self, doc_id=PDF_EVALUATOR_DOCUMENT_ID):
assert doc_id == "evaluator", 'Currently the only supported document is "evaluator".'
self.set_doc_id(doc_id)
def set_doc_id(self, doc_id):
assert doc_id == "evaluator", 'Currently the only supported document is "evaluator".'
self._doc_id = doc_id
def set_doc_content(self, doc_content):
self._doc_content = doc_content
def generate(self, filepath):
# Load up the document from the file.
self.init_html_content()
# Set our variables.
self.update_html_content()
# Generate our document locally.
self.convert_html_to_pdf(self._html_content, filepath)
#--------------------------------------------------------------------------#
# P R I V A T E F U N C T I O N S #
#--------------------------------------------------------------------------#
def init_html_content(self):
# Get the filepath of where THIS file is located and attach to the
# filepath the document name.
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
filepath = THIS_DIR + "/html_document/" + self._doc_id + ".html"
# Attempt to open the document.
with open(filepath) as input_file_handle:
self._html_content = input_file_handle.read()
# sys.stderr.write( "[myScript] - Error: Could not open %s\n" % (inputFn) )
# sys.exit(-1)
def update_html_content(self):
"""
Function will go through the document text and replace all the
placeholders with the user content.
"""
if self._doc_id is PDF_EVALUATOR_DOCUMENT_ID:
self._html_content = set_evaluator_content(self._html_content, self._doc_content)
def convert_html_to_pdf(self, sourceHtml, outputFilename):
"""
https://github.com/xhtml2pdf/xhtml2pdf/blob/master/doc/source/usage.rst
"""
# Enable PDF generation logging.
pisa.showLogging()
# open output file for writing (truncated binary)
resultFile = open(outputFilename, "w+b")
# convert HTML to PDF
pisaStatus = pisa.CreatePDF(
sourceHtml, # the HTML to convert
dest=resultFile) # file handle to recieve result
# close output file
resultFile.close() # close output file
# return True on success and False on errors
return pisaStatus.err
| StarcoderdataPython |
9762741 | <filename>kaggle/toxic_comments/src/features/hate_model.py
import pickle
import numpy as np
from sklearn.base import BaseEstimator, TransformerMixin
class HateModel(BaseEstimator, TransformerMixin):
def __init__(self, path):
with open(path, 'rb') as f:
self.model = pickle.load(f)
def fit(self, X, y=None):
return self
def transform(self, X, y=None):
print('Feature extraction via Hate model')
print(self.model.classes_)
return self.model.predict(X)
if __name__ == '__main__':
hate = HateModel('../data/hate_model.pkl')
print(hate.transform(np.array(["I love you","Hello", "this is hate, I hate you"])))
| StarcoderdataPython |
9668200 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ashkeys5.ui'
#
# Created by: PyQt4 UI code generator 4.11.4
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
def _fromUtf8(s):
return s
try:
_encoding = QtGui.QApplication.UnicodeUTF8
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig, _encoding)
except AttributeError:
def _translate(context, text, disambig):
return QtGui.QApplication.translate(context, text, disambig)
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName(_fromUtf8("MainWindow"))
MainWindow.resize(802, 513)
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(201, 222, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(137, 182, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 71, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(48, 95, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(164, 199, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(201, 222, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(137, 182, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 71, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(48, 95, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(164, 199, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 71, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(201, 222, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(137, 182, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 71, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(48, 95, 170))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 71, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(36, 71, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
MainWindow.setPalette(palette)
MainWindow.setAutoFillBackground(False)
self.centralwidget = QtGui.QWidget(MainWindow)
self.centralwidget.setAutoFillBackground(True)
self.centralwidget.setObjectName(_fromUtf8("centralwidget"))
self.horizontalLayoutWidget = QtGui.QWidget(self.centralwidget)
self.horizontalLayoutWidget.setGeometry(QtCore.QRect(60, 10, 731, 461))
self.horizontalLayoutWidget.setObjectName(_fromUtf8("horizontalLayoutWidget"))
self.horizontalLayout = QtGui.QHBoxLayout(self.horizontalLayoutWidget)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.frame_1 = QtGui.QFrame(self.horizontalLayoutWidget)
self.frame_1.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_1.setFrameShadow(QtGui.QFrame.Raised)
self.frame_1.setObjectName(_fromUtf8("frame_1"))
self.B1K_1 = QtGui.QPushButton(self.frame_1)
self.B1K_1.setEnabled(False)
self.B1K_1.setGeometry(QtCore.QRect(20, 130, 41, 32))
self.B1K_1.setObjectName(_fromUtf8("B1K_1"))
self.ticker_1 = QtGui.QLabel(self.frame_1)
self.ticker_1.setGeometry(QtCore.QRect(10, 0, 121, 41))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.ticker_1.sizePolicy().hasHeightForWidth())
self.ticker_1.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(28)
self.ticker_1.setFont(font)
self.ticker_1.setAlignment(QtCore.Qt.AlignCenter)
self.ticker_1.setObjectName(_fromUtf8("ticker_1"))
self.qty_1 = QtGui.QLineEdit(self.frame_1)
self.qty_1.setGeometry(QtCore.QRect(20, 50, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 212, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 212, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.qty_1.setPalette(palette)
self.qty_1.setObjectName(_fromUtf8("qty_1"))
self.SAll_1 = QtGui.QPushButton(self.frame_1)
self.SAll_1.setEnabled(False)
self.SAll_1.setGeometry(QtCore.QRect(20, 210, 41, 32))
self.SAll_1.setObjectName(_fromUtf8("SAll_1"))
self.SHalf_1 = QtGui.QPushButton(self.frame_1)
self.SHalf_1.setEnabled(False)
self.SHalf_1.setGeometry(QtCore.QRect(20, 170, 41, 32))
self.SHalf_1.setObjectName(_fromUtf8("SHalf_1"))
self.T_1 = QtGui.QLineEdit(self.frame_1)
self.T_1.setGeometry(QtCore.QRect(20, 400, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.T_1.setPalette(palette)
self.T_1.setObjectName(_fromUtf8("T_1"))
self.B2K_1 = QtGui.QPushButton(self.frame_1)
self.B2K_1.setEnabled(False)
self.B2K_1.setGeometry(QtCore.QRect(20, 90, 41, 32))
self.B2K_1.setObjectName(_fromUtf8("B2K_1"))
self.multiplier_1 = QtGui.QLineEdit(self.frame_1)
self.multiplier_1.setGeometry(QtCore.QRect(20, 250, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.multiplier_1.setPalette(palette)
self.multiplier_1.setAutoFillBackground(False)
self.multiplier_1.setObjectName(_fromUtf8("multiplier_1"))
self.SH2x_1 = QtGui.QPushButton(self.frame_1)
self.SH2x_1.setEnabled(False)
self.SH2x_1.setGeometry(QtCore.QRect(80, 90, 41, 32))
self.SH2x_1.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH2x_1.setObjectName(_fromUtf8("SH2x_1"))
self.SH1x_1 = QtGui.QPushButton(self.frame_1)
self.SH1x_1.setEnabled(False)
self.SH1x_1.setGeometry(QtCore.QRect(80, 130, 41, 32))
self.SH1x_1.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH1x_1.setObjectName(_fromUtf8("SH1x_1"))
self.BCh_1 = QtGui.QPushButton(self.frame_1)
self.BCh_1.setEnabled(False)
self.BCh_1.setGeometry(QtCore.QRect(80, 170, 41, 32))
self.BCh_1.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCh_1.setObjectName(_fromUtf8("BCh_1"))
self.BCa_1 = QtGui.QPushButton(self.frame_1)
self.BCa_1.setEnabled(False)
self.BCa_1.setGeometry(QtCore.QRect(80, 210, 41, 32))
self.BCa_1.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCa_1.setObjectName(_fromUtf8("BCa_1"))
self.line = QtGui.QFrame(self.frame_1)
self.line.setGeometry(QtCore.QRect(10, 330, 118, 3))
self.line.setFrameShape(QtGui.QFrame.HLine)
self.line.setFrameShadow(QtGui.QFrame.Sunken)
self.line.setObjectName(_fromUtf8("line"))
self.stop_1 = QtGui.QLineEdit(self.frame_1)
self.stop_1.setGeometry(QtCore.QRect(20, 340, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.stop_1.setPalette(palette)
self.stop_1.setObjectName(_fromUtf8("stop_1"))
self.order_1 = QtGui.QLineEdit(self.frame_1)
self.order_1.setGeometry(QtCore.QRect(20, 370, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.order_1.setPalette(palette)
self.order_1.setObjectName(_fromUtf8("order_1"))
self.Cx_1 = QtGui.QPushButton(self.frame_1)
self.Cx_1.setEnabled(False)
self.Cx_1.setGeometry(QtCore.QRect(20, 290, 41, 32))
self.Cx_1.setObjectName(_fromUtf8("Cx_1"))
self.Chg_1 = QtGui.QPushButton(self.frame_1)
self.Chg_1.setEnabled(False)
self.Chg_1.setGeometry(QtCore.QRect(80, 290, 41, 32))
self.Chg_1.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.Chg_1.setObjectName(_fromUtf8("Chg_1"))
self.B1K_1.raise_()
self.ticker_1.raise_()
self.qty_1.raise_()
self.SAll_1.raise_()
self.SHalf_1.raise_()
self.T_1.raise_()
self.B2K_1.raise_()
self.multiplier_1.raise_()
self.SH2x_1.raise_()
self.SH1x_1.raise_()
self.BCh_1.raise_()
self.BCa_1.raise_()
self.line.raise_()
self.stop_1.raise_()
self.order_1.raise_()
self.Cx_1.raise_()
self.Chg_1.raise_()
self.horizontalLayout.addWidget(self.frame_1)
self.frame_2 = QtGui.QFrame(self.horizontalLayoutWidget)
self.frame_2.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_2.setFrameShadow(QtGui.QFrame.Raised)
self.frame_2.setObjectName(_fromUtf8("frame_2"))
self.ticker_2 = QtGui.QLabel(self.frame_2)
self.ticker_2.setGeometry(QtCore.QRect(0, 0, 141, 41))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.ticker_2.sizePolicy().hasHeightForWidth())
self.ticker_2.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(29)
self.ticker_2.setFont(font)
self.ticker_2.setAlignment(QtCore.Qt.AlignCenter)
self.ticker_2.setObjectName(_fromUtf8("ticker_2"))
self.qty_2 = QtGui.QLineEdit(self.frame_2)
self.qty_2.setGeometry(QtCore.QRect(20, 50, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.qty_2.setPalette(palette)
self.qty_2.setObjectName(_fromUtf8("qty_2"))
self.T_2 = QtGui.QLineEdit(self.frame_2)
self.T_2.setGeometry(QtCore.QRect(20, 400, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.T_2.setPalette(palette)
self.T_2.setObjectName(_fromUtf8("T_2"))
self.B2K_2 = QtGui.QPushButton(self.frame_2)
self.B2K_2.setEnabled(False)
self.B2K_2.setGeometry(QtCore.QRect(20, 90, 41, 32))
self.B2K_2.setObjectName(_fromUtf8("B2K_2"))
self.B1K_2 = QtGui.QPushButton(self.frame_2)
self.B1K_2.setEnabled(False)
self.B1K_2.setGeometry(QtCore.QRect(20, 130, 41, 32))
self.B1K_2.setObjectName(_fromUtf8("B1K_2"))
self.SHalf_2 = QtGui.QPushButton(self.frame_2)
self.SHalf_2.setEnabled(False)
self.SHalf_2.setGeometry(QtCore.QRect(20, 170, 41, 32))
self.SHalf_2.setObjectName(_fromUtf8("SHalf_2"))
self.SAll_2 = QtGui.QPushButton(self.frame_2)
self.SAll_2.setEnabled(False)
self.SAll_2.setGeometry(QtCore.QRect(20, 210, 41, 32))
self.SAll_2.setObjectName(_fromUtf8("SAll_2"))
self.multiplier_2 = QtGui.QLineEdit(self.frame_2)
self.multiplier_2.setGeometry(QtCore.QRect(20, 250, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.multiplier_2.setPalette(palette)
self.multiplier_2.setObjectName(_fromUtf8("multiplier_2"))
self.SH2x_2 = QtGui.QPushButton(self.frame_2)
self.SH2x_2.setEnabled(False)
self.SH2x_2.setGeometry(QtCore.QRect(80, 90, 41, 32))
self.SH2x_2.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH2x_2.setObjectName(_fromUtf8("SH2x_2"))
self.SH1x_2 = QtGui.QPushButton(self.frame_2)
self.SH1x_2.setEnabled(False)
self.SH1x_2.setGeometry(QtCore.QRect(80, 130, 41, 32))
self.SH1x_2.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH1x_2.setObjectName(_fromUtf8("SH1x_2"))
self.BCh_2 = QtGui.QPushButton(self.frame_2)
self.BCh_2.setEnabled(False)
self.BCh_2.setGeometry(QtCore.QRect(80, 170, 41, 32))
self.BCh_2.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCh_2.setObjectName(_fromUtf8("BCh_2"))
self.BCa_2 = QtGui.QPushButton(self.frame_2)
self.BCa_2.setEnabled(False)
self.BCa_2.setGeometry(QtCore.QRect(80, 210, 41, 32))
self.BCa_2.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCa_2.setObjectName(_fromUtf8("BCa_2"))
self.line_2 = QtGui.QFrame(self.frame_2)
self.line_2.setGeometry(QtCore.QRect(10, 330, 118, 3))
self.line_2.setFrameShape(QtGui.QFrame.HLine)
self.line_2.setFrameShadow(QtGui.QFrame.Sunken)
self.line_2.setObjectName(_fromUtf8("line_2"))
self.stop_2 = QtGui.QLineEdit(self.frame_2)
self.stop_2.setGeometry(QtCore.QRect(20, 340, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.stop_2.setPalette(palette)
self.stop_2.setObjectName(_fromUtf8("stop_2"))
self.order_2 = QtGui.QLineEdit(self.frame_2)
self.order_2.setGeometry(QtCore.QRect(20, 370, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.order_2.setPalette(palette)
self.order_2.setObjectName(_fromUtf8("order_2"))
self.Cx_2 = QtGui.QPushButton(self.frame_2)
self.Cx_2.setEnabled(False)
self.Cx_2.setGeometry(QtCore.QRect(20, 290, 41, 32))
self.Cx_2.setObjectName(_fromUtf8("Cx_2"))
self.Chg_2 = QtGui.QPushButton(self.frame_2)
self.Chg_2.setEnabled(False)
self.Chg_2.setGeometry(QtCore.QRect(80, 290, 41, 32))
self.Chg_2.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.Chg_2.setObjectName(_fromUtf8("Chg_2"))
self.horizontalLayout.addWidget(self.frame_2)
self.frame_3 = QtGui.QFrame(self.horizontalLayoutWidget)
self.frame_3.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_3.setFrameShadow(QtGui.QFrame.Raised)
self.frame_3.setObjectName(_fromUtf8("frame_3"))
self.ticker_3 = QtGui.QLabel(self.frame_3)
self.ticker_3.setGeometry(QtCore.QRect(0, 0, 141, 41))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.ticker_3.sizePolicy().hasHeightForWidth())
self.ticker_3.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(29)
self.ticker_3.setFont(font)
self.ticker_3.setAlignment(QtCore.Qt.AlignCenter)
self.ticker_3.setObjectName(_fromUtf8("ticker_3"))
self.qty_3 = QtGui.QLineEdit(self.frame_3)
self.qty_3.setGeometry(QtCore.QRect(20, 50, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.qty_3.setPalette(palette)
self.qty_3.setObjectName(_fromUtf8("qty_3"))
self.T_3 = QtGui.QLineEdit(self.frame_3)
self.T_3.setGeometry(QtCore.QRect(20, 400, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.T_3.setPalette(palette)
self.T_3.setObjectName(_fromUtf8("T_3"))
self.B2K_3 = QtGui.QPushButton(self.frame_3)
self.B2K_3.setEnabled(False)
self.B2K_3.setGeometry(QtCore.QRect(20, 90, 41, 32))
self.B2K_3.setObjectName(_fromUtf8("B2K_3"))
self.B1K_3 = QtGui.QPushButton(self.frame_3)
self.B1K_3.setEnabled(False)
self.B1K_3.setGeometry(QtCore.QRect(20, 130, 41, 32))
self.B1K_3.setObjectName(_fromUtf8("B1K_3"))
self.SHalf_3 = QtGui.QPushButton(self.frame_3)
self.SHalf_3.setEnabled(False)
self.SHalf_3.setGeometry(QtCore.QRect(20, 170, 41, 32))
self.SHalf_3.setObjectName(_fromUtf8("SHalf_3"))
self.SAll_3 = QtGui.QPushButton(self.frame_3)
self.SAll_3.setEnabled(False)
self.SAll_3.setGeometry(QtCore.QRect(20, 210, 41, 32))
self.SAll_3.setObjectName(_fromUtf8("SAll_3"))
self.multiplier_3 = QtGui.QLineEdit(self.frame_3)
self.multiplier_3.setGeometry(QtCore.QRect(20, 250, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.multiplier_3.setPalette(palette)
self.multiplier_3.setObjectName(_fromUtf8("multiplier_3"))
self.SH2x_3 = QtGui.QPushButton(self.frame_3)
self.SH2x_3.setEnabled(False)
self.SH2x_3.setGeometry(QtCore.QRect(80, 90, 41, 32))
self.SH2x_3.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH2x_3.setObjectName(_fromUtf8("SH2x_3"))
self.SH1x_3 = QtGui.QPushButton(self.frame_3)
self.SH1x_3.setEnabled(False)
self.SH1x_3.setGeometry(QtCore.QRect(80, 130, 41, 32))
self.SH1x_3.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH1x_3.setObjectName(_fromUtf8("SH1x_3"))
self.BCh_3 = QtGui.QPushButton(self.frame_3)
self.BCh_3.setEnabled(False)
self.BCh_3.setGeometry(QtCore.QRect(80, 170, 41, 32))
self.BCh_3.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCh_3.setObjectName(_fromUtf8("BCh_3"))
self.BCa_3 = QtGui.QPushButton(self.frame_3)
self.BCa_3.setEnabled(False)
self.BCa_3.setGeometry(QtCore.QRect(80, 210, 41, 32))
self.BCa_3.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCa_3.setObjectName(_fromUtf8("BCa_3"))
self.line_3 = QtGui.QFrame(self.frame_3)
self.line_3.setGeometry(QtCore.QRect(10, 330, 118, 3))
self.line_3.setFrameShape(QtGui.QFrame.HLine)
self.line_3.setFrameShadow(QtGui.QFrame.Sunken)
self.line_3.setObjectName(_fromUtf8("line_3"))
self.stop_3 = QtGui.QLineEdit(self.frame_3)
self.stop_3.setGeometry(QtCore.QRect(20, 340, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.stop_3.setPalette(palette)
self.stop_3.setObjectName(_fromUtf8("stop_3"))
self.order_3 = QtGui.QLineEdit(self.frame_3)
self.order_3.setGeometry(QtCore.QRect(20, 370, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.order_3.setPalette(palette)
self.order_3.setObjectName(_fromUtf8("order_3"))
self.Cx_3 = QtGui.QPushButton(self.frame_3)
self.Cx_3.setEnabled(False)
self.Cx_3.setGeometry(QtCore.QRect(20, 290, 41, 32))
self.Cx_3.setObjectName(_fromUtf8("Cx_3"))
self.Chg_3 = QtGui.QPushButton(self.frame_3)
self.Chg_3.setEnabled(False)
self.Chg_3.setGeometry(QtCore.QRect(80, 290, 41, 32))
self.Chg_3.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.Chg_3.setObjectName(_fromUtf8("Chg_3"))
self.horizontalLayout.addWidget(self.frame_3)
self.frame_4 = QtGui.QFrame(self.horizontalLayoutWidget)
self.frame_4.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_4.setFrameShadow(QtGui.QFrame.Raised)
self.frame_4.setObjectName(_fromUtf8("frame_4"))
self.B1K_4 = QtGui.QPushButton(self.frame_4)
self.B1K_4.setEnabled(False)
self.B1K_4.setGeometry(QtCore.QRect(20, 130, 41, 32))
self.B1K_4.setObjectName(_fromUtf8("B1K_4"))
self.ticker_4 = QtGui.QLabel(self.frame_4)
self.ticker_4.setGeometry(QtCore.QRect(10, 0, 121, 41))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.ticker_4.sizePolicy().hasHeightForWidth())
self.ticker_4.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(28)
self.ticker_4.setFont(font)
self.ticker_4.setAlignment(QtCore.Qt.AlignCenter)
self.ticker_4.setObjectName(_fromUtf8("ticker_4"))
self.qty_4 = QtGui.QLineEdit(self.frame_4)
self.qty_4.setGeometry(QtCore.QRect(20, 50, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 212, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 212, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.qty_4.setPalette(palette)
self.qty_4.setObjectName(_fromUtf8("qty_4"))
self.SAll_4 = QtGui.QPushButton(self.frame_4)
self.SAll_4.setEnabled(False)
self.SAll_4.setGeometry(QtCore.QRect(20, 210, 41, 32))
self.SAll_4.setObjectName(_fromUtf8("SAll_4"))
self.SHalf_4 = QtGui.QPushButton(self.frame_4)
self.SHalf_4.setEnabled(False)
self.SHalf_4.setGeometry(QtCore.QRect(20, 170, 41, 32))
self.SHalf_4.setObjectName(_fromUtf8("SHalf_4"))
self.T_4 = QtGui.QLineEdit(self.frame_4)
self.T_4.setGeometry(QtCore.QRect(20, 400, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.T_4.setPalette(palette)
self.T_4.setObjectName(_fromUtf8("T_4"))
self.B2K_4 = QtGui.QPushButton(self.frame_4)
self.B2K_4.setEnabled(False)
self.B2K_4.setGeometry(QtCore.QRect(20, 90, 41, 32))
self.B2K_4.setObjectName(_fromUtf8("B2K_4"))
self.multiplier_4 = QtGui.QLineEdit(self.frame_4)
self.multiplier_4.setGeometry(QtCore.QRect(20, 250, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.multiplier_4.setPalette(palette)
self.multiplier_4.setAutoFillBackground(False)
self.multiplier_4.setObjectName(_fromUtf8("multiplier_4"))
self.SH2x_4 = QtGui.QPushButton(self.frame_4)
self.SH2x_4.setEnabled(False)
self.SH2x_4.setGeometry(QtCore.QRect(80, 90, 41, 32))
self.SH2x_4.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH2x_4.setObjectName(_fromUtf8("SH2x_4"))
self.SH1x_4 = QtGui.QPushButton(self.frame_4)
self.SH1x_4.setEnabled(False)
self.SH1x_4.setGeometry(QtCore.QRect(80, 130, 41, 32))
self.SH1x_4.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH1x_4.setObjectName(_fromUtf8("SH1x_4"))
self.BCh_4 = QtGui.QPushButton(self.frame_4)
self.BCh_4.setEnabled(False)
self.BCh_4.setGeometry(QtCore.QRect(80, 170, 41, 32))
self.BCh_4.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCh_4.setObjectName(_fromUtf8("BCh_4"))
self.BCa_4 = QtGui.QPushButton(self.frame_4)
self.BCa_4.setEnabled(False)
self.BCa_4.setGeometry(QtCore.QRect(80, 210, 41, 32))
self.BCa_4.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCa_4.setObjectName(_fromUtf8("BCa_4"))
self.line1 = QtGui.QFrame(self.frame_4)
self.line1.setGeometry(QtCore.QRect(10, 330, 118, 3))
self.line1.setFrameShape(QtGui.QFrame.HLine)
self.line1.setFrameShadow(QtGui.QFrame.Sunken)
self.line1.setObjectName(_fromUtf8("line1"))
self.stop_4 = QtGui.QLineEdit(self.frame_4)
self.stop_4.setGeometry(QtCore.QRect(20, 340, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.stop_4.setPalette(palette)
self.stop_4.setObjectName(_fromUtf8("stop_4"))
self.order_4 = QtGui.QLineEdit(self.frame_4)
self.order_4.setGeometry(QtCore.QRect(20, 370, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.order_4.setPalette(palette)
self.order_4.setObjectName(_fromUtf8("order_4"))
self.Cx_4 = QtGui.QPushButton(self.frame_4)
self.Cx_4.setEnabled(False)
self.Cx_4.setGeometry(QtCore.QRect(20, 290, 41, 32))
self.Cx_4.setObjectName(_fromUtf8("Cx_4"))
self.Chg_4 = QtGui.QPushButton(self.frame_4)
self.Chg_4.setEnabled(False)
self.Chg_4.setGeometry(QtCore.QRect(80, 290, 41, 32))
self.Chg_4.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.Chg_4.setObjectName(_fromUtf8("Chg_4"))
self.B1K_4.raise_()
self.ticker_4.raise_()
self.qty_4.raise_()
self.SAll_4.raise_()
self.SHalf_4.raise_()
self.T_4.raise_()
self.B2K_4.raise_()
self.multiplier_4.raise_()
self.SH2x_4.raise_()
self.SH1x_4.raise_()
self.BCh_4.raise_()
self.BCa_4.raise_()
self.line.raise_()
self.stop_4.raise_()
self.order_4.raise_()
self.frame_2.raise_()
self.Cx_4.raise_()
self.Chg_4.raise_()
self.horizontalLayout.addWidget(self.frame_4)
self.frame_5 = QtGui.QFrame(self.horizontalLayoutWidget)
self.frame_5.setFrameShape(QtGui.QFrame.StyledPanel)
self.frame_5.setFrameShadow(QtGui.QFrame.Raised)
self.frame_5.setObjectName(_fromUtf8("frame_5"))
self.B1K_5 = QtGui.QPushButton(self.frame_5)
self.B1K_5.setEnabled(False)
self.B1K_5.setGeometry(QtCore.QRect(20, 130, 41, 32))
self.B1K_5.setObjectName(_fromUtf8("B1K_5"))
self.ticker_5 = QtGui.QLabel(self.frame_5)
self.ticker_5.setGeometry(QtCore.QRect(10, 0, 121, 41))
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.ticker_5.sizePolicy().hasHeightForWidth())
self.ticker_5.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(28)
self.ticker_5.setFont(font)
self.ticker_5.setAlignment(QtCore.Qt.AlignCenter)
self.ticker_5.setObjectName(_fromUtf8("ticker_5"))
self.qty_5 = QtGui.QLineEdit(self.frame_5)
self.qty_5.setGeometry(QtCore.QRect(20, 50, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 212, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(127, 212, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.ToolTipText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.WindowText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Button, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 255, 191))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Light, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 212, 159))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Midlight, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Dark, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 113, 84))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Mid, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Text, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.BrightText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 85, 63))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ButtonText, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Window, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Shadow, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 170, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.AlternateBase, brush)
brush = QtGui.QBrush(QtGui.QColor(255, 255, 220))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipBase, brush)
brush = QtGui.QBrush(QtGui.QColor(0, 0, 0))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.ToolTipText, brush)
self.qty_5.setPalette(palette)
self.qty_5.setObjectName(_fromUtf8("qty_5"))
self.SAll_5 = QtGui.QPushButton(self.frame_5)
self.SAll_5.setEnabled(False)
self.SAll_5.setGeometry(QtCore.QRect(20, 210, 41, 32))
self.SAll_5.setObjectName(_fromUtf8("SAll_5"))
self.SHalf_5 = QtGui.QPushButton(self.frame_5)
self.SHalf_5.setEnabled(False)
self.SHalf_5.setGeometry(QtCore.QRect(20, 170, 41, 32))
self.SHalf_5.setObjectName(_fromUtf8("SHalf_5"))
self.T_5 = QtGui.QLineEdit(self.frame_5)
self.T_5.setGeometry(QtCore.QRect(20, 400, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.T_5.setPalette(palette)
self.T_5.setObjectName(_fromUtf8("T_5"))
self.B2K_5 = QtGui.QPushButton(self.frame_5)
self.B2K_5.setEnabled(False)
self.B2K_5.setGeometry(QtCore.QRect(20, 90, 41, 32))
self.B2K_5.setObjectName(_fromUtf8("B2K_5"))
self.multiplier_5 = QtGui.QLineEdit(self.frame_5)
self.multiplier_5.setGeometry(QtCore.QRect(20, 250, 100, 31))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(170, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.multiplier_5.setPalette(palette)
self.multiplier_5.setAutoFillBackground(False)
self.multiplier_5.setObjectName(_fromUtf8("multiplier_5"))
self.SH2x_5 = QtGui.QPushButton(self.frame_5)
self.SH2x_5.setEnabled(False)
self.SH2x_5.setGeometry(QtCore.QRect(80, 90, 41, 32))
self.SH2x_5.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH2x_5.setObjectName(_fromUtf8("SH2x_5"))
self.SH1x_5 = QtGui.QPushButton(self.frame_5)
self.SH1x_5.setEnabled(False)
self.SH1x_5.setGeometry(QtCore.QRect(80, 130, 41, 32))
self.SH1x_5.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.SH1x_5.setObjectName(_fromUtf8("SH1x_5"))
self.BCh_5 = QtGui.QPushButton(self.frame_5)
self.BCh_5.setEnabled(False)
self.BCh_5.setGeometry(QtCore.QRect(80, 170, 41, 32))
self.BCh_5.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCh_5.setObjectName(_fromUtf8("BCh_5"))
self.BCa_5 = QtGui.QPushButton(self.frame_5)
self.BCa_5.setEnabled(False)
self.BCa_5.setGeometry(QtCore.QRect(80, 210, 41, 32))
self.BCa_5.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.BCa_5.setObjectName(_fromUtf8("BCa_5"))
self.line2 = QtGui.QFrame(self.frame_5)
self.line2.setGeometry(QtCore.QRect(10, 330, 118, 3))
self.line2.setFrameShape(QtGui.QFrame.HLine)
self.line2.setFrameShadow(QtGui.QFrame.Sunken)
self.line2.setObjectName(_fromUtf8("line2"))
self.stop_5 = QtGui.QLineEdit(self.frame_5)
self.stop_5.setGeometry(QtCore.QRect(20, 340, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(254, 255, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.stop_5.setPalette(palette)
self.stop_5.setObjectName(_fromUtf8("stop_5"))
self.order_5 = QtGui.QLineEdit(self.frame_5)
self.order_5.setGeometry(QtCore.QRect(20, 370, 101, 28))
palette = QtGui.QPalette()
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Active, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(113, 114, 127))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Inactive, QtGui.QPalette.Base, brush)
brush = QtGui.QBrush(QtGui.QColor(73, 143, 255))
brush.setStyle(QtCore.Qt.SolidPattern)
palette.setBrush(QtGui.QPalette.Disabled, QtGui.QPalette.Base, brush)
self.order_5.setPalette(palette)
self.order_5.setObjectName(_fromUtf8("order_5"))
self.Cx_5 = QtGui.QPushButton(self.frame_5)
self.Cx_5.setEnabled(False)
self.Cx_5.setGeometry(QtCore.QRect(20, 290, 41, 32))
self.Cx_5.setObjectName(_fromUtf8("Cx_5"))
self.Chg_5 = QtGui.QPushButton(self.frame_5)
self.Chg_5.setEnabled(False)
self.Chg_5.setGeometry(QtCore.QRect(80, 290, 41, 32))
self.Chg_5.setStyleSheet(_fromUtf8("background-color: rgb(80,40,40)"))
self.Chg_5.setObjectName(_fromUtf8("Chg_5"))
self.B1K_5.raise_()
self.ticker_5.raise_()
self.qty_5.raise_()
self.SAll_5.raise_()
self.SHalf_5.raise_()
self.T_5.raise_()
self.B2K_5.raise_()
self.multiplier_5.raise_()
self.SH2x_5.raise_()
self.SH1x_5.raise_()
self.BCh_5.raise_()
self.BCa_5.raise_()
self.line.raise_()
self.stop_5.raise_()
self.order_5.raise_()
self.frame_2.raise_()
self.Cx_5.raise_()
self.Chg_5.raise_()
self.horizontalLayout.addWidget(self.frame_5)
self.label = QtGui.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(10, 70, 31, 18))
self.label.setObjectName(_fromUtf8("label"))
self.Arm = QtGui.QCheckBox(self.centralwidget)
self.Arm.setGeometry(QtCore.QRect(10, 10, 21, 23))
self.Arm.setAutoFillBackground(True)
self.Arm.setObjectName(_fromUtf8("Arm"))
self.label_5 = QtGui.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(10, 270, 41, 18))
self.label_5.setObjectName(_fromUtf8("label_5"))
self.label_4 = QtGui.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(10, 430, 41, 18))
self.label_4.setObjectName(_fromUtf8("label_4"))
self.label_6 = QtGui.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(10, 360, 41, 18))
self.label_6.setObjectName(_fromUtf8("label_6"))
MainWindow.setCentralWidget(self.centralwidget)
self.statusBar = QtGui.QStatusBar(MainWindow)
self.statusBar.setObjectName(_fromUtf8("statusBar"))
MainWindow.setStatusBar(self.statusBar)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
MainWindow.setWindowTitle(_translate("MainWindow", "Etrade-HKey", None))
self.B1K_1.setText(_translate("MainWindow", "B-1x", None))
self.ticker_1.setText(_translate("MainWindow", "***", None))
self.qty_1.setToolTip(_translate("MainWindow", "Quantity", None))
self.SAll_1.setText(_translate("MainWindow", "S-a", None))
self.SHalf_1.setText(_translate("MainWindow", "S-h", None))
self.T_1.setToolTip(_translate("MainWindow", "Ticker", None))
self.B2K_1.setText(_translate("MainWindow", "B-2x", None))
self.multiplier_1.setToolTip(_translate("MainWindow", "Ticker", None))
self.SH2x_1.setText(_translate("MainWindow", "SH2x", None))
self.SH1x_1.setText(_translate("MainWindow", "SH1x", None))
self.BCh_1.setText(_translate("MainWindow", "BC-h", None))
self.BCa_1.setText(_translate("MainWindow", "BC-a", None))
self.Cx_1.setText(_translate("MainWindow", "Cx", None))
self.Chg_1.setText(_translate("MainWindow", "Chg", None))
self.ticker_2.setText(_translate("MainWindow", "***", None))
self.B2K_2.setText(_translate("MainWindow", "B-2x", None))
self.B1K_2.setText(_translate("MainWindow", "B-1x", None))
self.SHalf_2.setText(_translate("MainWindow", "S-h", None))
self.SAll_2.setText(_translate("MainWindow", "S-a", None))
self.multiplier_2.setToolTip(_translate("MainWindow", "Ticker", None))
self.SH2x_2.setText(_translate("MainWindow", "SH2x", None))
self.SH1x_2.setText(_translate("MainWindow", "SH1x", None))
self.BCh_2.setText(_translate("MainWindow", "BC-h", None))
self.BCa_2.setText(_translate("MainWindow", "BC-a", None))
self.Cx_2.setText(_translate("MainWindow", "Cx", None))
self.Chg_2.setText(_translate("MainWindow", "Chg", None))
self.ticker_3.setText(_translate("MainWindow", "***", None))
self.B2K_3.setText(_translate("MainWindow", "B-2x", None))
self.B1K_3.setText(_translate("MainWindow", "B-1x", None))
self.SHalf_3.setText(_translate("MainWindow", "S-h", None))
self.SAll_3.setText(_translate("MainWindow", "S-a", None))
self.multiplier_3.setToolTip(_translate("MainWindow", "Ticker", None))
self.SH2x_3.setText(_translate("MainWindow", "SH2x", None))
self.SH1x_3.setText(_translate("MainWindow", "SH1x", None))
self.BCh_3.setText(_translate("MainWindow", "BC-h", None))
self.BCa_3.setText(_translate("MainWindow", "BC-a", None))
self.Cx_3.setText(_translate("MainWindow", "Cx", None))
self.Chg_3.setText(_translate("MainWindow", "Chg", None))
self.B1K_4.setText(_translate("MainWindow", "B-1x", None))
self.ticker_4.setText(_translate("MainWindow", "***", None))
self.qty_4.setToolTip(_translate("MainWindow", "Quantity", None))
self.SAll_4.setText(_translate("MainWindow", "S-a", None))
self.SHalf_4.setText(_translate("MainWindow", "S-h", None))
self.T_4.setToolTip(_translate("MainWindow", "Ticker", None))
self.B2K_4.setText(_translate("MainWindow", "B-2x", None))
self.multiplier_4.setToolTip(_translate("MainWindow", "Ticker", None))
self.SH2x_4.setText(_translate("MainWindow", "SH2x", None))
self.SH1x_4.setText(_translate("MainWindow", "SH1x", None))
self.BCh_4.setText(_translate("MainWindow", "BC-h", None))
self.BCa_4.setText(_translate("MainWindow", "BC-a", None))
self.Cx_4.setText(_translate("MainWindow", "Cx", None))
self.Chg_4.setText(_translate("MainWindow", "Chg", None))
self.B1K_5.setText(_translate("MainWindow", "B-1x", None))
self.ticker_5.setText(_translate("MainWindow", "***", None))
self.qty_5.setToolTip(_translate("MainWindow", "Quantity", None))
self.SAll_5.setText(_translate("MainWindow", "S-a", None))
self.SHalf_5.setText(_translate("MainWindow", "S-h", None))
self.T_5.setToolTip(_translate("MainWindow", "Ticker", None))
self.B2K_5.setText(_translate("MainWindow", "B-2x", None))
self.multiplier_5.setToolTip(_translate("MainWindow", "Ticker", None))
self.SH2x_5.setText(_translate("MainWindow", "SH2x", None))
self.SH1x_5.setText(_translate("MainWindow", "SH1x", None))
self.BCh_5.setText(_translate("MainWindow", "BC-h", None))
self.BCa_5.setText(_translate("MainWindow", "BC-a", None))
self.Cx_5.setText(_translate("MainWindow", "Cx", None))
self.Chg_5.setText(_translate("MainWindow", "Chg", None))
self.label.setText(_translate("MainWindow", "Qty", None))
self.Arm.setText(_translate("MainWindow", "CheckBox", None))
self.label_5.setText(_translate("MainWindow", "Xfact", None))
self.label_4.setText(_translate("MainWindow", "Sym", None))
self.label_6.setText(_translate("MainWindow", "$top", None))
| StarcoderdataPython |
5160343 | # -----------------------------------------------
# aplotter.py - ascii art function plotter
# Copyright (c) 2006, <NAME>
# All rights reserved.
#
# Redistribution and use in source and binary forms,
# with or without modification, are permitted provided
# that the following conditions are met:
#
# * Redistributions of source code must retain the
# above copyright notice, this list of conditions
# and the following disclaimer.
# * Redistributions in binary form must reproduce the
# above copyright notice, this list of conditions
# and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the <ORGANIZATION> nor the names of
# its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# -----------------------------------------------
import math
__all__ = ["Plotter", "plot"]
__version__ = "0.0.1"
EPSILON = 0.000001
def transposed(mat):
result = []
for i in range(len(mat[0])):
result.append([x[i] for x in mat])
return result
def y_reversed(mat):
result = []
for i in range(len(mat)):
result.append(list(reversed(mat[i])))
return result
def sign(x):
if 0 < x:
return 1
if 0 == x:
return 0
return -1
class Plotter(object):
class PlotData(object):
def __init__(self, x_size, y_size, min_x, max_x, min_y, max_y, x_mod, y_mod):
self.x_size = x_size
self.y_size = y_size
self.min_x = min_x
self.max_x = max_x
self.min_y = min_y
self.max_y = max_y
self.x_mod = x_mod
self.y_mod = y_mod
self.x_step = float(max_x - min_x) / float(self.x_size)
self.y_step = float(max_y - min_y) / float(self.y_size)
self.inv_x_step = 1 / self.x_step
self.inv_y_step = 1 / self.y_step
self.ratio = self.y_step / self.x_step
def __repr__(self):
s = "size: %s, bl: %s, tr: %s, step: %s" % (
(self.x_size, self.y_size),
(self.min_x, self.min_y),
(self.max_x, self.max_y),
(self.x_step, self.y_step),
)
return s
def __init__(self, **kwargs):
self.x_size = kwargs.get("x_size", 80)
self.y_size = kwargs.get("y_size", 20)
self.will_draw_axes = kwargs.get("draw_axes", True)
self.new_line = kwargs.get("newline", "\n")
self.dot = kwargs.get("dot", "*")
self.plot_slope = kwargs.get("plot_slope", True)
self.x_margin = kwargs.get("x_margin", 0.05)
self.y_margin = kwargs.get("y_margin", 0.1)
self.will_plot_labels = kwargs.get("plot_labels", True)
@staticmethod
def get_symbol_by_slope(slope, default_symbol):
draw_symbol = default_symbol
if slope > math.tan(3 * math.pi / 8):
draw_symbol = "|"
elif slope > math.tan(math.pi / 8) and slope < math.tan(3 * math.pi / 8):
draw_symbol = "/"
elif abs(slope) < math.tan(math.pi / 8):
draw_symbol = "-"
elif slope < math.tan(-math.pi / 8) and slope > math.tan(-3 * math.pi / 8):
draw_symbol = "\\"
elif slope < math.tan(-3 * math.pi / 8):
draw_symbol = "|"
return draw_symbol
def plot_labels(self, output_buffer, plot_data):
if plot_data.y_size < 2:
return
margin_factor = 1
do_plot_x_label = True
do_plot_y_label = True
x_str = "%+g"
if plot_data.x_size < 16:
do_plot_x_label = False
elif plot_data.x_size < 23:
x_str = "%+.2g"
y_str = "%+g"
if plot_data.x_size < 8:
do_plot_y_label = False
elif plot_data.x_size < 11:
y_str = "%+.2g"
act_min_x = plot_data.min_x + plot_data.x_mod * margin_factor
act_max_x = plot_data.max_x - plot_data.x_mod * margin_factor
act_min_y = plot_data.min_y + plot_data.y_mod * margin_factor
act_max_y = plot_data.max_y - plot_data.y_mod * margin_factor
if abs(act_min_x) < 1:
min_x_str = "%+.2g" % act_min_x
else:
min_x_str = x_str % act_min_x
if abs(act_max_x) < 1:
max_x_str = "%+.2g" % act_max_x
else:
max_x_str = x_str % act_max_x
if abs(act_min_y) < 1:
min_y_str = "%+.2g" % act_min_y
else:
min_y_str = y_str % act_min_y
if abs(act_max_y) < 1:
max_y_str = "%+.2g" % act_max_y
else:
max_y_str = y_str % act_max_y
min_x_coord = self.get_coord(act_min_x, plot_data.min_x, plot_data.x_step)
max_x_coord = self.get_coord(act_max_x, plot_data.min_x, plot_data.x_step)
min_y_coord = self.get_coord(act_min_y, plot_data.min_y, plot_data.y_step)
max_y_coord = self.get_coord(act_max_y, plot_data.min_y, plot_data.y_step)
# print plot_data
y_zero_coord = self.get_coord(0, plot_data.min_y, plot_data.y_step)
# if plot_data.min_x < 0 and plot_data.max_x > 0:
x_zero_coord = self.get_coord(0, plot_data.min_x, plot_data.x_step)
# else:
# pass
output_buffer[x_zero_coord][min_y_coord] = "+"
output_buffer[x_zero_coord][max_y_coord] = "+"
output_buffer[min_x_coord][y_zero_coord] = "+"
output_buffer[max_x_coord][y_zero_coord] = "+"
if do_plot_x_label:
for i, c in enumerate(min_x_str):
output_buffer[min_x_coord + i][y_zero_coord - 1] = c
for i, c in enumerate(max_x_str):
output_buffer[max_x_coord + i - len(max_x_str)][y_zero_coord - 1] = c
if do_plot_y_label:
for i, c in enumerate(max_y_str):
output_buffer[x_zero_coord + i][max_y_coord] = c
for i, c in enumerate(min_y_str):
output_buffer[x_zero_coord + i][min_y_coord] = c
def plot_data(self, xy_seq, output_buffer, plot_data):
if self.plot_slope:
xy_seq = list(xy_seq)
# sort according to the x coord
xy_seq.sort(key=lambda c: c[0])
prev_p = xy_seq[0]
e_xy_seq = enumerate(xy_seq)
next(e_xy_seq)
for i, (x, y) in e_xy_seq:
draw_symbol = self.dot
line_drawn = self.plot_line(prev_p, (x, y), output_buffer, plot_data)
prev_p = (x, y)
if not line_drawn:
if i > 0 and i < len(xy_seq) - 1:
px, py = xy_seq[i - 1]
nx, ny = xy_seq[i + 1]
if abs(nx - px) > EPSILON:
slope = (1.0 / plot_data.ratio) * (ny - py) / (nx - px)
draw_symbol = self.get_symbol_by_slope(slope, draw_symbol)
if (
x < plot_data.min_x
or x >= plot_data.max_x
or y < plot_data.min_y
or y >= plot_data.max_y
):
continue
x_coord = self.get_coord(x, plot_data.min_x, plot_data.x_step)
y_coord = self.get_coord(y, plot_data.min_y, plot_data.y_step)
if (
x_coord >= 0
and x_coord < len(output_buffer)
and y_coord >= 0
and y_coord < len(output_buffer[0])
):
if self.draw_axes:
if (
y_coord
== self.get_coord(0, plot_data.min_y, plot_data.y_step)
and draw_symbol == "-"
):
draw_symbol = "="
output_buffer[x_coord][y_coord] = draw_symbol
else:
for x, y in xy_seq:
if (
x < plot_data.min_x
or x >= plot_data.max_x
or y < plot_data.min_y
or y >= plot_data.max_y
):
continue
x_coord = self.get_coord(x, plot_data.min_x, plot_data.x_step)
y_coord = self.get_coord(y, plot_data.min_y, plot_data.y_step)
if (
x_coord >= 0
and x_coord < len(output_buffer)
and y_coord > 0
and y_coord < len(output_buffer[0])
):
output_buffer[x_coord][y_coord] = self.dot
def plot_line(self, start, end, output_buffer, plot_data):
start_coord = (
self.get_coord(start[0], plot_data.min_x, plot_data.x_step),
self.get_coord(start[1], plot_data.min_y, plot_data.y_step),
)
end_coord = (
self.get_coord(end[0], plot_data.min_x, plot_data.x_step),
self.get_coord(end[1], plot_data.min_y, plot_data.y_step),
)
x0, y0 = start_coord
x1, y1 = end_coord
if (x0, y0) == (x1, y1):
return True
clipped_line = clip_line(
start,
end,
(plot_data.min_x, plot_data.min_y),
(plot_data.max_x, plot_data.max_y),
)
if clipped_line != None:
start, end = clipped_line
else:
return False
start_coord = (
self.get_coord(start[0], plot_data.min_x, plot_data.x_step),
self.get_coord(start[1], plot_data.min_y, plot_data.y_step),
)
end_coord = (
self.get_coord(end[0], plot_data.min_x, plot_data.x_step),
self.get_coord(end[1], plot_data.min_y, plot_data.y_step),
)
x0, y0 = start_coord
x1, y1 = end_coord
if (x0, y0) == (x1, y1):
return True
x_zero_coord = self.get_coord(0, plot_data.min_x, plot_data.x_step)
y_zero_coord = self.get_coord(0, plot_data.min_y, plot_data.y_step)
if start[0] - end[0] == 0:
draw_symbol = "|"
else:
slope = (1.0 / plot_data.ratio) * (end[1] - start[1]) / (end[0] - start[0])
draw_symbol = self.get_symbol_by_slope(slope, self.dot)
try:
delta = x1 - x0, y1 - y0
if abs(delta[0]) > abs(delta[1]):
s = sign(delta[0])
slope = float(delta[1]) / delta[0]
for i in range(0, abs(int(delta[0]))):
cur_draw_symbol = draw_symbol
x = i * s
cur_y = int(y0 + slope * x)
if self.draw_axes and cur_y == y_zero_coord and draw_symbol == "-":
cur_draw_symbol = "="
output_buffer[x0 + x][cur_y] = cur_draw_symbol
else:
s = sign(delta[1])
slope = float(delta[0]) / delta[1]
for i in range(0, abs(int(delta[1]))):
y = i * s
cur_draw_symbol = draw_symbol
cur_y = y0 + y
if self.draw_axes and cur_y == y_zero_coord and draw_symbol == "-":
cur_draw_symbol = "="
output_buffer[int(x0 + slope * y)][cur_y] = cur_draw_symbol
except:
print(start, end)
print(start_coord, end_coord)
print(plot_data)
raise
return False
def plot_single(self, seq, min_x=None, max_x=None, min_y=None, max_y=None):
return self.plot_double(range(len(seq)), seq, min_x, max_x, min_y, max_y)
def plot_double(self, x_seq, y_seq, min_x=None, max_x=None, min_y=None, max_y=None):
if min_x == None:
min_x = min(x_seq)
if max_x == None:
max_x = max(x_seq)
if min_y == None:
min_y = min(y_seq)
if max_y == None:
max_y = max(y_seq)
if max_y == min_y:
max_y += 1
x_mod = (max_x - min_x) * self.x_margin
y_mod = (max_y - min_y) * self.y_margin
min_x -= x_mod
max_x += x_mod
min_y -= y_mod
max_y += y_mod
plot_data = self.PlotData(
self.x_size, self.y_size, min_x, max_x, min_y, max_y, x_mod, y_mod
)
output_buffer = [[" "] * self.y_size for i in range(self.x_size)]
if self.will_draw_axes:
self.draw_axes(output_buffer, plot_data)
self.plot_data(zip(x_seq, y_seq), output_buffer, plot_data)
if self.will_plot_labels:
self.plot_labels(output_buffer, plot_data)
trans_result = transposed(y_reversed(output_buffer))
result = self.new_line.join(["".join(row) for row in trans_result])
return result
def draw_axes(self, output_buffer, plot_data):
draw_x = False
draw_y = False
if plot_data.min_x <= 0 and plot_data.max_x > 0:
draw_y = True
zero_x = self.get_coord(0, plot_data.min_x, plot_data.x_step)
for y in range(plot_data.y_size):
output_buffer[zero_x][y] = "|"
if plot_data.min_y <= 0 and plot_data.max_y > 0:
draw_x = True
zero_y = self.get_coord(0, plot_data.min_y, plot_data.y_step)
for x in range(plot_data.x_size):
output_buffer[x][zero_y] = "-"
if draw_x and draw_y:
output_buffer[zero_x][zero_y] = "+"
@staticmethod
def get_coord(val, min, step):
result = int((val - min) / step)
return result
def clip_line(line_pt_1, line_pt_2, rect_bottom_left, rect_top_right):
ts = [0.0, 1.0]
if line_pt_1[0] == line_pt_2[0]:
return (
(line_pt_1[0], max(min(line_pt_1[1], line_pt_2[1]), rect_bottom_left[1])),
(line_pt_1[0], min(max(line_pt_1[1], line_pt_2[1]), rect_top_right[1])),
)
if line_pt_1[1] == line_pt_2[1]:
return (
(max(min(line_pt_1[0], line_pt_2[0]), rect_bottom_left[0]), line_pt_1[1]),
(min(max(line_pt_1[0], line_pt_2[0]), rect_top_right[0]), line_pt_1[1]),
)
if (
(rect_bottom_left[0] <= line_pt_1[0] and line_pt_1[0] < rect_top_right[0])
and (rect_bottom_left[1] <= line_pt_1[1] and line_pt_1[1] < rect_top_right[1])
and (rect_bottom_left[0] <= line_pt_2[0] and line_pt_2[0] < rect_top_right[0])
and (rect_bottom_left[1] <= line_pt_2[1] and line_pt_2[1] < rect_top_right[1])
):
return line_pt_1, line_pt_2
ts.append(float(rect_bottom_left[0] - line_pt_1[0]) / (line_pt_2[0] - line_pt_1[0]))
ts.append(float(rect_top_right[0] - line_pt_1[0]) / (line_pt_2[0] - line_pt_1[0]))
ts.append(float(rect_bottom_left[1] - line_pt_1[1]) / (line_pt_2[1] - line_pt_1[1]))
ts.append(float(rect_top_right[1] - line_pt_1[1]) / (line_pt_2[1] - line_pt_1[1]))
ts.sort()
if ts[2] < 0 or ts[2] >= 1 or ts[3] < 0 or ts[2] >= 1:
return None
result = [
(pt_1 + t * (pt_2 - pt_1))
for t in (ts[2], ts[3])
for (pt_1, pt_2) in zip(line_pt_1, line_pt_2)
]
return (result[0], result[1]), (result[2], result[3])
def plot(*args, **flags):
limit_flags_names = set(["min_x", "min_y", "max_x", "max_y"])
limit_flags = dict([(n, flags[n]) for n in limit_flags_names & set(flags)])
settting_flags = dict([(n, flags[n]) for n in set(flags) - limit_flags_names])
if len(args) == 1:
p = Plotter(**settting_flags)
print(p.plot_single(args[0], **limit_flags))
elif len(args) == 2:
p = Plotter(**settting_flags)
print(p.plot_double(args[0], args[1], **limit_flags))
else:
raise NotImplementedError("can't draw multiple graphs yet")
| StarcoderdataPython |
4900971 | #!/usr/bin/python35
fp = open('hello.txt')
print('fp.read() => \n', fp.read())
fp.seek(0, 0)
print('fp.read(8) => \n', fp.read(8))
fp.close()
| StarcoderdataPython |
6636721 | <filename>translate.py<gh_stars>0
import boto3
def readfile():
file = input('enter a filename here: ')
readfile = open(file, 'r')
txt = readfile.read()
return txt
translate = boto3.client(service_name='translate', region_name='us-west-2', use_ssl=True)
result = translate.translate_text(Text=readfile(),
SourceLanguageCode="en", TargetLanguageCode="de")
# print('TranslatedText: ' + result.get('TranslatedText'))
# print('SourceLanguageCode: ' + result.get('SourceLanguageCode'))
# print('TargetLanguageCode: ' + result.get('TargetLanguageCode'))
targetlanguage = result.get('TargetLanguageCode') | StarcoderdataPython |
1821788 | <reponame>connorbrinton/gql
"""The primary :mod:`gql` package includes everything you need to
execute GraphQL requests, with the exception of the transports
which are optional:
- the :func:`gql <gql.gql>` method to parse a GraphQL query
- the :class:`Client <gql.Client>` class as the entrypoint to execute requests
and create sessions
"""
from .__version__ import __version__
from .client import Client
from .gql import gql
__all__ = [
"__version__",
"gql",
"Client",
]
| StarcoderdataPython |
6476126 | from sqlalchemy import Column, Integer, Text, ForeignKey
from sqlalchemy.orm import relationship
from app.db.session import Base
class Comment(Base):
__tablename__ = "comment"
id = Column(Integer, primary_key=True, index=True)
text = Column(Text)
user_id = Column(Integer, ForeignKey("user.id"))
character_id = Column(Integer, ForeignKey("character.id"))
episode_id = Column(Integer, ForeignKey("episode.id"))
appearance_id = Column(Integer, ForeignKey("character_episode.id"))
user = relationship("User", back_populates="comments")
character = relationship("Character", back_populates="comments")
episode = relationship("Episode", back_populates="comments")
appearance = relationship("CharacterEpisode", back_populates="comments")
| StarcoderdataPython |
8002337 | import argparse
import errno
import os
import subprocess
from jinja2 import Template
GODOC_DEFAULT_PORT = 6061
IPYTHON_DEFAULT_PORT = 9999
parser = argparse.ArgumentParser(description='Produce configurable plist files')
parser.add_argument('template', help='The location of the plist template')
parser.add_argument('--port', help='Which port the service should listen on')
current_directory = os.path.dirname(os.path.abspath(__file__))
home = os.environ["HOME"].strip()
args = parser.parse_args()
if args.template == "go":
port = args.port or GODOC_DEFAULT_PORT
template = Template(open('templates/godoc.plist.template').read())
try:
godoc_binary = subprocess.check_output(["which", "godoc"]).strip().decode('utf-8')
except subprocess.CalledProcessError as e:
print("\nCould not find godoc on path! Install godoc to use the godoc server\n")
raise
try:
os.mkdir(os.path.join(home, "var", "log", "godoc"))
except OSError as e:
if e.errno != errno.EEXIST:
raise
print(template.render(godoc_binary=godoc_binary, godoc_port=port, home=home))
if args.template == "nginx":
try:
nginx_binary = subprocess.check_output(["which", "nginx"]).strip()
except subprocess.CalledProcessError as e:
print("\nCould not find godoc on path! Install godoc to use the godoc server\n")
raise
template = Template(open('templates/nginx.plist.template').read())
print(template.render(nginx_binary=nginx_binary,
current_directory=current_directory))
if args.template == "ipython":
port = args.port or IPYTHON_DEFAULT_PORT
template = Template(open('templates/ipython.plist.template').read())
print(template.render(home=home, port=port, current_directory=current_directory))
if args.template == "devdocs":
#try:
#rubby = subprocess.check_output(["which", "rvm"])
#except subprocess.CalledProcessError:
#pass
devdocs_dir = "{cur}/usr/devdocs/devdocs-master".format(cur=current_directory)
rackup_binary = subprocess.check_output(["which", "ddocs_rackup"],
cwd=devdocs_dir).strip()
template = Template(open('templates/devdocs.plist.template').read())
print(template.render(current_directory=current_directory,
rackup_binary=rackup_binary))
| StarcoderdataPython |
312981 | import ctypes
ERROR_HANDLER_FUNC = ctypes.CFUNCTYPE(None, ctypes.c_char_p, ctypes.c_int,
ctypes.c_char_p, ctypes.c_int,
ctypes.c_char_p)
def py_error_handler(filename, line, function, err, fmt):
pass
c_error_handler = ERROR_HANDLER_FUNC(py_error_handler)
| StarcoderdataPython |
386665 | # -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# <EMAIL>.
# A GfxWindow always has a current MouseHandler, which knows what to
# do with mouse events on the canvas. The window's toolboxes can
# install new MouseHandlers. The base class defined here does
# nothing.
## TODO: This is obsolete but still used. Replace all occurences with
## common.IO.mousehandler
class MouseHandler(object):
def acceptEvent(self, eventtype):
# eventtype is either 'up', 'down', or 'move'. Return True if it
# can be handled.
# Locked only the three events
if eventtype == 'up' or eventtype == 'down' or eventtype == 'move':
return True
else:
return False
def up(self, x, y, buttons):
pass
def down(self, x, y, buttons):
pass
def move(self, x, y, buttons):
pass
nullHandler = MouseHandler() # doesn't do anything
# Hack to tide us over until this file is removed.
from ooflib.common.IO.mousehandler import MouseButtons
| StarcoderdataPython |
5091680 | <reponame>Bob-Yeah/kaolin<gh_stars>1-10
# Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# Soft Rasterizer (SoftRas)
#
# Copyright (c) 2017 <NAME>
# Copyright (c) 2018 <NAME>
# Copyright (c) 2019 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import torch
import torch.nn.functional as F
def compute_ambient_light(
face_vertices: torch.Tensor,
textures: torch.Tensor,
ambient_intensity: float = 1.,
ambient_color: torch.Tensor = None):
r"""Computes ambient lighting to a mesh, given faces and face textures.
Args:
face_vertices (torch.Tensor): A tensor containing a list of (per-face)
vertices of the mesh (shape: `B` :math:`\times` `num_faces`
:math:`\times 9`). Here, :math:`B` is the batchsize, `num_faces`
is the number of faces in the mesh, and since each face is assumed
to be a triangle, it has 3 vertices, and hence 9 coordinates in
total.
textures (torch.Tensor): TODO: Add docstring
ambient_intensity (float): Intensity of ambient light (in the range
:math:`\left[0, 1\right]`). If the values provided are outside
this range, we clip them so that they fall in range.
ambient_color (torch.Tensor): Color of the ambient light (R, G, B)
(shape: :math:`3`)
Returns:
light (torch.Tensor): A light tensor, which can be elementwise
multiplied with the textures, to obtain the mesh with lighting
applied (shape: `B` :math:`\times` `num_faces` :math:`\times
1 \times 1 \times 1 \times 3`)
"""
if not torch.is_tensor(face_vertices):
raise TypeError('Expected input face_vertices to be of type '
'torch.Tensor. Got {0} instead.'.format(
type(face_vertices)))
if not torch.is_tensor(textures):
raise TypeError('Expected input textures to be of type '
'torch.Tensor. Got {0} instead.'.format(
type(textures)))
if not isinstance(ambient_intensity, float) and not isinstance(
ambient_intensity, int):
raise TypeError('Expected input ambient_intensity to be of '
'type float. Got {0} instead.'.format(
type(ambient_intensity)))
if ambient_color is None:
ambient_color = torch.ones(3, dtype=face_vertices.dtype,
device=face_vertices.device)
if not torch.is_tensor(ambient_color):
raise TypeError('Expected input ambient_color to be of type '
'torch.Tensor. Got {0} instead.'.format(
type(ambient_color)))
# if face_vertices.dim() != 3 or face_vertices.shape[-1] != 9:
# raise ValueError('Input face_vertices must have 3 dimensions '
# 'and be of shape (..., ..., 9). Got {0} dimensions '
# 'and shape {1} instead.'.format(face_vertices.dim(),
# face_vertices.shape))
# TODO: check texture dims
if ambient_color.dim() != 1 or ambient_color.shape != (3,):
raise ValueError('Input ambient_color must have 1 dimension '
'and be of shape 3. Got {0} dimensions and shape {1} '
'instead.'.format(ambient_color.dim(), ambient_color.shape))
# Clip ambient_intensity to be in the range [0, 1]
if ambient_intensity < 0:
ambient_intensity = 0.
if ambient_intensity > 1:
ambient_intensity = 1.
batchsize = face_vertices.shape[0]
num_faces = face_vertices.shape[1]
device = face_vertices.device
if ambient_color.dim() == 1:
ambient_color = ambient_color[None, :].to(device)
light = torch.zeros(batchsize, num_faces, 3).to(device)
# If the intensity of the ambient light is 0, do not
# bother computing lighting.
if ambient_intensity == 0:
return light
# Ambient lighting is constant everywhere, and is given as
# I = I_a * K_a
# where,
# I: Intensity at a vertex
# I_a: Intensity of the ambient light
# K_a: Ambient reflectance of the vertex (3 channels, R, G, B)
light += ambient_intensity * ambient_color[:, None, :]
return light[:, :, None, :]
def apply_ambient_light(
face_vertices: torch.Tensor,
textures: torch.Tensor,
ambient_intensity: float = 1.,
ambient_color: torch.Tensor = torch.ones(3)):
r"""Computes and applies ambient lighting to a mesh, given faces and
face textures.
Args:
face_vertices (torch.Tensor): A tensor containing a list of (per-face)
vertices of the mesh (shape: `B` :math:`\times` `num_faces`
:math:`\times 9`). Here, :math:`B` is the batchsize, `num_faces`
is the number of faces in the mesh, and since each face is assumed
to be a triangle, it has 3 vertices, and hence 9 coordinates in
total.
textures (torch.Tensor): TODO: Add docstring
ambient_intensity (float): Intensity of ambient light (in the range
:math:`\left[0, 1\right]`). If the values provided are outside
this range, we clip them so that they fall in range.
ambient_color (torch.Tensor): Color of the ambient light (R, G, B)
(shape: :math:`3`)
Returns:
textures (torch.Tensor): Updated textures, with ambient lighting
applied (shape: same as input `textures`) #TODO: Update docstring
"""
light = compute_ambient_light(face_vertices, textures, ambient_intensity,
ambient_color)
return textures * light
def compute_directional_light(
face_vertices: torch.Tensor,
textures: torch.Tensor,
directional_intensity: float = 1.,
directional_color: torch.Tensor = None,
direction: torch.Tensor = None):
r"""Computes directional lighting to a mesh, given faces and face textures.
Args:
face_vertices (torch.Tensor): A tensor containing a list of (per-face)
vertices of the mesh (shape: `B` :math:`\times` `num_faces`
:math:`\times 9`). Here, :math:`B` is the batchsize, `num_faces`
is the number of faces in the mesh, and since each face is assumed
to be a triangle, it has 3 vertices, and hence 9 coordinates in
total.
textures (torch.Tensor): TODO: Add docstring
directional_intensity (float): Intensity of directional light (in the
range :math:`\left[0, 1\right]`). If the values provided are
outside this range, we clip them so that they fall in range.
directional_color (torch.Tensor): Color of the directional light
(R, G, B) (shape: :math:`3`).
direction (torch.Tensor): Direction of light from the light source.
(default: :math:`\left( 0, 1, 0 \right)^T`)
Returns:
light (torch.Tensor): A light tensor, which can be elementwise
multiplied with the textures, to obtain the mesh with lighting
applied (shape: `B` :math:`\times` `num_faces` :math:`\times
1 \times 1 \times 1 \times 3`)
"""
if not torch.is_tensor(face_vertices):
raise TypeError('Expected input face_vertices to be of type '
'torch.Tensor. Got {0} instead.'.format(
type(face_vertices)))
if not torch.is_tensor(textures):
raise TypeError('Expected input textures to be of type '
'torch.Tensor. Got {0} instead.'.format(
type(textures)))
if not isinstance(directional_intensity, float) and not isinstance(
directional_intensity, int):
raise TypeError('Expected input directional_intensity to be of '
'type float. Got {0} instead.'.format(
type(directional_intensity)))
if directional_color is None:
directional_color = torch.ones(3, dtype=face_vertices.dtype,
device=face_vertices.device)
if not torch.is_tensor(directional_color):
raise TypeError('Expected input directional_color to be of type '
'torch.Tensor. Got {0} instead.'.format(
type(directional_color)))
if direction is None:
direction = torch.tensor([0., 1., 0.], dtype=face_vertices.dtype,
device=face_vertices.device)
if not torch.is_tensor(direction):
raise TypeError('Expected input direction to be of type '
'torch.Tensor. Got {0} instead.'.format(type(direction)))
# if face_vertices.dim() != 3 or face_vertices.shape[-1] != 9:
# raise ValueError('Input face_vertices must have 3 dimensions '
# 'and be of shape (..., ..., 9). Got {0} dimensions '
# 'and shape {1} instead.'.format(face_vertices.dim(),
# face_vertices.shape))
# TODO: check texture dims
if directional_color.dim() != 1 or directional_color.shape != (3,):
raise ValueError('Input directional_color must have 1 dimension '
'and be of shape 3. Got {0} dimensions and shape {1} '
'instead.'.format(directional_color.dim(),
directional_color.shape))
if direction.dim() != 1 or direction.shape != (3,):
raise ValueError('Input direction must have 1 dimension and be '
'of shape 3. Got {0} dimensions and shape {1} '
'instead.'.format(direction.dim(), direction.shape))
batchsize = face_vertices.shape[0]
num_faces = face_vertices.shape[1]
device = face_vertices.device
if directional_color.dim() == 1:
directional_color = directional_color[None, :].to(device)
if direction.dim() == 1:
direction = direction[None, :].to(device)
# Clip directional intensity to be in the range [0, 1]
if directional_intensity < 0:
directional_intensity = 0.
if directional_intensity > 1:
directional_intensity = 1.
# Initialize light to zeros
light = torch.zeros(batchsize, num_faces, 3).to(device)
# If the intensity of the directional light is 0, do not
# bother computing lighting.
if directional_intensity == 0:
return light
# Compute face normals.
v10 = face_vertices[:, :, 0] - face_vertices[:, :, 1]
v12 = face_vertices[:, :, 2] - face_vertices[:, :, 1]
normals = F.normalize(torch.cross(v12, v10), p=2, dim=2, eps=1e-6)
# Reshape, to get back the batchsize dimension.
normals = normals.reshape(batchsize, num_faces, 3)
# Get direction to 3 dimensions, if not already there.
if direction.dim() == 2:
direction = direction[:, None, :]
cos = F.relu(torch.sum(normals * direction, dim=2))
light += directional_intensity * (directional_color[:, None, :]
* cos[:, :, None])
return light[:, :, None, :]
def apply_directional_light(
face_vertices: torch.Tensor,
textures: torch.Tensor,
directional_intensity: float = 1.,
directional_color: torch.Tensor = torch.ones(3),
direction: torch.Tensor = torch.FloatTensor([0, 1, 0])):
r"""Computes and applies directional lighting to a mesh, given faces
and face textures.
Args:
face_vertices (torch.Tensor): A tensor containing a list of (per-face)
vertices of the mesh (shape: `B` :math:`\times` `num_faces`
:math:`\times 9`). Here, :math:`B` is the batchsize, `num_faces`
is the number of faces in the mesh, and since each face is assumed
to be a triangle, it has 3 vertices, and hence 9 coordinates in
total.
textures (torch.Tensor): TODO: Add docstring
directional_intensity (float): Intensity of directional light (in the
range :math:`\left[0, 1\right]`). If the values provided are
outside this range, we clip them so that they fall in range.
directional_color (torch.Tensor): Color of the directional light
(R, G, B) (shape: :math:`3`).
direction (torch.Tensor): Direction of light from the light source.
(default: :math:`\left( 0, 1, 0 \right)^T`)
Returns:
light (torch.Tensor): A light tensor, which can be elementwise
multiplied with the textures, to obtain the mesh with lighting
applied (shape: `B` :math:`\times` `num_faces` :math:`\times
1 \times 1 \times 1 \times 3`)
"""
light = compute_directional_light(face_vertices, textures,
directional_intensity, directional_color, direction)
return textures * light
| StarcoderdataPython |
5091191 | <filename>mybbs-backend/mybbsbackend/database/api/user.py
from sqlalchemy.orm import exc
from mybbsbackend import database
from mybbsbackend.database.model import user as model_user
class UserAPI:
def get_one_by_username(self, username):
session = database.get_session()
query = session.query(
model_user.UserModel).filter_by(username=username)
try:
user = query.first()
return user
except exc.NoResultFound:
return None
def get_one_by_id(self, id):
session = database.get_session()
query = session.query(model_user.UserModel).filter_by(id=id)
try:
user = query.one()
return user
except exc.NoResultFound:
return None
def get_all(self):
session = database.get_session()
query = session.query(model_user.UserModel)
try:
users = query.all()
return users
except exc.NoResultFound:
return None
def add_one(self, user):
session = database.get_session()
try:
session.add(user)
session.flush()
session.commit()
return user
except Exception:
pass
# TODO
def update_user(self, user):
session = database.get_session()
user_id = user.get('id')
query = session.query(model_user.UserModel).filter_by(id=user_id)
try:
# 'user' is a dict object here, not a model instance.
# update() return a number, not a model instance.
# in add_one(), 'user' stands a model instance.
query.update(user)
session.flush()
session.commit()
return user
except exc.NoResultFound:
pass
# TODO
def delete_user_by_id(self, id):
session = database.get_session()
query = session.query(
model_user.UserModel).filter_by(id=id)
try:
user = query.first()
session.delete(user)
session.flush()
session.commit()
except exc.NoResultFound:
pass
# TODO
| StarcoderdataPython |
239274 | <reponame>Tonyhe666/iOS-Scripting
#!/usr/bin/python
# -*- coding: UTF-8 -*-
# ------------------------android 的xml语言文件解析------------------------------
from xml.dom import minidom
def get_attrvalue(node, attrname):
return node.getAttribute(attrname) if node else ''
def get_nodevalue(node, index = 0):
child = node.firstChild
if child :
return child.nodeValue
else :
return ''
# return node.childNodes[index].nodeValue if node else ''
def get_xmlnode(node,name):
return node.getElementsByTagName(name) if node else []
def xml_to_string(filename='strings.xml'):
doc = minidom.parse(filename)
return doc.toxml('UTF-8')
def get_xml_data(filename='strings.xml'):
doc = minidom.parse(filename)
root = doc.documentElement
name_nodes = get_xmlnode(root, 'string')
dic = {}
for node in name_nodes:
key = get_attrvalue(node, 'name')
#print key
value = get_nodevalue(node).encode('utf-8', 'ignore')
#print value
dic[key] = value
#print key
#print dic[key]
return dic
# ------------------------iOS 语言文件解析------------------------------
def findKey(line):
index = line.find('=')
#print index
if index != -1:
strkey = line[0:index] # not include =
#print strkey
return strkey.encode('utf-8', 'ignore')
else:
return ''
def get_Infoplist_data(name='InfoPlist.strings'):
key_list = []
fp = open(name, 'r')
for eacheline in fp:
#print eacheline
key = findKey(eacheline)
if key != '' :
key_list.append(key)
fp.close
return key_list
# 生成新的语言文件
def generalInfoplist(android_map, iOS_list):
fp = open('newiOS.strings', 'wb')
for key in iOS_list:
value = android_map.get(key)
if value != None:
str = "%s=\"%s\";\n" % (key, value)
fp.writelines(str)
fp.close
# xml 转infoplist
def xml2infoplist(android_map):
fp = open('dic.strings', 'wb')
for key in android_map:
str = "%s=\"%s\";\n" % (key, android_map[key])
fp.writelines(str)
fp.close
if __name__ == "__main__":
android_map = get_xml_data()
xml2infoplist(android_map)
iOS_list = get_Infoplist_data()
generalInfoplist(android_map, iOS_list)
| StarcoderdataPython |
6537500 | import requests_mock
from teatime import Issue
def assert_mocked_execute(
target,
rpc_results,
plugin,
context,
rpc_methods,
skipped=False,
):
with requests_mock.Mocker() as mock:
mock.request(
method=requests_mock.ANY,
url=requests_mock.ANY,
response_list=rpc_results,
)
plugin.run(context=context)
if skipped:
assert mock.called is False
else:
assert mock.call_count == len(rpc_results)
for i, response in enumerate(rpc_results):
assert mock.request_history[i].json()["method"] == rpc_methods[i]
def assert_report_has_issue(report, meta_name, title, description, rpc_raw, severity):
assert len(report.issues) == 1
assert report.meta == {meta_name: True}
issue: Issue = report.issues[0]
assert issue.id and isinstance(issue.id, str)
assert issue.title == title
assert issue.description == description
assert issue.raw_data == rpc_raw
assert issue.severity == severity
def assert_empty_report(report, meta_name):
assert report.issues == []
assert report.meta == {meta_name: True}
| StarcoderdataPython |
3551131 | import argparse
import os
import sys
import numpy as np
import torch
import yaml
from runners.real_data_runner import train, semisupervised, transfer, compute_representations, plot_transfer, \
plot_representation, compute_mcc
def parse():
parser = argparse.ArgumentParser(description='')
parser.add_argument('--config', type=str, default='mnist.yaml', help='Path to the config file')
parser.add_argument('--run', type=str, default='run', help='Path for saving running related data.')
parser.add_argument('--n-sims', type=int, default=0, help='Number of simulations to run')
parser.add_argument('--seed', type=int, default=1, help='Random seed')
parser.add_argument('--baseline', action='store_true', help='Run the script for the baseline')
parser.add_argument('--transfer', action='store_true',
help='Run the transfer learning experiments after pretraining')
parser.add_argument('--semisupervised', action='store_true', help='Run semi-supervised experiments')
parser.add_argument('--representation', action='store_true',
help='Run CCA representation validation across multiple seeds')
parser.add_argument('--mcc', action='store_true', help='compute MCCs -- '
'only relevant for representation experiments')
parser.add_argument('--second-seed', type=int, default=0, help='Second random seed for computing MCC -- '
'only relevant for representation experiments')
parser.add_argument('--subset-size', type=int, default=6000,
help='Number of data points per class to consider -- '
'only relevant for transfer learning if not run with --all flag')
parser.add_argument('--all', action='store_true',
help='Run transfer learning experiment for many seeds and subset sizes -- '
'only relevant for transfer and representation experiments')
parser.add_argument('--plot', action='store_true',
help='Plot selected experiment for the selected dataset')
parser.add_argument('--conditional', action='store_true', help="whether using the conditional loss for training")
# THIS OPTIONS ARE FOR DEBUG ONLY --- WILL BE REMOVED
parser.add_argument('-a', action='store_true')
parser.add_argument('-p', action='store_true')
parser.add_argument('-z', type=int, default=0)
args = parser.parse_args()
return args
def dict2namespace(config):
namespace = argparse.Namespace()
for key, value in config.items():
if isinstance(value, dict):
new_value = dict2namespace(value)
else:
new_value = value
setattr(namespace, key, new_value)
return namespace
def make_and_set_dirs(args, config):
"""call after setting args.doc to set and create necessary folders"""
args.dataset = config.data.dataset.split('_')[0] # take into account baseline datasets e.g.: mnist_transferBaseline
if 'doc' in vars(args).keys():
if config.model.positive:
args.doc += 'p'
if config.model.augment:
args.doc += 'a'
if config.model.final_layer:
args.doc += str(config.model.feature_size)
args.doc += config.model.architecture.lower()
else:
# args has no attribute doc
args.doc = config.model.architecture.lower()
args.doc = os.path.join(args.dataset, args.doc) # group experiments by dataset
if 'doc2' in vars(args).keys():
# add second level doc folder
args.doc2 = os.path.join(args.doc, args.doc2)
else:
# if not defined, set to level 1 doc
args.doc2 = args.doc
os.makedirs(args.run, exist_ok=True)
args.log = os.path.join(args.run, 'logs', args.doc2)
os.makedirs(args.log, exist_ok=True)
args.checkpoints = os.path.join(args.run, 'checkpoints', args.doc2, "dim10_new_{}".format(args.seed))
os.makedirs(args.checkpoints, exist_ok=True)
args.output = os.path.join(args.run, 'output', args.doc)
os.makedirs(args.output, exist_ok=True)
if 'doc_baseline' in vars(args).keys():
if config.model.positive:
args.doc_baseline += 'p'
if config.model.augment:
args.doc_baseline += 'a'
if config.model.final_layer:
args.doc_baseline += str(config.model.feature_size)
args.doc_baseline += config.model.architecture.lower()
args.doc_baseline = os.path.join(args.dataset, args.doc_baseline)
args.checkpoints_baseline = os.path.join(args.run, 'checkpoints', args.doc_baseline)
os.makedirs(args.checkpoints_baseline, exist_ok=True)
args.output_baseline = os.path.join(args.run, 'output', args.doc_baseline)
os.makedirs(args.output_baseline, exist_ok=True)
def main():
if torch.cuda.is_available():
dev = torch.cuda.device_count() - 1
print("Running on gpu:{}".format(dev))
# torch.cuda.set_device(dev)
else:
print("Running on cpu")
args = parse()
# load config
with open(os.path.join('configs', args.config), 'r') as f:
print('loading config file: {}'.format(os.path.join('configs', args.config)))
config_raw = yaml.load(f, Loader=yaml.FullLoader)
config = dict2namespace(config_raw)
config.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
# print(config)
# set random seeds
np.random.seed(args.seed)
torch.manual_seed(args.seed)
# FOR DEBUG ONLY
if args.a:
config.model.augment = True
if args.p:
config.model.positive = True
if args.z > 0:
config.model.final_layer = True
config.model.feature_size = args.z
# TRANSFER LEARNING EXPERIMENTS
# 1- no special flag: pretrain icebeem on 0-7 // --doc should be different between datasets
# 2- --transfer: train only g on 8-9 // --doc should be the same as in step 1
# 3- no special flags but with baseline config: train icebeem on 8-9
# 4- --transfer --baseline: train icebeem on 8-9 (same as above)
# steps 2, 3 and 4 are for many seeds and many subset sizes: the user can do them manually, or add the flag --all
# and the script will perform the loop
# step 3 is only for debug and shouldn't be used in practice
if not args.transfer and not args.semisupervised and not args.baseline and not args.plot and not args.representation:
print('Training an ICE-BeeM on {}'.format(config.data.dataset))
args.doc = 'transfer'
make_and_set_dirs(args, config)
train(args, config, args.conditional)
if args.transfer and not args.baseline and not args.plot:
if not args.all:
print(
'Transfer for {} - subset size: {} - seed: {}'.format(config.data.dataset, args.subset_size, args.seed))
args.doc = 'transfer'
make_and_set_dirs(args, config)
transfer(args, config)
else:
new_args = argparse.Namespace(**vars(args))
for n in [0, 500, 1000, 2000, 3000, 4000, 5000, 6000]:
for seed in range(args.seed, args.n_sims + args.seed):
print('Transfer for {} - subset size: {} - seed: {}'.format(config.data.dataset, n, seed))
new_args.subset_size = n
new_args.seed = seed
# change random seed
np.random.seed(seed)
torch.manual_seed(seed)
new_args.doc = 'transfer'
make_and_set_dirs(new_args, config)
transfer(new_args, config)
if config.data.dataset.lower() in ['mnist_transferbaseline', 'cifar10_transferbaseline',
'fashionmnist_transferbaseline', 'cifar100_transferbaseline']:
# this is just here for debug, shouldn't be run, use --baseline --transfer instead
if not args.all:
print('Transfer baseline for {} - subset size: {} - seed: {}'.format(config.data.dataset.split('_')[0],
args.subset_size, args.seed))
args.doc = 'transferBaseline'
args.doc2 = 'size{}_seed{}'.format(args.subset_size, args.seed)
make_and_set_dirs(args, config)
train(args, config, args.conditional)
else:
new_args = argparse.Namespace(**vars(args))
for n in [0, 500, 1000, 2000, 3000, 4000, 5000, 6000]:
for seed in range(args.seed, args.n_sims + args.seed):
print('Transfer baseline for {} - subset size: {} - seed: {}'.format(
config.data.dataset.split('_')[0], n, seed))
new_args.subset_size = n
new_args.seed = seed
# change random seed
np.random.seed(seed)
torch.manual_seed(seed)
new_args.doc = 'transferBaseline'
new_args.doc2 = 'size{}_seed{}'.format(n, seed)
make_and_set_dirs(new_args, config)
train(new_args, config, args.conditional)
if args.transfer and args.baseline and not args.plot:
# update args and config
new_args = argparse.Namespace(**vars(args))
new_args.config = os.path.splitext(args.config)[0] + '_baseline' + os.path.splitext(args.config)[1]
with open(os.path.join('configs', new_args.config), 'r') as f:
print('loading baseline config file: {}'.format(os.path.join('configs', new_args.config)))
config_raw = yaml.load(f, Loader=yaml.FullLoader)
config = dict2namespace(config_raw)
config.device = torch.device('cuda') if torch.cuda.is_available() else torch.device('cpu')
if not args.all:
print(
'Transfer baseline for {} - subset size: {} - seed: {}'.format(config.data.dataset.split('_')[0],
new_args.subset_size, new_args.seed))
new_args.doc = 'transferBaseline'
new_args.doc2 = 'size{}_seed{}'.format(args.subset_size, args.seed)
make_and_set_dirs(new_args, config)
train(new_args, config, args.conditional)
else:
for n in [0, 500, 1000, 2000, 3000, 4000, 5000, 6000]:
for seed in range(args.seed, args.n_sims + args.seed):
print('Transfer baseline for {} - subset size: {} - seed: {}'.format(
config.data.dataset.split('_')[0], n, seed))
new_args.subset_size = n
new_args.seed = seed
# change random seed
np.random.seed(seed)
torch.manual_seed(seed)
new_args.doc = 'transferBaseline'
new_args.doc2 = 'size{}_seed{}'.format(n, seed)
make_and_set_dirs(new_args, config)
train(new_args, config, args.conditional)
# PLOTTING TRANSFER LEARNING
# 1- just use of the flag --plot and --transfer AND NO other flag (except --config of course)
if args.plot and not args.baseline and not args.semisupervised and args.transfer and not args.representation:
print('Plotting transfer experiment for {}'.format(config.data.dataset))
args.doc = 'transfer'
args.doc_baseline = 'transferBaseline'
make_and_set_dirs(args, config)
plot_transfer(args, config)
# SEMI-SUPERVISED EXPERIMENTS
# 1- no special flag: pretrain icebeem on 0-7 // same as 1- above
# 2- --semisupervised: classify 8-9 using pretrained icebeem // --doc should be the same as from step 1-
# 3- --baseline: pretrain unconditional ebm on 0-7: IT IS VERY IMPORTANT HERE TO SPECIFY A --doc THAT IS
# DIFFERENT FROM WHEN RUN FOR ICEBEEM
# 4- --semisupervised --baseline: classify 8-9 using unconditional ebm // --doc should be the same as from step 3-
if args.baseline and not args.semisupervised and not args.transfer and not args.representation and not args.plot:
print('Training a baseline EBM on {}'.format(config.data.dataset))
args.doc = 'semisupervisedBaseline'
make_and_set_dirs(args, config)
train(args, config, conditional=False)
if args.semisupervised and not args.baseline and not args.plot:
print('Computing semi-supervised accuracy for ICE-BeeM on {}'.format(config.data.dataset))
args.doc = 'transfer' # first step of semi-supervised learning is the same as first step in transfer learning
make_and_set_dirs(args, config)
semisupervised(args, config)
if args.semisupervised and args.baseline and not args.plot:
print('Computing semi-supervised accuracy for baseline EBM on {}'.format(config.data.dataset))
args.doc = 'semisupervisedBaseline'
make_and_set_dirs(args, config)
semisupervised(args, config)
# COMPARE QUALITY OF REPRESENTATIONS ON REAL DATA
# 1- --representation: trains ICE-BeeM on train dataset, and save learnt rep of test data for
# different seeds
# 2- --representation --baseline: trains unconditional EBM on train dataset, and save learnt rep of test data for
# different seeds
# 3- --representation --mcc: compute pairwaise mccs for ICE-BeeM:
# --all: do it for all random seeds used in step 1-
# --seed X --second-seed Y: compute mcc between seeds X and Y
# 4- --representation --mcc --baseline: same as 3- for baseline
# 5- --representation --plot: plot boxplot of MCCs in and out of sample
# --n-sims: only consider n_sims seeds and not all if n_ims < n seeds used in 1-
if args.representation:
config.n_labels = 10 if config.data.dataset.lower().split('_')[0] != 'cifar100' else 100
if not args.mcc and not args.baseline and not args.plot:
for seed in range(args.seed, args.n_sims + args.seed):
print('Learning representation for {} - seed: {}'.format(config.data.dataset, seed))
new_args = argparse.Namespace(**vars(args))
new_args.seed = seed
np.random.seed(args.seed)
torch.manual_seed(args.seed)
new_args.doc = 'representation'
new_args.doc2 = 'seed{}'.format(seed)
make_and_set_dirs(new_args, config)
compute_representations(new_args, config)
if args.baseline and not args.mcc and not args.plot:
for seed in range(args.seed, args.n_sims + args.seed):
print('Learning baseline representation for {} - seed: {}'.format(config.data.dataset, seed))
new_args = argparse.Namespace(**vars(args))
new_args.seed = seed
np.random.seed(args.seed)
torch.manual_seed(args.seed)
new_args.doc = 'representationBaseline'
new_args.doc2 = 'seed{}'.format(seed)
make_and_set_dirs(new_args, config)
compute_representations(new_args, config, conditional=False)
if args.mcc and not args.baseline and not args.plot:
if args.all:
for seed in range(args.seed, args.n_sims + args.seed - 1):
for second_seed in range(seed + 1, args.n_sims + args.seed):
print('Computing MCCs for {} - seeds: {} and {}'.format(config.data.dataset, seed,
second_seed))
new_args = argparse.Namespace(**vars(args))
new_args.seed = seed
new_args.second_seed = second_seed
np.random.seed(args.seed)
torch.manual_seed(args.seed)
new_args.doc = 'representation'
make_and_set_dirs(new_args, config)
compute_mcc(new_args, config)
else:
assert 'second_seed' in vars(args).keys()
print('Computing MCCs for {} - seeds: {} and {}'.format(config.data.dataset, args.seed,
args.second_seed))
args.doc = 'representation'
make_and_set_dirs(args, config)
compute_mcc(args, config)
if args.mcc and args.baseline and not args.plot:
if args.all:
for seed in range(args.seed, args.n_sims + args.seed - 1):
for second_seed in range(seed + 1, args.n_sims + args.seed):
print('Computing baseline MCCs for {} - seeds: {} and {}'.format(config.data.dataset, seed,
second_seed))
new_args = argparse.Namespace(**vars(args))
new_args.seed = seed
new_args.second_seed = second_seed
np.random.seed(args.seed)
torch.manual_seed(args.seed)
new_args.doc = 'representationBaseline'
make_and_set_dirs(new_args, config)
compute_mcc(new_args, config)
else:
assert 'second_seed' in vars(args).keys()
print('Computing baseline MCCs for {} - seeds: {} and {}'.format(config.data.dataset, args.seed,
args.second_seed))
args.doc = 'representationBaseline'
make_and_set_dirs(args, config)
compute_mcc(args, config)
if args.plot:
print('Plotting representation experiment for {}'.format(config.data.dataset))
args.doc = 'representation'
args.doc_baseline = 'representationBaseline'
make_and_set_dirs(args, config)
plot_representation(args, config)
if __name__ == '__main__':
sys.exit(main())
| StarcoderdataPython |
246407 | import numpy as np
from numpy import linalg as la
from sklearn import datasets
from .EM_Factor_analysis import factor_analysis
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
# iris data example
def run_demo():
print("play with iris data")
iris = datasets.load_iris()
x = iris.data
y = iris.target
print("Assume 2 latent variables")
nFactors = 2 # assume 2 latent variables
[W, z, log_history] = factor_analysis(x, nFactors)
plt.scatter(x = z[:,0], y = z[:,1],c = y)
plt.title("iris dataset reduced to 2 Factors")
plt.show()
plt.plot(log_history)
plt.title("log likelihood over iteration")
plt.show()
print("Assume 3 latent variables")
nFactors = 3 # assume 3 latent variables this time
[W, z, log_history] = factor_analysis(x, nFactors)
fig = plt.figure()
ax = fig.add_subplot(111, projection = '3d')
ax.scatter(xs = z[:,0], ys = z[:,1], zs = z[:,2], c = y)
plt.title("iris dataset reduced to 3 Factors")
plt.show()
plt.plot(log_history)
plt.title("log likelihood over iteration")
plt.show() | StarcoderdataPython |
4993662 | from __future__ import absolute_import
from __future__ import unicode_literals
from .api import VERBOSE, UserIntervention, IotsaConfig, IotsaWifi, IotsaDevice
| StarcoderdataPython |
9618504 | from math import ceil, floor, trunc
a=12.3456
b=(a)
# print(b)
list=[1,2,3,4,5]
# here we use the shallow copy method to both has the different memory address. using the "copy.copy".
# list1=copy(list)
help(copy.copy)
print("list : ",id(list))
# print("list1 : ",id(list1)) | StarcoderdataPython |
1823872 | <reponame>YunofHD/PSpider
# _*_ coding: utf-8 _*_
"""
inst_fetch.py by xianhu
"""
import time
import random
import logging
import requests
from ..utilities import CONFIG_FETCH_MESSAGE, get_dict_buildin # '.'从上层目录的某个文件夹导入
class Fetcher(object):
"""
class of Fetcher, must include function working()
"""
def __init__(self, max_repeat=3, sleep_time=0):
"""
constructor
:param max_repeat: default 3, maximum repeat count of a fetching
:param sleep_time: default 0, sleeping time after a fetching
"""
self._max_repeat = max_repeat
self._sleep_time = sleep_time
return
def working(self, priority: int, url: str, keys: dict, deep: int, repeat: int, proxies=None) -> (int, object, int):
"""
working function, must "try, except" and don't change the parameters and returns
:return fetch_state: can be -1(fetch failed), 0(need repeat), 1(fetch success)
:return fetch_result: can be any object, for example string, list, None, etc
:return proxies_state: can be -1(unavaiable), 0(return to queue), 1(avaiable)
"""
logging.debug("%s start: %s", self.__class__.__name__, CONFIG_FETCH_MESSAGE % (priority, keys, deep, repeat, url))
time.sleep(random.randint(0, self._sleep_time))
try:
fetch_state, fetch_result, proxies_state = self.url_fetch(priority, url, keys, deep, repeat, proxies=proxies)
except Exception as excep: # 抓取出现问题抛出异常,对重复次数进行逻辑判断
if repeat >= self._max_repeat:
fetch_state, fetch_result, proxies_state = -1, None, -1
logging.error("%s error: %s, %s", self.__class__.__name__, excep, CONFIG_FETCH_MESSAGE % (priority, get_dict_buildin(keys), deep, repeat, url))
else:
fetch_state, fetch_result, proxies_state = 0, None, -1
logging.debug("%s repeat: %s, %s", self.__class__.__name__, excep, CONFIG_FETCH_MESSAGE % (priority, keys, deep, repeat, url))
logging.debug("%s end: fetch_state=%s, proxies_state=%s, url=%s", self.__class__.__name__, fetch_state, proxies_state, url)
return fetch_state, fetch_result, proxies_state
def url_fetch(self, priority: int, url: str, keys: dict, deep: int, repeat: int, proxies=None) -> (int, object, int):
"""
fetch the content of a url, you can rewrite this function, parameters and returns refer to self.working()
"""
response = requests.get(url, params=None, headers={}, data=None, proxies=proxies, timeout=(3.05, 10))
if response.history:
logging.debug("%s redirect: %s", self.__class__.__name__, CONFIG_FETCH_MESSAGE % (priority, keys, deep, repeat, url))
result = (response.status_code, response.url, response.text)
return 1, result, 1
| StarcoderdataPython |
6672681 | <filename>krgram/tl/protocol/system.py
from krgram.tl.base import TLConstructor, TLConstructedType
from krgram.tl.core_types.native import TL_long, TL_int, TL_string
from krgram.tl.core_types.vector import Vector
'''
msgs_ack#62d6b459 msg_ids:Vector<long> = MsgsAck;
'''
class msgs_ack(TLConstructor):
ID = 0x62d6b459
def get_structure(self):
return "msg_ids", Vector(lambda x: TL_long())
class MsgsAck(TLConstructedType):
CONSTRUCTORS_CLASSES = msgs_ack,
'''
bad_msg_notification#a7eff811 bad_msg_id:long bad_msg_seqno:int error_code:int = BadMsgNotification;
bad_server_salt#edab447b bad_msg_id:long bad_msg_seqno:int error_code:int new_server_salt:long = BadMsgNotification;
'''
class bad_msg_notification(TLConstructor):
ID = 0xa7eff811
def get_structure(self):
return ("bad_msg_id", TL_long()), ("bad_msg_seqno", TL_int()),( "error_code", TL_int())
class bad_server_salt(TLConstructor):
ID = 0xedab447b
def get_structure(self):
return ("bad_msg_id", TL_long()), ("bad_msg_seqno", TL_int()), ("error_code", TL_int()), ("new_server_salt",TL_long())
class BadMsgNotification(TLConstructedType):
CONSTRUCTORS_CLASSES = bad_msg_notification, bad_server_salt
'''
msgs_state_req#da69fb52 msg_ids:Vector<long> = MsgsStateReq;
msgs_state_info#04deb57d req_msg_id:long info:string = MsgsStateInfo;
msgs_all_info#8cc0d131 msg_ids:Vector<long> info:string = MsgsAllInfo;
'''
class msgs_state_req(TLConstructor):
ID = 0xda69fb52
def get_structure(self):
return "msg_ids", Vector(lambda x: TL_long())
class msgs_state_info(TLConstructor):
ID = 0x04deb57d
def get_structure(self):
return ("req_msg_id", TL_long()), ("info", TL_string())
class msgs_all_info(TLConstructor):
ID = 0x8cc0d131
def get_structure(self):
return ("msg_ids", Vector(lambda x: TL_long())), ("info", TL_string())
#############
'''
msg_detailed_info#276d3ec6 msg_id:long answer_msg_id:long bytes:int status:int = MsgDetailedInfo;
msg_new_detailed_info#809db6df answer_msg_id:long bytes:int status:int = MsgDetailedInfo;
msg_resend_req#7d861a08 msg_ids:Vector<long> = MsgResendReq;
//rpc_result#f35c6d01 req_msg_id:long result:Object = RpcResult; // parsed manually
rpc_error#2144ca19 error_code:int error_message:string = RpcError;
rpc_answer_unknown#5e2ad36e = RpcDropAnswer;
rpc_answer_dropped_running#cd78e586 = RpcDropAnswer;
rpc_answer_dropped#a43ad8b7 msg_id:long seq_no:int bytes:int = RpcDropAnswer;
future_salt#0949d9dc valid_since:int valid_until:int salt:long = FutureSalt;
future_salts#ae500895 req_msg_id:long now:int salts:vector<future_salt> = FutureSalts;
pong#347773c5 msg_id:long ping_id:long = Pong;
destroy_session_ok#e22045fc session_id:long = DestroySessionRes;
destroy_session_none#62d350c9 session_id:long = DestroySessionRes;
new_session_created#9ec20908 first_msg_id:long unique_id:long server_salt:long = NewSession;
//message msg_id:long seqno:int bytes:int body:Object = Message; // parsed manually
//msg_container#73f1f8dc messages:vector<message> = MessageContainer; // parsed manually
//msg_copy#e06046b2 orig_message:Message = MessageCopy; // parsed manually, not used - use msg_container
//gzip_packed#3072cfa1 packed_data:string = Object; // parsed manually
http_wait#9299359f max_delay:int wait_after:int max_wait:int = HttpWait;
ipPort ipv4:int port:int = IpPort;
help.configSimple#d997c3c5 date:int expires:int dc_id:int ip_port_list:Vector<ipPort> = help.ConfigSimple;
---functions---
rpc_drop_answer#58e4a740 req_msg_id:long = RpcDropAnswer;
get_future_salts#b921bd04 num:int = FutureSalts;
ping#7abe77ec ping_id:long = Pong;
ping_delay_disconnect#f3427b8c ping_id:long disconnect_delay:int = Pong;
destroy_session#e7512126 session_id:long = DestroySessionRes;
contest.saveDeveloperInfo#9a5f6e95 vk_id:int name:string phone_number:string age:int city:string = Bool;
''' | StarcoderdataPython |
5163199 | <gh_stars>0
# Generated by Django 3.2.11 on 2022-05-06 12:30
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('observations', '0014_identification_observations_identification_check_confidence_reasons'),
]
operations = [
migrations.AlterField(
model_name='identification',
name='confidence',
field=models.CharField(blank=True, choices=[('In_progress', 'In progress'), ('Check', 'Check'), ('Check_in_museum', 'Check in museum'), ('Confirmed', 'Confirmed'), ('Redo', 'Redo'), ('Review', 'Review')], max_length=30, null=True),
),
]
| StarcoderdataPython |
1873815 | from .gradient_descent_base import L1BaseGradientDescent
from .gradient_descent_base import L2BaseGradientDescent
from .gradient_descent_base import LinfBaseGradientDescent
from ..models.base import Model
from ..criteria import Misclassification, TargetedMisclassification
from .base import T
from typing import Union, Any
class L1FastGradientAttack(L1BaseGradientDescent):
"""Fast Gradient Method (FGM) using the L1 norm
Args:
random_start : Controls whether to randomly start within allowed epsilon ball.
"""
def __init__(self, *, random_start: bool = False):
super().__init__(
rel_stepsize=1.0, steps=1, random_start=random_start,
)
def run(
self,
model: Model,
inputs: T,
criterion: Union[Misclassification, TargetedMisclassification, T],
*,
epsilon: float,
**kwargs: Any,
) -> T:
if hasattr(criterion, "target_classes"):
raise ValueError("unsupported criterion")
return super().run(
model=model, inputs=inputs, criterion=criterion, epsilon=epsilon, **kwargs
)
class L2FastGradientAttack(L2BaseGradientDescent):
"""Fast Gradient Method (FGM)
Args:
random_start : Controls whether to randomly start within allowed epsilon ball.
"""
def __init__(self, *, random_start: bool = False):
super().__init__(
rel_stepsize=1.0, steps=1, random_start=random_start,
)
def run(
self,
model: Model,
inputs: T,
criterion: Union[Misclassification, TargetedMisclassification, T],
*,
epsilon: float,
**kwargs: Any,
) -> T:
if hasattr(criterion, "target_classes"):
raise ValueError("unsupported criterion")
return super().run(
model=model, inputs=inputs, criterion=criterion, epsilon=epsilon, **kwargs
)
class LinfFastGradientAttack(LinfBaseGradientDescent):
"""Fast Gradient Sign Method (FGSM)
Args:
random_start : Controls whether to randomly start within allowed epsilon ball.
"""
def __init__(self, *, random_start: bool = False):
super().__init__(
rel_stepsize=1.0, steps=1, random_start=random_start,
)
def run(
self,
model: Model,
inputs: T,
criterion: Union[Misclassification, TargetedMisclassification, T],
*,
epsilon: float,
**kwargs: Any,
) -> T:
if hasattr(criterion, "target_classes"):
raise ValueError("unsupported criterion")
return super().run(
model=model, inputs=inputs, criterion=criterion, epsilon=epsilon, **kwargs
)
| StarcoderdataPython |
12804378 | from collections import defaultdict
from datetime import datetime
from xml.sax.saxutils import escape
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from conference import models
from conference.templatetags.conference import fare_blob
class Command(BaseCommand):
"""
"""
@transaction.atomic
def handle(self, *args, **options):
try:
conference = args[0]
except IndexError:
raise CommandError('conference missing')
partner_events = defaultdict(list)
for f in models.Fare.objects.available(conference=conference).filter(ticket_type='partner'):
try:
date = datetime.strptime(fare_blob(f, 'data').split(',')[0][:-2] + ' 2011', '%B %d %Y').date()
time = datetime.strptime(fare_blob(f, 'departure'), '%H:%M').time()
except ValueError:
continue
partner_events[date].append((f, time))
for sch in models.Schedule.objects.filter(conference=conference):
events = list(models.Event.objects.filter(schedule=sch))
for fare, time in partner_events[sch.date]:
track_id = 'f%s' % fare.id
for e in events:
if track_id in e.get_all_tracks_names():
event = e
break
else:
event = models.Event(schedule=sch, talk=None)
event.track = 'partner-program ' + track_id
event.custom = escape(fare.name)
event.start_time = time
if time.hour < 13:
d = (13 - time.hour) * 60
else:
d = (19 - time.hour) * 60
event.duration = d
event.save()
| StarcoderdataPython |
5042325 | XPM_MOD_VERSION = '1.2.1'
XPM_MOD_URL = 'https://github.com/juho-p/wot-debugserver'
XPM_MOD_UPDATE_URL = ''
XPM_GAME_VERSIONS = ['0.9.5']
run = True
import datetime
import tcprepl
def log(text):
ds = datetime.time.strftime(datetime.datetime.now().time(), '%H:%M')
print 'replserver %s: %s' % (ds, text)
def run_server():
log('run server...')
try:
while True:
tcprepl.run_repl()
log('REPL stopped, restarting...')
except:
log('* Crashed *')
import traceback
traceback.print_exc()
log('Server stopped!')
if run:
log('starting..')
try:
import threading
thread = threading.Thread(target=run_server, args=())
thread.setDaemon(True)
thread.start()
log('thread started..')
except:
import traceback
traceback.print_exc()
| StarcoderdataPython |
1960240 | <gh_stars>1-10
import os
os.environ["TF_DETERMINISTIC_OPS"] = "1"
import tensorflow as tf
from backbones import ModelFactory
tf.random.set_seed(42)
NUM_FRAMES = 512
NUM_FEATURES = 128
MODEL_FACTORY = ModelFactory()
class DeepMetricLearning(tf.keras.Model):
def __init__(self, backbone_name="densenet121", **kwargs):
super().__init__(**kwargs)
self.backbone = MODEL_FACTORY.get_model_by_name(name=backbone_name)
self.backbone._name = "backbone_global"
self.fc = tf.keras.layers.Dense(units=128, activation="relu", name="fc")
self.pooling = tf.keras.layers.GlobalAveragePooling2D(name="pooling")
self.use_fp = False
def _build(self):
inputs = tf.keras.layers.Input(
shape=[NUM_FRAMES, NUM_FEATURES, 3], dtype=tf.float32
)
self(inputs, training=True)
def call(self, inputs, training=False):
features = self.backbone(inputs, training=training)
features = self.pooling(features)
features = self.fc(features)
return features # [B, 128]
def compile(
self,
optimizer,
metrics,
metric_loss_fn,
classification_loss_fn,
moving_average_bce,
):
super().compile(optimizer, metrics)
self.metric_loss_fn = metric_loss_fn
self.classification_loss_fn = classification_loss_fn
self.moving_average_bce = moving_average_bce
def _apply_gradients(self, total_loss):
# compute gradient
if isinstance(
self.optimizer, tf.keras.mixed_precision.experimental.LossScaleOptimizer
):
scaled_loss = self.optimizer.get_scaled_loss(total_loss)
else:
scaled_loss = total_loss
scaled_gradients = tf.gradients(scaled_loss, self.trainable_variables)
if isinstance(
self.optimizer, tf.keras.mixed_precision.experimental.LossScaleOptimizer
):
gradients = self.optimizer.get_unscaled_gradients(scaled_gradients)
else:
gradients = scaled_gradients
self.optimizer.apply_gradients(zip(gradients, self.trainable_variables))
@tf.function
def train_step(self, data):
data = data[0]
x_tp, y_tp = data["x_tp"], data["y_tp"]
# forward pass
features = self(x_tp, training=True)
# calculate metric learning loss
metric_loss = tf.reduce_mean(self.metric_loss_fn(y_tp, features))
# apply gradients
self._apply_gradients(metric_loss)
# return results
results = {}
results.update({"loss": metric_loss})
return results
@tf.function
def test_step(self, data):
x, y = data
# forward pass
features = self(x, training=False)
# calculate metric learning loss
metric_loss = tf.reduce_mean(self.metric_loss_fn(y, features))
# return results
results = {}
results.update({"loss": metric_loss})
return results
class Classifier(DeepMetricLearning):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.dropout = tf.keras.layers.Dropout(rate=0.5)
self.fc = tf.keras.layers.Dense(
units=512, activation=tf.nn.relu, name="dense_global_cls"
)
self.logits = tf.keras.layers.Dense(units=24, activation=None, dtype=tf.float32)
self.use_fp = False
def call(self, inputs, training=False):
features = self.backbone(inputs, training=training)
x = self.pooling(features)
x = self.dropout(x, training=training)
x = self.fc(x)
x = self.dropout(x, training=training)
x = self.logits(x)
return x
def compute_tp_loss(self, y_tp, pred_tp, from_logits=False):
if from_logits:
pred_tp = tf.nn.sigmoid(pred_tp)
epsilon_ = tf.convert_to_tensor(tf.keras.backend.epsilon(), pred_tp.dtype)
pred_tp = tf.clip_by_value(pred_tp, epsilon_, 1.0 - epsilon_)
bce = y_tp * tf.math.log(pred_tp + tf.keras.backend.epsilon())
return -1.0 * tf.reduce_mean(bce)
def compute_fp_loss(self, y_fp, pred_fp, from_logits=False):
if from_logits:
pred_fp = tf.nn.sigmoid(pred_fp)
epsilon_ = tf.convert_to_tensor(tf.keras.backend.epsilon(), pred_fp.dtype)
pred_fp = tf.clip_by_value(pred_fp, epsilon_, 1.0 - epsilon_)
bce = y_fp * tf.math.log(1.0 - pred_fp + tf.keras.backend.epsilon())
return -1.0 * tf.reduce_mean(bce)
@tf.function
def train_step(self, data):
data = data[0]
x_tp, y_tp = data["x_tp"], data["y_tp"]
if self.use_fp:
logits_tp = self(x_tp, training=True)
cls_tp_loss = self.compute_tp_loss(y_tp, logits_tp, from_logits=True)
self._apply_gradients(cls_tp_loss)
# fp optimize
x_fp, y_fp = data["x_fp"], data["y_fp"]
logits_fp = self(x_fp, training=True)
cls_fp_loss = self.compute_fp_loss(y_fp, logits_fp, from_logits=True)
self._apply_gradients(cls_fp_loss)
self.metrics[0].update_state(y_tp, logits_tp)
# return result
results = {}
results.update(
{
"tp_loss": cls_tp_loss,
"fp_loss": cls_fp_loss,
"lwlrap": self.metrics[0].result(),
}
)
return results
else:
# forward step
logits = self(x_tp, training=True)
# compute loss and calculate gradients
cls_loss = self.moving_average_bce(
y_tp, logits, data["r"], self.optimizer.iterations, data["is_cutmix"][0]
)
self._apply_gradients(cls_loss)
self.metrics[0].update_state(y_tp, logits)
# return result
results = {}
results.update({"loss": cls_loss, "lwlrap": self.metrics[0].result()})
return results
@tf.function
def test_step(self, data):
x, y = data
logits = self(x, training=False)
# compute loss and calculate gradients
cls_loss = self.classification_loss_fn(y, logits)
self.metrics[0].update_state(y, logits)
# return result
results = {}
results.update({"loss": cls_loss, "lwlrap": self.metrics[0].result()})
return results
| StarcoderdataPython |
5023687 | import commands
import time
import rospy
def signal_level():
return commands.getoutput("cat /proc/net/wireless | awk 'NR==3 {print $4}'")
if __name__ == '__main__':
rospy.init_node('wifi_signal_level')
while not rospy.is_shutdown():
print signal_level()
time.sleep(0.1)
| StarcoderdataPython |
3244607 | #!/usr/bin/env python3
# Otro script para mostrar los datos del sensor MH-Z14A
# 2021 Josema - <EMAIL>
# Librerías necesarias
# pip install ephem
# pip3 install scrollphathd
# pip3 install pyserial
# sudo apt install libatlas-base-dev
# sudo apt install python3-smbus
import time
import serial
import scrollphathd
import math
import ephem
from scrollphathd.fonts import font3x5
from datetime import datetime as dt
from datetime import timedelta
# Comando para leer la concentración de CO2
PETICION = [0xFF, 0x01, 0x86, 0x00, 0x00, 0x00, 0x00, 0x00, 0x79]
# Rango1 de 0 a 2000 ppm
RANGO1 = [0xFF, 0x01, 0x99, 0x00, 0x00, 0x00, 0x07, 0xd0, 0x8F]
# Rango2 de 0 a 5000 ppm
RANGO2 = [0xFF, 0x01, 0x99, 0x00, 0x00, 0x00, 0x13, 0x88, 0xCB]
# Rango3 de 0 a 10000 ppm
RANGO3 = [0xFF, 0x01, 0x99, 0x00, 0x00, 0x00, 0x27, 0x10, 0x2F]
# Calibración
CALIBRAR = [0xFF, 0x01, 0x87, 0x00, 0x00, 0x00, 0x00, 0x00, 0x78]
# Activar auto calibración
ACT_AUTO_CALIBRACION = [0xFF, 0x01, 0x79, 0xA0, 0x00, 0x00, 0x00, 0x00, 0xE6]
# Desactivar auto calibración
DES_AUTO_CALIBRACION = [0xFF, 0x01, 0x79, 0x00, 0x00, 0x00, 0x00, 0x00, 0x86]
MAXIMO_BARRA = 800
MINIMO_BARRA = 400
BRILLO = None
# BRILLO_BAJO = 0.1 # Por la noche
# BRILLO_ALTO = 0.3 # Por el día
# Configurar aquí los datos de longitud, latitud y altura
LONGITUD = '40.285408'
LATITUD = '-3.788855'
ALTURA = 660
# Configuramos la conexión serie según los datos del fabricante
sensor = serial.Serial(
port = '/dev/serial0',
baudrate = 9600,
parity = serial.PARITY_NONE,
stopbits = serial.STOPBITS_ONE,
bytesize = serial.EIGHTBITS,
timeout = 1)
# En mi configuración actual tengo que invertir la pantalla
scrollphathd.rotate(180)
# Esta función imprime el valor en la pantalla
def imprime_scrollphat(dato):
global BRILLO
global MAXIMO_BARRA
global MINIMO_BARRA
# Alinea la cifra siempre a la derecha, tenga 3 ó 4 cifras.
if dato >= 1000:
x = 1
else:
x = 5
scrollphathd.clear()
# Sólo mostramos la barra por el día, es decir, en función del valor del brillo
if BRILLO == 0.1:
scrollphathd.write_string(str(dato), x = x, y = 1, font = font3x5, brightness = BRILLO)
else:
scrollphathd.write_string(str(dato), x = x, y = 0, font = font3x5, brightness = BRILLO)
# Las siguientes cuatro lineas imprimen un indicador en la parte inferior con 400ppm estará al 0 y con 1000ppm al 100%
scrollphathd.fill(BRILLO - 0.1 if BRILLO > 0.1 else BRILLO, 0, 6, int((dato - 400) / ((MAXIMO_BARRA - MINIMO_BARRA) / 17)), 1)
scrollphathd.fill(BRILLO - 0.1 if BRILLO > 0.1 else BRILLO, 0, 5, 1, 2)
scrollphathd.fill(BRILLO - 0.1 if BRILLO > 0.1 else BRILLO, 8, 5, 1, 2)
scrollphathd.fill(BRILLO - 0.1 if BRILLO > 0.1 else BRILLO, 16, 5, 1, 2)
scrollphathd.show()
# Esta función lee el valor de CO2 y lo devuelve
def obten_co2():
# Enviamos el comando para pedir el valor de CO2
sensor.write(bytearray(PETICION))
respuesta = sensor.read(9)
# if len(respuesta) == 9:
# El valor que buscamos se encuentra en el byte 2 (high byte) y 3 (low byte).
# return (respuesta[2] << 8) | respuesta[3]
return (respuesta[2] << 8) | respuesta[3]
# Esta funcion usa la librería ephem para calcular si es de día en función de los datos de longitud y latitud y ajusta la variable BRILLO
def ajustar_brillo():
global LONGITUD
global LATITUD
global ALTURA
global BRILLO
# Sólo si el usuario ha configurado los datos de LON, LAT y ALT hacen el cálculo...
if LONGITUD != 0 and LATITUD != 0 and ALTURA != 0:
sol = ephem.Sun()
observador = ephem.Observer()
# ↓ Define your coordinates here ↓
observador.lat, observador.lon, observador.elevation = LONGITUD, LATITUD, ALTURA
# ↓ Set the time (UTC) here ↓
observador.date = dt.utcnow()
sol.compute(observador)
# altitud_sol = sol.alt
# print(altitud_sol*180/math.pi)
# -16.8798870431°
angulo = (sol.alt * 180 / math.pi)
if angulo > 0: # Es de día
BRILLO = 0.3
else: # Es de noche
BRILLO = 0.1
# ...si no ponemos el brillo a 0.2
else:
BRILLO = 0.2
hora_comprobacion_luz = dt.now()
ajustar_brillo()
# Mostramos la palabra HEAT (no hay palabra en español de cuatro letras) en la pantalla durante los tres minutos de calentamiento
scrollphathd.write_string("HEAT", x = 1, y = 1, font = font3x5, brightness = BRILLO)
scrollphathd.show()
# Configuramos el sensor en el rango de medición de 0 - 2000 ppm. Cuanto más bajo es el rango, mejor es la precisión.
sensor.write(bytearray(RANGO1))
# Por experiencia, el primer valor devuelto por el sensor es una medida errónea. Así que leemos y descartamos el valor.
obten_co2()
# Esperamos tres minutos, tiempo que indica el fabricante para el calentamiento del sensor. El for muestra la cuenta atrás.
print("\nndirCO2.py v1.0 - Josema - 30 de marzo de 2021 - <EMAIL>\n")
print("Esperando al calentamiento del sensor (Control + C para saltar)...")
try:
for segundos in range(180, 0, -1):
print(" " + str(segundos) + " segundos. " if segundos > 1 else " segundo. ", end="\r")
time.sleep(1)
print("Iniciando...")
except KeyboardInterrupt:
pass
# sensor.write(bytearray(RANGO1))
# Vuelvo a pedir el valor de CO2 para intentar evitar el valor 3420 que devuelve al principio
obten_co2()
# Volvemos a hacer un a lectura para mostrar el primer valor en la pantalla
valor_co2_anterior = obten_co2()
imprime_scrollphat(valor_co2_anterior)
# Entramos el bucle y no salimos nunca
try:
while True:
# Paramos un segundo en cada iteración del bucle
time.sleep(1)
valor_co2 = obten_co2()
# Calculamos la dirección de bucle for
if valor_co2 > valor_co2_anterior:
direccion_for = 1
elif valor_co2 < valor_co2_anterior:
direccion_for = -1
else:
imprime_scrollphat(valor_co2)
continue
# Este for muestra la animación del conteo cuando cambia el valor
for digito in range(valor_co2_anterior, valor_co2, direccion_for):
imprime_scrollphat(digito)
# Sólo si el salto entre valores es menor de 15 hacemos una pausa de 300ms. Si no lo fuera no hacemos pausa para que la animación no sea tediosa.
if abs(valor_co2_anterior - valor_co2) <= 15:
time.sleep(0.3)
valor_co2_anterior = valor_co2
# Entramos cada minuto aquí para comprobar si es de día o de noche
if dt.now() >= (hora_comprobacion_luz + timedelta(minutes=1)):
# print("Sólo entro aquí cada minuto")
ajustar_brillo()
hora_comprobacion_luz = dt.now()
except KeyboardInterrupt:
scrollphathd.clear() | StarcoderdataPython |
6681403 | #!/usr/bin/env python2
import struct
import os
import sys
from binascii import hexlify
if len(sys.argv) < 2:
print "Usage: print_ticket_keys.py decTitleKeys.bin"
sys.exit(0)
if not os.path.isfile(sys.argv[1]):
print "Input file '%s' doesn't exist." % sys.argv[1]
raise SystemExit(0)
with open(sys.argv[1], 'rb') as fh:
nEntries = struct.unpack('<I', fh.read(4))[0]
fh.seek(12, os.SEEK_CUR)
for i in xrange(nEntries):
fh.seek(8, os.SEEK_CUR)
titleId = fh.read(8)
decryptedTitleKey = fh.read(16)
print '%s: %s' % (hexlify(titleId), hexlify(decryptedTitleKey))
| StarcoderdataPython |
6500895 | <gh_stars>1-10
# =========================================
# IMPORTS
# --------------------------------------
import rootpath
rootpath.append()
# =========================================
# EXPORTS
# --------------------------------------
from totalrecall.profilers import *
| StarcoderdataPython |
4900612 | <reponame>player1537-forks/spack<filename>var/spack/repos/builtin/packages/nettle/package.py
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Nettle(AutotoolsPackage, GNUMirrorPackage):
"""The Nettle package contains the low-level cryptographic library
that is designed to fit easily in many contexts."""
homepage = "https://www.lysator.liu.se/~nisse/nettle/"
gnu_mirror_path = "nettle/nettle-3.3.tar.gz"
version('3.4.1', sha256='f941cf1535cd5d1819be5ccae5babef01f6db611f9b5a777bae9c7604b8a92ad')
version('3.4', sha256='ae7a42df026550b85daca8389b6a60ba6313b0567f374392e54918588a411e94')
version('3.3', sha256='46942627d5d0ca11720fec18d81fc38f7ef837ea4197c1f630e71ce0d470b11e')
version('3.2', sha256='ea4283def236413edab5a4cf9cf32adf540c8df1b9b67641cfc2302fca849d97')
version('2.7.1', sha256='bc71ebd43435537d767799e414fce88e521b7278d48c860651216e1fc6555b40')
version('2.7', sha256='c294ea133c05382cc2effb1734d49f4abeb1ad8515543a333de49a11422cd4d6')
depends_on('gmp')
depends_on('m4', type='build')
def configure_args(self):
return ['CFLAGS={0}'.format(self.compiler.c99_flag)]
| StarcoderdataPython |
9752034 | <filename>2021/solutions/day3.py<gh_stars>0
# Using integer division messed up my solution for part 2 >:(
def read_file(test = True):
if test:
filename = '../tests/day3.txt'
else:
filename = '../input/day3.txt'
with open(filename) as file:
temp = [line.strip() for line in file]
return temp
def puzzle1(test = True):
temp = read_file(test)
counts = [0]*len(temp[0])
for line in temp:
for i, char in enumerate(line):
if char == '1':
counts[i] += 1
gamma, epsilon = list(), list()
for num in counts:
if num >= len(temp) // 2:
gamma += ['1']
epsilon += ['0']
else:
epsilon += ['1']
gamma += ['0']
gamma = int(''.join(gamma), 2)
epsilon = int(''.join(epsilon), 2)
print(gamma*epsilon)
def puzzle2(test = True):
temp = read_file(test)
pos = 0
while len(temp) > 1:
count = 0
for line in temp:
if line[pos] == '1':
count += 1
if count >= len(temp) / 2:
temp = [x for x in temp if x[pos] == '1']
else:
temp = [x for x in temp if x[pos] == '0']
pos += 1
oxygen = int(temp[0],2)
temp = read_file(test)
pos = 0
while len(temp) > 1:
count = 0
for line in temp:
if line[pos] == '1':
count += 1
if count >= len(temp) / 2:
temp = [x for x in temp if x[pos] == '0']
else:
temp = [x for x in temp if x[pos] == '1']
pos += 1
c02 = int(temp[0],2)
print(oxygen*c02)
puzzle1(False)
puzzle2(False) | StarcoderdataPython |
9714931 | <gh_stars>100-1000
"""Test SegmentationMetricsByPixels and SegmentationMetricsByInstances classes.
Also act as tests for ClassificationMetrics, since it's identical to
SegmentationMetricsByPixels.
Structurally, file consists of four classes, which respectively check:
- basic assembly process (shapes compatibility, confusion matrix corectness);
- evaluated result shape of SegmemtationMetricsByPixels for all metrics;
- similarly, evaluated result contents;
- so-called "subsampling" functions of SegmentationMetricsByInstances.
Test data is pre-defined, it's shape and contents were chosen for reasons
of balance between visual simplicity and test coverage diversity.
"""
# pylint: disable=import-error, no-name-in-module, invalid-name, protected-access
import numpy as np
import pytest
from batchflow.models.metrics import SegmentationMetricsByPixels, SegmentationMetricsByInstances
# Accuracy is not included because it can't process 'multiclass' parameter
# and therefore is being tested individually.
METRICS_LIST = ['tpr', 'fpr', 'fnr', 'tnr', 'prv', 'ppv', 'fdr', 'for', 'npv', 'plr', 'nlr', 'dor', 'f1s', 'jac']
BATCH_SIZE = 4
IMAGE_SIZE = 2
NUM_CLASSES = 3
# Set targets.
TARGETS = np.array([[[0, 1],
[2, 2]],
[[0, 0],
[1, 1]],
[[0, 1],
[0, 2]],
[[0, 0],
[1, 1]]
])
# Set predictions as 'labels'.
LABELS = np.array([[[0, 1],
[1, 0]],
[[2, 0],
[1, 1]],
[[0, 1],
[2, 1]],
[[0, 0],
[0, 1]]
])
# Onehots are basically like probas, just with all 0 and a single 1.
PROBA = np.eye(NUM_CLASSES)[LABELS]
# Logit function gives ±infs on degenerate case of 0s and 1s but works fine for sigmoid function.
LOGITS = np.where(PROBA > 0.5, np.inf, -np.inf)
"""First param stands for predictions variable, second — for predictions type, third — for axis with class info.
Transposed predictions correspond to 'channels_first' data format."""
PREDICTIONS = [(LABELS, 'labels', None),
(PROBA, 'proba', 3),
(LOGITS, 'logits', 3),
(np.transpose(PROBA, (3, 0, 1, 2)), 'proba', 0),
(np.transpose(LOGITS, (3, 0, 1, 2)), 'logits', 0)]
BAD_PREDICTIONS = [(LABELS[0], 'labels', None), # predictions ndim is less then targets' for labels
(PROBA, 'proba', None), # axis is None for multiclass proba
(LOGITS, 'logits', None)] # axis is None for multiclass logits
class TestAssembly:
"""Check metrics creation process."""
@pytest.mark.parametrize('SegmentationMetrics', [SegmentationMetricsByPixels, SegmentationMetricsByInstances])
@pytest.mark.parametrize('predictions, fmt, axis', BAD_PREDICTIONS)
def test_incompatibility_processing(self, SegmentationMetrics, predictions, fmt, axis):
"""Create metrics class with inconsistent targets and predictions
(different ndim, no axis when it's required), expecting ValueError.
Parameters
----------
SegmentationMetrics: SegmentationsMetricsByPixels or
SegmentationsMetricsByInstances
Metrics class
predictions : np.array
Variable name containing predictions' array of desired format
fmt : string
Denotes predictions format
axis : None or int
A class axis
"""
with pytest.raises(ValueError):
SegmentationMetrics(TARGETS, predictions, fmt, NUM_CLASSES, axis)
params = [(SegmentationMetricsByPixels, np.array([[[1, 0, 1],
[0, 1, 1],
[0, 0, 0]],
[[1, 0, 0],
[0, 2, 0],
[1, 0, 0]],
[[1, 0, 0],
[0, 1, 1],
[1, 0, 0]],
[[2, 1, 0],
[0, 1, 0],
[0, 0, 0]]]),
),
(SegmentationMetricsByInstances, np.array([[[[0, 0],
[1, 1]],
[[0, 1],
[0, 0]]],
[[[0, 0],
[0, 1]],
[[0, 0],
[1, 0]]],
[[[0, 0],
[0, 1]],
[[0, 1],
[1, 0]]],
[[[0, 0],
[0, 1]],
[[0, 0],
[0, 0]]],
]))]
@pytest.mark.parametrize('SegmentationMetrics, exp_matrix', params)
@pytest.mark.parametrize('predictions, fmt, axis', PREDICTIONS)
def test_confusion_matrix(self, SegmentationMetrics, exp_matrix, predictions, fmt, axis):
"""Compare contents of actual confusion matrix with expected ones
for metrics class assembled with given params.
Parameters
----------
SegmentationMetrics: SegmentationsMetricsByPixels or
SegmentationsMetricsByInstances
Metrics class
exp_matrix: np.array
Expected confusion matrix
predictions : np.array
Variable name containing predictions' array of desired format
fmt : string
Denotes predictions format
axis : None or int
A class axis
"""
metric = SegmentationMetrics(TARGETS, predictions, fmt, NUM_CLASSES, axis)
res_matrix = metric._confusion_matrix
assert np.array_equal(res_matrix, exp_matrix)
class TestShape:
"""Check the shape of evaluated metrics return value for various parameters.
There is a following pattern in both tests:
0. Each function is preceded by data for it's parametrization.
1. Parametrizing decorators are applied.
2. Instance of SegmentationMetricsByPixels is being created.
3. Metric is being evaluated with given parameters.
4. It's result's shape is being compared with expected one.
"""
# First param stands for batch aggregation, second — for multiclass one, third represents expected output shape.
params = [(None, None, lambda l: (BATCH_SIZE, NUM_CLASSES - l)),
(None, 'micro', (BATCH_SIZE,)),
(None, 'macro', (BATCH_SIZE,)),
('mean', None, lambda l: (NUM_CLASSES - l,)),
('mean', 'micro', None),
('mean', 'macro', None)]
@pytest.mark.parametrize('metric_name', METRICS_LIST)
@pytest.mark.parametrize('predictions, fmt, axis', PREDICTIONS)
@pytest.mark.parametrize('batch_agg, multi_agg, exp_shape', params)
@pytest.mark.parametrize('skip_bg', [False, True])
def test_shape(self, metric_name, predictions, fmt, axis, batch_agg, multi_agg, exp_shape, skip_bg):
"""Compare expected return value shape with actual return value shape of
metric evaluation with given params for all metrics from METRICS_LIST.
Parameters
----------
predictions : np.array
Variable name containing predictions' array of desired format
fmt : string
Denotes predictions format
axis : None or int
A class axis
batch_agg : string
Cross-batch aggregation type
multi_agg : string
Multiclass agregation type
exp_shape : None or tuple
Expected return value shape
skip_bg : False or True
If background class should be excluded from metrics evaluation
"""
if callable(exp_shape):
exp_shape = exp_shape(skip_bg)
metric = SegmentationMetricsByPixels(targets=TARGETS, predictions=predictions, fmt=fmt,
num_classes=NUM_CLASSES, axis=axis, skip_bg=skip_bg)
res = metric.evaluate(metrics=metric_name, agg=batch_agg, multiclass=multi_agg)
res_shape = res.shape if isinstance(res, np.ndarray) else None
assert res_shape == exp_shape
@pytest.mark.parametrize('predictions, fmt, axis', PREDICTIONS)
@pytest.mark.parametrize('batch_agg, exp_shape', [(None, (BATCH_SIZE,)), ('mean', None)])
def test_shape_accuracy(self, predictions, fmt, axis, batch_agg, exp_shape):
"""Compare expected return value shape with actual return value shape of
accuracy metric evaluation with given params.
Parameters
----------
predictions : np.array
Variable name containing predictions' array of desired format
fmt : string
Denotes predictions format
axis : None or int
A class axis
batch_agg : string
Cross-batch aggregation type
exp_shape : None or tuple
Expected return value shape
"""
metric = SegmentationMetricsByPixels(TARGETS, predictions, fmt, NUM_CLASSES, axis)
res = metric.evaluate(metrics='accuracy', agg=batch_agg)
res_shape = res.shape if isinstance(res, np.ndarray) else None
assert res_shape == exp_shape
class TestResult:
"""Check evaluated metrics return value for various parameters.
There is a following pattern in both tests:
0. Each function is preceded by data for it's parametrization.
1. Parametrizing decorators are applied.
2. Instance of SegmentationMetricsByPixels is being created.
3. Metric is being evaluated with given parameters.
4. It's result is being compared with expected one.
"""
# First param stands for batch aggregation type, second — for multiclass one,
# third represents manually pre-calculated expected output contents for each type of metrics.
params = [(None, None, {'tpr' : np.array([1.00, 1.00, 0.00, 0.50, 1.00, 1.00, 0.50, 1.00, 0.00, 1.00, 0.50, 1.00]),
'fpr' : np.array([0.33, 0.33, 0.00, 0.00, 0.00, 0.25, 0.00, 0.33, 0.33, 0.50, 0.00, 0.00]),
'tnr' : np.array([0.66, 0.66, 1.00, 1.00, 1.00, 0.75, 1.00, 0.66, 0.66, 0.50, 1.00, 1.00]),
'fnr' : np.array([0.00, 0.00, 1.00, 0.50, 0.00, 0.00, 0.50, 0.00, 1.00, 0.00, 0.50, 0.00]),
'prv' : np.array([0.25, 0.25, 0.50, 0.50, 0.50, 0.00, 0.50, 0.25, 0.25, 0.50, 0.50, 0.00]),
'ppv' : np.array([0.50, 0.50, 1.00, 1.00, 1.00, 0.00, 1.00, 0.50, 0.00, 0.66, 1.00, 1.00]),
'fdr' : np.array([0.50, 0.50, 0.00, 0.00, 0.00, 1.00, 0.00, 0.50, 1.00, 0.33, 0.00, 0.00]),
'for' : np.array([0.00, 0.00, 0.50, 0.33, 0.00, 0.00, 0.33, 0.00, 0.33, 0.00, 0.33, 0.00]),
'npv' : np.array([1.00, 1.00, 0.50, 0.66, 1.00, 1.00, 0.66, 1.00, 0.66, 1.00, 0.66, 1.00]),
'plr' : np.array([3.00, 3.00, 0.00, np.inf, np.inf, 4.00,
np.inf, 3.00, 0.00, 2.00, np.inf, np.inf]),
'nlr' : np.array([0.00, 0.00, 1.00, 0.50, 0.00, 0.00, 0.50, 0.00, 1.50, 0.00, 0.50, 0.00]),
'dor' : np.array([np.inf, np.inf, 0.00, np.inf, np.inf, np.inf,
np.inf, np.inf, 0, np.inf, np.inf, np.inf]),
'f1s' : np.array([0.66, 0.66, 0.00, 0.66, 1.00, 0.00,
0.66, 0.66, 0.00, 0.80, 0.66, np.inf]),
'jac' : np.array([0.50, 0.50, 0.00, 0.50, 1.00, 0.00,
0.50, 0.50, 0.00, 0.66, 0.50, np.inf])}),
(None, 'micro', {'tpr' : np.array([0.50, 0.75, 0.50, 0.75]),
'fpr' : np.array([0.25, 0.12, 0.25, 0.12]),
'tnr' : np.array([0.75, 0.87, 0.75, 0.88]),
'fnr' : np.array([0.50, 0.25, 0.50, 0.25]),
'prv' : np.array([0.33, 0.33, 0.33, 0.33]),
'ppv' : np.array([0.50, 0.75, 0.50, 0.75]),
'fdr' : np.array([0.50, 0.25, 0.50, 0.25]),
'for' : np.array([0.25, 0.12, 0.25, 0.12]),
'npv' : np.array([0.75, 0.87, 0.75, 0.88]),
'plr' : np.array([3.00, 10.00, 2.25, 5.00]),
'nlr' : np.array([0.42, 0.18, 0.64, 0.20]),
'dor' : np.array([6.00, np.inf, np.inf, np.inf]),
'f1s' : np.array([0.50, 0.75, 0.50, 0.75]),
'jac' : np.array([0.33, 0.60, 0.33, 0.60])}),
(None, 'macro', {'tpr' : np.array([0.66, 0.83, 0.5, 0.83]),
'fpr' : np.array([0.22, 0.08, 0.22, 0.16]),
'tnr' : np.array([0.77, 0.91, 0.78, 0.84]),
'fnr' : np.array([0.33, 0.16, 0.50, 0.17]),
'prv' : np.array([0.33, 0.33, 0.33, 0.33]),
'ppv' : np.array([0.66, 0.66, 0.50, 0.88]),
'fdr' : np.array([0.33, 0.33, 0.50, 0.11]),
'for' : np.array([0.16, 0.11, 0.22, 0.11]),
'npv' : np.array([0.83, 0.88, 0.77, 0.88]),
'plr' : np.array([2.00, 4.00, 1.50, 2.00]),
'nlr' : np.array([0.33, 0.16, 0.66, 0.16]),
'dor' : np.array([0.00, np.inf, 0.00, np.inf]),
'f1s' : np.array([0.58, 0.71, 0.50, 0.79]),
'jac' : np.array([0.4, 0.55, 0.33, 0.65])}),
('mean', None, {'tpr' : np.array([0.75, 0.87, 0.50]),
'fpr' : np.array([0.21, 0.16, 0.14]),
'tnr' : np.array([0.79, 0.83, 0.85]),
'fnr' : np.array([0.25, 0.12, 0.50]),
'prv' : np.array([0.43, 0.37, 0.18]),
'ppv' : np.array([0.79, 0.75, 0.50]),
'fdr' : np.array([0.20, 0.25, 0.50]),
'for' : np.array([0.16, 0.08, 0.20]),
'npv' : np.array([0.83, 0.91, 0.79]),
'plr' : np.array([2.50, 3.00, 1.33]),
'nlr' : np.array([0.25, 0.12, 0.62]),
'dor' : np.array([np.inf, np.inf, 0.00]),
'f1s' : np.array([0.70, 0.74, 0.00]),
'jac' : np.array([0.54, 0.625, 0.00])}),
('mean', 'micro', {'tpr' : np.array([0.62]),
'fpr' : np.array([0.18]),
'tnr' : np.array([0.81]),
'fnr' : np.array([0.37]),
'prv' : np.array([0.33]),
'ppv' : np.array([0.62]),
'fdr' : np.array([0.37]),
'for' : np.array([0.18]),
'npv' : np.array([0.81]),
'plr' : np.array([5.06]),
'nlr' : np.array([0.36]),
'dor' : np.array([6.00]),
'f1s' : np.array([0.62]),
'jac' : np.array([0.46])}),
('mean', 'macro', {'tpr' : np.array([0.70]),
'fpr' : np.array([0.17]),
'tnr' : np.array([0.82]),
'fnr' : np.array([0.29]),
'prv' : np.array([0.33]),
'ppv' : np.array([0.68]),
'fdr' : np.array([0.31]),
'for' : np.array([0.15]),
'npv' : np.array([0.84]),
'plr' : np.array([2.37]),
'nlr' : np.array([0.33]),
'dor' : np.array([0.00]),
'f1s' : np.array([0.64]),
'jac' : np.array([0.48])})
]
@pytest.mark.parametrize('predictions, fmt, axis', PREDICTIONS)
@pytest.mark.parametrize('batch_agg, multi_agg, exp_dict', params)
def test_result(self, predictions, fmt, axis, batch_agg, multi_agg, exp_dict):
"""Compare expected evaluated metrics return value with actual one
with given params for all metrics from METRICS_DICT.
Parameters
----------
predictions : np.array
Variable name containing predictions' array of desired format
fmt : string
Denotes predictions format
axis : None or int
A class axis
batch_agg : string
Cross-batch aggregation type
multi_agg : string
Multiclass agregation type
exp_dict : dict
Keys are metric's aliases and values are expected contents
of their evaluation results with given aggregation params
"""
metric = SegmentationMetricsByPixels(TARGETS, predictions, fmt, NUM_CLASSES, axis)
for metric_name, exp in exp_dict.items():
res = metric.evaluate(metrics=metric_name, agg=batch_agg, multiclass=multi_agg)
res = res.reshape(-1) if isinstance(res, np.ndarray) else [res]
assert np.allclose(res, exp, atol=1e-02, rtol=0), 'failed on metric {}'.format(metric_name)
@pytest.mark.parametrize('predictions, fmt, axis', PREDICTIONS)
@pytest.mark.parametrize('batch_agg, exp', [(None, np.array([0.50, 0.75, 0.50, 0.75])), ('mean', np.array([0.62]))])
def test_result_accuracy(self, predictions, fmt, axis, batch_agg, exp):
"""Compare expected evaluated metrics return value actual one
with given params for `accuracy` metrics.
Parameters
----------
predictions : np.array
Variable name containing predictions' array of desired format
fmt : string
Denotes predictions format
axis : None or int
A class axis
batch_agg : string
Cross-batch aggregation type
exp : np.array
Expected `accuracy` evaluation result with given aggregation params
"""
metric = SegmentationMetricsByPixels(TARGETS, predictions, fmt, NUM_CLASSES, axis)
res = metric.evaluate(metrics='accuracy', agg=batch_agg)
res = res.reshape(-1) if isinstance(res, np.ndarray) else [res]
assert np.allclose(res, exp, atol=1e-02, rtol=0), 'failed on metric {}'.format('accuracy')
class TestSubsampling:
"""Check the correctness of confusion matrix subsampling functions result
for SegmentationMetricsByInstances class (e.g. true_positive subsample,
total_population subsample). Test functions here act as an equivalent of
TestResult functions for SegmentationMetricsByInstances class, since it
differs from SegmentationMetricsByPixels in redefined subsampling functions
(and confusion matrix assembly process, which is checked in TestAssembly).
"""
params = [('true_positive', np.array([[1, 0],
[1, 0],
[1, 0],
[1, 0]])),
('condition_positive', np.array([[1, 1],
[1, 0],
[1, 1],
[1, 0]])),
('prediction_positive', np.array([[2, 0],
[1, 1],
[1, 1],
[1, 0]])),
('total_population', np.array([[2, 1],
[1, 1],
[1, 2],
[1, 0]]))]
@pytest.mark.parametrize('subsample_name, exp_subsample', params)
def test_subsampling(self, subsample_name, exp_subsample):
"""Compare expected subsample with actual one.
Parameters
----------
subsample_name: string
Name of confusion matrix subsample
exp_subsample: np.array
Expected subsample of confusion matrix
"""
metric = SegmentationMetricsByInstances(TARGETS, LABELS, 'labels', NUM_CLASSES)
res_subsample = getattr(metric, subsample_name)()
assert np.array_equal(res_subsample, exp_subsample)
def test_subsampling_true_negative(self):
"""Check if subsampling true negative raises ValueError."""
metric = SegmentationMetricsByInstances(TARGETS, LABELS, 'labels', NUM_CLASSES)
with pytest.raises(ValueError):
getattr(metric, 'true_negative')()
| StarcoderdataPython |
3585625 | <filename>schapke/cresi/data_prep/move_single_dir.py
python data_prep/speed_masks.py \
--geojson_dir /spacenet/dataset/train/AOI_5_Khartoum/geojson_roads_speed \
--image_dir /spacenet/dataset/train/AOI_5_Khartoum/PS-RGB \
--output_conversion_csv /wdata/out.csv \
--output_mask_dir /wdata/train/masks
| StarcoderdataPython |
200910 | <filename>pytglib/api/types/rich_text_strikethrough.py
from ..utils import Object
class RichTextStrikethrough(Object):
"""
A strikethrough rich text
Attributes:
ID (:obj:`str`): ``RichTextStrikethrough``
Args:
text (:class:`telegram.api.types.RichText`):
Text
Returns:
RichText
Raises:
:class:`telegram.Error`
"""
ID = "richTextStrikethrough"
def __init__(self, text, **kwargs):
self.text = text # RichText
@staticmethod
def read(q: dict, *args) -> "RichTextStrikethrough":
text = Object.read(q.get('text'))
return RichTextStrikethrough(text)
| StarcoderdataPython |
6653401 | <gh_stars>0
# Altere o script para que o usuário informe:
# - nome
# - montante inicial
# - tempo de investimento
# Então imprima uma mensagem contendo o nome e o resultado para os dois investimentos.
# Lembrete:
# - Poupança: rendimento de 0,469% ao mês
# - CDI: retorno de 105% do CDI, que rende 7,5% ao ano
def compound_interest(amount, interest_rate, time):
return amount * (1 + interest_rate) ** time
def calculate_return_poupanca(amount, time_year):
interest_rate_month = 0.469 / 100
return compound_interest(amount, interest_rate_month, time_year * 12)
def calculate_return_xp(amount, time_year):
interest_rate_year = (105 / 100) * (7.5 / 100)
return compound_interest(amount, interest_rate_year, time_year)
def ask_name():
return input('Qual é o seu nome? ')
def ask_amount():
return float(input('Quanto deseja investir? R$'))
def ask_time_year():
return int(input('Em quantos _anos_ deseja resgatar seu dinheiro? '))
def print_result(name, amount, time_year, roi_poupanca, roi_xp):
message_header_template = '{name}, o retorno de investir R${amount} durante {time_year} anos é:'
message_roi_template = '- {investiment_name}: R${roi:.2f}'
message_header = message_header_template.format(name=name, amount=amount, time_year=time_year)
message_roi_poupanca = message_roi_template.format(
roi=roi_poupanca,
investiment_name='Poupança')
message_roi_xp = message_roi_template.format(
roi=roi_xp,
investiment_name='XP')
print(message_header)
print(message_roi_poupanca)
print(message_roi_xp)
name = ask_name()
amount = ask_amount()
time_year = ask_time_year()
roi_poupanca = calculate_return_poupanca(amount, time_year)
roi_xp = calculate_return_xp(amount, time_year)
print_result(name, amount, time_year, roi_poupanca, roi_xp)
| StarcoderdataPython |
3554441 | <gh_stars>10-100
import pydda
import pyart
import numpy as np
from netCDF4 import Dataset
from scipy.interpolate import interp1d
from datetime import datetime
def test_add_era_interim_field():
Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0)
Grid0 = pydda.constraints.make_constraint_from_era_interim(
Grid0, pydda.tests.sample_files.ERA_PATH,
vel_field='corrected_velocity')
grid_time = datetime.strptime(Grid0.time["units"],
"seconds since %Y-%m-%dT%H:%M:%SZ")
era_dataset = Dataset(pydda.tests.sample_files.ERA_PATH)
z = era_dataset.variables["z"][:]
u = era_dataset.variables["u"][:]
lat = era_dataset.variables["latitude"][:]
lon = era_dataset.variables["longitude"][:]
base_time = datetime.strptime(era_dataset.variables["time"].units,
"hours since %Y-%m-%d %H:%M:%S.%f")
time_step = np.argmin(np.abs(base_time - grid_time))
lat_inds = np.where(np.logical_and(
lat >= Grid0.point_latitude["data"].min(),
lat <= Grid0.point_latitude["data"].max()))
lon_inds = np.where(np.logical_and(
lon >= Grid0.point_longitude["data"].min(),
lon <= Grid0.point_longitude["data"].max()))
z = z[time_step, :, lat_inds[0], lon_inds[0]]
u = u[time_step, :, lat_inds[0], lon_inds[0]]
nonans = np.logical_and(z < 25000., np.isfinite(u))
z = z[nonans].flatten()
u = u[nonans].flatten()
# Interpolate era data onto u as a function of z
u_interp = interp1d(z, u, kind='nearest')
u_new_gridded = u_interp(
np.asarray(Grid0.point_z["data"]+Grid0.radar_altitude["data"]))
u_vertical = np.mean(u_new_gridded, axis=1).mean(axis=1)
u_grid = np.mean(Grid0.fields["U_erainterim"]["data"], axis=1).mean(axis=1)
np.testing.assert_allclose(u_grid, u_vertical, atol=0.5)
def test_era_initialization():
Grid0 = pyart.io.read_grid(pydda.tests.EXAMPLE_RADAR0)
Grid0 = pydda.constraints.make_constraint_from_era_interim(
Grid0, pydda.tests.sample_files.ERA_PATH,
vel_field='corrected_velocity')
u_init, v_init, w_init = pydda.initialization.make_initialization_from_era_interim(
Grid0, pydda.tests.sample_files.ERA_PATH,
vel_field='corrected_velocity')
np.testing.assert_allclose(
u_init, Grid0.fields["U_erainterim"]["data"], atol=1e-2)
np.testing.assert_allclose(
v_init, Grid0.fields["V_erainterim"]["data"], atol=1e-2)
np.testing.assert_allclose(
w_init, Grid0.fields["W_erainterim"]["data"], atol=1e-2)
| StarcoderdataPython |
1972886 | <reponame>mpc24-dataanalytics/web-scraping-challenge
#!/usr/bin/env python
# coding: utf-8
# In[18]:
# Dependencies
from bs4 import BeautifulSoup
from splinter import Browser
import pandas as pd
import time
from selenium import webdriver
import re
def scrape_info():
mars={}
#Visit NASA Mars News url to scrape the page
browser=Browser('chrome')
url='https://mars.nasa.gov/news/'
browser.visit(url)
#Parse with 'html.parser'with beautifulsoup object
html = browser.html
soup = BeautifulSoup(html, 'html.parser')
#Collect latest news title and paragraph text
news_title= soup.find_all('div', class_= 'content_title')
news_title[1].text
news_para=soup.find('div', class_='article_teaser_body').text
mars["news_title"]=news_title[1].text
mars["news_para"]=news_para
### JPL Mars Space Images
#Visit JPL featured space image url to scrape the page
browser=Browser('chrome')
url='https://www.jpl.nasa.gov/spaceimages/?search=&category=Mars'
browser.visit(url)
time.sleep(2)
browser.find_by_id('full_image').click()
time.sleep(2)
browser.find_link_by_partial_text('more info').click()
#Parse with 'html.parser'with beautifulsoup object
html = browser.html
soup = BeautifulSoup(html, 'html.parser')
link=soup.select_one('figure.lede a img').get('src')
full_link='https://www.jpl.nasa.gov'+link
mars["featured_image_url"]=full_link
### Mars Weather
#Visit Mars Weather Twitter url to scrape the page
browser=Browser('chrome')
url='https://twitter.com/marswxreport?lang=en'
browser.visit(url)
#Parse with 'html.parser'with beautifulsoup object
html = browser.html
soup = BeautifulSoup(html, 'html.parser')
text=re.compile(r'sol')
link=soup.find('span', text=text).text
mars["mars_weather"]=link
table=pd.read_html('https://space-facts.com/mars/')
df=table [0]
df.columns=["Description", "Value"]
mars['facts']=df.to_html()
### Mars Hemispheres
#Visit USGS Astrogeology url to scrape the page
browser=Browser('chrome')
url='https://astrogeology.usgs.gov/search/results?q=hemisphere+enhanced&k1=target&v1=Mars'
browser.visit(url)
#Parse with 'html.parser'with beautifulsoup object
html = browser.html
soup = BeautifulSoup(html, "html.parser")
#Create a dictionary to include titles and links for mars hemispheres
hemisphere_urls=[]
products=soup.find ('div', class_='result-list')
hemispheres=products.find_all('div',{'class':'item'})
for hemisphere in hemispheres:
title = hemisphere.find("h3").text
title = title.replace("Enhanced", "")
end_link = hemisphere.find("a")["href"]
image_link = "https://astrogeology.usgs.gov/" + end_link
browser.visit(image_link)
html_hemispheres = browser.html
soup=BeautifulSoup(html_hemispheres, "html.parser")
downloads = soup.find("div", class_="downloads")
image_url = downloads.find("a")["href"]
hemisphere_urls.append({"title": title, "img_url": image_url})
return mars
if __name__ == "__main__":
# If running as script, print scraped data
print(scrape_info())
| StarcoderdataPython |
4829391 | <gh_stars>1-10
# -*- coding: utf-8 -*-
"""
This module handles desktop notifications and supports multiple backends, depending on
the platform.
"""
# system imports
import platform
from threading import RLock
import logging
import asyncio
from pathlib import Path
from typing import (
Type,
Union,
Optional,
Callable,
Coroutine,
List,
Any,
TypeVar,
Sequence,
)
# external imports
from packaging.version import Version
# local imports
from .base import (
Urgency,
Button,
ReplyField,
Notification,
DesktopNotifierBase,
PYTHON_ICON_PATH,
)
__all__ = [
"Notification",
"Button",
"ReplyField",
"Urgency",
"DesktopNotifier",
]
logger = logging.getLogger(__name__)
T = TypeVar("T")
default_event_loop_policy = asyncio.DefaultEventLoopPolicy()
def get_implementation() -> Type[DesktopNotifierBase]:
"""
Return the backend class depending on the platform and version.
:returns: A desktop notification backend suitable for the current platform.
:raises RuntimeError: when passing ``macos_legacy = True`` on macOS 12.0 and later.
"""
if platform.system() == "Darwin":
from .macos_support import is_bundle, is_signed_bundle, macos_version
has_unusernotificationcenter = macos_version >= Version("10.14")
has_nsusernotificationcenter = macos_version < Version("12.0")
is_signed = is_signed_bundle()
if has_unusernotificationcenter and is_signed:
# Use modern UNUserNotificationCenter.
from .macos import CocoaNotificationCenter
return CocoaNotificationCenter
elif has_nsusernotificationcenter and is_bundle():
if has_unusernotificationcenter and not is_signed:
logger.warning(
"Running outside of a signed Framework or bundle: "
"falling back to NSUserNotificationCenter"
)
else:
logger.warning(
"Running on macOS 10.13 or earlier: "
"falling back to NSUserNotificationCenter"
)
# Use deprecated NSUserNotificationCenter.
from .macos_legacy import CocoaNotificationCenterLegacy
return CocoaNotificationCenterLegacy
else:
# Use dummy backend.
logger.warning(
"Notification Center can only be used "
"from a signed Framework or app bundle"
)
from .dummy import DummyNotificationCenter
return DummyNotificationCenter
elif platform.system() == "Linux":
from .dbus import DBusDesktopNotifier
return DBusDesktopNotifier
elif platform.system() == "Windows" and Version(platform.version()) >= Version(
"10.0.10240"
):
from .winrt import WinRTDesktopNotifier
return WinRTDesktopNotifier
else:
from .dummy import DummyNotificationCenter
return DummyNotificationCenter
class DesktopNotifier:
"""Cross-platform desktop notification emitter
Uses different backends depending on the platform version and available services.
All implementations will dispatch notifications without an event loop but will
require a running event loop to execute callbacks when the end user interacts with a
notification. On Linux, a asyncio event loop is required. On macOS, a CFRunLoop *in
the main thread* is required. Packages such as :mod:`rubicon.objc` can be used to
integrate asyncio with a CFRunLoop.
:param app_name: Name to identify the application in the notification center. On
Linux, this should correspond to the application name in a desktop entry. On
macOS, this argument is ignored and the app is identified by the bundle ID of
the sending program (e.g., Python).
:param app_icon: Default icon to use for notifications. This should be either a URI
string, a :class:`pathlib.Path` path, or a name in a freedesktop.org-compliant
icon theme. If None, the icon of the calling application will be used if it
can be determined. On macOS, this argument is ignored and the app icon is
identified by the bundle ID of the sending program (e.g., Python).
:param notification_limit: Maximum number of notifications to keep in the system's
notification center. This may be ignored by some implementations.
"""
def __init__(
self,
app_name: str = "Python",
app_icon: Union[Path, str, None] = PYTHON_ICON_PATH,
notification_limit: Optional[int] = None,
) -> None:
impl_cls = get_implementation()
if isinstance(app_icon, Path):
app_icon = app_icon.as_uri()
self._lock = RLock()
self._impl = impl_cls(app_name, app_icon, notification_limit)
self._did_request_authorisation = False
# Use our own event loop for the sync API so that we don't interfere with any
# other ansycio event loops / threads, etc.
self._loop = default_event_loop_policy.new_event_loop()
def _run_coro_sync(self, coro: Coroutine[None, None, T]) -> T:
"""
Runs the given coroutine and returns the result synchronously. This is used as a
wrapper to conveniently convert the async API calls to synchronous ones.
"""
if self._loop.is_running():
future = asyncio.run_coroutine_threadsafe(coro, self._loop)
res = future.result()
else:
res = self._loop.run_until_complete(coro)
return res
@property
def app_name(self) -> str:
"""The application name"""
return self._impl.app_name
@app_name.setter
def app_name(self, value: str) -> None:
"""Setter: app_name"""
self._impl.app_name = value
@property
def app_icon(self) -> Optional[str]:
"""The application icon: a URI for a local file or an icon name."""
return self._impl.app_icon
@app_icon.setter
def app_icon(self, value: Union[Path, str, None]) -> None:
"""Setter: app_icon"""
if isinstance(value, Path):
value = value.as_uri()
self._impl.app_icon = value
async def request_authorisation(self) -> bool:
"""
Requests authorisation to send user notifications. This will be automatically
called for you when sending a notification for the first time but it may be
useful to call manually to request authorisation in advance.
On some platforms such as macOS and iOS, a prompt will be shown to the user
when this method is called for the first time. This method does nothing on
platforms where user authorisation is not required.
:returns: Whether authorisation has been granted.
"""
with self._lock:
self._did_request_authorisation = True
return await self._impl.request_authorisation()
async def has_authorisation(self) -> bool:
"""Returns whether we have authorisation to send notifications."""
return await self._impl.has_authorisation()
async def send(
self,
title: str,
message: str,
urgency: Urgency = Urgency.Normal,
icon: Union[Path, str, None] = None,
buttons: Sequence[Button] = (),
reply_field: Optional[ReplyField] = None,
on_clicked: Optional[Callable[[], Any]] = None,
on_dismissed: Optional[Callable[[], Any]] = None,
attachment: Union[Path, str, None] = None,
sound: bool = False,
thread: Optional[str] = None,
) -> Notification:
"""
Sends a desktop notification.
Some arguments may be ignored, depending on the backend.
This method will always return a :class:`base.Notification` instance and will
not raise an exception when scheduling the notification fails. If the
notification was scheduled successfully, its ``identifier`` will be set to the
platform's native notification identifier. Otherwise, the ``identifier`` will be
``None``.
Note that even a successfully scheduled notification may not be displayed to the
user, depending on their notification center settings (for instance if "do not
disturb" is enabled on macOS).
:param title: Notification title.
:param message: Notification message.
:param urgency: Notification level: low, normal or critical. This may be
interpreted differently by some implementations, for instance causing the
notification to remain visible for longer, or may be ignored.
:param icon: URI string, :class:`pathlib.Path` or icon name to use for the
notification, typically the app icon. This will replace the icon specified
by :attr:`app_icon`. Will be ignored on macOS.
:param buttons: A list of buttons with callbacks for the notification.
:param reply_field: An optional reply field to show with the notification. Can
be used for instance in chat apps.
:param on_clicked: Callback to call when the notification is clicked. The
callback will be called without any arguments. This is ignored by some
implementations.
:param on_dismissed: Callback to call when the notification is dismissed. The
callback will be called without any arguments. This is ignored by some
implementations.
:param attachment: URI string or :class:`pathlib.Path` for an attachment to the
notification such as an image, movie, or audio file. A preview of this
attachment may be displayed together with the notification. Different
platforms and Linux notification servers support different types of
attachments. Please consult the platform support section of the
documentation.
:param sound: Whether to play a sound when the notification is shown. The
platform's default sound will be used, where available.
:param thread: An identifier to group related notifications together. This is
ignored on Linux.
:returns: The scheduled notification instance.
"""
if not icon:
icon = self.app_icon
elif isinstance(icon, Path):
icon = icon.as_uri()
if isinstance(attachment, Path):
attachment = attachment.as_uri()
notification = Notification(
title,
message,
urgency,
icon,
buttons,
reply_field,
on_clicked,
on_dismissed,
attachment,
sound,
thread,
)
with self._lock:
if not self._did_request_authorisation:
await self.request_authorisation()
await self._impl.send(notification)
return notification
def send_sync(
self,
title: str,
message: str,
urgency: Urgency = Urgency.Normal,
icon: Union[Path, str, None] = None,
buttons: Sequence[Button] = (),
reply_field: Optional[ReplyField] = None,
on_clicked: Optional[Callable[[], Any]] = None,
on_dismissed: Optional[Callable[[], Any]] = None,
attachment: Union[Path, str, None] = None,
sound: bool = False,
thread: Optional[str] = None,
) -> Notification:
"""
Synchronous call of :meth:`send`, for use without an asyncio event loop.
:returns: The scheduled notification instance.
"""
coro = self.send(
title,
message,
urgency,
icon,
buttons,
reply_field,
on_clicked,
on_dismissed,
attachment,
sound,
thread,
)
return self._run_coro_sync(coro)
@property
def current_notifications(self) -> List[Notification]:
"""A list of all currently displayed notifications for this app"""
return self._impl.current_notifications
async def clear(self, notification: Notification) -> None:
"""
Removes the given notification from the notification center.
:param notification: Notification to clear.
"""
with self._lock:
await self._impl.clear(notification)
async def clear_all(self) -> None:
"""
Removes all currently displayed notifications for this app from the notification
center.
"""
with self._lock:
await self._impl.clear_all()
| StarcoderdataPython |
9708746 | <reponame>Nelson-iitp/mdlog
#-----------------------------------------------------------------------------------------------------
# mdlog/md.py
#-----------------------------------------------------------------------------------------------------
import os.path, sys
from os import makedirs
#-----------------------------------------------------------------------------------------------------
class LOG:
""" Markdown Logging
~> Log your outputs and results directly to mark-down format.
~> Generate on-the-fly reports, ready for presentation.
"""
def __init__(self, log_dir, log_file, uri_title_quote=False):
"""
log_dir : directory to create new log file at
log_file : name of new log file, *AUTO ADDS '.md' EXTENSION*
uri_title_quote : if True, uses "double-quotes" for title in links else uses (round-brackets)
show_status : if True, prints the log_file and log_path after initializing
Note: By default, the escmode is False, does not escape any md-special chars
"""
self.log_dir = log_dir
self.log_file = log_file + ( '' if log_file.lower().endswith('.md') else '.md')
self.log_path = os.path.join(self.log_dir, self.log_file)
self.uri_title_start, self.uri_title_end = (('"', '"') if uri_title_quote else ('(', ')'))
self.iomode='' #<--- no iomode means closeed
self.escmode(False) #<---- manually set the escape mode after init if its required
# ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
""" Logger File Handles """
# ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
def open(self, mode='w'):
""" open log file handle """
assert (mode=='a' or mode=='w')
self.iomode = mode
makedirs(self.log_dir, exist_ok=True)
self.f = open(self.log_path, mode)
def close(self):
""" close log file handle """
self.f.close()
self.iomode=''
del self.f
def loc(self, file):
""" returns relative loaction of a file wrt log file
- this is useful to linking local files in markdown
- 'uri' function will auto-convert to relative path if its 'loc' arg is True
"""
return os.path.relpath( file , self.log_dir )
def info(self, p=print):
""" short info about file handle"""
p('[Logging ~ Mode:[{}] @File:[{}] @Path:[{}]'.format(self.iomode, self.log_file, self.log_path))
# ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
""" IO and Escaping """
# ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
def escmode(self, esc):
""" enable or disable escaping, sets the 'set of escape charaters' in self.esc
- if 'esc' evaluates to False, then escaping is disabled
- otherwise, it should be a tuple, all chars in the 'esc' tuple are escaped """
self.esc = esc
self.do_esc = True if self.esc else False
self.write = self.write_do_esc if self.do_esc else self.write_no_esc
def escape(self, msg):
""" escapes all instances of chars in self.esc tuple """
m = str(msg)
for esc in self.esc:
m = m.replace(esc, '\\' + esc)
return m
def escaper(self, escT):
""" returns a context-manager for temporary escaping special chars - see the ESCAPER class """
return ESCAPER((self, escT))
def write_no_esc(self, *msg):
""" write msg without escaping """
for m in msg:
self.f.write(str(m))
def write_do_esc(self, *msg):
""" write msg with escaping - escapes all chars that are currently in self.esc tuple """
emsg = map(self.escape, msg)
for m in emsg:
self.f.write(m)
# ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
""" Markdown Elements """
# ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~ ~
# NOTE: use self.write(*msg) to put chars to file directly
class DUAL_RDR:
""" implements a fake-handle for dual output - mainly implements write method """
def __init__(self, parent) -> None:
self.parent = parent
def write(self, *args):
self.parent.f.write(*args)
self.parent.xf.write(*args) #<--- temporary 'xf' which is sys.stdout
# std-output-redirect
def rdr_(self, as_code=True, dual=False):
""" redirects std-out(console output) to log file
Args:
as_code: if True, opens a code block before and after redirecting
dual: if True, prints the std-output on consile as well
"""
self.rdr_as_code = as_code
self.c_() if self.rdr_as_code else None
self.xf = sys.stdout
sys.stdout = (LOG.DUAL_RDR(self) if dual else self.f)
def _rdr(self):
""" stop redirecting from std-output """
sys.stdout = self.xf
self._c(False) if self.rdr_as_code else None
del self.rdr_as_code
# NewLines
def nl(self):
""" [new-line] put newline to file """
self.f.write('\n')
def nl2(self):
""" [new-line-2] put 2 newlines to file """
self.f.write('\n\n')
# Strings/Chars (Lines and Paragraphs)
def ln(self, *msg, sep=' ', term=False):
""" [line] put all items in input to file - seperated by sep, if term=True, ends the block """
self._ln(*msg, sep=sep) if term else self.ln_(*msg, sep=sep)
def ln_(self, *msg, sep=' '):
""" [line] put all items in input to file - seperated by sep, dont end the block """
for m in msg:
self.write(m)
self.f.write(sep)
def _ln(self, *msg, sep=' '):
""" [line] put all items in input to file - seperated by sep, and end the block """
self.ln_(*msg, sep=sep)
self.f.seek( self.f.tell() - 1 )
self.f.write('\n\n')
# Heading
HEADINGS = [' ', '# ', '## ', '### ', '#### ', '##### ', '###### ']
def h(self, n, msg):
""" insert heading of size 'n' : <h1> to <h6> """
self.f.write(self.HEADINGS[n])
self.write(msg)
self.f.write('\n\n')
# Horizontal Rule
HR = '___'
def hr(self):
""" insert horizontal rule : <hr> """
self.f.write(self.HR + '\n\n')
# Block (Text and Quotes)
PARAS, QUOTES = '', '>'
def b_(self, quote=False):
""" open text/quote block """
self.f.write(self.QUOTES if quote else self.PARAS)
def _b(self):
""" close text/quote block """
self.f.write('\n\n')
#--------------------------------------------------------------------------
def b(self, *msg, quote=False):
""" write text/quote block """
self.f.write(self.QUOTES if quote else self.PARAS)
self.write(*msg)
self.f.write('\n\n')
# Code Block
CODES = '```'
def c_(self):
""" open code block """
self.f.write(self.CODES + '\n')
def _c(self, put_nl=True):
""" close code block, if put_nl is true, inserts a newline before closing block """
self.f.write('\n') if put_nl else None
self.f.write(self.CODES + '\n\n')
#--------------------------------------------------------------------------
def c(self, *msg, put_nl=True):
""" write a code block, if put_nl is true, inserts a newline before closing block"""
self.f.write(self.CODES + '\n')
self.write(*msg)
self.f.write( ('\n' if put_nl else '') + self.CODES + '\n\n')
# URI (urls and images)
URILINK, URIIMG = '[', '!['
def uri(self, caption, url, title=None, image=False, inline=False, loc=False):
""" inserts a url as a link or an image (uses relative path if 'loc' is True) """
if image:
self.f.write(self.URIIMG)
self.f.write(caption)
else:
self.f.write(self.URILINK)
self.write(caption)
uri = (self.loc(url) if loc else url) #<---- relative path please
self.f.write(']'+'(' + uri + ((' ' + self.uri_title_start + title + self.uri_title_end) if title else '') + ')')
self.f.write('\n\n') if (not inline) else None
# Lists
OLS, ULS = '1. ', '* '
def ll_(self, order=False):
""" opens a list """
self.ostrL = []
self.ostrL.append(self.OLS if order else self.ULS)
self.ostrP = 0
self.ul_tab_ind = 0
self.ul_pre_str=''
def _ll(self):
""" close a list """
self.f.write('\n')
def l_(self, order=False):
""" (+1) indent -> sub-list """
self.ostrL.append(self.OLS if order else self.ULS)
self.ostrP += 1
self.ul_tab_ind+=1
self.ul_pre_str+='\t'
def _l(self):
""" (-1) indent -> super-list """
del self.ostrL[-1]
self.ostrP -= 1
self.ul_tab_ind-=1
self.ul_pre_str = self.ul_pre_str[0:-1] if (self.ul_tab_ind>0) else ''
def li_(self):
""" opens a list item """
self.f.write(self.ul_pre_str + self.ostrL[self.ostrP])
def _li(self):
""" closes a list item """
self.f.write('\n')
def li(self, *msg):
""" short-hand for _li and li_ """
self.f.write(self.ul_pre_str + self.ostrL[self.ostrP])
self.write(*msg)
self.f.write('\n')
#--------------------------------------------------------------------------
def ll(self, L, order=False, level=0):
""" writes items of list L - no nesting """
ostr = self.OLS if order else self.ULS
lstr = ""
for _ in range(level):
lstr+='\t'
for l in L: # for i,l in enumerate(L):
self.f.write(lstr + ostr)
self.write(l)
self.f.write('\n')
self.f.write('\n')
def ll2(self, H, L, outer_order=False, inner_order=True, level=0, in_sep=True):
""" writes items of 2 List with H containing parent and L containing sublist """
ostr = self.OLS if outer_order else self.ULS
lstr = ""
for _ in range(level):
lstr+='\t'
for h,l in zip(H,L):
self.f.write(lstr + ostr)
self.write(h)
self.f.write('\n')
self.ll(l, order=inner_order, level=level+1)
self.f.write('\n') if in_sep else None
self.f.write('\n')
def lld(self, D, outer_order=False, inner_order=True, level=0, in_sep=True):
""" writes items of 2 List with D.keys containing parent and D.values containing sublist """
self.ll2(list(D.keys()),list(D.values()), outer_order, inner_order, level, in_sep)
# Tables (from iterables)
TAB_ALIGN_LEFT, TAB_ALIGN_CENTER, TAB_ALIGN_RIGHT = ':------', ':------:', '------:'
def _dump_header(self, header, align):
""" helper method, dont use directly """
self.f.write('|')
for h in header:
self.write(h)
self.f.write('|')
self.f.write('\n|'+ '|'.join([ a for a in align ]) +'|\n' )
#--------------------------------------------------------------------------
def t_(self, header, align):
""" opens a table """
if type(align) is str:
self._dump_header(header, [align for _ in range(len(header))])
else:
self._dump_header(header, align)
def r(self, R):
""" write full row at once """
self.f.write('|')
for i in R:
self.write(i)
self.f.write('|')
self.f.write('\n')
def r_(self):
""" open row - for writing unit by unit """
self.f.write('|')
def ri(self, i):
""" write a unit (row item) """
self.write(i)
self.f.write('|')
def _r(self):
""" close a row """
self.f.write('|\n')
def _t(self):
""" close a table """
self.f.write('\n')
#--------------------------------------------------------------------------
def rt(self, header, align, R):
""" [Row table] - table with header and each item in R defining one full Row """
self.t_(header, align)
for i in R:
self.r( i )
self._t()
def mrt(self, header, align, *R):
""" [Multi-Row table] - table with header and each item in MR defining one full Row
- auto generates header if its none """
header = range(len(R[0])) if header is None else header
self.rt(header, align, R)
def ct(self, header, align, C):
""" [Col table] - table with header and each item in C defining one full Col """
self.t_(header, align)
rows, cols = len(C[0]), len(C)
for i in range(rows):
self.r_()
for j in range(cols):
self.ri(C[j][i])
self._r()
self._t()
def mct(self, header, align, *C):
""" [Multi-Col table] - table with header and each item in C defining one full Col
- auto generates header if its none """
header = range(len(C)) if (header is None) else header
self.t_(header, align)
for cc in zip(*C):
self.r(cc)
self._t()
def dct(self, align, D):
""" [Dict col table] - table with header as D.keys() and each item in D.values() defining one full Col
- directly calls self.ct with two args - keys and value - as each column """
self.ct(list(D.keys()), align, list(D.values()))
def drt(self, align, D, hkey='Key', hval='Val'):
""" [Dict row table] - table with 2-cols (hkey and hval) from a dict
- directly calls self.rt with two args - keys and value - as each column """
self.rt([hkey, hval], align, D.items() )
# preformated-text (from dict)
def pfd(self, D, caption=""):
""" [pre-formated Dict] - pre-format text from a dict """
self.c_()
self.f.write("=-=-=-=-==-=-=-=-=\n"+caption+"\n=-=-=-=-==-=-=-=-=\n")
for k,v in D.items():
self.f.write(str(k) + " : " + str(v) + '\n')
self.f.write("=-=-=-=-==-=-=-=-=\n")
self._c()
class ESCAPER:
""" context manager for toggling character escaping while logging,
> escaping requires extra computing and is avoided by default
> user can switch on escaping specific charecters using this context manager """
def __init__(self, *logs_tup) -> None:
""" *log_tup : tuples like (log, esc) """
self.LOT = logs_tup #(log, esc)
def __enter__(self):
for log, esc in self.LOT:
log.pesc = log.esc
log.escmode(esc)
return self.LOT[0][0] if len(self.LOT)==1 else tuple( [self.LOT[i][0] for i in self.LOT ])
def __exit__(self, exc_type, exc_val, exc_tb):
for log, _ in self.LOT:
log.escmode(log.pesc)
del log.pesc
del self.LOT
return True
""" Predefined escape char sets """
FORM_ESC = ( '`', '*', '_', '~' )
LINK_ESC = ( '{', '}', '[', ']', '(', ')', '!')
BLOCK_ESC = ( '+', '-', '|', '>' )
ALL_ESC = FORM_ESC + LINK_ESC + BLOCK_ESC
NO_ESC = tuple() #<--- anything evaluating to false is no_esc
#-----------------------------------------------------------------------------------------------------
# Foot-Note:
""" NOTE: https://daringfireball.net/projects/markdown/syntax
Paragraph <p>
paragraph is simply one or more consecutive lines of text, separated by one or more blank lines.
(A blank line is any line that looks like a blank line — a line containing nothing but spaces or tabs is considered blank.)
Normal paragraphs should not be indented with spaces or tabs.
Headings <h?>
To create an atx-style header, you put 1-6 hash marks (#) at the beginning of the line
— the number of hashes equals the resulting HTML header level.
uses: self._P_PRE_H, self._P_POST_H
Blockquotes
are indicated using email-style ‘>’ angle brackets.
Block level tags and Inline HTML
For any markup that is not covered by Markdown’s syntax, you simply use HTML itself.
There’s no need to preface it or delimit it to indicate that you’re switching from Markdown to HTML; you just use the tags.
The only restrictions are that block-level HTML elements — e.g. <div>, <table>, <pre>, <p>, etc.
— must be separated from surrounding content by blank lines, and the start and end tags of the block should not be indented with tabs or spaces.
Markdown is smart enough not to add extra (unwanted) <p> tags around HTML block-level tags.
When you do want to insert a <br /> break tag using Markdown, you end a line with two or more spaces, then type return.
Yes, this takes a tad more effort to create a <br />, but a simplistic “every line break is a <br />” rule wouldn’t work for Markdown.
Markdown’s email-style blockquoting and multi-paragraph list items work best — and look better — when you format them with hard breaks.
Lists
Markdown supports ordered (numbered) and unordered (bulleted) lists.
Unordered lists use asterisks, pluses, and hyphens — interchangably — as list markers:
Ordered lists use numbers followed by periods.
It’s important to note that the actual numbers you use to mark the list have no effect on the HTML output Markdown produces.
Lists for fixed size iterables
> size of list is known a priori
> not suitable for long running loops
Lists for time consuming long iterations
> use open, close to specify begin and end of list
> use plus, minus for indendation(nesting)
> use ll_ for wiritng lists
> lchar is '*' for unordered list and '0' for ordered list
Links
This is [an example](http://example.com/ "Title") inline link.
if you’re referring to a local resource on the same server, you can use relative paths:
See my [About](/about/) page for details.
Reference-style links use a second set of square brackets, inside which you place a label of your choosing to identify the link:
This is [an example][id] reference-style link.
You can optionally use a space to separate the sets of brackets:
This is [an example] [id] reference-style link.
Then, anywhere in the document, you define your link label like this, on a line by itself:
[id]: http://example.com/ "Optional Title Here"
That is:
> Square brackets containing the link identifier (optionally indented from the left margin using up to three spaces);
> followed by a colon;
> followed by one or more spaces (or tabs);
> followed by the URL for the link;
> optionally followed by a title attribute for the link, enclosed in double or single quotes, or enclosed in parentheses.
The following three link definitions are equivalent:
[foo]: http://example.com/ "Optional Title Here"
[foo]: http://example.com/ 'Optional Title Here'
[foo]: http://example.com/ (Optional Title Here)
Using header as withing doc link
The #header-IDs are generated from the content of the header according to the following rules:
All text is converted to lowercase.
All non-word text (e.g., punctuation, HTML) is removed.
All spaces are converted to hyphens.
Two or more hyphens in a row are converted to one.
If a header with the same ID has already been generated, a unique incrementing number is appended, starting at 1.
EMPHASIS
Markdown treats asterisks (*) and underscores (_) as indicators of emphasis.
Text wrapped with one * or _ will be wrapped with an HTML <em> tag; double *’s or _’s will be wrapped with an HTML <strong> tag.
E.g., this input:
*single asterisks*, _single underscores_ for em
**double asterisks**, __double underscores__ for strong
these not need extra functions dont
CODE
To indicate a span of code, wrap it with backtick quotes (`).
Unlike a pre-formatted code block, a code span indicates code within a normal paragraph. For example:
Use the `printf()` function.
will produce:
<p>Use the <code>printf()</code> function.</p>
To include a literal backtick character within a code span, you can use multiple backticks as the opening and closing delimiters:
``There is a literal backtick (`) here.``
which will produce this:
<p><code>There is a literal backtick (`) here.</code></p>
The backtick delimiters surrounding a code span may include spaces — one after the opening, one before the closing.
This allows you to place literal backtick characters at the beginning or end of a code span:
A single backtick in a code span: `` ` ``
A backtick-delimited string in a code span: `` `foo` ``
IMAGES
Admittedly, it’s fairly difficult to devise a “natural” syntax for placing images into a plain text document format.
Markdown uses an image syntax that is intended to resemble the syntax for links, allowing for two styles: inline and reference.
Inline image syntax looks like this:


That is:
An exclamation mark: !;
followed by a set of square brackets, containing the alt attribute text for the image;
followed by a set of parentheses, containing the URL or path to the image, and an optional title attribute enclosed in double or single quotes.
Reference-style image syntax looks like this:
![Alt text][id]
Where “id” is the name of a defined image reference. Image references are defined using syntax identical to link references:
[id]: url/to/image "Optional title attribute"
As of this writing, Markdown has no syntax for specifying the dimensions of an image;
if this is important to you, you can simply use regular HTML <img> tags.
escapers = ( '\\', '`', '*', '_', '{', '}', '[', ']', '(', ')', '#', '+', '-', '.', '!' )
Markdown provides backslash escapes for the following characters:
\ backslash
` backtick
* asterisk
_ underscore
{} curly braces
[] square brackets
() parentheses
# hash mark
+ plus sign
- minus sign (hyphen)
. dot
! exclamation mark
"""
""" NOTE:
* Author: Nelson.S
"""
#-----------------------------------------------------------------------------------------------------
| StarcoderdataPython |
162452 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2015 Netheos (http://www.netheos.net)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import, unicode_literals, print_function
import dateutil.parser
import logging
import contextlib
import json
from ..storage import IStorageProvider, register_provider
from ..oauth.session_managers import OAuth2SessionManager
from ..oauth.oauth2_params import OAuth2ProviderParameters
from ..models import (CPath, CBlob, CFolder, CQuota,
CUploadRequest, CDownloadRequest, RetryStrategy, RequestInvoker)
from ..cexceptions import (CRetriableError, CAuthenticationError, CInvalidFileTypeError,
CFileNotFoundError, CStorageError, CHttpError)
from ..utils import (abbreviate, buildCStorageError, ensure_content_type_is_json, download_data_to_sink, get_content_length)
logger = logging.getLogger(__name__)
@register_provider
class OneDriveStorage(IStorageProvider):
"""FIXME work in progress !
Some chars are forbidden in file names (see unit tests for details)
Content-Type is not handled.
See https://github.com/OneDrive/onedrive-api-docs for API reference.
"""
PROVIDER_NAME = 'onedrive'
# OneDrive endpoint:
ENDPOINT = 'https://api.onedrive.com/v1.0'
ENDPOINT_DRIVE = ENDPOINT + '/drive' # user default drive
ENDPOINT_DRIVE_ROOT = ENDPOINT_DRIVE + '/root'
# This is to retrieve user email (email is user id for user credentials repository)
ENDPOINT_ME = 'https://apis.live.net/v5.0/me'
# OAuth2 endpoints and parameters:
_oauth2ProviderParameters = OAuth2ProviderParameters(
authorize_url='https://login.live.com/oauth20_authorize.srf',
access_token_url='https://login.live.com/oauth20_token.srf',
refresh_token_url='https://login.live.com/oauth20_token.srf',
scope_in_authorization=True,
scope_perms_separator=' ')
def __init__(self, storage_builder):
self._session_manager = OAuth2SessionManager(self._oauth2ProviderParameters,
storage_builder.app_info,
storage_builder.user_creds_repos,
storage_builder.user_credentials)
self._scope = storage_builder.app_info.scope
self._retry_strategy = storage_builder.retry_strategy
def _buildCStorageError(self, response, c_path):
"""FIXME check retriable status codes after calling this method
"""
error = response.json()['error']
message = error['code'] + ' (' + error['message'] + ')'
err = buildCStorageError(response, message, c_path)
if response.status_code == 429 \
or (response.status_code >= 500
and response.status_code != 501
and response.status_code != 507):
err = CRetriableError(err)
return err
def _validate_onedrive_api_response(self, response, c_path):
"""Validate a response from OneDrive API.
An API response is valid if response is valid, and content-type is json."""
self._validate_onedrive_response(response, c_path)
# Server response looks correct ; however check content type is Json:
cl = get_content_length(response)
if cl is not None and cl > 0:
ensure_content_type_is_json(response, raise_retriable=True)
# OK, response looks fine:
return response
def _validate_onedrive_response(self, response, c_path):
"""Validate a response for a file download or API request.
Only server code is checked (content-type is ignored)."""
logger.debug("validating onedrive response: %s %s: %d %s",
response.request.method,
response.request.url,
response.status_code, response.reason)
if response.status_code >= 300:
# Determining if error is retriable is not possible without parsing response:
# FIXME si c'est faisable ici!
raise self._buildCStorageError(response, c_path)
# OK, response looks fine:
return response
def _get_request_invoker(self, validation_function, c_path):
request_invoker = RequestInvoker(c_path)
# We forward directly to session manager do_request() method:
request_invoker.do_request = self._session_manager.do_request
request_invoker.validate_response = validation_function
return request_invoker
def _get_basic_request_invoker(self, c_path=None):
"""An invoker that does not check response content type:
to be used for files downloading"""
return self._get_request_invoker(self._validate_onedrive_response, c_path)
def _get_api_request_invoker(self, c_path=None):
"""An invoker that checks response content type = json:
to be used by all API requests"""
return self._get_request_invoker(self._validate_onedrive_api_response, c_path)
def _build_file_url(self, c_path):
return self.ENDPOINT_DRIVE_ROOT + ':' + c_path.url_encoded()
def provider_name(self):
return OneDriveStorage.PROVIDER_NAME
def get_user_id(self):
"""user_id is email in case of OneDrive"""
ri = self._get_api_request_invoker()
resp = self._retry_strategy.invoke_retry(ri.get, self.ENDPOINT_ME)
return resp.json()['emails']['account']
def get_quota(self):
"""Return a CQuota object.
"""
url = self.ENDPOINT_DRIVE
ri = self._get_api_request_invoker()
resp = self._retry_strategy.invoke_retry(ri.get, url)
quota = resp.json()['quota']
total = quota['total']
used = quota['used']
return CQuota(used, total)
def list_root_folder(self):
return self.list_folder(CPath('/'))
def list_folder(self, c_folder_or_c_path):
try:
c_path = c_folder_or_c_path.path
except AttributeError:
c_path = c_folder_or_c_path
try:
url = self._build_file_url(c_path) + ":/children"
ri = self._get_api_request_invoker()
content_json = self._retry_strategy.invoke_retry(ri.get, url).json()
#if not remote_path.exists():
# # per contract, listing a non existing folder must return None
# return None
#if remote_path.last_is_blob():
# raise CInvalidFileTypeError(c_path, False)
folder_content = {}
for val in content_json['value']:
val_path = c_path.add(val['name'])
folder_content[val_path] = self._parse_item(val_path, val)
if not folder_content:
# If we found nothing, it may be a blob ; check it was actually a folder:
c_file = self.get_file(c_path)
if c_file.is_blob():
raise CInvalidFileTypeError(c_path, False)
return folder_content
except CFileNotFoundError as e:
# Folder does not exist
return None
def create_folder(self, c_path):
if c_path.is_root():
return False # we never create the root folder
try:
# Intermediate folders are created if they are missing
ri = self._get_api_request_invoker(c_path)
url = self._build_file_url(c_path.parent()) + ':/children'
body = {'name': c_path.base_name(), 'folder': {}}
headers = {'Content-Type': 'application/json'}
resp = self._retry_strategy.invoke_retry(ri.post, url, data=json.dumps(body), headers=headers)
return True
except CHttpError as e:
if e.status_code == 409 and e.message.startswith('nameAlreadyExists'):
# A file already exists ; we have to check it is a folder though
c_file = self.get_file(c_path)
if not c_file.is_folder():
raise CInvalidFileTypeError(c_path, False)
return False
if e.status_code == 403:
# Most likely a blob exists along the path
self._raise_if_blob_in_path(c_path)
raise
def _parse_item(self, c_path, item_json):
file_id = item_json['id']
last_modif = _parse_date_time(item_json['lastModifiedDateTime'])
if _is_folder_type(item_json):
obj = CFolder(c_path, file_id, last_modif)
else:
length = item_json['size']
content_type = None # OneDrive has no content-type...
obj = CBlob(length, content_type, c_path, file_id, last_modif)
return obj
def _raise_if_blob_in_path(self, c_path):
"""Climb up in path hierarchy until we reach a blob, then raise with that blob path.
If we reach root without ancountering any blob, return normally"""
while not c_path.is_root():
c_file = self.get_file(c_path) # may return None if nothing exists at that path
if c_file and c_file.is_blob():
raise CInvalidFileTypeError(c_path, False)
c_path = c_path.parent()
def delete(self, c_path):
if c_path.is_root():
raise CStorageError('Can not delete root folder')
url = self._build_file_url(c_path)
ri = self._get_api_request_invoker(c_path)
try:
resp = self._retry_strategy.invoke_retry(ri.delete, url)
return True
except CFileNotFoundError as e:
return False
#remote_path = self._find_remote_path(c_path)
#if not remote_path.exists():
# # per contract, deleting a non existing folder must return False
# return False
#self._delete_by_id(c_path, remote_path.files_chain[-1].file_id)
#return True
def get_file(self, c_path):
"""Get CFile for given path, or None if no object exists with that path"""
url = self._build_file_url(c_path)
ri = self._get_api_request_invoker(c_path)
try:
resp = self._retry_strategy.invoke_retry(ri.get, url)
return self._parse_item(c_path, resp.json())
except CFileNotFoundError as e:
return None
if c_path.is_root():
return CFolder(CPath('/'))
remote_path = self._find_remote_path(c_path)
if not remote_path.exists():
# path does not exist
return None
return remote_path.deepest_file()
def download(self, download_request):
try:
return self._retry_strategy.invoke_retry(self._do_download, download_request)
except CFileNotFoundError:
# We have to distinguish here between "nothing exists at that path",
# and "a folder exists at that path":
c_file = self.get_file(download_request.path)
if c_file is None: # Nothing exists
raise
elif c_file.is_folder():
raise CInvalidFileTypeError(c_file.path, True)
else:
# Should not happen: a file exists but can not be downloaded ?!
raise CStorageError('Not downloadable file: %r' % c_file)
def _do_download(self, download_request):
"""This method does NOT retry request"""
c_path = download_request.path
url = self._build_file_url(c_path) + ':/content'
headers = download_request.get_http_headers()
# If we have a Range header, disable requests auto compression
# (cf. https://github.com/kennethreitz/requests/issues/2632)
if 'Range' in headers and 'Accept-Encoding' not in headers:
headers['Accept-Encoding'] = None
ri = self._get_basic_request_invoker(c_path)
with contextlib.closing(ri.get(url,
headers=headers,
stream=True)) as response:
download_data_to_sink(response, download_request.byte_sink())
def upload(self, upload_request):
c_path = upload_request.path
self.create_folder(c_path.parent())
try:
return self._retry_strategy.invoke_retry(self._do_upload, upload_request)
except CHttpError as e:
if e.status_code == 409 and e.message.startswith('nameAlreadyExists'):
# A file already exists ; most likely a folder
c_file = self.get_file(c_path)
if c_file.is_folder():
raise CInvalidFileTypeError(c_path, True)
if e.status_code == 403:
# Happens when trying to consider blobs as folders along the path
self._raise_if_blob_in_path(upload_request.path)
raise
def _do_upload(self, upload_request):
"""Simple upload for now (limited to 100MB)
TODO : use resumable upload API"""
c_path = upload_request.path
url = self._build_file_url(c_path) + ':/content'
in_stream = upload_request.byte_source().open_stream()
try:
ri = self._get_api_request_invoker(c_path)
resp = ri.put(url=url, data=in_stream)
finally:
in_stream.close()
def _parse_date_time(dt_str):
return dateutil.parser.parse(dt_str)
def _is_blob_type(item_json):
"""Determine if one drive file type can be represented as a CBlob"""
return ('file' in item_json
or 'photo' in item_json
or 'audio' in item_json
or 'video' in item_json)
def _is_folder_type(item_json):
"""Determine if one drive file type can be represented as a CFolder"""
return ('folder' in item_json
or 'album' in item_json)
| StarcoderdataPython |
3452537 | FREQUENCY = 60
MEMORY_SIZE = 0x1000
PROGRAM_OFFSET = 0x200
SZ_INSTR = 0x2
SCREEN_ROWS = 32
SCREEN_COLS = 64
INSTRUCTIONS_PER_CYCLE = 9
FONT_OFFSET = 0x000
FONT_SIZE = 5
FONTDATA = (
"F0909090F0" # 0
"2060202070" # 1
"F010F080F0" # 2
"F010F010F0" # 3
"9090F01010" # 4
"F080F010F0" # 5
"F080F090F0" # 6
"F010204040" # 7
"F090F090F0" # 8
"F090F010F0" # 9
"F090F09090" # A
"E090E090E0" # B
"F0808080F0" # C
"E0909090E0" # D
"F080F080F0" # E
"F080F08080" # F
)
| StarcoderdataPython |
27449 | #!/usr/bin/env python2
"""Context for all tests."""
from __future__ import absolute_import
import os
import sys
sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)) + "../lcovparse"))
import lcovparse # pylint: disable=wrong-import-position,unused-import
| StarcoderdataPython |
99940 | <reponame>LittleYUYU/Interactive-Semantic-Parsing
#utils.py
import tensorflow as tf
import numpy as np
from nltk import word_tokenize
import config
# color
# Usage: print bcolors.WARNING + "Warning: No active frommets remain. Continue?" + bcolors.ENDC
class bcolors:
PINK = '\033[95m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
def discounted_sum(num_list, discount):
sum = 0.0
for value in num_list[::-1]:
sum = sum * discount + value
return sum
def ground_truth_names2labels(label_names, labelers):
labels = []
for name, labeler in zip(label_names, labelers):
labels.append(labeler.transform([name])[0])
return labels
def id2token_str(id_list, id2token, unk):
# id_list = np.minimum(id_list, unk)
# return " ".join([id2token[_id] if _id != unk else "UNK" for _id in id_list])
return " ".join([id2token[_id] for _id in id_list if _id in id2token])
def token2id_list(token_str, token2id, unk):
tokens = word_tokenize(token_str)
# return [token2id[token.lower()] for token in tokens if token.lower() in token2id]
ids = [min(token2id.get(token.lower(), unk), unk) for token in tokens]
return [_ for _ in ids if _ != unk]
# to show the real unk
# return np.minimum(ids, unk)
def label2name(label_list, label_encoders):
names = []
for subtask_idx, encoder in enumerate(label_encoders):
if label_list[subtask_idx] is not None:
names.append(encoder.inverse_transform([label_list[subtask_idx]])[0])
else:
names.append("None")
return names
def bool_valid_args(received_args, FLAGS):
""" Check whether the arguments are valid. """
for arg in received_args:
if arg[0] == "-":
if arg[1] != "-":
print("Invalid arg: missing bar.")
return False
if "=" in arg:
real_arg = arg[2:arg.index("=")]
else:
real_arg = arg[2:]
if real_arg not in FLAGS.__flags:
print("Invalid arg: %s" % real_arg)
return False
return True
# def clipped_error(x):
# return tf.where(tf.abs(x) < 1.0, 0.5 * tf.square(x), tf.abs(x) - 0.5)
def make_array(seqs, length=None):
'''Make a 2D NumPy array from a list of strings or a list of 1D arrays/lists.
Shape of result is len(seqs) x length of longest sequence.'''
# padding
if length is None:
length = max(len(elem) for elem in seqs)
array = np.full((len(seqs), length), 0, dtype=np.int32)
for i, item in enumerate(seqs):
if len(item) > length: # clipping
item = item[:(length + 1)//2] + item[len(item) - length//2:]
array[i, :len(item)] = item
return array
def read_batch_state(states, instruction_length, user_answer_length, agent_level, bool_batch=True):
""" Rearrange the states into a list of [bs, sent_len] or [bs]. """
assert agent_level in {"low_level", "high_level"}
if not bool_batch: # this is a single instance
states = [states] # make it to a batch
reshaped_states = []
for idx in range(len(states[0])):
content = [state[idx] for state in states]
if agent_level == "low_level":
if idx == 0:
content = make_array(content, instruction_length)
elif idx == 1:
content = np.minimum(content, instruction_length)
elif idx == 2:
content = make_array(content, user_answer_length)
elif idx == 3:
content = np.minimum(content, user_answer_length)
else:
content = np.array(content)
else:
content = np.array(content)
reshaped_states.append(content)
return reshaped_states
| StarcoderdataPython |
4889588 | import matplotlib.pyplot as plt
import pytest
from qm.qua import *
from qm.QuantumMachinesManager import QuantumMachinesManager
from qm import SimulationConfig
import numpy as np
from qualang_tools.bakery.bakery import baking
from copy import deepcopy
def gauss(amplitude, mu, sigma, length):
t = np.linspace(-length / 2, length / 2, length)
gauss_wave = amplitude * np.exp(-((t - mu) ** 2) / (2 * sigma ** 2))
return [float(x) for x in gauss_wave]
@pytest.fixture
def config():
def IQ_imbalance(g, phi):
c = np.cos(phi)
s = np.sin(phi)
N = 1 / ((1 - g ** 2) * (2 * c ** 2 - 1))
return [
float(N * x) for x in [(1 - g) * c, (1 + g) * s, (1 - g) * s, (1 + g) * c]
]
return {
"version": 1,
"controllers": {
"con1": {
"type": "opx1",
"analog_outputs": {
1: {"offset": +0.0},
2: {"offset": +0.0},
3: {"offset": +0.0},
},
"digital_outputs": {1: {}, 2: {}},
}
},
"elements": {
"qe1": {
"singleInput": {"port": ("con1", 1)},
"intermediate_frequency": 0,
"operations": {
"playOp": "constPulse",
"a_pulse": "arb_pulse1",
"playOp2": "constPulse2",
},
"digitalInputs": {
"digital_input1": {
"port": ("con1", 1),
"delay": 0,
"buffer": 0,
}
},
},
"qe2": {
"mixInputs": {
"I": ("con1", 2),
"Q": ("con1", 3),
"lo_frequency": 0,
"mixer": "mixer_qubit",
},
"intermediate_frequency": 0,
"operations": {"constOp": "constPulse_mix", "gaussOp": "gauss_pulse"},
},
},
"pulses": {
"constPulse": {
"operation": "control",
"length": 1000, # in ns
"waveforms": {"single": "const_wf"},
},
"constPulse2": {
"operation": "control",
"length": 1000, # in ns
"waveforms": {"single": "const_wf"},
"digital_marker": "ON",
},
"arb_pulse1": {
"operation": "control",
"length": 100, # in ns
"waveforms": {"single": "arb_wf"},
},
"constPulse_mix": {
"operation": "control",
"length": 80,
"waveforms": {"I": "const_wf", "Q": "zero_wf"},
},
"gauss_pulse": {
"operation": "control",
"length": 80,
"waveforms": {"I": "gauss_wf", "Q": "zero_wf"},
},
},
"waveforms": {
"zero_wf": {"type": "constant", "sample": 0.0},
"const_wf": {"type": "constant", "sample": 0.2},
"arb_wf": {"type": "arbitrary", "samples": [i / 200 for i in range(100)]},
"gauss_wf": {"type": "arbitrary", "samples": gauss(0.2, 0, 15, 80)},
},
"digital_waveforms": {
"ON": {"samples": [(1, 0)]},
},
"mixers": {
"mixer_qubit": [
{
"intermediate_frequency": 0,
"lo_frequency": 0,
"correction": IQ_imbalance(0.0, 0.0),
}
],
},
}
def simulate_program_and_return(config, prog, duration=20000):
qmm = QuantumMachinesManager()
qmm.close_all_quantum_machines()
job = qmm.simulate(
config, prog, SimulationConfig(duration, include_analog_waveforms=True)
)
return job
def test_simple_bake(config):
cfg = deepcopy(config)
with baking(config=cfg) as b:
for x in range(10):
b.add_op(f"new_op_{x}", "qe1", samples=[1, 0, 1, 0])
b.play(f"new_op_{x}", "qe1")
with program() as prog:
b.run()
job = simulate_program_and_return(cfg, prog)
samples = job.get_simulated_samples()
assert len(samples.con1.analog["1"]) == 80000
def test_bake_with_macro(config):
cfg = deepcopy(config)
def play_twice(b):
b.play("a_pulse", "qe1")
b.play("a_pulse", "qe1")
with baking(config=cfg) as b:
play_twice(b)
with program() as prog:
b.run()
job = simulate_program_and_return(cfg, prog, 200)
samples = job.get_simulated_samples()
tstamp = int(
job.simulated_analog_waveforms()["controllers"]["con1"]["ports"]["1"][0][
"timestamp"
]
)
assert all(
samples.con1.analog["1"][tstamp : tstamp + 200]
== [i / 200 for i in range(100)] + [i / 200 for i in range(100)]
)
def test_amp_modulation_run(config):
cfg = deepcopy(config)
with baking(config=cfg, padding_method="right", override=False) as b:
b.play("playOp", "qe1")
with baking(config=cfg, padding_method="right", override=False) as b2:
b2.play("playOp", "qe1")
amp_Python = 2
with program() as prog:
b.run(amp_array=[("qe1", amp_Python)])
with program() as prog2:
amp_QUA = declare(fixed, value=amp_Python)
b2.run(amp_array=[("qe1", amp_QUA)])
with program() as prog3:
play("playOp" * amp(amp_Python), "qe1")
job1 = simulate_program_and_return(cfg, prog, 500)
samples1 = job1.get_simulated_samples()
samples1_data = samples1.con1.analog["1"]
job2 = simulate_program_and_return(cfg, prog2, 500)
samples2 = job2.get_simulated_samples()
samples2_data = samples2.con1.analog["1"]
job3 = simulate_program_and_return(cfg, prog3, 500)
samples3 = job3.get_simulated_samples()
samples3_data = samples3.con1.analog["1"]
assert len(samples1_data) == len(samples2_data)
assert all(
[samples1_data[i] == samples3_data[i] for i in range(len(samples1_data))]
)
assert all(
[samples2_data[i] == samples3_data[i] for i in range(len(samples2_data))]
)
def test_play_baked_with_existing_digital_wf(config):
cfg = deepcopy(config)
with baking(cfg) as b:
b.play("playOp2", "qe1")
with program() as prog:
b.run()
job = simulate_program_and_return(cfg, prog)
samples = job.get_simulated_samples()
assert len(samples.con1.digital["1"] > 0)
| StarcoderdataPython |
4918252 | """
Language Description Status Browser
-----------------------------------
The description status of languages can be investigated in relation to the vitality (or
endangerment) of a language.
"""
from collections import defaultdict
import json
import attr
from pyramid.view import view_config
from sqlalchemy import func
from sqlalchemy.orm import aliased, joinedload
from clld.web.adapters.geojson import GeoJson
from clld.web.maps import Map, Layer, Legend
from clld.web.util.helpers import JS
from clld.web.util.htmllib import HTML
from clld.web.util.multiselect import MultiSelect
from clld.db.meta import DBSession
from clld.db.models import common
from clldutils import svg
from glottolog3.models import Languoid, LanguoidLevel, get_parameter, get_source
from glottolog3.maps import Language
CATEGORIES = ['Spoken L1 Language', 'Sign Language']
def ldstatus(ppk):
sql = """\
select
l.id, v.domainelement_pk, vs.source, l.jsondata::json->>'meds'
from
language as l, languoid as ll, valueset as vs, value as v, parameter as p
where
l.jsondata::json->>'meds' is not null and l.pk = vs.language_pk and vs.parameter_pk = p.pk
and v.valueset_pk = vs.pk and vs.parameter_pk = {0}
and ll.pk = l.pk and ll.category in ('Spoken L1 Language', 'Sign Language')
""".format(ppk)
res = {}
for lid, aespk, aes_source, meds in DBSession.execute(sql):
meds = json.loads(meds)
res[lid] = (aespk, meds[0] if meds else None, meds, aes_source)
return res
@view_config(route_name='langdocstatus', renderer='langdocstatus/intro.mako')
def intro(req):
def count(ppk):
return DBSession.query(common.DomainElement.pk, func.count(common.Value.pk))\
.join(common.Value)\
.join(common.ValueSet)\
.join(common.Language)\
.filter(Languoid.category.in_(CATEGORIES))\
.filter(common.DomainElement.parameter_pk == ppk)\
.group_by(common.DomainElement.pk)
med = get_parameter('med')
aes = get_parameter('aes')
return {
'aes': aes,
'med': med,
'ref': get_source(aes.jsondata['reference_id']),
'macroareas': get_parameter('macroarea').domain,
'families': family_query(),
'med_type_count': {pk: c for pk, c in count(med.pk)},
'aes_status_count': {pk: c for pk, c in count(aes.pk)},
}
def src2dict(s, med_map):
res = dict(zip(['id', 'med_type', 'year', 'pages', 'name'], s))
res['med_rank'] = med_map[res['med_type']].number
return res
@attr.s
class Icon(object):
shape = attr.ib()
color = attr.ib()
@classmethod
def from_spec(cls, s):
return cls(s[0], s[1:])
def get_icon_map():
res = defaultdict(dict)
for pid in ['aes', 'med']:
for de in get_parameter(pid).domain:
res[pid][de.id.split('-')[1]] = Icon.from_spec(de.jsondata['icon'])
return res
class DescStatsGeoJson(GeoJson):
def __init__(self, obj):
GeoJson.__init__(self, obj)
aes = get_parameter('aes')
med = get_parameter('med')
self.ldstatus = ldstatus(aes.pk)
self.med_map = {
de.id.split('-')[1]: (Icon.from_spec(de.jsondata['icon']), de)
for de in med.domain}
self.med_map[None] = self.med_map['wordlist_or_less']
self.aes_map = {
de.pk: (Icon.from_spec(de.jsondata['icon']), de) for de in aes.domain}
def feature_iterator(self, ctx, req):
for l in ctx:
if l.id in self.ldstatus:
yield l
def featurecollection_properties(self, ctx, req):
return {'layer': 'desc'}
def get_icon(self, req, type_, aes_icon):
icon = self.med_map[type_][0].shape + aes_icon.color
if self.obj[2] == 'sdt':
icon = aes_icon.shape + self.med_map[type_][0].color
return self.obj[0][icon]
def feature_properties(self, ctx, req, feature):
aespk, med, sources, edsrc = self.ldstatus[feature.id]
# augment the source dicts
sources = [src2dict(v, {k: v[1] for k, v in self.med_map.items()}) for v in sources]
for s in sources:
s['icon'] = self.get_icon(req, s['med_type'], self.aes_map[aespk][0])
s['sdt'] = self.med_map[s['med_type']][1].number
med = src2dict(med, {k: v[1] for k, v in self.med_map.items()}) if med else med
aes_icon, aes = self.aes_map[aespk]
return {
'ed': aes.number,
'edsrc': edsrc,
'icon': self.get_icon(req, med['med_type'] if med else None, aes_icon),
'med': med['id'] if med else None,
'sdt': self.med_map[med['med_type'] if med else None][1].number,
'info_query': {'source': med['id']} if med else {},
'red_icon': self.get_icon(req, None, aes_icon),
'sources': sources}
def get_language(self, ctx, req, feature):
return Language(
0, feature.name, feature.longitude, feature.latitude, feature.id)
class DescStatsMap(Map):
def __init__(self, ctx, req, icon_map, focus, de_to_icon):
self.icon_map = icon_map
self.focus = focus
self.de_to_icon = de_to_icon
Map.__init__(self, ctx, req)
def get_layers(self):
yield Layer(
'languoids',
'Languoids',
DescStatsGeoJson((self.icon_map, None, self.focus)).render(
self.ctx, self.req, dump=False)
)
def get_options(self):
return {
'icon_size': 20,
'hash': True,
'max_zoom': 12,
'on_init': JS('GLOTTOLOG3.LangdocStatus.update'),
'no_showlabels': True}
def get_legends(self):
def img(spec):
return HTML.img(
src=svg.data_url(svg.icon(spec)), height='20', width='20', style='margin-left: 0.5em;')
def desc(text):
return HTML.span(text, style='margin-left: 0.5em; margin-right: 0.5em;')
values = [desc('Most extensive description is a ...')]
for sdt in get_parameter('med').domain:
icon = self.de_to_icon['med'][sdt.id.split('-')[1]]
values.append(
HTML.label(
HTML.input(
type='checkbox',
checked='checked',
id='marker-toggle-sdt-' + str(sdt.number),
onclick='GLOTTOLOG3.LangdocStatus.toggleMarkers()'),
img(icon.shape + 'ffffff' if self.focus == 'ed' else 'c' + icon.color),
desc(sdt.name)))
values.append(desc('Language is ...'))
for ed in get_parameter('aes').domain:
icon = self.de_to_icon['aes'][ed.id.split('-')[1]]
values.append((
HTML.label(
HTML.input(
type='checkbox',
checked='checked',
id='marker-toggle-ed-' + str(ed.number),
onclick='GLOTTOLOG3.LangdocStatus.toggleMarkers()'),
img('c' + icon.color if self.focus == 'ed' else icon.shape + 'ffffff'),
desc(ed.name.lower()))))
yield Legend(self, 'values', values, label='Legend')
def language_query(req=None):
query = DBSession.query(common.Language) \
.filter(common.Language.active == True) \
.filter(common.Language.latitude != None) \
.filter(Languoid.level == LanguoidLevel.language) \
.filter(Languoid.category.in_(CATEGORIES))
if req:
macroarea = req.params.get('macroarea')
if macroarea:
query = query.filter(Languoid.macroareas.contains(macroarea))
families = [f for f in req.params.get('family', '').split(',') if f]
if families:
family = aliased(Languoid)
query = query.join(family, Languoid.family_pk == family.pk)\
.filter(family.id.in_(families))
countries = []
for c in req.params.getall('country'):
countries.extend(c.split())
if countries:
query = query\
.join(common.ValueSet)\
.join(common.Parameter)\
.join(common.Value)\
.join(common.DomainElement)\
.filter(common.Parameter.id == 'country')\
.filter(common.DomainElement.name.in_(countries))
return query
def family_query(req=None):
query = DBSession.query(Languoid)\
.filter(Languoid.father_pk == None)\
.filter(common.Language.active == True)\
.order_by(common.Language.name)
if req:
macroarea = req.params.get('macroarea')
if macroarea:
query = query.filter(Languoid.macroareas.contains(macroarea))
return query
def _get_families(req):
families = [f for f in req.params.get('family', '').split(',') if f]
if families:
return DBSession.query(Languoid).filter(Languoid.id.in_(families)).all()
return []
@view_config(route_name='langdocstatus.browser', renderer='langdocstatus/browser.mako')
def browser(req):
"""
The main GlottoScope view, with selection controls, map and tally table.
"""
ms = MultiSelect(
req, 'families', 'msfamily', collection=family_query(req), selected=_get_families(req))
focus = req.params.get('focus', 'ed')
im = get_icon_map()
if focus == 'sdt':
colors, shapes = im['med'], im['aes']
else:
shapes, colors = im['med'], im['aes']
icon_map = {}
for shape in [o.shape for o in shapes.values()]:
for color in [o.color for o in colors.values()] + ['ffffff']:
spec = shape + color
icon_map[spec] = req.static_url('clld:web/static/icons/%s.png' % spec)
return {
'families': ms,
'macroareas': get_parameter('macroarea'),
'countries': req.params.getall('country'),
'map': DescStatsMap(language_query(req), req, icon_map, focus, im),
'icon_map': icon_map,
'focus': focus,
'doctypes': [
(de, Icon.from_spec(de.jsondata['icon'])) for de in get_parameter('med').domain],
'endangerments': [
(de, Icon.from_spec(de.jsondata['icon'])) for de in get_parameter('aes').domain],
}
@view_config(
route_name='langdocstatus.languages', renderer='langdocstatus/language_table.mako')
def languages(req):
"""
Called when cells in the tally table are clicked to load the corresponding languages.
:param req:
:return: list of (language, med) pairs with matching endangerment and doctype.
"""
macroarea = req.params.get('macroarea')
family = _get_families(req)
year = req.params.get('year')
demap = defaultdict(dict)
for pid in ['aes', 'med']:
for de in get_parameter(pid).domain:
demap[pid][de.number] = de
aes, aeslpks, med, medlpks = None, [], None, []
label = 'Languages'
if int(req.matchdict['ed']) in demap['aes']:
aes = demap['aes'][int(req.matchdict['ed'])]
label = HTML.em(aes.name) + ' languages'
aeslpks = {
v.valueset.language_pk for v in DBSession.query(common.Value)\
.filter(common.Value.domainelement_pk == aes.pk)\
.options(joinedload(common.Value.valueset))}
if family:
label = label + ' of the %s families' % ', '.join(f.name for f in family)
if macroarea:
label = label + ' from ' + macroarea
if int(req.matchdict['sdt']) in demap['med']:
med = demap['med'][int(req.matchdict['sdt'])]
medlpks = {
v.valueset.language_pk for v in DBSession.query(common.Value) \
.filter(common.Value.domainelement_pk == med.pk) \
.options(joinedload(common.Value.valueset))}
label = label + ' whose most extensive description'
if year:
year = int(year)
label = label + ' in %s' % year
label = label + ' is a ' + med.name
stats = ldstatus(get_parameter('aes').pk)
langs = []
for lang in language_query(req):
if aes and lang.pk not in aeslpks:
continue
if not year and (med and lang.pk not in medlpks):
continue
aespk, med_, sources, _ = stats.get(lang.id, (None, None, [], None))
gmed = None
if year:
drop = True
for s in sources:
s = src2dict(s, {v.id.split('-')[1]: v for v in demap['med'].values()})
if s['year'] <= int(year):
gmed = s
if med and gmed['med_type'] == med.id.split('-')[1]:
drop = False
break
if drop and med:
continue
else:
gmed = med_
langs.append((lang, gmed))
return {'languages': sorted(langs, key=lambda l: l[0].name), 'label': label}
| StarcoderdataPython |
370521 | <gh_stars>1-10
MODEL_TYPES = ["autoencoding", "seq-to-seq"]
| StarcoderdataPython |
6516931 | <reponame>alexshin/django-postges-lookups-any
from django.db import models
class ModelA(models.Model):
name = models.CharField('Test Name A', max_length=100)
external_id = models.PositiveIntegerField('Test ID A')
class ModelB(models.Model):
name = models.CharField('Test Name B', max_length=100)
external_id = models.PositiveIntegerField('Test ID B')
| StarcoderdataPython |
3588335 | import random
import inspect
# Day 1 Exercise
print("""
Day 1 - Exercises
-----------------
Alice in the Wonderland - alpha frequency distribution table
""")
filename = "alice_in_wonderland.txt"
file = open(filename, "r")
fileContent = file.read().lower()
alpha = 'abcdefghijklmnopqrstuvwxyz'
frequency = []
for i in range(len(alpha)):
frequency.append([alpha[i], 0])
for c in range(len(fileContent)):
curChar = fileContent[c:c+1]
if(curChar.isalpha()):
for i in range(len(frequency)):
if(frequency[i][0]==curChar):
frequency[i][1] = frequency[i][1]+1
break
print("\nFrequency as List of lists: ", frequency)
print("\nFrequency in required format:\n")
for i in range(len(frequency)):
print(frequency[i][0]+': '+str(frequency[i][1]))
# Day 2 Exercise
print("""
Day 2 - Exercises
-----------------
""")
print("\nNumbers to Letters the chr() method\n")
# There is something small that needs fixing. Can you spot it and fix it? (Hint, we only want A-Z and a-z)
# and Make a function that prints A-Z and a-z
upper = []
lower = []
charDict = {}
for i in range(65,125):
char = str(chr(i))
if(char.isalpha()):
charDict[i] = char
if(char.isupper()):
upper.append(char)
else:
lower.append(char)
print('ASCII CODES', charDict)
print("\nA-Z a-z\n")
print('A-Z', upper)
print('a-z', lower)
# Make a function that asks the user for a message, and turns it into a list of numbers. (It's a cypher ;))
print("\nCypher message\n")
cypher = []
message = input('Enter your message: ').lower()
for i in range(len(message)):
cypher.append(ord(message[i]))
print("Your Number cipher is: ", cypher)
#Optional: Write a function that does a ceaser cypher (Google), ask the user a number, and shift their message by that number.
print("\nCeaser Cipher\n")
ceasarCypher = ''
ceasarNum = input('Enter your ceasar number: ')
for i in range(len(cypher)):
newChar = cypher[i]+int(ceasarNum)
if(newChar > 122):
newChar = newChar - 122 + 97 - 1
ceasarCypher = ceasarCypher+chr(newChar)
print("Your ceaser ciphered message is: ", ceasarCypher)
# Print the world
M = 'land'
o = 'water'
world = [[o,o,o,o,o,o,o,o,o,o,o],
[o,o,o,o,M,M,o,o,o,o,o],
[o,o,o,o,o,o,o,o,M,M,o],
[o,o,o,M,o,o,o,o,o,M,o],
[o,o,o,M,o,M,M,o,o,o,o],
[o,o,o,o,M,M,M,M,o,o,o],
[o,o,o,M,M,M,M,M,M,M,o],
[o,o,o,M,M,o,M,M,M,o,o],
[o,o,o,o,o,o,M,M,o,o,o],
[o,M,o,o,o,M,o,o,o,o,o],
[o,o,o,o,o,o,o,o,o,o,o]]
def printWorld():
for i in range(len(world)):
row = ''
for j in range(len(world[i])):
row = row+world[i][j]+','
print('['+row+']\n')
def printReverseWorld():
for i in range(len(world)-1, 0, -1):
row = ''
for j in range(len(world[i])-1, 0, -1):
row = row+world[i][j]+','
print('['+row+']\n')
print('\nYour World\n')
printWorld()
print('\nYour Reverse world\n')
printReverseWorld()
#Write a function that generates an n x n sized board with either land or water chosen randomly.
def generateWorld():
world = []
for x in range(0, 11):
row = []
for y in range(0, 11):
i = random.randint(0,1)
if(i==1):
c = 'X'
else:
c = 'o'
row.append(c)
world.append(row)
return world
newWorld = generateWorld()
print('\nNew World:\n')
for i in range(len(newWorld)):
print('\n', newWorld[i])
# Day 3 Exercise
print("""
Day 3 - Exercises
-----------------
""")
# Modify "a" for another name in my_dict. Hint: you will have to create a new key-value pair, copy in the value, and then delete the old one.
my_dict = {
"a": 35000,
"b": 8000,
"z": 450
}
print("\nMy dict before rename of a: ", my_dict)
my_dict["new"] = my_dict["a"]
del(my_dict["a"])
print("\nMy dict after rename of a to new: ", my_dict)
# Redo the frequency distribution of alice_in_wonderland.txt and save your result in a dictionary.
filename = "alice_in_wonderland.txt"
file = open(filename, "r")
fileContent = file.read().lower()
alpha = 'abcdefghijklmnopqrstuvwxyz'
frequency = {}
for i in range(len(alpha)):
frequency[alpha[i]]=0
for c in range(len(fileContent)):
curChar = fileContent[c:c+1]
if(curChar.isalpha()):
frequency[curChar] = frequency[curChar]+1
print("\nFrequency as dictionary: ", frequency)
print("\nFrequency in required format:\n")
for i in frequency.keys():
print(i+': '+str(frequency[i]))
# Create a dictionary with your own personal details, feel free to be creative and funny so for example, you could include key-value pairs with quirky fact, fav quote, pet. Practice adding, modifying, accesing.
my_faves = dict(name = "Gowri", superhero = "Iron Man, Wonder Woman", food = "dosa")
print("\nMy favorites: ", my_faves)
my_faves["iluv"] = "Travel"
print("\nMy favorites after addition: ", my_faves)
my_faves["iluv"] = "Travel, Procedurals"
print("\nMy favorites after modification: ", my_faves)
del(my_faves["superhero"])
print("\nMy favorites after deletion: ", my_faves)
# Review the chat reply of today's beautiful class interaction and instantiate a student variable for everyone who shared their dream.
class Student():
def __init__(self, name, discord_id, fav_food, dream):
self.name = name
self.discord_id = discord_id
self.fav_food = fav_food
self.dream = dream
def my_print(self):
print(self.name+" "+self.discord_id+" "+self.fav_food+" "+self.dream)
def self_print(self):
for attr, value in self.__dict__.items():
print(attr+": "+value)
s1 = Student("<NAME>", "gowri", "Dosa", "World with Liv & Let Liv")
s2 = Student("<NAME>", "bituin [gold]", "sashimi", "Lessen the gender wage gap")
s3 = Student("<NAME>", "andreea", "wontonmee", "becoming an University teacher")
s4 = Student("Jessica", "Jessi_RS [Gold]", "pasta", "work as developer by end of the year")
print("\nStudent Group:\n")
s1.my_print()
s2.my_print()
s3.my_print()
s4.my_print()
s1.fav_food = "Enchiladas"
del s1.discord_id
print("\nS1 after modification & deletion:\n")
s1.self_print()
#Translate the real world 1MWTT student into a Student class, decide on all the attributes that would be meaningful.
print('\nCreate Student class\n')
class StudentClass():
def __init__(self, firstname="", lastname="", email="", phoneNum="", country="", genderIdentity="", codingLevel=""):
self.firstname = firstname
self.lastname = lastname
self.email = email
self.phoneNum = phoneNum
self.country = country
self.genderIdentity = genderIdentity
self.codingLevel = codingLevel
def self_print(self):
for attr, value in self.__dict__.items():
print(attr+": "+value)
args = inspect.getfullargspec(StudentClass.__init__).args
addMore = True
students = []
while(addMore):
s=[]
for i in range(1, len(args)):
s.append(input('Enter student\'s '+args[i]+': '))
students.append(StudentClass(*s))
more = input("\nDo you want to add more students? y/n: ")
addMore = more == ('y' or 'Y')
print('\nReview the students that were created:\n')
for i in range(len(students)):
print('\nStudent '+str(i+1)+':\n')
students[i].self_print()
# Day 4 Exercise
print("""
Day 4 - Exercises
-----------------
""")
#Compare the lexical diversity scores for humor and romance fiction in 1.1. Which genre is more lexically diverse?
humor = 0.231
romance = 0.121
if(humor > romance):
print("Humor is more lexically diverse than Romance")
else:
print("Romance is more lexically diverse than Hummor") | StarcoderdataPython |
6568591 | <reponame>rtloftin/strategically_efficient_rl
from normal_form.games.zero_sum import RandomZeroSumGame
from normal_form.games.bernoulli import RandomBernoulliGame
from normal_form.games.roshambo import Roshambo
from normal_form.games.hybrid_roshambo import HybridRoshambo
from normal_form.games.unique_equilibrium import UniqueEquilibrium
GAMES = {
"zero_sum": RandomZeroSumGame,
"bernoulli": RandomBernoulliGame,
"roshambo": Roshambo,
"hybrid_roshambo": HybridRoshambo,
"unique": UniqueEquilibrium,
}
def build_game(game, N, M, config={}):
if game not in GAMES:
raise ValueError(f"Game type '{game}' is not defined")
return GAMES[game](N, M, config)
| StarcoderdataPython |
303947 | # Copyright 2013 Rackspace, Inc.
"""Tests for bobby.views."""
import json
import StringIO
import mock
from twisted.internet import defer
from twisted.trial import unittest
from twisted.web.test.requesthelper import DummyRequest
from bobby import views
from bobby.worker import BobbyWorker
class BobbyDummyRequest(DummyRequest):
"""Dummy request object."""
def __init__(self, postpath, session=None, content=''):
super(BobbyDummyRequest, self).__init__(postpath, session)
self.content = StringIO.StringIO()
self.content.write(content)
self.content.seek(0)
self.clientproto = 'HTTP/1.1'
def URLPath(self):
"""Fake URLPath object."""
FakeURLPath = mock.Mock(spec=['path'])
FakeURLPath.path = self.postpath
return FakeURLPath
class ViewTest(unittest.TestCase):
"""A TestCase for testing views."""
def setUp(self):
self.db = mock.Mock()
self.bobby = views.Bobby(self.db)
self.worker = mock.create_autospec(BobbyWorker)
self.bobby._worker = self.worker
def test_create_server(self):
"""POSTing application/json creates a server."""
expected = {
'entityId': 'entity-xyz',
'groupId': 'group-uvw',
'links': [
{
'href': '/101010/groups/group-uvw/servers/server-rst',
'rel': 'self'
}
],
'serverId': 'server-rst',
}
server = expected.copy()
del server['links']
self.worker.create_server.return_value = defer.succeed(server)
request_json = {
'server': {
'id': 'server-abc'
}
}
request = BobbyDummyRequest('/101010/groups/group-uvw/servers/',
content=json.dumps(request_json))
request.method = 'POST'
d = self.bobby.create_server(request, '101010', server['groupId'])
self.successResultOf(d)
result = json.loads(request.written[0])
self.assertEqual(result, expected)
self.worker.create_server.assert_called_once_with(
'101010', 'group-uvw', request_json['server'])
def test_delete_server(self):
"""Deletes a server and returns 402."""
self.worker.delete_server.return_value = defer.succeed(None)
request = BobbyDummyRequest('/101010/groups/uvwxyz/servers/opqrst')
request.method = 'DELETE'
d = self.bobby.delete_server(request, '101010', 'uvwxyz', 'opqrst')
self.successResultOf(d)
self.assertEqual(request.responseCode, 204)
self.worker.delete_server.assert_called_once_with(
'101010', 'uvwxyz', 'opqrst')
def test_create_group(self):
"""POST to /{tenantId}/groups creates a new group."""
expected = {
'groupId': 'uvwxyz',
'links': [{
'href': '/101010/groups/uvwxyz',
'rel': 'self'
}],
'notification': 'notification-abc',
'notificationPlan': 'notification-def',
'tenantId': 'tenant-ghi'
}
group = expected.copy()
del group['links']
self.worker.create_group.return_value = defer.succeed(group)
request_json = {
'groupId': 'uvwxyz',
'notification': 'notification-abc',
'notificationPlan': 'notification-def'
}
request = BobbyDummyRequest('/101010/groups/',
content=json.dumps(request_json))
request.method = 'POST'
d = self.bobby.create_group(request, '010101')
self.successResultOf(d)
result = json.loads(request.written[0])
self.assertEqual(result, expected)
self.worker.create_group.assert_called_once_with('010101', 'uvwxyz')
def test_delete_group(self):
"""Deletes a server, returning a 204."""
self.worker.delete_group.return_value = defer.succeed(None)
request = BobbyDummyRequest('/101010/groups/uvwxyz')
d = self.bobby.delete_group(request, '101010', 'uvwxyz')
self.successResultOf(d)
self.assertEqual(request.responseCode, 204)
self.worker.delete_group.assert_called_once_with('101010', 'uvwxyz')
@mock.patch('bobby.cass.check_quorum_health')
@mock.patch('bobby.cass.alter_alarm_state')
def test_alarm(self, alter_alarm_state, check_quorum_health):
"""Updates the status of an alarm for a server."""
alter_alarm_state.return_value = defer.succeed(('policy-abcdef', 'server-abc'))
check_quorum_health.return_value = defer.succeed(True)
data = {
"event_id": "acOne:enOne:alOne:chOne:1326910500000:WARNING",
"log_entry_id": "6da55310-4200-11e1-aaaf-cd4c8801b6b1",
"details": {
"target": None,
"timestamp": 1326905540481,
"metrics": {
"tt_firstbyte": {
"type": "I",
"data": 2,
"unit": "milliseconds"
},
"duration": {
"type": "I",
"data": 2,
"unit": "milliseconds"
},
"bytes": {
"type": "i",
"data": 17,
"unit": "bytes"
},
"tt_connect": {
"type": "I",
"data": 0,
"unit": "milliseconds"
},
"code": {
"type": "s",
"data": "200",
"unit": "unknown"
}
},
"state": "WARNING",
"status": "warn.",
"txn_id": "sometransaction",
"collector_address_v4": "127.0.0.1",
"collector_address_v6": None,
"observations": [
{
"monitoring_zone_id": "mzOne",
"state": "WARNING",
"status": "warn.",
"timestamp": 1326905540481
}
]
},
"entity": {
"id": "enOne",
"label": "entity one",
"ip_addresses": {
"default": "127.0.0.1"
},
"metadata": None,
"managed": False,
"uri": None,
"agent_id": None,
"created_at": 1326905540481,
"updated_at": 1326905540481
},
"check": {
"id": "chOne",
"label": "ch a",
"type": "remote.http",
"details": {
"url": "http://www.foo.com",
"body": "b",
"method": "GET",
"follow_redirects": True,
"include_body": False
},
"monitoring_zones_poll": [
"mzOne"
],
"timeout": 60,
"period": 150,
"target_alias": "default",
"target_hostname": "",
"target_resolver": "",
"disabled": False,
"metadata": None,
"created_at": 1326905540481,
"updated_at": 1326905540481
},
"alarm": {
"id": "alOne",
"label": "Alarm 1",
"check_type": "remote.http",
"check_id": None,
"criteria": "if (metric[\"t\"] >= 2.1) { return WARNING } return WARNING",
"disabled": False,
"notification_plan_id": "npOne",
"metadata": None,
"created_at": 1326905540481,
"updated_at": 1326905540481
},
"tenant_id": "91111"
}
request = BobbyDummyRequest('/alarm', content=json.dumps(data))
request.method = 'POST'
d = self.bobby.alarm(request)
self.successResultOf(d)
self.assertEqual(request.responseCode, 200)
alter_alarm_state.assert_called_once_with(
self.db, data['alarm']['id'], data['details']['state'])
check_quorum_health.assert_called_once_with(self.db, 'policy-abcdef')
self.worker.execute_policy.assert_called_once_with('policy-abcdef')
@mock.patch('bobby.cass.check_quorum_health')
@mock.patch('bobby.cass.alter_alarm_state')
def test_alarm_still_healthy(self, alter_alarm_state, check_quorum_health):
"""Updates the status of an alarm for a server, but still sees the group as healthy."""
alter_alarm_state.return_value = defer.succeed(('policy-abcdef', 'server-abc'))
check_quorum_health.return_value = defer.succeed(False)
data = {
"event_id": "acOne:enOne:alOne:chOne:1326910500000:WARNING",
"log_entry_id": "6da55310-4200-11e1-aaaf-cd4c8801b6b1",
"details": {
"target": None,
"timestamp": 1326905540481,
"metrics": {
"tt_firstbyte": {
"type": "I",
"data": 2,
"unit": "milliseconds"
},
"duration": {
"type": "I",
"data": 2,
"unit": "milliseconds"
},
"bytes": {
"type": "i",
"data": 17,
"unit": "bytes"
},
"tt_connect": {
"type": "I",
"data": 0,
"unit": "milliseconds"
},
"code": {
"type": "s",
"data": "200",
"unit": "unknown"
}
},
"state": "WARNING",
"status": "warn.",
"txn_id": "sometransaction",
"collector_address_v4": "127.0.0.1",
"collector_address_v6": None,
"observations": [
{
"monitoring_zone_id": "mzOne",
"state": "WARNING",
"status": "warn.",
"timestamp": 1326905540481
}
]
},
"entity": {
"id": "enOne",
"label": "entity one",
"ip_addresses": {
"default": "127.0.0.1"
},
"metadata": None,
"managed": False,
"uri": None,
"agent_id": None,
"created_at": 1326905540481,
"updated_at": 1326905540481
},
"check": {
"id": "chOne",
"label": "ch a",
"type": "remote.http",
"details": {
"url": "http://www.foo.com",
"body": "b",
"method": "GET",
"follow_redirects": True,
"include_body": False
},
"monitoring_zones_poll": [
"mzOne"
],
"timeout": 60,
"period": 150,
"target_alias": "default",
"target_hostname": "",
"target_resolver": "",
"disabled": False,
"metadata": None,
"created_at": 1326905540481,
"updated_at": 1326905540481
},
"alarm": {
"id": "alOne",
"label": "Alarm 1",
"check_type": "remote.http",
"check_id": None,
"criteria": "if (metric[\"t\"] >= 2.1) { return WARNING } return WARNING",
"disabled": False,
"notification_plan_id": "npOne",
"metadata": None,
"created_at": 1326905540481,
"updated_at": 1326905540481
},
"tenant_id": "91111"
}
request = BobbyDummyRequest('/alarm', content=json.dumps(data))
request.method = 'POST'
d = self.bobby.alarm(request)
self.successResultOf(d)
self.assertEqual(request.responseCode, 200)
alter_alarm_state.assert_called_once_with(
self.db, data['alarm']['id'], data['details']['state'])
check_quorum_health.assert_called_once_with(self.db, 'policy-abcdef')
self.assertFalse(self.worker.execute_policy.called)
class TestCreatePolicy(ViewTest):
"""Test POST /{tenantId}/groups/{groupId}/policies"""
@mock.patch('bobby.cass.create_policy')
def test_create_policy(self, create_policy):
"""POSTing application/json creates a policy."""
expected = {
'alarmTemplate': 'alarm-template-jkl',
'checkTemplate': 'check-template-ghi',
'groupId': 'group-def',
'links': [
{
'href':
'/101010/groups/group-def/policies/policy-abc',
'rel': 'self'
}
],
'policyId': 'policy-abc'
}
policy = expected.copy()
del policy['links']
create_policy.return_value = defer.succeed(policy)
request_json = {
'alarmTemplate': 'alarm-template-jkl',
'checkTemplate': 'check-template-ghi',
'policyId': 'policy-abc'
}
request = BobbyDummyRequest('/101010/groups/group-def/policies/',
content=json.dumps(request_json))
request.method = 'POST'
d = self.bobby.create_policy(request, '101010', policy['groupId'])
self.successResultOf(d)
result = json.loads(request.written[0])
self.assertEqual(result, expected)
class TestDeletePolicy(ViewTest):
"""Test DELETE /{tenantId}/groups/{groupId}/policiess/{policyId}"""
@mock.patch('bobby.cass.delete_policy')
def test_delete_policy(self, delete_policy):
"""Deletes a policy and returns 402."""
delete_policy.return_value = defer.succeed(None)
request = BobbyDummyRequest('/101010/groups/uvwxyz/policies/opqrst')
d = self.bobby.delete_policy(request, '101010', 'uvwxyz', 'opqrst')
self.successResultOf(d)
self.assertEqual(request.responseCode, 204)
delete_policy.assert_called_once_with(self.db, 'uvwxyz', 'opqrst')
| StarcoderdataPython |
1763135 | from setuptools import setup
setup(
name='python-google-api-clients',
packages=[
'google_api_clients',
'google_api_clients.bigquery',
'google_api_clients.pubsub',
],
version='0.0.1',
author='<NAME>',
author_email='<EMAIL>',
description='Google API Clients for Python',
url='https://github.com/shojikai/python-google-api-clients',
test_suite='test'
)
| StarcoderdataPython |
3407769 | from pathlib import Path
import pytest
from flexmock import flexmock
from packit.config.job_config import JobType, JobConfigTriggerType
from packit.api import PackitAPI
from packit.cli import utils
from packit.cli.utils import get_packit_api
from packit.config import JobConfig
from packit.local_project import LocalProject
from tests.spellbook import get_test_config, initiate_git_repo
def test_is_upstream(upstream_and_remote):
upstream, _ = upstream_and_remote
c = get_test_config()
api = get_packit_api(
config=c, local_project=LocalProject(working_dir=str(upstream))
)
assert api.upstream_local_project
assert not api.downstream_local_project
assert api.upstream_local_project.working_dir == str(upstream)
def test_is_downstream(distgit_and_remote):
downstream, _ = distgit_and_remote
c = get_test_config()
api = get_packit_api(
config=c, local_project=LocalProject(working_dir=str(downstream))
)
assert api.downstream_local_project
assert not api.upstream_local_project
assert api.downstream_local_project.working_dir == str(downstream)
def test_url_is_downstream():
c = get_test_config()
api = get_packit_api(
config=c,
local_project=LocalProject(git_url="https://src.fedoraproject.org/rpms/packit"),
)
assert api.downstream_local_project
assert not api.upstream_local_project
def test_url_is_upstream():
c = get_test_config()
api = get_packit_api(
config=c,
local_project=LocalProject(git_url="https://github.com/packit-service/ogr"),
)
assert api.upstream_local_project
assert not api.downstream_local_project
@pytest.mark.parametrize(
"remotes,package_config,is_upstream",
[
(
[],
flexmock(
upstream_project_url=None, dist_git_base_url=None, synced_files=None
),
True,
),
(
[],
flexmock(
upstream_project_url="some-url",
dist_git_base_url=None,
synced_files=None,
),
True,
),
(
[("origin", "https://github.com/packit-service/ogr.git")],
flexmock(
upstream_project_url="some-url",
dist_git_base_url=None,
synced_files=None,
),
True,
),
(
[("origin", "https://github.com/packit-service/ogr.git")],
flexmock(
upstream_project_url="https://github.com/packit-service/ogr",
dist_git_base_url=None,
synced_files=None,
),
True,
),
(
[("upstream", "https://github.com/packit-service/ogr.git")],
flexmock(
upstream_project_url="https://github.com/packit-service/ogr",
dist_git_base_url=None,
synced_files=None,
),
True,
),
(
[("origin", "https://src.fedoraproject.org/rpms/ogr.git")],
flexmock(
upstream_project_url="https://github.com/packit-service/ogr",
dist_git_base_url="https://src.fedoraproject.org",
synced_files=None,
),
False,
),
(
[("origin", "https://src.fedoraproject.org/rpms/python-ogr.git")],
flexmock(
upstream_project_url="https://github.com/packit-service/ogr",
dist_git_base_url="src.fedoraproject.org",
synced_files=None,
),
False,
),
(
[("origin", "https://src.fedoraproject.org/rpms/python-ogr.git")],
flexmock(
upstream_project_url=None,
dist_git_base_url="https://src.fedoraproject.org",
synced_files=None,
),
False,
),
(
[("origin", "https://src.fedoraproject.org/fork/user/rpms/python-ogr.git")],
flexmock(
upstream_project_url=None,
dist_git_base_url="https://src.fedoraproject.org",
synced_files=None,
),
False,
),
(
[("origin", "<EMAIL>:user/ogr.git")],
flexmock(
upstream_project_url="https://github.com/packit-service/ogr",
dist_git_base_url="https://src.fedoraproject.org",
synced_files=None,
),
True,
),
(
[
("remote", "https://some.remote/ur/l.git"),
("origin", "<EMAIL>:user/ogr.git"),
],
flexmock(
upstream_project_url="https://github.com/packit-service/ogr",
dist_git_base_url="https://src.fedoraproject.org",
synced_files=None,
),
True,
),
(
[
("remote", "https://some.remote/ur/l.git"),
("origin", "git@github.com:user/ogr.git"),
],
JobConfig(
type=JobType.build,
trigger=JobConfigTriggerType.pull_request,
upstream_project_url="https://github.com/packit-service/ogr",
),
True,
),
],
)
def test_get_api(tmpdir, remotes, package_config, is_upstream):
t = Path(str(tmpdir))
repo = t / "project_repo"
repo.mkdir(parents=True, exist_ok=True)
initiate_git_repo(repo, remotes=remotes)
flexmock(utils).should_receive("get_local_package_config").and_return(
package_config
)
c = get_test_config()
api = get_packit_api(config=c, local_project=LocalProject(working_dir=str(repo)))
if is_upstream:
assert api.upstream_local_project
else:
flexmock(PackitAPI).should_receive("_run_kinit").once()
assert api.downstream_local_project
assert api.dg
| StarcoderdataPython |
9619943 | <gh_stars>1-10
# import necessary packages
from datetime import datetime
from sklearn.metrics import (log_loss, confusion_matrix, accuracy_score,
f1_score, make_scorer)
from sklearn.ensemble import RandomForestClassifier
from sklearn.compose import ColumnTransformer
from sklearn.pipeline import Pipeline
from sklearn.impute import SimpleImputer
from sklearn.preprocessing import StandardScaler, OneHotEncoder
from sklearn.model_selection import train_test_split, cross_val_score
from sqlalchemy import create_engine
import pandas as pd
import numpy as np
import joblib
def stamp_time():
"""stamps the time in the terminal.
"""
now = datetime.now()
current_time = now.strftime("%H:%M:%S")
print("Current Time =", current_time)
def print_scores(true, pred, pred_proba):
"""prints the log loss, confusion matrix, accuracy, and F1 score
Args:
true: true values
pred: predicted values
pred_proba: predicted probabilities
"""
lloss = log_loss(true, pred_proba)
cf = confusion_matrix(true, pred)
acc = accuracy_score(true, pred)
f1 = f1_score(true, pred)
print(f"log loss: {lloss}\n\nconfusion matrix:\n{cf}\naccuracy: {acc}\n"
+ f"F1 score: {f1}")
def categorize_arrests(dataframe):
"""categorizes arrests into boolean 1 if 1+ arrests, 0 if no arrests
Args:
dataframe: a pandas dataframe with column 'n_arrests' to be transformed
Returns:
cat_df: a copy of the original dataframe with column 'n_arrests' filled
with 1 and 0 for arrest(s)/no arrests
"""
cat_df = dataframe.copy()
cat_df['n_arrests'] = cat_df['n_arrests'].fillna(value=0)
cat_df['n_arrests'] = cat_df['n_arrests'].astype(int)
cat_df.loc[cat_df['n_arrests'] > 0, 'n_arrests'] = 1
return cat_df
def split_last(dataframe, target_col, sort_col='date', cut=.9):
"""Splits the dataframe on sort_column at the given cut ratio, and splits
the target column
Args:
dataframe: dataframe to be cut
sort_col: column to be sorted on. Default='date'
cut: cut ratio for the train/eval sets
Returns:
X_train: dataframe of the first cut of the data set without the target
y_train: dataframe of the first cut of the data set only target values
X_eval: dataframe of the remaining slice of the data set without target
y_eval: dataframe of the remaining slice of the data set only targets
"""
if sort_col != None:
dataframe = dataframe.sort_values(by=sort_col, axis='columns')
cutoff = dataframe.shape[0]*cut
first_df = dataframe.reset_index(drop=True).loc[:cutoff]
last_df = dataframe.reset_index(drop=True).loc[cutoff:]
X_train = first_df.drop(columns=[target_col])
y_train = np.array(first_df[target_col]).ravel()
X_eval = last_df.drop(columns=[target_col])
y_eval = np.array(last_df[target_col]).ravel()
return X_train, y_train, X_eval, y_eval
def log_loss_cvs(pipe, X_train, y_train, cv=5):
"""performs and prints results of cross_val_score with log_loss as scorer
Args:
pipe: pipeline or model
X_train: training set
y_train: training targets
cv: cross validation splitting strategy. Default: 5-fold
"""
ll_scorer = make_scorer(log_loss, greater_is_better=True, needs_proba=True)
scores = cross_val_score(pipe, X_train, y_train, scoring=ll_scorer, cv=cv)
print(scores)
print(
f"95% CI log loss: "
f"{round(scores.mean(), 2)} "
f"(+/- {round(scores.std() * 2, 2)})"
)
def evaluate_model(pipe, X_train, y_train, X_eval, y_eval):
"""prints model evaluation metrics: training log loss, testing log loss,
confusion matrix, accuracy, F1
Args:
pipe: pipeline or model
X_train: training set
y_train: training targets
X_eval: evaluation/test set
y_eval: evaluation/test set
"""
print("fitting model")
pipe.fit(X_train, y_train)
print("model fit. Predicting for training set")
train_probas = pipe.predict_proba(X_train)
print(f"training log loss: {log_loss(y_train, train_probas)}")
test_probas = pipe.predict_proba(X_eval)
test_predict = pipe.predict(X_eval)
print(f"test neg log loss: {log_loss(y_eval, test_probas)}")
print(f"confusion matrix: \n{confusion_matrix(y_eval, test_predict)}")
print(f"accuracy: {accuracy_score(y_eval, test_predict)}")
print(f"F1: {f1_score(y_eval, test_predict)}")
def joblib_pipeline(pipeline, file_name="pipeline.joblib"):
"""pickles/joblibs the pipeline to the specified file_name
Args:
pipeline: fit pipeline
file_name: name or path of file to be created. Default: pipeline.joblib
"""
output_file = open(file_name, "wb")
joblib.dump(pipeline, output_file)
output_file.close()
def main():
pass
if __name__ == "__main__":
main() | StarcoderdataPython |
8108780 | <filename>kisensum/openadr/openadr/vtn/migrations/0040_remove_report_site.py<gh_stars>0
# -*- coding: utf-8 -*-
# Generated by Django 1.11.5 on 2017-11-20 18:18
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('vtn', '0039_auto_20171120_1818'),
]
operations = [
migrations.RemoveField(
model_name='report',
name='site',
),
]
| StarcoderdataPython |
8054130 | <gh_stars>1-10
import asyncio
import sys
from kademlia_aio.services import logging_to_console, setup_event_loop, start_node
logging_to_console()
setup_event_loop()
start_node(*sys.argv[1:3])
asyncio.get_event_loop().run_forever()
| StarcoderdataPython |
4934125 | <reponame>simonsobs/acondbs<gh_stars>0
import pytest
from acondbs import create_app
from acondbs.db.ops import define_tables
##__________________________________________________________________||
params = [
[True, 200, "<!DOCTYPE html>"],
[False, 400, "errors"],
]
@pytest.mark.parametrize("graphiql, code, data", params)
def test_disable(graphiql, code, data):
app = create_app(
SQLALCHEMY_DATABASE_URI="sqlite:///:memory:",
ACONDBS_GRAPHIQL=graphiql,
ACONDBS_GRAPHIQL_TEMPLATE_NO=2,
)
with app.app_context():
define_tables()
client = app.test_client()
response = client.get("/graphql", headers={"Accept": "text/html"})
assert code == response.status_code
assert data in response.data.decode("utf-8")
##__________________________________________________________________||
params = [
[None, "//cdn.jsdelivr.net/npm/graphiql@0.11.11/graphiql.min.js"],
[1, "https://unpkg.com/graphiql/graphiql.min.js"],
[2, "graphql-playground"],
]
@pytest.mark.parametrize("number, data", params)
def test_template(number, data):
app = create_app(
SQLALCHEMY_DATABASE_URI="sqlite:///:memory:",
ACONDBS_GRAPHIQL=True,
ACONDBS_GRAPHIQL_TEMPLATE_NO=number,
)
with app.app_context():
define_tables()
client = app.test_client()
response = client.get("/graphql", headers={"Accept": "text/html"})
assert 200 == response.status_code
assert data in response.data.decode("utf-8")
##__________________________________________________________________||
| StarcoderdataPython |
6556782 | <reponame>abuyukcakir/coevol-python<filename>reality_mining_prep.py<gh_stars>0
import networkx as nx
import numpy as np
import scipy as sp
import pandas as pd
from CoEVOL import CoEVOL
import sys
from datetime import datetime
from dateutil.parser import parse
from scipy.io import loadmat
from functools import reduce
from scipy.sparse import csr_matrix
# import matplotlib.pyplot as plt
# np.set_printoptions(threshold=sys.maxsize)
MONTHLY = 0
WEEKLY = 1
def preprocess(mode=MONTHLY):
vc_date, vc_src, vc_dest, sm_date, sm_src, sm_dest, all_nodes = clean_data()
max_id = np.max(all_nodes)
print(len(vc_date))
print(len(sm_date))
if(mode == MONTHLY):
# Parse dates month by month.
vc_monthly_dates = get_monthly_dates(vc_date)
sm_monthly_dates = get_monthly_dates(sm_date)
# Generate monthly snapshots
snapshots_vc = generate_snapshot_graphs(vc_monthly_dates, vc_src, vc_dest, max_id)
snapshots_sm = generate_snapshot_graphs(sm_monthly_dates, sm_src, sm_dest, max_id)
# Sanity check
for i in range(len(snapshots_vc)):
print(snapshots_vc[i].number_of_nodes())
print(snapshots_vc[i].number_of_edges())
return snapshots_vc, snapshots_sm, max_id
elif(mode == WEEKLY): # Parse dates week by week
vc_weekly_dates = get_weekly_dates(vc_date)
sm_weekly_dates = get_weekly_dates(sm_date)
# Generate weekly snapshots
snapshots_vc = generate_snapshot_graphs(vc_weekly_dates, vc_src, vc_dest, max_id)
snapshots_sm = generate_snapshot_graphs(sm_weekly_dates, sm_src, sm_dest, max_id)
# Sanity check
# for i in range(len(snapshots_vc)):
# print(snapshots_vc[i].number_of_nodes())
# print(snapshots_vc[i].number_of_edges())
return snapshots_vc, snapshots_sm, max_id
else:
print('Undefined date parsing mode. Returning.')
return
def generate_snapshot_graphs(dates, src, dest, max_id):
'''
Given the date, source node, destination node arrays,
generate snapshot graphs corresponding to each different date.
'''
cur_snapshot = create_empty_snapshot(max_id)
cur_date = dates[0]
snapshot_graphs = []
for i in range(len(dates)):
if(dates[i] != cur_date): # len(dates[i]) > 0 and
# We are done with the current snapshot.
# Add that to the list.
snapshot_graphs.append(cur_snapshot)
# print('new snapshot is generated')
# Generate a new snapshot graph.
cur_snapshot = create_empty_snapshot(max_id)
# Otherwise, add the i-th (temporal) edge to the current snapshot
cur_snapshot.add_edge(src[i], dest[i])
cur_date = dates[i]
return snapshot_graphs
def create_empty_snapshot(max_id):
g = nx.Graph()
node_ids = np.arange(0, max_id+1)
g.add_nodes_from(node_ids)
return g
def clean_data():
dates_sm = loadmat('RealityMining/Date_Short_msg.mat')
src_sm = loadmat('RealityMining/Source_Short_msg.mat')
dest_sm = loadmat('RealityMining/Destination_Short_msg.mat')
dates_vc = loadmat('RealityMining/Date_Voice_Call.mat')
src_vc = loadmat('RealityMining/Source_Voice_Call.mat')
dest_vc = loadmat('RealityMining/Destination_Voice_Call.mat')
# dates_bt = loadmat('RealityMining/Date_text_Bluetoothe.mat')
# src_bt = loadmat('RealityMining/Source_realitymining_Bluetoothe.mat')
# dest_bt = loadmat('RealityMining/Destination_realitymining_Bluetoothe.mat')
vc_date = np.stack(dates_vc['Date'], axis=1)[0]
vc_src = np.stack(src_vc['Source'], axis=1)[0]
vc_dest = np.stack(dest_vc['Destination'], axis=1)[0]
sm_date = np.stack(dates_sm['Date'], axis=1)[0]
sm_src = np.stack(src_sm['Source'], axis=1)[0]
sm_dest = np.stack(dest_sm['Destination'], axis=1)[0]
# bt_date = np.stack(dates_vc['Bluetoothe'], axis=1)[0]
all_nodes = reduce( np.union1d, (vc_src, vc_dest, sm_src, sm_dest))
return vc_date, vc_src, vc_dest, sm_date, sm_src, sm_dest, all_nodes
def get_monthly_dates(date_arr):
'''
Reads Date arrays of RealityMining datasets and removes day information
from the dates. The resulting format is Mon-YYYY.
'''
monthly = np.empty_like(date_arr)
for i in range(len(date_arr)):
# print(date_arr[i][0])
# i-th entry is an array of a single element. Take that one and
# remove the Day- entry from the head of the date.
monthly[i] = date_arr[i][0][3:]
return monthly
def get_weekly_dates(date_arr):
'''
Reads Date arrays of RealityMining datasets and changes each day by
WeekNumber-YYYY where WeekNumber is which week of the year is that week.
'''
weekly = np.empty_like(date_arr)
for i in range(len(date_arr)):
cur_date = datetime.strptime(date_arr[i][0], '%d-%b-%Y')
y, wn, wd = cur_date.isocalendar()
weekly[i] = '{}-{}'.format(wn, date_arr[i][0][7:])
# print(weekly)
return weekly
if(__name__ == "__main__"):
snapshots_m_vc, snapshots_m_sm, max_nodeid = preprocess(mode=MONTHLY)
# snapshots_w_vc, snapshots_w_sm = preprocess(mode=WEEKLY)
# Number of timestamps T
T = len(snapshots_m_vc)
# Number of subjects
S = 2
# Number of nodes in one snapshot
n = max_nodeid+1
A = np.empty((T, S), dtype=object)
# Read graph snapshots as matrices, and input them to CoEvol
for i in range(len(snapshots_m_vc)):
A[i, 0] = nx.to_scipy_sparse_matrix(snapshots_m_vc[i])
for i in range(len(snapshots_m_sm)):
A[i, 1] = nx.to_scipy_sparse_matrix(snapshots_m_sm[i])
print('Reality Mining dataset is read.')
print('Running CoEVOL on the dataset...')
# ks = [5, 10, 15, 20, 25]
thetas = [0.1, 0.3, 0.5, 0.7, 0.9]
# thetas = [0.1]
ks = [5, 10]
errs = np.zeros((len(ks), len(thetas)))
for i in range(len(ks)):
for j in range(len(thetas)):
coevol = CoEVOL(A, k=ks[i], theta=thetas[j])
coevol.factorize()
| StarcoderdataPython |
6550862 | from chew import compare
def test_load_fingerprint(path_tests):
header, fingerprint = compare.load_fingerprint(str(path_tests / "data/igsr.HG00102.TP73.npz"))
assert header == "HG00102-N1-DNA1-WES1"
assert fingerprint.shape == (3, 23770)
| StarcoderdataPython |
1972383 | <reponame>RyanArnasonML/stock-analysis<gh_stars>0
# -*- coding: utf-8 -*-
"""
Created on Mon Feb 8 21:46:06 2021
@author: ryanar
"""
import numpy as np
import yfinance
import talib as ta
import pandas as pd
my_data = yfinance.download('AAPL', '2013-1-1','2021-2-28')
def ma(Data, lookback, what='Close'):
return Data[what].rolling(lookback).mean()
def volatility(Data, lookback, what='Close'):
return Data[what].rolling(lookback).std()
def BollingerBands(Data, lookback, standard_distance, what='Close'):
bollBand = pd.DataFrame()
# Calculating the mean
bollBand['mean'] = Data[what].rolling(lookback).mean()
# Calculating the volatility
bollBand['std'] = Data[what].rolling(lookback).std()
# Upper Bollinger Band
bollBand['upper'] = bollBand['mean'] + standard_distance * bollBand['std']
# Lower Bollinger Band
bollBand['lower'] = bollBand['mean'] - standard_distance * bollBand['std']
return bollBand
def percent_bollinger_indicator(Data, lookback, standard_distance, what='Close'):
bollBand = BollingerBands(Data, lookback, standard_distance, what)
bollBand['%'] = ((Data[what ] - bollBand['lower']) / (bollBand['upper']- bollBand['lower']))
return bollBand
def divergence(Data, indicator, lower_barrier, upper_barrier, width, buy, sell):
for i in range(len(Data)):
try:
if[i, indicator] < lower_barrier:
for a in range(i + 1, i + width):
if Data[a, indicator] > lower_barrier:
for r in range(a+1, a + width):
if Data[r, indicator] < lower_barrier and Data[r, indicator] > Data[i, indicator] and Data[r, 3] < Data[i, 3]:
for s in range(r+1, r+width):
if Data[s, indicator] > lower_barrier:
Data[s,buy] = 1
break
else:
break
else:
break
else:
break
else:
break
except IndexError:
pass
for i in range(len(Data)):
try:
if[i, indicator] > upper_barrier:
for a in range(i + 1, i + width):
if Data[a, indicator] < upper_barrier:
for r in range(a + 1, a + width):
if Data[r, indicator] < upper_barrier and Data[r, indicator] < Data[i, indicator] and Data[r, 3] > Data[i, 3]:
for s in range(r+1, r+width):
if Data[s, indicator] < upper_barrier:
Data[s, sell] = -1
break
else:
break
else:
break
else:
break
else:
break
except IndexError:
pass
return Data
result = percent_bollinger_indicator(my_data, 14, 2)
# my_data = divergence(my_data, bollinger_percentage_column, 0, 1, 20, buy_column, sell_column) | StarcoderdataPython |
212062 | # coding=utf-8
# Copyright 2016 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from __future__ import (absolute_import, division, generators, nested_scopes, print_function,
unicode_literals, with_statement)
from pants.base.build_environment import get_buildroot
from pants.base.file_system_project_tree import FileSystemProjectTree
from pants.util.memo import memoized
@memoized
def get_project_tree(options):
"""Creates the project tree for build files for use in a given pants run."""
pants_ignore = options.pants_ignore or []
return FileSystemProjectTree(get_buildroot(), pants_ignore)
| StarcoderdataPython |
11373942 | <filename>armulator/armv6/opcodes/abstract_opcodes/smc.py<gh_stars>10-100
from armulator.armv6.opcodes.abstract_opcode import AbstractOpcode
from armulator.armv6.arm_exceptions import SMCException, UndefinedInstructionException
from bitstring import BitArray
from armulator.armv6.configurations import have_security_ext, have_virt_ext
class Smc(AbstractOpcode):
def __init__(self):
super(Smc, self).__init__()
def execute(self, processor):
if processor.condition_passed():
if have_security_ext() and processor.registers.current_mode_is_not_user():
if (have_virt_ext() and not processor.registers.is_secure() and
not processor.registers.current_mode_is_hyp() and
processor.registers.hcr.get_tsc()):
hsr_string = BitArray(25)
processor.write_hsr("010011", hsr_string)
processor.registers.take_hyp_trap_exception()
else:
if processor.registers.scr.get_scd():
if processor.registers.is_secure():
print "unpredictable"
else:
raise UndefinedInstructionException()
else:
raise SMCException()
else:
raise UndefinedInstructionException()
| StarcoderdataPython |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.