text stringlengths 38 1.54M |
|---|
"""Exceptions module."""
class NotASuite(Exception):
"""Exception class for When it's not a Suite errors."""
pass
class NumberOutsideOfRange(Exception):
"""Exception class for When Number is out of range errors."""
pass
class NotADeck(Exception):
"""Exception class for When invalide deck type requested errors."""
pass
class DeckLimitExceeded(Exception):
"""Exception class for When Deck limit is exceeded."""
pass
|
from datetime import timedelta
from django.db import models
from django.db.models import F, ExpressionWrapper, OuterRef, Max, Subquery
from django.utils import timezone
from duckevents.models import FeedEntry
from background_task import background
def _clone_feed_entry(entry):
"""
"Clones" <entry> by removing the reference to the given enries primary key
so a new DB entity is created when saved instead of updated.
Note: Doing it this way for simplicity sake as we don't currently have any nested objects
we need to clone as well. `schedule` will still point to the existing schedule, which is what we want.
"""
entry.pk = None
entry.date += timedelta(days=entry.schedule.days)
entry.created = timezone.now()
return entry
@background(schedule=5)
def insert_scheduled_events():
"""
Creates new FeedEntry events for existing events that are overdue for a new
event based on their schedule.
1. Find the latest event per schedule where event.date + event.schedule.days > current date
2. Insert a new event with the same data, and a date increased by entry.schedule.days
"""
current_date = timezone.now()
# Find FeedEntries that are overdue for a new event based on their schedule
qs = (
FeedEntry.objects
.filter(
date=Subquery(
# Find lates event (max date) per schedule
FeedEntry.objects
.filter(schedule=OuterRef('schedule'))
.values('schedule')
.annotate(last_event_date=Max('date'))
.values('last_event_date')[:1]
)
)
# Calculate the date the next event should happen
.annotate(next_date=
ExpressionWrapper(F('date') + timedelta(days=1) * F('schedule__days'), output_field=models.DateTimeField())
)
# Only select the entries that are overdue for another entry
.filter(next_date__gte=current_date)
)
# Make a copy of the events
new_entites = list(map(_clone_feed_entry, qs))
if len(new_entites):
# And actually create them
FeedEntry.objects.bulk_create(qs)
|
from nltk import pos_tag, ne_chunk
from nltk.tree import Tree
from utils import tokenize
def nltk_ner(text):
tokenized_sents = tokenize(text)
orgs = []
for (index, tokenized_sent) in enumerate(tokenized_sents):
_orgs = extract_orgs(tokenized_sent)
orgs.extend([(index, org) for org in _orgs])
return orgs
def extract_orgs(tokenized_sent):
orgs = []
pos_tokenized_sent = pos_tag(tokenized_sent)
for trunk in ne_chunk(pos_tokenized_sent):
if isinstance(trunk, Tree):
label = trunk.label()
if label == "ORGANIZATION":
org = " ".join([l[0] for l in trunk])
orgs.append(org)
return orgs
|
#!/usr/bin/env python
# encoding: utf-8
'''
icgc_mutation_zscores.py
Created by Joan Smith
on 2017-6-18
Calculate zscores for each ICGC mutation file
Copyright (c) 2018. All rights reserved.
'''
import argparse
import sys
import os
import glob
import pandas as pd
import numpy as np
sys.path.append('../common/')
import utilities as util
import analysis
INCLUDED_MUTATIONS = ['disruptive_inframe_deletion',
'disruptive_inframe_insertion',
'frameshift_variant',
'inframe_deletion',
'missense_variant',
'splice_acceptor_variant',
'splice_donor_variant',
'stop_gained',
'stop_lost']
MUTATION_PERCENT = .02
def get_options():
parser = argparse.ArgumentParser(description='Get permutation and thread counts')
parser.add_argument('-i', action='store', dest='input_directory')
parser.add_argument('-c', action='store', dest='clinical_directory')
parser.add_argument('-o', action='store', dest='output_directory', default='.')
parser.add_argument('-a', action='store', dest='annotation_file')
namespace = parser.parse_args()
return (namespace.input_directory, namespace.clinical_directory,
namespace.output_directory, namespace.annotation_file)
def get_icgc_cancer_type(f):
return f.split('.')[2]
def prep_annotations(mutation_ensgs, hgnc):
# get the ensgs we care about
unique_ensgs = mutation_ensgs.to_frame().set_index('gene_affected').index.unique()
unique_ensgs = pd.Series([True], name='mutation_ensgs', index=unique_ensgs)
# only hold annotations for genes in our mutation file
annotations = hgnc.join(unique_ensgs, how='right')
# some ensgs don't have symbols, for these, carry through the ensg
annotations = annotations.reset_index()
annotations['Symbol'] = annotations['Symbol'].fillna(annotations['index'])
annotations = annotations.set_index('index')
annotations = annotations['Symbol']
return annotations
def prep_data(mutation_file, clinical_file, hgnc):
clinical = pd.read_csv(clinical_file, index_col=0, low_memory=False)
relevant_clinical = clinical[[u'Time', u'Censor']].astype(float)
mutation_data = pd.read_csv(mutation_file, sep='\t')
mutation_data = mutation_data[mutation_data[u'consequence_type'].isin(INCLUDED_MUTATIONS)]
number_patients_in_mutation_data = mutation_data[u'icgc_donor_id'].unique().size
print 'Number of total sequenced patients: ', number_patients_in_mutation_data
# Reduce mutation data to patients that also have clinical data
df = mutation_data.join(relevant_clinical, on='icgc_donor_id', how='inner')
annotations = prep_annotations(df['gene_affected'], hgnc)
df['gene_affected'] = df['gene_affected'].map(annotations)
df.set_index([u'gene_affected', u'icgc_donor_id'], inplace=True)
# symmetrically filter clinical data down to patients that were also sequenced
unique_patients = df.index.get_level_values('icgc_donor_id').unique()
unique_patients_df = pd.DataFrame(unique_patients, index=unique_patients)
clinical_data_with_sequenced_patients = relevant_clinical.join(unique_patients_df, how='inner')
num_patients = clinical_data_with_sequenced_patients.shape[0]
print 'Number of patients with sequence and clinical data: ', num_patients
return df, clinical_data_with_sequenced_patients, num_patients
def calculate_zscores_for_file(mutation_file, clinical_file, outdir, hgnc):
df, clinical_data_with_sequenced_patients, num_patients = prep_data(mutation_file,
clinical_file, hgnc)
cancer_type = get_icgc_cancer_type(mutation_file)
print cancer_type
formatstring = '{0}, {1}, {2}, {3}, {4}\n'
outfile = os.path.join(outdir, cancer_type + '_mutation_percent_'+ str(MUTATION_PERCENT) + '.icgc_zscores.out.csv')
with open(outfile, 'w') as out:
out.write('gene,zscore,pvalue,num mutations,num patients\n')
#for every gene, collect the clinical data with the mutation data.
patients_with_gene = df.groupby(level=u'gene_affected')
for gene, gene_df in patients_with_gene:
mutated_patient_list = gene_df.index.get_level_values('icgc_donor_id').unique()
num_mutations = len(mutated_patient_list)
if num_mutations >= MUTATION_PERCENT * num_patients:
# take the patients with mutations and without, and build an analysis dataframe with time and censor.
analysis_data = pd.DataFrame(
{'mutated': np.ones(num_mutations)}, index=mutated_patient_list)
analysis_data = analysis_data.join(clinical_data_with_sequenced_patients, how='right')
analysis_data['mutated'].fillna(0, inplace=True)
#Do analysis!
print 'Doing analysis for %s: mutated %d of %d' % (gene, num_mutations, num_patients)
time = analysis_data['Time']
censor = analysis_data['Censor']
split = analysis_data['mutated']
cox_dict = analysis.do_cox(time, censor, split)
if cox_dict['n'] != len(analysis_data['Time']):
print 'ERROR'
if gene[0] != '\'':
gene = '\'' + gene
out.write(formatstring.format(gene, cox_dict['z'], cox_dict['p'], num_mutations,cox_dict['n']))
analysis_data.to_csv(os.path.join(outdir, gene[1:] + '_data.csv'),
columns=['Time', 'Censor', 'mutated'], index_label='patient')
def main():
indir, clinical_dir, outdir, hgnc_file = get_options()
files = os.listdir(indir)
files = util.remove_extraneous_files(files)
hgnc = pd.read_csv(hgnc_file, low_memory=False)
hgnc = hgnc[['Approved Symbol', 'Ensembl ID(supplied by Ensembl)']]
hgnc.columns = ['Symbol', 'Ensembl ID']
hgnc.set_index('Ensembl ID', inplace=True)
hgnc['Symbol'] = '\'' + hgnc['Symbol']
for f in files:
cancer_type = get_icgc_cancer_type(f)
print cancer_type
clinical_file = os.path.join(clinical_dir, cancer_type + '.csv')
cancer_type_outdir = os.path.join(outdir, cancer_type)
if not os.path.isdir(cancer_type_outdir):
os.makedirs(cancer_type_outdir)
calculate_zscores_for_file(os.path.join(indir, f), clinical_file, cancer_type_outdir, hgnc)
if __name__ == "__main__":
main()
|
from edgetpu.basic import edgetpu_utils
version = edgetpu_utils.GetRuntimeVersion()
print(version)
all_edgetpu_paths = edgetpu_utils.ListEdgeTpuPaths(edgetpu_utils.EDGE_TPU_STATE_NONE)
print('Available EdgeTPU Device(s):')
print(''.join(all_edgetpu_paths))
|
import os
import socket
from cStringIO import StringIO
from fabric.api import run, env, task, sudo, put, parallel, local, runs_once, hide
env.hosts = [
'ipd1.tic.hefr.ch',
'ipd2.tic.hefr.ch',
'ipd3.tic.hefr.ch',
'ipd4.tic.hefr.ch',
]
env.user = 'ipd'
@task
def ping():
run('hostname')
run('hostname -f')
run('dig $(hostname -f)')
run('cat /etc/resolv.conf')
@task
def crossping():
for h in env.hosts:
run('ping -c 1 {}'.format(h))
h = h.split('.', 1)[0]
run('ping -c 1 {}'.format(h))
@task
@parallel
def restartlibvirt():
sudo('service libvirt-bin restart')
@task
@parallel
def addkey(key):
"""
You may need to set the password through the environment if this is the
first time you add a key.
"""
run('mkdir -p ~/.ssh')
run('chmod 0700 ~/.ssh')
put(key, '~/key.tmp')
run('cat ~/key.tmp >> ~/.ssh/authorized_keys')
run('chmod 0600 ~/.ssh/authorized_keys')
run('rm -f ~/key.tmp')
@task
@runs_once
def gencacert(path='workdir/pki'):
keyfile = os.path.join(path, 'ca.key.pem')
infofile = os.path.join(path, 'ca.info')
certfile = os.path.join(path, 'ca.crt.pem')
with open(infofile, 'w') as fh:
fh.write('cn = Integrated Projects Deployer\n'
'ca\n'
'cert_signing_key\n')
local('mkdir -p {}'.format(path))
local('certtool --generate-privkey > {}'.format(keyfile))
with hide('output'):
local((
'certtool '
'--generate-self-signed '
'--load-privkey {} '
'--template {} '
'--outfile {}'
).format(keyfile, infofile, certfile))
@task
@parallel
def instcacert(path='workdir/pki/ca.crt.pem'):
remote_path = '/etc/pki/libvirt/ca.pem'
sudo('mkdir -p /etc/pki/libvirt')
put(path, remote_path, use_sudo=True)
sudo('chown root:root {}'.format(remote_path))
@task
@parallel
def gencerts(path='workdir/pki'):
keyfile = os.path.join(path, '{}.key.pem'.format(env.host))
infofile = os.path.join(path, '{}.info'.format(env.host))
certfile = os.path.join(path, '{}.crt.pem'.format(env.host))
cacert = 'workdir/pki/ca.crt.pem'
cakey = 'workdir/pki/ca.key.pem'
local('mkdir -p {}'.format(path))
local('certtool --generate-privkey > {}'.format(keyfile))
with open(infofile, 'w') as fh:
fh.write((
'organization = Integrated Projects Deployer\n'
'cn = {}\n'
'tls_www_server\n'
'encryption_key\n'
'signing_key\n'
).format(env.host))
with hide('output'):
local((
'certtool '
'--generate-certificate '
'--load-privkey {} '
'--load-ca-certificate {} '
'--load-ca-privkey {} '
'--template {} '
'--outfile {}'
).format(keyfile, cacert, cakey, infofile, certfile))
@task
@parallel
def instcerts(path='workdir/pki'):
keyfile = os.path.join(path, '{}.key.pem'.format(env.host))
certfile = os.path.join(path, '{}.crt.pem'.format(env.host))
sudo('mkdir -p /etc/pki/libvirt/private')
put(keyfile, '/etc/pki/libvirt/private/serverkey.pem', use_sudo=True)
put(certfile, '/etc/pki/libvirt/servercert.pem', use_sudo=True)
sudo('chown root:root '
'/etc/pki/libvirt/private/serverkey.pem '
'/etc/pki/libvirt/servercert.pem')
@task
@runs_once
def genclientcert(path='workdir/pki'):
keyfile = os.path.join(path, 'client.key.pem')
infofile = os.path.join(path, 'client.info')
certfile = os.path.join(path, 'client.crt.pem')
cacert = 'workdir/pki/ca.crt.pem'
cakey = 'workdir/pki/ca.key.pem'
local('mkdir -p {}'.format(path))
local('certtool --generate-privkey > {}'.format(keyfile))
with open(infofile, 'w') as fh:
fh.write(
'country = CH\n'
'state = Fribourg\n'
'locality = Fribourg\n'
'organization = Integrated Project Deployer\n'
'cn = ipd_client1\n'
'tls_www_client\n'
'encryption_key\n'
'signing_key\n'
)
with hide('output'):
local((
'certtool '
'--generate-certificate '
'--load-privkey {} '
'--load-ca-certificate {} '
'--load-ca-privkey {} '
'--template {} '
'--outfile {}'
).format(keyfile, cacert, cakey, infofile, certfile))
@task
def setuppki():
gencacert()
gencerts()
instcacert()
instcerts()
genclientcert()
localpki()
@task
@runs_once
def localpki():
local('sudo mkdir -p /etc/pki/CA/')
local('sudo cp workdir/pki/ca.crt.pem /etc/pki/CA/cacert.pem')
@task
@parallel
def conflibvirt():
put('workdir/libvirtd.conf', '/etc/libvirt/libvirtd.conf', use_sudo=True)
put('workdir/qemu.conf', '/etc/libvirt/qemu.conf', use_sudo=True)
sudo('chown root:root /etc/libvirt/*.conf')
restartlibvirt()
@task
@parallel
def bootstrap():
ip = socket.gethostbyname(env.host)
# Setup network interface
with open('workdir/config/network.conf') as fh:
conf = fh.read()
conf = StringIO(conf.format(ip_address=ip))
put(conf, '/etc/network/interfaces', use_sudo=True)
sudo('chown root:root /etc/network/interfaces')
sudo('service networking restart')
# Setup rc.local
with open('workdir/config/rc.local') as fh:
conf = fh.read()
conf = StringIO(conf.format(ip_address=ip))
put(conf, '/etc/rc.local', use_sudo=True, mode=0755)
sudo('chown root:root /etc/rc.local')
# Get ubuntu base image
sudo('mkdir -p /var/lib/ipd/images /var/lib/ipd/cd')
sudo('wget -O /var/lib/ipd/cd/ubuntu.iso \'http://www.ubuntu.com/start-download?distro=server&bits=64&release=latest\'')
# Correct apparmor config for libvirt
# https://help.ubuntu.com/community/AppArmor
# https://bugs.launchpad.net/ubuntu/+source/libvirt/+bug/1204616
# Install base disk images
# - windows
# - ubuntu
@task
@parallel
def reboot():
sudo('shutdown -r now ')
@task
@runs_once
def redis():
local('docker run -d -p 6379:6379 -volumes-from ipd-redis-data harbor.wsfcomp.com/redis')
@task
@parallel
def installipd():
version = local('python setup.py --version', capture=True)
name = local('python setup.py --name', capture=True)
sudo('apt-get install -y python-pip python-dev libxml2-dev libxslt-dev zlib1g-dev')
sudo('pip install {}=={}'.format(name, version))
|
#Build Mood Vector for each
import pickle, time
from buildEmotionDatabase import getEmotionsVector
UrlDict = pickle.load(open("faceUrls.pkl", "rb"))
#testUrlDict = {"01/2016" : UrlDict["01/2016"],
#"02/2016" : UrlDict["02/2016"],
#"03/2016" : UrlDict["03/2016"]}
moodDict = {}
count = 0;
for month in UrlDict.keys():
print(month)
moodArray = [0,0,0,0,0,0,0,0]
numUrls = len(UrlDict[month])
#print(numUrls)
for url in UrlDict[month]:
count += 1
if count % 19 == 0:
print("PAUSE!!!!!!!!!!!!") # Pause (20 calls/min max)
time.sleep(60)
urlEmos = getEmotionsVector(url)
#print(urlEmos)
if len(urlEmos) == 8:
for i in range(0,8):
moodArray[i] += urlEmos[i]
else:
numUrls -= 1
if numUrls != 0:
for i in range(0,8):
moodArray[i] /= numUrls
moodDict[month] = moodArray
print(moodDict)
pickle.dump(moodDict, open("monthMoods.pkl", "wb"))
|
# 1052. Grumpy Bookstore Owner
class Solution:
def maxSatisfied(self, C: List[int], G: List[int], X: int) -> int:
hi = cur = sum(C[:X]) + sum(C[i] for i in range(X, len(C)) if G[i] == 0)
for i in range(X, len(C)):
if G[i] == 1:
cur += C[i]
if G[i-X] == 1:
cur -= C[i-X]
hi = max(hi, cur)
return hi |
str1 = input("Enter a string: ")
str2 = input("Enter another: ")
if not str1 == str2:
print(str1 + str2)
else:
print("Two strings are identical.")
|
"""Support for GitHub Actions."""
from typing import cast, Any, Dict
import time
import jwt
from gidgethub.abc import GitHubAPI
def get_jwt(*, app_id: str, private_key: str) -> str:
"""Construct the JWT (JSON Web Token), used for GitHub App authentication."""
time_int = int(time.time())
payload = {"iat": time_int, "exp": time_int + (10 * 60), "iss": app_id}
bearer_token = jwt.encode(payload, private_key, algorithm="RS256")
return bearer_token
async def get_installation_access_token(
gh: GitHubAPI, *, installation_id: str, app_id: str, private_key: str
) -> Dict[str, Any]:
"""Obtain a GitHub App's installation access token.
Return a dictionary containing access token and expiration time.
(https://docs.github.com/en/free-pro-team@latest/rest/reference/apps#create-an-installation-access-token-for-an-app)
"""
access_token_url = f"/app/installations/{installation_id}/access_tokens"
token = get_jwt(app_id=app_id, private_key=private_key)
response = await gh.post(
access_token_url,
data=b"",
jwt=token,
)
# example response
# {
# "token": "v1.1f699f1069f60xxx",
# "expires_at": "2016-07-11T22:14:10Z"
# }
return cast(Dict[str, Any], response)
|
def find_matching_strings(list):
lower_list = map(lambda x: x.lower, list)
words_dict = {}
true_word_count = {}
occurrences = {}
for word in list:
if word.lower() not in words_dict:
words_dict[word.lower()] = 1
true_word_count[word] = 1
occurrences[word.lower()] = 1
else:
if word not in true_word_count:
true_word_count[word] = 1
occurrences[word.lower()] = occurrences[word.lower()] + 1
else:
true_word_count[word] = true_word_count[word] + 1
words_dict[word.lower()] = words_dict[word.lower()] + 1
return [words_dict, occurrences, true_word_count]
words = "word Word word WoRd apple"
words_list = words.split()
count = find_matching_strings(words_list)[0]
occurrences = find_matching_strings(words_list)[1]
total_words = find_matching_strings(words_list)[2]
for key in count.keys():
percent = (int(count[key]) / len(words_list)) * 100
words_occurences_list = set()
for true_key in total_words:
if true_key.lower() == key:
words_occurences_list.add(true_key)
print(key + " " + str(percent) + "% " + str(count[key]) + " total occurrences, " + \
str(occurrences[key]) + " representations " + str(words_occurences_list))
|
#!/usr/bin/env python
from __future__ import print_function, division
import numpy as np
import h5py
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
import astropy.units as u
from astropy.coordinates import SkyCoord
from glob import glob
import os
def load_data(fnames):
# Load in all the data files
d = []
for fn in fnames:
print('Loading {:s} ...'.format(fn))
with h5py.File(fn, 'r') as f:
d.append(f['stellar_phot_spec_ast'][:])
return np.hstack(d)
def find_ddpayne_repeats(d):
# Sort by longitude, then latitude
idx_sort = np.lexsort((d['gal_l'], d['gal_b']))
# Apply sort
d_s = d[idx_sort]
# Find indices where lon or lat change
idx_split = (np.diff(d_s['gal_l']) > 0.) | (np.diff(d_s['gal_b']) > 0)
idx_split = np.hstack([0, np.where(idx_split)[0] + 1, len(d)])
# Go through each unique object
for i0,i1 in zip(idx_split[:-1],idx_split[1:]):
if i1 > i0+1:
print(d_s[i0:i1])
print('')
# Find unique longitudes
#lon, idx, n = np.unique(d['gal_l'], return_index=True, return_counts=True)
def match_catalogs(d1, d2, max_sep=0.1*u.arcsec):
c1 = SkyCoord(d1['gal_l']*u.deg, d1['gal_b']*u.deg, frame='galactic')
c2 = SkyCoord(d2['gal_l']*u.deg, d2['gal_b']*u.deg, frame='galactic')
idx_c1, sep, _ = c2.match_to_catalog_sky(c1)
idx = (sep < max_sep)
idx_c1 = idx_c1[idx]
return idx_c1, idx, sep[idx]
def match_3_catalogs(d1, d2, d3, max_sep=0.1*u.arcsec):
c1 = SkyCoord(d1['gal_l']*u.deg, d1['gal_b']*u.deg, frame='galactic')
c2 = SkyCoord(d2['gal_l']*u.deg, d2['gal_b']*u.deg, frame='galactic')
c3 = SkyCoord(d3['gal_l']*u.deg, d3['gal_b']*u.deg, frame='galactic')
def match_pair(x,y):
idx_x, sep, _ = y.match_to_catalog_sky(x)
idx = (sep < max_sep)
idx_xy = (idx_x[idx], np.where(idx)[0])
xy = x[idx_xy[0]]
return xy, idx_xy
def filter_arrs(a_list, idx_remove):
idx_keep = np.ones(a_list[0].size, dtype='bool')
idx_keep[idx_remove] = 0
return [a[idx_keep] for a in a_list]
# Match catalogs pairwise
c12, idx_12 = match_pair(c1, c2)
c23, idx_23 = match_pair(c2, c3)
c13, idx_13 = match_pair(c1, c3)
print(f'{len(idx_12[0])} 1&2 matches.')
print(f'{len(idx_13[0])} 1&3 matches.')
print(f'{len(idx_23[0])} 2&3 matches.')
# Match 12 & 13
c123, idx_12_13 = match_pair(c12, c13)
print(idx_12_13)
idx_123 = (
idx_12[0][idx_12_13[0]], # indices in d1 of 123 matches
idx_12[1][idx_12_13[0]], # indices in d2 of 123 matches
idx_13[1][idx_12_13[1]] # indices in d3 of 123 matches
)
print(f'{len(idx_123[0])} 1&2&3 matches.')
# Filter matches out of 13
idx_13 = filter_arrs(idx_13, idx_12_13[1])
#idx_keep = np.ones(idx_13.size, dtype='bool')
#idx_keep[idx_12_13[1]] = 0
#idx_13 = idx_13[idx_keep]
# Match 12 & 23
c123, idx_12_23 = match_pair(c12, c23)
# Filter matches out of 12 and 23
idx_12 = filter_arrs(idx_12, idx_12_23[0])
idx_23 = filter_arrs(idx_23, idx_12_23[1])
# Concatenate 12, 13, 23, 123
idx1 = np.hstack([
idx_12[0],
idx_13[0],
np.full_like(idx_23[0], -1),
idx_123[0]
])
idx2 = np.hstack([
idx_12[1],
np.full_like(idx_13[0], -1),
idx_23[0],
idx_123[1]
])
idx3 = np.hstack([
np.full_like(idx_12[0], -1),
idx_13[1],
idx_23[1],
idx_123[2]
])
return idx1, idx2, idx3
def main():
ext = 'pdf'
fig_dir = 'plots'
d_galah = load_data(glob('data/galah_data_*to*.h5'))
d_apogee = load_data(glob('data/dr16_data_*to*.h5'))
d_ddpayne = load_data(glob('data/ddpayne_data_*to*.h5'))
# Match GALAH, DDPayne and APOGEE catalogs
idx_galah, idx_ddpayne, idx_apogee = match_3_catalogs(
d_galah, d_ddpayne, d_apogee
)
d_galah = d_galah[idx_galah]
d_ddpayne = d_ddpayne[idx_ddpayne]
d_apogee = d_apogee[idx_apogee]
mask_galah = (idx_galah != -1)
mask_ddpayne = (idx_ddpayne != -1)
mask_apogee = (idx_apogee != -1)
print('{:d} matches.'.format(len(d_galah)))
# Save matches
fname = 'data/crossmatches_galah_apogee_ddpayne.h5'
dset_kw = dict(chunks=True, compression='gzip', compression_opts=3)
with h5py.File(fname, 'w') as f:
f.create_dataset('/galah', data=d_galah, **dset_kw)
f.create_dataset('/ddpayne', data=d_ddpayne, **dset_kw)
f.create_dataset('/apogee', data=d_apogee, **dset_kw)
f.create_dataset('/mask_galah', data=mask_galah.astype('u1'), **dset_kw)
f.create_dataset('/mask_ddpayne', data=mask_ddpayne.astype('u1'), **dset_kw)
f.create_dataset('/mask_apogee', data=mask_apogee.astype('u1'), **dset_kw)
#return 0
# Load matches
print('Loading matches ...')
fname = 'data/crossmatches_galah_apogee_ddpayne.h5'
with h5py.File(fname, 'r') as f:
d_galah = f['/galah'][:]
d_ddpayne = f['/ddpayne'][:]
d_apogee = f['/apogee'][:]
mask_galah = (f['/mask_galah'][:].astype('bool'))
mask_ddpayne = (f['/mask_ddpayne'][:].astype('bool'))
mask_apogee = (f['/mask_apogee'][:].astype('bool'))
print(f'{np.count_nonzero(mask_galah)} GALAH sources.')
print(f'{np.count_nonzero(mask_ddpayne)} LAMOST sources.')
print(f'{np.count_nonzero(mask_apogee)} APOGEE sources.')
# Extract labels from each survey
params = {
'teff': {
'galah': d_galah['teff'],
'apogee': d_apogee['sdss_aspcap_param'][:,0],
'lamost': d_ddpayne['ddpayne_teff']
},
'logg': {
'galah': d_galah['logg'],
'apogee': d_apogee['sdss_aspcap_param'][:,1],
'lamost': d_ddpayne['ddpayne_logg']
},
'feh': {
'galah': d_galah['feh'],
'apogee': d_apogee['sdss_aspcap_param'][:,3],
'lamost': d_ddpayne['ddpayne_feh']
}
}
param_errs = {
'teff': {
'galah': d_galah['teff_err'],
'apogee': d_apogee['sdss_aspcap_teff_err'],
'lamost': d_ddpayne['ddpayne_teff_err']
},
'logg': {
'galah': d_galah['logg_err'],
'apogee': d_apogee['sdss_aspcap_logg_err'],
'lamost': d_ddpayne['ddpayne_logg_err']
},
'feh': {
'galah': d_galah['feh_err'],
'apogee': d_apogee['sdss_aspcap_m_h_err'],
'lamost': d_ddpayne['ddpayne_feh_err']
}
}
survey_masks = {
'galah': mask_galah,
'apogee': mask_apogee,
'lamost': mask_ddpayne
}
# Residuals in each label
param_dlims = {
'teff': (-500., 500.),
'logg': (-0.5, 0.5),
'feh': (-0.5, 0.5)
}
# Labels
param_names = ('teff', 'logg', 'feh')
param_labels = {
'teff': r'T_{\mathrm{eff}}',
'logg': r'\log \left( g \right)',
'feh': r'\left[ \mathrm{Fe} / \mathrm{H} \right]'
}
survey_labels = {
'galah': r'\mathrm{GALAH}',
'apogee': r'\mathrm{APOGEE}',
'lamost': r'\mathrm{LAMOST}'
}
# Choose one survey to anchor the (teff, logg, feh) scale
surveys = ('apogee', 'galah', 'lamost')
# Plot histogram of residuals
print('Plotting histograms of residuals ...')
for name in param_names:
fig,ax_list = plt.subplots(3,2, figsize=(12,13.5))
fig.suptitle(rf'$\Delta {param_labels[name]}$')
#for ax,comp in zip(ax_list, comparisons):
for (ax1,ax2),(k1,k2) in zip(ax_list,((0,2),(1,2),(0,1))):
comp, anchor = surveys[k1], surveys[k2]
dval = params[name][comp] - params[name][anchor]
var = (param_errs[name][comp])**2 + (param_errs[name][anchor])**2
idx = (
np.isfinite(dval) &
survey_masks[comp] &
survey_masks[anchor]
)
print(f'{comp} & {anchor}: {np.count_nonzero(idx)} matches.')
dval = dval[idx]
var = var[idx]
idx_chi = np.ones(dval.size, dtype='bool')
for i in range(3):
delta_est = (
np.mean(dval[idx_chi] / var[idx_chi])
/ np.mean(1./var[idx_chi])
)
chi = (dval - delta_est) / np.sqrt(var)
idx_chi = (np.abs(chi) < 3.)
print(f' -> {1.-np.count_nonzero(idx_chi)/idx_chi.size:.3f} rejected.')
print(f'Delta = {delta_est:.5f}')
ax1.hist(dval, range=param_dlims[name], bins=100, alpha=0.7)
ax1.axvline(np.median(dval), c='g', lw=2.0, alpha=0.7)
ax1.axvline(np.median(delta_est), c='orange', lw=2.0, alpha=0.7)
ax1.grid('on', alpha=0.25)
ax1.xaxis.set_major_locator(ticker.AutoLocator())
ax1.xaxis.set_minor_locator(ticker.AutoMinorLocator())
txt = (
r'$'
f'{survey_labels[comp]} - {survey_labels[anchor]}'
f' = {delta_est:.5f}'
r'$'
)
ax1.text(
0.03, 0.95,
txt,
ha='left', va='top',
transform=ax1.transAxes
)
_,x_edges,_ = ax2.hist(chi, range=(-5.,5.), bins=100, alpha=0.7)
dx = x_edges[1] - x_edges[0]
x_gauss = np.linspace(-5., 5., 1000)
y_gauss = dx * chi.size * np.exp(-0.5*x_gauss**2) / np.sqrt(2.*np.pi)
ax2.plot(x_gauss, y_gauss, c='g', alpha=0.5)
chi2_mean = np.mean(chi[np.abs(chi)<5.]**2)
txt = (
r'$'
r'\langle \chi^2 \rangle = '
f'{chi2_mean:.3f}'
r'$'
)
ax2.text(
0.03, 0.95, txt,
ha='left', va='top',
transform=ax2.transAxes
)
ax2.grid('on', alpha=0.25)
ax2.xaxis.set_major_locator(ticker.AutoLocator())
ax2.xaxis.set_minor_locator(ticker.AutoMinorLocator())
for ax in ax_list.flat[:-2]:
ax.set_xticklabels([])
for ax in ax_list.flat:
ax.set_yticklabels([])
ax_list[2,0].set_xlabel(rf'$\Delta {param_labels[name]}$')
ax_list[2,1].set_xlabel(rf'$\chi \left( {param_labels[name]} \right)$')
fig.subplots_adjust(
top=0.94, bottom=0.06,
left=0.05, right=0.95,
hspace=0.05, wspace=0.05
)
fig.savefig(os.path.join(fig_dir, f'resid_hist_{name}.{ext}'), dpi=150)
plt.close(fig)
return 0
if __name__ == '__main__':
main()
|
# -*- coding:utf-8 -*-
import random
"""
t={
"F1":(1,f1)#数量,函数
}"""
def random_func(random_dict):
assemble=[]
for key in random_dict:
assemble+=[key for i in range(random_dict[key][0])]
focus=random.choice(assemble)
return random_dict[focus][1]
#
class RandomFunc():
def __init__(self,random_dict):
self.assemble = []
self.random_dict=random_dict
self.focus=None
for key in random_dict:
self.assemble += [key for i in range(random_dict[key][0])]
def get_random_func(self):
self.focus = random.choice(self.assemble)
return self.random_dict[self.focus][1]
def get_focus_func(self,focus):
return self.random_dict[focus][1]
# def f1():
# print("1")
# def f2():
# print("2")
# t={
# "F1":(1,f1),#数量,函数
# "F2":(9,f2)
# }
# func=RandomFunc(t).get_random_func
# for i in range(10):
# f,name=func()
# f() |
a = [1, 4, 7, 3, 11, 9, 2, 6, 4, 7]
def find_element(a, p, q, k):
element = None
j = p
povit = a[q]
for i in range(p, q+1):
if a[i] < povit:
tmp = a[i]
a[i] = a[j]
a[j] = tmp
j += 1
tmp = a[j]
a[j] = a[q]
a[q] = tmp
print(a)
print('j = %s, k = %s' % (j, k-1))
if j == (k-1):
print('find element')
print(a)
if j > (k-1):
element = find_element(a, p, j-1, k)
elif j < (k-1):
element = find_element(a, j+1, q, k)
else:
element = a[j]
if element:
return element
def test():
tmp = 1
return tmp
def main():
ret = find_element(a, 0, len(a)-1, 10)
print(a)
# ret = test()
print('value = ', ret)
if __name__ == '__main__':
main() |
# Advent of code day 5
# Open data
advent_input = open('input_5.txt')
advent_data = advent_input.readlines()
advent_data = [x.rstrip() for x in advent_data]
# Create class seat
class Seat:
def __init__(self, str):
start_row = 0
end_row = 127
start_column = 0
end_column = 7
for letter in str:
if letter == 'B':
start_row = int(round(start_row + ((end_row - start_row) / 2)))
elif letter == 'F':
end_row = int(end_row - ((end_row - start_row) / 2))
elif letter == 'R':
start_column = int(round(start_column + ((end_column - start_column) / 2)))
elif letter == 'L':
end_column = int(end_column - ((end_column - start_column) / 2))
# Decide the row
if str[6] == 'F':
row = start_row
elif str[6] == 'B':
row = end_row
else:
print('INVALID')
# Decide the column
if str[9] == 'R':
column = end_column
elif str[9] == 'L':
column = start_column
else:
print('INVALID')
self.row = row
self.column = column
def id(self):
return self.row * 8 + self.column
# Create the list with seats
seating_list = [Seat(line).id() for line in advent_data]
# Part one
seating_list.sort()
print(seating_list[-1])
# Part two
for i in range(len(seating_list)):
if (i + 1) in seating_list and (i - 1) in seating_list and i not in seating_list:
print(i)
|
import os
import os.path
import sys
def rename_files(indir, outdir):
files = os.listdir(indir)
print(len(files))
for index, oldname in enumerate(files):
newname = '{0:06}_1.jpg'.format(index)
oldpath = indir + '/' + oldname
newpath = outdir + '/' + newname
#cmd = 'mv {0} {1}'.format(oldpath, newpath)
cmd = 'convert {0} -resize 800x600 {1}'.format(oldpath, newpath)
print(cmd)
os.system(cmd)
if __name__ == '__main__':
if len(sys.argv) < 3:
print('python3 rename.py indir outdir')
indir = sys.argv[1]
outdir = sys.argv[2]
rename_files(indir, outdir)
|
import socket
import sys
import threading
import time
import errno
from queue import Queue
HEADER_LENGTH=10
all_connections = []
all_address = []
HOST = "localhost"
PORT = 5054
queue = Queue()
username=input("Username:")
username=username+"-helper"
client_socket=socket.socket()
client_socket.connect((HOST,PORT))
client_socket.setblocking(True)
name=username.encode('utf-8')
name_header= f"{len(name):<{HEADER_LENGTH}}".encode('utf-8')
client_socket.send(name_header+name)
print("Enter list to list available connections:")
connection=True
while connection:
cmd = input()
if cmd == 'done':
client_socket.send(str.encode(cmd))
connection=False
print("The service has been disconnected.\n")
break
if len(str.encode(cmd)) > 0:
client_socket.send(str.encode(cmd))
response = str(client_socket.recv(20480), "utf-8")
print(response, end="")
|
# -*- coding: utf-8 -*-
from odoo import models, fields, api
from odoo.exceptions import UserError
from datetime import datetime
import logging
class ControlExt(models.Model):
_inherit = 'account.move'
icbp = fields.Float('I.C.B.P.', digits=(12,2), compute="_get_icbp")
def _get_icbp(self):
# raise UserError(str(self.tax_line_ids))
self.icbp = 0.00
for line in self.line_ids:
if line.name == 'ICBP':
self.icbp = line.credit
|
'''
1644번
소수의 연속합
'''
import sys
from collections import deque
input=sys.stdin.readline
n=int(input())
prime=deque()
p=[i%2 for i in range(n+2)]
p[1]=0; p[2]=1
for i in range(3, n+1, 2):
if p[i]==1:
for j in range(i+i, n+1, i):
p[j]=0
for i in range(2, n+1):
if p[i]: prime.append(i)
l=0; r=0; s=2; ans=0
while r<len(prime)-1 and l<=r:
if s<n:
r+=1
s+=prime[r]
else:
s-=prime[l]
l+=1
if s==n:
ans+=1
def isPrime(n):
if n==2: return True
if n==1 or n%2==0: return False
for i in range(3, int(n**0.5)+1, 2):
if n%i==0: return False
return True
if isPrime(n): ans+=1
print(ans) |
# fungsi open
# parameter 1 = nama filenya
# parameter 2 = mode
# mode mode :
# r = read = membaca file
# w = write = menulis ke sebuah file , jika file sudah ada isinya , maka isi tersebut akan di tiban
# a = append = menambahkan data ke dalam file
# r+ = user bisa baca dan bisa nulis
user = open("user.txt", "a")
# cara menbahkan text pada baris baru paling bawah
user.write("\nlink - link")
# mengetahui sebuah file bisa ditulis atau tidak , jika output true = bisa ,bisa tidaknya tegantung dengan mode yg digunakan
print(user.writable())
# jika sudah tidak digunakan maka file tutup gembali ,dengan cara di bawah ini
user.close()
|
## https://docs.djangoproject.com/en/1.11/topics/db/managers/
## https://docs.djangoproject.com/en/dev/howto/custom-management-commands/#howto-custom-management-commands
## https://medium.com/@bencleary/django-scheduled-tasks-queues-part-1-62d6b6dc24f8
## https://medium.com/@bencleary/django-scheduled-tasks-queues-part-2-fc1fb810b81d
## https://medium.com/@kevin.michael.horan/scheduling-tasks-in-django-with-the-advanced-python-scheduler-663f17e868e6
## https://django-background-tasks.readthedocs.io/en/latest/
# VtsKernelLinuxKselftest#timers_set-timer-lat_32bit
import pdb
import datetime
import json
import logging
import os
import re
import yaml
import datetime
import subprocess
from django.core.management.base import BaseCommand, CommandError
from django.utils.timesince import timesince
from lkft.models import KernelChange, CiBuild, ReportBuild
from lcr import qa_report
from lcr.settings import QA_REPORT, QA_REPORT_DEFAULT, BUILD_WITH_JOBS_NUMBER
from lkft.views import get_test_result_number_for_build, get_lkft_build_status, get_classified_jobs
from lkft.views import extract
from lkft.views import get_result_file_path
from lkft.views import download_attachments_save_result
from lkft.views import get_build_metadata
from lkft.views import get_build_kernel_version, parse_kernel_version_string
from lkft.lkft_config import get_version_from_pname, get_kver_with_pname_env
from lkft.reportconfig import get_all_report_kernels, get_all_report_projects
logger = logging.getLogger(__name__)
qa_report_def = QA_REPORT[QA_REPORT_DEFAULT]
qa_report_api = qa_report.QAReportApi(qa_report_def.get('domain'), qa_report_def.get('token'))
jenkins_api = qa_report.JenkinsApi('ci.linaro.org', None)
rawkernels = get_all_report_kernels()
projectids = get_all_report_projects()
def do_boilerplate(output):
output.write("\n\nFailure Key:\n")
output.write("--------------------\n")
output.write("I == Investigation\nB == Bug#, link to bugzilla\nF == Flakey\nU == Unexpected Pass\n\n")
# a flake entry
# name, state, bugzilla
def process_flakey_file(path_flakefile):
Dict44 = {'version' : 4.4 , 'flakelist' : [] }
Dict49 = {'version' : 4.9 , 'flakelist' : [] }
Dict414 = {'version' : 4.14, 'flakelist' : [] }
Dict419 = {'version' : 4.19, 'flakelist' : [] }
Dict54 = {'version' : 5.4, 'flakelist' : [] }
Dict510 = {'version' : 5.10, 'flakelist' : [] }
flakeDicts = [Dict44, Dict49, Dict414, Dict419, Dict54, Dict510]
kernelsmatch = re.compile('[0-9]+.[0-9]+')
androidmatch = re.compile('ANDROID[0-9]+|AOSP')
hardwarematch = re.compile('HiKey|db845|HiKey960')
allmatch = re.compile('ALL')
#pdb.set_trace()
f_flakefile = open(path_flakefile, "r")
Lines = f_flakefile.readlines()
f_flakefile.close()
for Line in Lines:
newstate = ' '
if Line[0] == '#':
continue
if Line[0] == 'I' or Line[0] == 'F' or Line[0] == 'B' or Line[0] == 'E':
newstate = Line[0]
Line = Line[2:]
m = Line.find(' ')
if m:
testname = Line[0:m]
Line = Line[m:]
testentry = {'name' : testname, 'state': newstate, 'board': [], 'androidrel':[] }
if Line[0:4] == ' ALL':
Line = Line[5:]
Dict44['flakelist'].append(testentry)
Dict49['flakelist'].append(testentry)
Dict414['flakelist'].append(testentry)
Dict419['flakelist'].append(testentry)
Dict54['flakelist'].append(testentry)
Dict510['flakelist'].append(testentry)
else:
n = kernelsmatch.match(Line)
if n:
Line = Line[n.end():]
for kernel in n:
for Dict in flakeDicts:
if kernel == Dict['version']:
Dict['flakelist'].append(testentry)
else:
continue
if Line[0:3] == 'ALL':
Line = Line[4:]
testentry['board'].append("HiKey")
testentry['board'].append("HiKey960")
testentry['board'].append("db845")
else:
h = hardwarematch.findall(Line)
if h:
for board in h:
testentry['board'].append(board)
else:
continue
a = allmatch.search(Line)
if a:
testentry['androidrel'].append('Android8') #O
testentry['androidrel'].append('Android9') #P
testentry['androidrel'].append('Android10') #Q
testentry['androidrel'].append('Android11') #R
testentry['androidrel'].append('Android12') #S
testentry['androidrel'].append('AOSP')
else:
a = androidmatch.findall(Line)
if a:
for android in a:
testentry['androidrel'].append(android)
else:
continue
else:
continue
return flakeDicts
# take the data dictionaries, the testcase name and ideally the list of failures
# and determine how to classify a test case. This might be a little slow espeically
# once linked into bugzilla
def classifyTest(flakeDicts, testcasename, hardware, kernel, android):
for dict in flakeDicts:
if dict['version'] == kernel:
break
#pdb.set_trace()
foundboard = 0
foundandroid = 0
#if testcasename == 'VtsKernelLinuxKselftest.timers_set-timer-lat_64bit':
# pdb.set_trace()
#if testcasename == 'android.webkit.cts.WebChromeClientTest#testOnJsBeforeUnloadIsCalled#arm64-v8a':
# pdb.set_trace()
for flake in dict['flakelist']:
if flake['name'] == testcasename:
for board in flake['board'] :
if board == hardware:
foundboard = 1
break
for rel in flake['androidrel']:
if rel == android:
foundandroid = 1
break
if foundboard == 1 and foundandroid == 1:
return flake['state']
else:
return 'I'
return 'I'
def find_best_two_runs(builds, project_name, project, exact_ver1="", exact_ver2="", reverse_build_order=False, no_check_kernel_version=False):
goodruns = []
bailaftertwo = 0
number_of_build_with_jobs = 0
baseExactVersionDict=None
nextVersionDict=None
if len(exact_ver1) > 0 and exact_ver1 !='No':
baseExactVersionDict = parse_kernel_version_string(exact_ver1)
sorted_builds = sorted(builds, key=get_build_kernel_version, reverse=True)
for build in sorted_builds:
if bailaftertwo == 2:
break
elif bailaftertwo == 0 :
baseVersionDict = parse_kernel_version_string(build['version'])
if baseExactVersionDict is not None \
and not baseVersionDict['versionString'].startswith(exact_ver1):
logger.info('Skip the check as it is not the specified version for %s %s', project_name, build['version'])
continue
# print "baseset"
elif bailaftertwo == 1 :
nextVersionDict = parse_kernel_version_string(build['version'])
if exact_ver2 is not None \
and not nextVersionDict['versionString'].startswith(exact_ver2):
# for case that second build version specified, but not this build
logger.info('Skip the check as it is not the specified version for %s %s', project_name, build['version'])
continue
elif not no_check_kernel_version and nextVersionDict['Extra'] == baseVersionDict['Extra']:
# for case that need to check kernel version, and all major, minro, extra version are the same
# then needs to check if the rc version is the same too
nextRc = nextVersionDict.get('rc')
baseRc = baseVersionDict.get('rc')
if (nextRc is None and baseRc is None) \
or (nextRc is not None and baseRc is not None and nextRc == baseRc):
# for case that neither build is rc version or both have the same rc version
logger.info('Skip the check as it has the same version for %s %s', project_name, build['version'])
continue
else:
# for case that the rc version are different, like
# 1. one build is rc version, but another is not rc version
# 2. both are rc versions, but are different rc versions
pass
else:
# for cases that
# 1. the second build version not specified, or this build has the same version like the specified second build version
# 2. no need to check the kernel version, or the kernel version is different, either extra version or the rc version
pass
logger.info("Checking for %s, %s", project_name, build.get('version'))
build['created_at'] = qa_report_api.get_aware_datetime_from_str(build.get('created_at'))
jobs = qa_report_api.get_jobs_for_build(build.get("id"))
jobs_to_be_checked = get_classified_jobs(jobs=jobs).get('final_jobs')
build_status = get_lkft_build_status(build, jobs_to_be_checked)
#build_status = get_lkft_build_status(build, jobs)
jobs=jobs_to_be_checked
if build_status['has_unsubmitted']:
logger.info('Skip the check as the build has unsubmitted jobs: %s %s', project_name, build['version'])
continue
elif build_status['is_inprogress']:
logger.info('Skip the check as the build is still inprogress: %s %s', project_name, build['version'])
continue
build['jobs'] = jobs
if not jobs:
continue
download_attachments_save_result(jobs=jobs)
failures = {}
#pdb.set_trace()
total_jobs_finished_number = 0
build['numbers'] = qa_report.TestNumbers()
for job in jobs:
jobstatus = job['job_status']
jobfailure = job['failure']
# for some failed cases, numbers are not set for the job
job_numbers = job.get('numbers', None)
if jobstatus == 'Complete' and jobfailure is None and \
job_numbers is not None and job_numbers.get('finished_successfully'):
total_jobs_finished_number = total_jobs_finished_number + 1
result_file_path = get_result_file_path(job=job)
if not result_file_path or not os.path.exists(result_file_path):
continue
# now tally then move onto the next job
kernel_version = get_kver_with_pname_env(prj_name=project_name, env=job.get('environment'))
platform = job.get('environment').split('_')[0]
metadata = {
'job_id': job.get('job_id'),
'qa_job_id': qa_report_api.get_qa_job_id_with_url(job_url=job.get('url')),
'result_url': job.get('attachment_url'),
'lava_nick': job.get('lava_config').get('nick'),
'kernel_version': kernel_version,
'platform': platform,
}
extract(result_file_path, failed_testcases_all=failures, metadata=metadata)
# this line overrides the numbers set within the function of download_attachments_save_result
test_numbers = qa_report.TestNumbers()
test_numbers.addWithHash(job['numbers'])
job['numbers'] = test_numbers
build['numbers'].addWithTestNumbers(test_numbers)
# now let's see what we have. Do we have a complete yet?
print("Total Finished Jobs Number / Total Jobs Number: %d / %d" % (total_jobs_finished_number, len(jobs)))
# when the finished successfully jobs number is the same as the number of all jobs
# it means all the jobs are finished successfully, the build is OK to be used for comparisonfin
if len(jobs) == total_jobs_finished_number and build['numbers'].modules_total > 0:
#pdb.set_trace()
if nextVersionDict is not None:
# add for the second build
if exact_ver2 is not None:
if reverse_build_order:
# find regression in exact_ver2 compare to exact_ver1
goodruns.append(build)
else:
# find regression in exact_ver1 compare to exact_ver2
goodruns.insert(0, build)
elif int(nextVersionDict['Extra']) > int(baseVersionDict['Extra']):
# for the case the second build is newer than the first build
# first build is goodruns[0], second build is goodruns[1]
# normally, the builds are sorted with the newest kernel version as the first build.
goodruns.append(build)
else:
# for the case the second build is older than or equal to the first build
goodruns.insert(0, build)
else:
# add for the first build
goodruns.append(build)
bailaftertwo += 1
logger.info("found one valid build bailaftertwo=%s %s, %s", bailaftertwo, project_name, build.get('version'))
elif bailaftertwo == 0 and exact_ver1 is not None and baseVersionDict is not None and baseVersionDict.get('versionString') == exact_ver1:
# found the first build with exact_ver1, but that build does not have all jobs finished successfully
# stop the loop for builds to find anymore
bailaftertwo += 1
goodruns.append(build)
logger.info("The build specified with --exact-version-1 is not a valid build: %s, %s", project_name, build.get('version'))
return goodruns
elif bailaftertwo == 1 and exact_ver2 is not None and nextVersionDict is not None and nextVersionDict.get('versionString') == exact_ver2:
# found the second build with exact_ver2, but that build does not have all jobs finished successfully
# stop the loop for builds to find anymore
bailaftertwo += 1
logger.info("The build specified with --exact-version-2 is not a valid build: %s, %s", project_name, build.get('version'))
return goodruns
else:
# for case that no completed build found, continute to check the next build
logger.info("Not one valid will continue: %s, %s", project_name, build.get('version'))
continue
#pdb.set_trace()
failures_list = []
for module_name in sorted(failures.keys()):
failures_in_module = failures.get(module_name)
for test_name in sorted(failures_in_module.keys()):
failure = failures_in_module.get(test_name)
abi_stacktrace = failure.get('abi_stacktrace')
abis = sorted(abi_stacktrace.keys())
stacktrace_msg = ''
if (len(abis) == 2) and (abi_stacktrace.get(abis[0]) != abi_stacktrace.get(abis[1])):
for abi in abis:
stacktrace_msg = '%s\n\n%s:\n%s' % (stacktrace_msg, abi, abi_stacktrace.get(abi))
else:
stacktrace_msg = abi_stacktrace.get(abis[0])
failure['abis'] = abis
failure['stacktrace'] = stacktrace_msg.strip()
failure['module_name'] = module_name
failures_list.append(failure)
#pdb.set_trace()
android_version = get_version_from_pname(pname=project.get('name'))
build['failures_list'] = failures_list
return goodruns
# Try to find the regressions in goodruns[1]
# compared to the result in goodruns[0]
def find_regressions(goodruns):
runA = goodruns[1]
failuresA = runA['failures_list']
runB = goodruns[0]
failuresB = runB['failures_list']
regressions = []
for failureA in failuresA:
match = 0
testAname = failureA['test_name']
for failureB in failuresB:
testBname = failureB['test_name']
if testAname == testBname:
match = 1
break
if match != 1 :
# for failures in goodruns[1],
# if they are not reported in goodruns[0],
# then they are regressions
regressions.append(failureA)
return regressions
def find_antiregressions(goodruns):
runA = goodruns[1]
failuresA = runA['failures_list']
runB = goodruns[0]
failuresB = runB['failures_list']
antiregressions = []
for failureB in failuresB:
match = 0
for failureA in failuresA:
testAname = failureA['test_name']
testBname = failureB['test_name']
if testAname == testBname:
match = 1
break
if match != 1 :
antiregressions.append(failureB)
return antiregressions
""" Example project_info dict
{'project_id': 210,
'hardware': 'hikey',
'OS' : 'Android10',
'branch' : 'Android-4.19-q',},
"""
def print_androidresultheader(output, project_info, run, priorrun):
output.write(" " + project_info['OS'] + "/" + project_info['hardware'] + " - " )
output.write("Current:" + run['version'] + " Prior:" + priorrun['version']+"\n")
build_metadata = get_build_metadata(build_metadata_url=run.get('metadata'))
prior_build_metadata = get_build_metadata(build_metadata_url=priorrun.get('metadata'))
def get_last_of_metadata(metadata):
if metadata is None:
return None
if type(metadata) is str:
return metadata
if type(metadata) is list:
return metadata[-1]
if build_metadata.get('gsi_fingerprint', None):
output.write(" " + "GSI Fingerprint:" + " - " )
if get_last_of_metadata(build_metadata.get('gsi_fingerprint')) == get_last_of_metadata(prior_build_metadata.get('gsi_fingerprint', 'UNKNOWN')):
output.write("Current:" + get_last_of_metadata(build_metadata.get('gsi_fingerprint')) + " == Prior:" + get_last_of_metadata(prior_build_metadata.get('gsi_fingerprint', 'UNKNOWN')) + "\n")
else:
output.write("Current:" + get_last_of_metadata(build_metadata.get('gsi_fingerprint')) + " != Prior:" + get_last_of_metadata(prior_build_metadata.get('gsi_fingerprint', 'UNKNOWN')) + "\n")
if build_metadata.get('vendor_fingerprint', None):
output.write(" " + "Vendor Fingerprint:" + " - " )
if get_last_of_metadata(build_metadata.get('vendor_fingerprint')) == get_last_of_metadata(prior_build_metadata.get('vendor_fingerprint', 'UNKNOWN')):
output.write("Current:" + get_last_of_metadata(build_metadata.get('vendor_fingerprint')) + " == Prior:" + get_last_of_metadata(prior_build_metadata.get('vendor_fingerprint', 'UNKNOWN')) + "\n")
else:
output.write("Current:" + get_last_of_metadata(build_metadata.get('vendor_fingerprint')) + " != Prior:" + get_last_of_metadata(prior_build_metadata.get('vendor_fingerprint', 'UNKNOWN')) + "\n")
if build_metadata.get('cts_version', None):
output.write(" " + "CTS Version:" + " - " )
if get_last_of_metadata(build_metadata.get('cts_version', 'UNKNOWN')) == get_last_of_metadata(prior_build_metadata.get('cts_version', 'UNKNOWN')):
output.write("Current:" + get_last_of_metadata(build_metadata.get('cts_version', 'UNKNOWN')) + " == Prior:" + get_last_of_metadata(prior_build_metadata.get('cts_version', 'UNKNOWN')) + "\n")
else:
output.write("Current:" + get_last_of_metadata(build_metadata.get('cts_version', 'UNKNOWN')) + " != Prior:" + get_last_of_metadata(prior_build_metadata.get('cts_version', 'UNKNOWN')) + "\n")
# there is the case like presubmit jobs that there are not vts is run
if build_metadata.get('vts_version', None):
output.write(" " + "VTS Version:" + " - " )
if get_last_of_metadata(build_metadata.get('vts_version', 'UNKNOWN')) == get_last_of_metadata(prior_build_metadata.get('vts_version', 'UNKNOWN')):
output.write("Current:" + get_last_of_metadata(build_metadata.get('vts_version', 'UNKNOWN')) + " == Prior:" + get_last_of_metadata(prior_build_metadata.get('vts_version', 'UNKNOWN')) + "\n")
else:
output.write("Current:" + get_last_of_metadata(build_metadata.get('vts_version', 'UNKNOWN')) + " != Prior:" + get_last_of_metadata(prior_build_metadata.get('vts_version', 'UNKNOWN')) + "\n")
def add_unique_kernel(unique_kernels, kernel_version, combo, unique_kernel_info):
#pdb.set_trace()
if kernel_version not in unique_kernels:
unique_kernels.append(kernel_version)
newlist = []
newlist.append(combo)
unique_kernel_info[kernel_version] = newlist
else:
kernellist= unique_kernel_info[kernel_version]
kernellist.append(combo)
def report_results(output, run, regressions, combo, priorrun, flakes, antiregressions, trim_number=0):
#pdb.set_trace()
numbers = run['numbers']
project_info = projectids[combo]
output.write(project_info['branch'] + "\n")
print_androidresultheader(output, project_info, run, priorrun)
#pdb.set_trace()
if numbers.modules_total == 0:
logger.info("Skip printing the regressions information as this build might not have any cts vts jobs run")
return
output.write(" " + str(len(antiregressions)) + " Prior Failures now pass\n")
output.write(" " + str(len(regressions)) + " Regressions of ")
output.write(str(numbers.number_failed) + " Failures, ")
output.write(str(numbers.number_passed) + " Passed, ")
if numbers.number_ignored > 0 :
output.write(str(numbers.number_ignored) + " Ignored, ")
if numbers.number_assumption_failure > 0 :
output.write(str(numbers.number_assumption_failure) + " Assumption Failures, ")
output.write(str(numbers.number_total) + " Total\n" )
output.write(" " + "Modules Run: " + str(numbers.modules_done) + " Module Total: " + str(numbers.modules_total) + "\n")
regressions_failure = [ failure for failure in regressions if failure.get('result') == 'fail' ]
regressions_assumption = [ failure for failure in regressions if failure.get('result') == 'ASSUMPTION_FAILURE' ]
def print_regressions(output, target_regressions, flakes, project_info, trim_number=0):
if 'baseOS' in project_info:
OS = project_info['baseOS']
else:
OS = project_info['OS']
if trim_number > 0 and len(target_regressions) > trim_number:
print_number=int(trim_number/2)
for regression in target_regressions[:print_number]:
# pdb.set_trace()
testtype = classifyTest(flakes, regression['test_name'], project_info['hardware'], project_info['kern'], OS)
output.write(" " + testtype + " " + regression['module_name'] +"." + regression['test_name'] + "\n")
output.write(" [...%d lines removed...]\n" % (len(target_regressions) - print_number * 2))
for regression in target_regressions[-print_number:]:
# pdb.set_trace()
testtype = classifyTest(flakes, regression['test_name'], project_info['hardware'], project_info['kern'], OS)
output.write(" " + testtype + " " + regression['module_name'] +"." + regression['test_name'] + "\n")
else:
for regression in target_regressions:
# pdb.set_trace()
testtype = classifyTest(flakes, regression['test_name'], project_info['hardware'], project_info['kern'], OS)
output.write(" " + testtype + " " + regression['module_name'] +"." + regression['test_name'] + "\n")
if len(regressions_failure) > 0:
output.write(" " + str(len(regressions_failure)) + " Test Regressions:\n")
print_regressions(output, regressions_failure, flakes, project_info, trim_number=trim_number)
if len(regressions_assumption) > 0:
output.write(" " + str(len(regressions_assumption)) + " New Assumption Failures:\n")
print_regressions(output, regressions_assumption, flakes, project_info, trim_number=trim_number)
if len(regressions) > 0:
output.write(" " + "Current jobs\n")
for job in run['jobs']:
output.write(" " + "%s %s\n" % (job.get('external_url'), job.get('name')))
output.write(" " + "Prior jobs\n")
for job in priorrun['jobs']:
output.write(" " + "%s %s\n" % (job.get('external_url'), job.get('name')))
output.write("\n")
def report_kernels_in_report(path_outputfile, unique_kernels, unique_kernel_info, work_total_numbers):
with open(path_outputfile, "w") as f_outputfile:
f_outputfile.write("\n")
f_outputfile.write("\n")
f_outputfile.write("Kernel/OS Combo(s) in this report:\n")
for kernel in unique_kernels:
f_outputfile.write(" " + kernel+ " - ")
combolist = unique_kernel_info[kernel]
intercombo = iter(combolist)
combo=combolist[0]
f_outputfile.write(combo)
next(intercombo)
for combo in intercombo:
f_outputfile.write(", "+ combo)
f_outputfile.write("\n")
f_outputfile.write("\n")
f_outputfile.write("%d Prior Failures now pass\n" % work_total_numbers.number_antiregressions)
f_outputfile.write("%d Regressions of %d Failures, %d Passed, %d Ignored, %d Assumption Failures, %d Total\n" % (
work_total_numbers.number_regressions,
work_total_numbers.number_failed,
work_total_numbers.number_passed,
work_total_numbers.number_ignored,
work_total_numbers.number_assumption_failure,
work_total_numbers.number_total))
class Command(BaseCommand):
help = 'returns Android Common Kernel Regression Report for specific kernels'
def add_arguments(self, parser):
parser.add_argument('kernel', type=str, help='Kernel version')
parser.add_argument('outputfile', type=str, help='Output File')
parser.add_argument('flake', type=str, help='flakey file')
parser.add_argument("--no-check-kernel-version",
help="Specify if the kernel version for the build should be checked.",
dest="no_check_kernel_version",
action='store_true',
required=False)
parser.add_argument("--exact-version-1",
help="Specify the exact kernel version for the first build",
dest="exact_version_1",
default="",
required=False)
parser.add_argument("--exact-version-2",
help="Specify the exact kernel version for the second build",
dest="exact_version_2",
default="",
required=False)
parser.add_argument("--trim-number",
help="Specify how many test cases to be printed",
dest="trim_number",
type=int,
default=0,
required=False)
parser.add_argument("--reverse-build-order",
help="When both --exact-version-1 and --exact-version-2 specified,\
normally will try to find the regressions in --exact-version-1 against --exact-version-2,\
but with this option, it will try to find the regressions in --exact-version-2\
agains the build of --exact-version-1",
dest="reverse_build_order",
action='store_true',
required=False)
parser.add_argument("--qareport-project",
help="Specify the QA Report project to check",
dest="qareport_project",
default=None,
required=False)
def handle(self, *args, **options):
kernel = options['kernel']
path_outputfile = options['outputfile']
scribblefile = path_outputfile + str(".scribble")
f_errorprojects = path_outputfile + str(".errorprojects")
path_flakefile = options['flake']
no_check_kernel_version = options.get('no_check_kernel_version')
opt_exact_ver1 = options.get('exact_version_1')
opt_exact_ver2 = options.get('exact_version_2')
trim_number = options.get('trim_number')
reverse_build_order = options.get('reverse_build_order')
qareport_project = options.get('qareport_project')
# map kernel to all available kernel, board, OS combos that match
work = []
unique_kernel_info = { }
unique_kernels=[]
if qareport_project:
work = [ qareport_project ]
else:
work = rawkernels.get(kernel)
if work is None:
print("The specified kernel is not supported yet:", kernel)
print("The supported kernels are:", ' '.join(rawkernels.keys()))
return
flakes = process_flakey_file(path_flakefile)
output = open(scribblefile, "w")
output_errorprojects = open(f_errorprojects, "w")
do_boilerplate(output)
work_total_numbers = qa_report.TestNumbers()
trimmed_lines = 0
for combo in work:
project_info = projectids[combo]
project_id = project_info.get('project_id', None)
if project_id is not None:
logger.info("Try to get project %s with project_id %s", combo, project_id)
project = qa_report_api.get_project(project_id)
else:
project_group = project_info.get('group', None)
project_slug = project_info.get('slug', None)
project_fullname = qa_report_api.get_project_full_name_with_group_and_slug(project_group, project_slug)
logger.info("Try to get project %s with project_fullname %s", combo, project_fullname)
project = qa_report_api.get_project_with_name(project_fullname)
if project is None:
print("\nNOTE: project for " + combo + " was not found, please check and try again\n")
output_errorprojects.write("\nNOTE: project " + combo+ " was not found, please check and try again\n\n")
continue
project_id = project.get('id')
builds = qa_report_api.get_all_builds(project_id)
project_name = project.get('name')
goodruns = find_best_two_runs(builds, project_name, project,
exact_ver1=opt_exact_ver1, exact_ver2=opt_exact_ver2, reverse_build_order=reverse_build_order,
no_check_kernel_version=no_check_kernel_version)
if len(goodruns) < 2 :
print("\nNOTE: project " + project_name+ " did not have 2 good runs\n")
if opt_exact_ver1 is not None:
output_errorprojects.write("NOTE: project " + project_name + " did not have results for %s\n" % (opt_exact_ver1))
else:
output_errorprojects.write("NOTE: project " + project_name + " did not have results for %s\n" % (kernel))
if len(goodruns) == 1:
# assuming that it's the latest build a invalid build
# and that caused only one goodruns returned.
# if the first latest build is a valid build,
# then the second build should be alwasy there
run = goodruns[0]
output_errorprojects.write(project_info['branch'] + "\n")
output_errorprojects.write(" " + project_info['OS'] + "/" + project_info['hardware'] + " - " + "Current:" + run['version'] + "\n")
output_errorprojects.write(" Current jobs\n")
for job in run['jobs']:
output_errorprojects.write(" " + "%s %s %s\n" % (job.get('external_url'), job.get('name'), job.get("job_status")))
if job.get('failure') and job.get('failure').get('error_msg'):
output_errorprojects.write(" " + "%s\n" % (job.get('failure').get('error_msg')))
output_errorprojects.write(" Want to resubmit the failed jobs for a try? https://android.linaro.org/lkft/jobs/?build_id=%s&fetch_latest=true\n" % run.get('id'))
elif len(goodruns) == 0 and opt_exact_ver1 is not None:
output_errorprojects.write(project_info['branch'] + "\n")
output_errorprojects.write(" " + project_info['OS'] + "/" + project_info['hardware'] + " - build for kernel version " + opt_exact_ver1 + " is not found or still in progress!"+ "\n")
output_errorprojects.write(" Builds list: https://android.linaro.org/lkft/builds/?project_id=%s&fetch_latest=true\n" % project_id)
elif len(goodruns) == 0:
output_errorprojects.write(project_info['branch'] + "\n")
output_errorprojects.write(" " + project_info['OS'] + "/" + project_info['hardware'] + " - no build available for " + kernel + "!\n")
output_errorprojects.write("\n")
else:
add_unique_kernel(unique_kernels, goodruns[1]['version'], combo, unique_kernel_info)
regressions = find_regressions(goodruns)
antiregressions = find_antiregressions(goodruns)
goodruns[1].get('numbers').number_regressions = len(regressions)
goodruns[1].get('numbers').number_antiregressions = len(antiregressions)
work_total_numbers.addWithTestNumbers(goodruns[1].get('numbers'))
report_results(output, goodruns[1], regressions, combo, goodruns[0], flakes, antiregressions, trim_number=trim_number)
if trim_number > 0 and len(regressions) > trim_number:
print_number = int(trim_number/2)
trimmed_lines = trimmed_lines + (len(regressions) - print_number * 2)
if trimmed_lines > 0:
output_errorprojects.write("\nNOTE: %d lines were removed from this report.\n" % trimmed_lines)
report_kernels_in_report(path_outputfile, unique_kernels, unique_kernel_info, work_total_numbers)
output.close()
output_errorprojects.close()
bashCommand = "cat "+ str(scribblefile) +str(" >> ") + path_outputfile
print(bashCommand)
#process = subprocess.run(['cat', scribblefile, str('>>'+options['outputfile']) ], stdout=subprocess.PIPE)
"""
except:
raise CommandError('Kernel "%s" does not exist' % kernel)
"""
|
#!/usr/bin/env python3
"""
Convert a LBANN model to an ONNX model.
Run "./lbann2onnx.py --help" for more details.
"""
import argparse
import re
import onnx
import os
import lbann.onnx.l2o
def parseInputShape(s):
name, shape = re.compile("^([^=]+)=([0-9,]+)$").search(s).groups()
return (name, list(map(int, shape.split(","))))
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Convert a LBANN model to an ONNX model",
epilog="Usage: lbann2onnx.py model_alexnet.prototext output.onnx image=3,224,224 label=1000")
parser.add_argument("lbann_path", type=str,
help="Path to a LBANN model in .prototext")
parser.add_argument("onnx_path", type=str,
help="Path to an ONNX model")
parser.add_argument("input_shape", type=str, nargs="*",
help="Shape(s) of input tensor(s) *without* the mini-batch size in the \"NAME=N1,...,ND\" format.")
parser.add_argument("--add-value-info", dest="add_value_info", action="store_const",
const=True, default=False,
help="Embed value_info in the generated ONNX model")
args = parser.parse_args()
lbannPath = args.lbann_path
onnxPath = args.onnx_path
inputShapes = dict(map(parseInputShape, args.input_shape))
addValueInfo = args.add_value_info
model, miniBatchSize = lbann.onnx.l2o.parseLbannModelPB(os.path.expanduser(lbannPath),
inputShapes,
addValueInfo=addValueInfo)
onnx.save(model, os.path.expanduser(onnxPath))
|
from DateTime import DateTime
import transaction
portal = app.recensio
broken = portal.portal_catalog.search(
{
"review_state": "published",
"effective": {"query": DateTime("1000/01/02 1:00:00 GMT+0"), "range": "max"},
}
)
for i, brain in enumerate(broken):
obj = brain.getObject()
effective_date = None
for history in obj.workflow_history["simple_publication_workflow"]:
if history["action"] == "publish":
publication_date = history["time"]
if publication_date > effective_date:
effective_date = publication_date
if str(obj.effective()) == "1000/01/01":
# Just to be extra sure we don't clobber existing dates
obj.setEffectiveDate(effective_date)
obj.reindexObject()
print("Fixed effective_date for %s" % obj.absolute_url())
if i % 20:
transaction.commit()
transaction.commit()
|
import re
import itertools
def isPrime(n):
if n == 2:
return 0
if n == 3:
return 0
if n % 2 == 0:
return 2
if n % 3 == 0:
return 3
i = 5
w = 2
while i * i <= n:
if n % i == 0:
return i
i += w
w = 6 - w
return 0
def genStrings(n):
return ["".join(seq) for seq in itertools.product("01", repeat=n)]
t = input()
print("Case #"+t+":")
string = input()
string = string.strip().split(" ")
n = int(string[0])
j = int(string[1])
tempBitString = genStrings(n)
for s in range(32769, 65536):
if not (s&1):
continue
string = tempBitString[s]
bitString = []
for x in range(2, 11):
temp = isPrime(int(str(string), x))
if temp:
bitString.append(temp)
else:
break
if len(bitString)!=9:
continue
else:
j = j-1
print(string, end=" ")
for b in bitString:
print(b, end=" ")
print("")
if not j:
break |
# -*- coding: utf-8 -*-
"""
Created on Mon Apr 02 23:06:02 2018
@author: Lucas
"""
import random
def AM(x1, x2):
ret = []
for i in range(len(x1)):
ret.append((x1[i] + x2[i]) / 2.0)
return ret
class Monomial:
MIN_POWER = -5
MAX_POWER = 5
def __init__(self, n, a=None, c=1):
self.n = n
self.c = c
if a:
self.a = a
else:
self.a = [random.randint(Monomial.MIN_POWER, Monomial.MAX_POWER) for d in xrange(n)]
def __call__(self, x):
ret = self.c
for i in xrange(self.n):
ret *= x[i] ** self.a[i]
return ret
def __repr__(self):
return str(self.c) + str(self.a)
class Posynomial:
def __init__(self, monomials):
self.monomials = monomials
def __call__(self, x):
ret = 0
for m in self.monomials:
ret += m(x)
return ret
def __repr__(self):
ret = ""
for i in xrange(len(self.monomials)):
ret += str(self.monomials[i])
if i < len(self.monomials) - 1:
ret += "+ "
return ret
p = Posynomial([Monomial(4, c=4), Monomial(4, c=2), Monomial(4)])
print p
print p([2, 3, 4, 5]) |
from django.db import models
from datetime import datetime
class Roll(models.Model):
producer = models.CharField(max_length=20)
number = models.IntegerField()
batch = models.IntegerField(null=True)
size = models.CharField(max_length=20)
coating = models.CharField(max_length=20)
hardness = models.CharField(max_length=20)
net = models.IntegerField()
gross = models.IntegerField()
remainder = models.IntegerField()
defective = models.BooleanField(default=False)
class Plan (models.Model):
date = models.DateField()
roll = models.ForeignKey(Roll, on_delete=models.CASCADE)
format = models.CharField(max_length=10)
class Pack (models.Model):
number = models.IntegerField()
roll = models.ForeignKey(Roll, on_delete=models.CASCADE)
size = models.CharField(max_length=30)
date = models.DateTimeField()
net = models.IntegerField()
gross = models.IntegerField()
|
from collections import Counter
def error_corrected_message(signal_text):
transmissions = signal_text.splitlines()
return ''.join(Counter(repeated_character).most_common()[0][0]
for repeated_character in zip(*transmissions))
def hidden_message(signal_text):
transmissions = signal_text.splitlines()
return ''.join(Counter(repeated_character).most_common()[-1][0]
for repeated_character in zip(*transmissions))
|
import os
import uuid
from dotenv import load_dotenv
from flask import Flask, flash, redirect, url_for, send_from_directory, render_template
from flask_bootstrap import Bootstrap
from flask_wtf import FlaskForm
from flask_wtf.file import FileRequired, FileAllowed
from werkzeug.utils import secure_filename
from wtforms import FileField
from converter import process_supply
load_dotenv()
app = Flask(__name__)
app.secret_key = os.environ.get('SECRET_KEY', 'dev')
bootstrap = Bootstrap(app)
PATH_UPLOAD = os.environ.get('PATH_UPLOAD', 'uploads')
PATH_RESULT = os.environ.get('PATH_RESULT', 'results')
PATH_EXPORT = os.environ.get('PATH_EXPORT', '.')
TEMPLATE_FILE = os.environ.get('TEMPLATE_FILE', 'sku-body-template.xlsx')
class FileUploadForm(FlaskForm):
upload = FileField(validators=[FileRequired(), FileAllowed(upload_set=['xlsx', 'docx'])])
@app.route('/', methods=['GET', 'POST'])
def upload_file():
form = FileUploadForm()
params = {
'template_name_or_list': 'index.html',
'title': 'Конвертер остатков автозаказа',
'form': form,
}
if form.validate_on_submit():
f = form.upload.data
filename = f'{uuid.uuid4()}_{secure_filename(f.filename)}'
filepath = os.path.join(PATH_UPLOAD, filename)
f.save(filepath)
errors = list()
result, errors = process_supply(filepath, PATH_EXPORT, PATH_RESULT, TEMPLATE_FILE)
if errors:
flash('\n'.join(errors))
if result:
return redirect(url_for('download_result', filename=result))
return render_template(**params)
@app.route('/results/<filename>')
def download_result(filename):
return send_from_directory(PATH_RESULT, filename)
|
from alpaca import Alpaca
from utils import load_test, split_df, TimeSeriesResampler,confusion_matrix
import time
from sklearn.model_selection import train_test_split
from sklearn.utils import shuffle
from sklearn.pipeline import Pipeline
import numpy as np
import pandas as pd
if __name__ == '__main__':
X, y = load_test()
# Length of timeseries for resampler and cnn
sz = 230
# Number of channels for cnn
num_channels = X.shape[-1]
# Number of classes for cnn
num_classes = np.unique(y).shape[0]
classes = np.array(["0", "1", "2", "3", "4", "?"])
repetitions = 1
results = []
outliers = np.empty((0, 230*3+5))
for r in range(repetitions):
print("Repetition #",r)
X, y = shuffle(X, y, random_state=r)
# Turn y to numpy array
y = np.array(y)
# Split into train and test sets
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.3, stratify=y, random_state=r)
for votingstr in ["democratic", "veto", "stacked_svc", "stacked_dtc"]:
if votingstr == 'stacked_svc':
meta = 'svc'
elif votingstr == 'stacked_dtc':
meta = 'dtc'
if votingstr == 'stacked_svc' or votingstr == 'stacked_dtc':
voting = 'stacked'
stacked = True
else:
stacked = False
voting = votingstr
meta = None
# Build pipeline from resampler and estimator
resampler = TimeSeriesResampler(sz=sz)
alpaca = Pipeline([('resampler', resampler),
('classifier', Alpaca())])
alpaca.fit(X_train, y_train, classifier__stacked=stacked, classifier__n_clusters=100)
y_pred_bin, y_pred = alpaca.predict(X_test, voting=voting)
# Plot confusion matrix (Binary)
y_test_bin = np.copy(y_test)
y_test_bin[y_test_bin > 0] = 1
tn, fp, fn, tp = confusion_matrix(y_test_bin, y_pred_bin).ravel()
# Append overall error
results.append([votingstr, r, 'err', (fp+fn)/(tn+fp+fn+tp)])
# Append false negative rate
results.append([votingstr, r, 'fnr', fn/(fn+tp)])
# Append false positive rate
results.append([votingstr, r, 'fpr', fp/(fp+tn)])
# Save misclassified samples (with y_pred,y_pred_bin, y_true, and voting scheme)
idx = np.where(y_test_bin != y_pred_bin)
# Flattened curves
curves = X_test[idx].transpose(0, 2, 1).reshape(X_test[idx].shape[0],-1)
vote_type = np.array([votingstr for i in range(idx[0].shape[0])]).reshape((-1,1))
wrong = np.hstack([curves, y_pred[idx].reshape((-1,1)),y_test[idx].reshape((-1,1)),
y_pred_bin[idx].reshape((-1,1)),y_test_bin[idx].reshape((-1,1)), vote_type])
outliers = np.vstack((outliers,wrong))
df = pd.DataFrame(outliers)
df.to_csv("..\\results\\OutliersVotingTest.csv")
df = pd.DataFrame(results, columns=['voting', 'test', 'metric', 'value'])
df.to_csv("..\\results\\VotingTest.csv")
|
# coding: utf-8
# In[2]:
import torch
import os
import numpy as np
os.environ["CUDA_VISIBLE_DEVICES"]= "0"
os.environ['CUDA_LAUNCH_BLOCKING'] = "1"
import torch.backends.cudnn as cudnn
cudnn.benchmark = True
import modelio
import feature_handler
import masked_softmax
# In[ ]:
def prepare_training_data_vocab(training_data):
word_vocab_in_train, pos_vocab_in_train, frame_vocab_in_train, lu_vocab_in_train, fe_vocab_in_train = [],[],[],[], []
for tokens in training_data:
for t in tokens:
lu = t[12]
if lu != '_':
lu_vocab_in_train.append(lu)
frame = t[13]
if frame != '_':
frame_vocab_in_train.append(frame)
bio_fe = t[14]
if bio_fe != 'O':
fe = bio_fe.split('-')[1]
lu_vocab_in_train = list(set(lu_vocab_in_train))
frame_vocab_in_train = list(set(frame_vocab_in_train))
fe_vocab_in_train = list(set(fe_vocab_in_train))
return lu_vocab_in_train, frame_vocab_in_train, fe_vocab_in_train
# In[ ]:
class live(object):
def __init__(self, language, word_to_ix, pos_to_ix, dp_to_ix, josa_to_ix, frame_to_ix, lu_to_ix, fe_to_ix, josa_onlyPOS, usingGPU):
self.usingGPU = usingGPU
self.josa_onlyPOS = josa_onlyPOS
self.language = language
self.word_to_ix = word_to_ix
self.pos_to_ix = pos_to_ix
self.dp_to_ix = dp_to_ix
self.josa_to_ix = josa_to_ix
self.frame_to_ix = frame_to_ix
self.lu_to_ix = lu_to_ix
self.fe_to_ix = fe_to_ix
def frame_evaluation():
pass
def argument_evaluation(self, model, training_data, test_data):
feature_extractor = feature_handler.extractor(self.language, self.josa_onlyPOS, self.usingGPU)
prepare = modelio.prepare(self.usingGPU)
tensor2tag = modelio.tensor2tag(self.frame_to_ix, self.fe_to_ix, self.usingGPU)
lu_vocab_in_train, frame_vocab_in_train, fe_vocab_in_train = prepare_training_data_vocab(training_data)
acc, total = 0,0
tp,fn,tn,fp, found = 0,0,0,0,0
for tokens in test_data:
sentence, pos, dp, lu, frame = prepare.prepare_sentence(tokens)
if lu in lu_vocab_in_train:
if frame in frame_vocab_in_train:
sentence, pos, dp, lu, frame = prepare.prepare_sentence(tokens)
target_position = feature_extractor.get_targetpositions(tokens)
sentence_in = prepare.prepare_sequence(sentence, self.word_to_ix)
pos_in = prepare.prepare_sequence(pos, self.pos_to_ix)
dp_in = prepare.prepare_sequence(dp, self.dp_to_ix)
# frame_in = prepare.prepare_ix(frame, self.frame_to_ix)
# lu_in = prepare.prepare_ix(lu, self.lu_to_ix)
positions = feature_extractor.get_argpositions(tokens)
gold_spans = []
for arg_position in positions:
arg = arg_position[2]
arg_in = prepare.prepare_ix(arg, self.fe_to_ix)
josa = feature_extractor.get_josa(tokens, arg_position)
last_dp = feature_extractor.get_last_dp(tokens, arg_position)
josa_in = prepare.prepare_ix(josa, self.josa_to_ix)
last_dp_in = prepare.prepare_ix(last_dp, self.dp_to_ix)
# if arg_position[2] != 'O':
gold_span = {}
arg_span = {}
arg_span['begin'] = arg_position[0]
arg_span['end'] = arg_position[1]
gold_span['arg'] = arg
gold_span['span'] = arg_span
gold_span['arg_in'] = arg_in
gold_span['josa_in'] = josa_in
gold_span['last_dp_in'] = last_dp_in
gold_spans.append(gold_span)
for gold_arg in gold_spans:
model.zero_grad()
model.hidden_lstm_tok = model.init_hidden_lstm_tok()
model.hidden_lstm_tgt = model.init_hidden_lstm_tgt()
model.hidden_lstm_arg = model.init_hidden_lstm_arg()
arg_span = gold_arg['span']
arg_in = gold_arg['arg_in']
josa_in = gold_arg['josa_in']
last_dp_in = gold_arg['last_dp_in']
tag_scores = model(sentence_in, pos_in, dp_in, josa_in, last_dp_in, arg_in, target_position, arg_span, lu, frame, sentence)
gold = gold_arg['arg']
score, pred = tensor2tag.get_fe_by_tensor(tag_scores)
# print(gold, pred)
if pred != 'O':
if pred == gold:
tp += 1
else:
fp += 1
if gold != 'O':
if pred == 'O':
fn += 1
else:
found += 1
if pred == gold:
acc += 1
total += 1
else:
total += 1
# break
if tp == 0:
precision = 0
else:
precision = tp / (tp+fp)
if found == 0:
recall = 0
else:
recall = found / (found+fn)
if precision == 0 or recall == 0:
f1 = 0
else:
f1 = 2 * (precision*recall) / (precision+recall)
precision = round(precision, 4)
recall = round(recall, 4)
f1 = round(f1, 4)
if acc != 0:
accuracy = acc / total
else:
accuracy = 0
return precision, recall, f1, accuracy
|
from selenium import webdriver
from scrap import scrap
import settings
if __name__ == "__main__":
# ドライバー設定
driver = webdriver.Chrome(r"chromedriver.exe", options=settings.chrome_options)
#ログイン
scrap.log_in_amazon(driver)
# 注文履歴へ
scrap.go_to_order_history(driver)
#
|
class emp:
sal=10
name="anonymous"
def __init__(self,name,sal): #create constuctor
self.name=name
self.sal=sal
def disp(self):
print(self.name," salary:",self.sal)
emp1=emp("yopp",123)
emp1.name="yoss"
emp1.disp()
emp2=emp("kaka",34)
emp2.disp()
print(hasattr(emp1,"sal")) #check the attribute
print("doc:",emp.__doc__)
print(emp.__name__)#class name
print(emp.__module__)#module name
del emp1 #delete the object
emp1.disp()
|
# -*- coding: utf-8 -*-
from main import DOORPI
logger = DOORPI.register_module(__name__, return_new_logger = True)
import time
import datetime
from resources.event_handler.classes import SingleAction
class TimeTickDestroyAction(SingleAction): pass
class TimeTicker:
last_time_tick = 0
last_realtime_event = 0
def start(self):
# register timebased_events
DOORPI.events.register_event('OnTimeTick', __name__)
DOORPI.events.register_event('OnTimeSecond', __name__)
DOORPI.events.register_event('OnTimeSecondEvenNumber', __name__)
DOORPI.events.register_event('OnTimeSecondUnevenNumber', __name__)
DOORPI.events.register_event('OnTimeMinute', __name__)
DOORPI.events.register_event('OnTimeMinuteEvenNumber', __name__)
DOORPI.events.register_event('OnTimeMinuteUnevenNumber', __name__)
for minute in DOORPI.CONST.MINUTE_RANGE:
DOORPI.events.register_event('OnTimeMinute%s'%minute, __name__)
DOORPI.events.register_event('OnTimeMinuteEvery5', __name__)
DOORPI.events.register_event('OnTimeHour', __name__)
DOORPI.events.register_event('OnTimeHourEvenNumber', __name__)
DOORPI.events.register_event('OnTimeHourUnevenNumber', __name__)
for hour in DOORPI.CONST.HOUR_RANGE:
DOORPI.events.register_event('OnTimeHour%s'%hour, __name__)
DOORPI.events.register_event('OnTimeDay', __name__)
DOORPI.events.register_event('OnTimeDayEvenNumber', __name__)
DOORPI.events.register_event('OnTimeDayUnevenNumber', __name__)
DOORPI.events.register_event('OnTimeWeek', __name__)
DOORPI.events.register_event('OnTimeWeekEvenNumber', __name__)
DOORPI.events.register_event('OnTimeWeekUnevenNumber', __name__)
DOORPI.events.register_event('OnTimeMonth', __name__)
DOORPI.events.register_event('OnTimeMonthEvenNumber', __name__)
DOORPI.events.register_event('OnTimeMonthUnevenNumber', __name__)
DOORPI.events.register_event('OnTimeYear', __name__)
DOORPI.events.register_event('OnTimeYearEvenNumber', __name__)
DOORPI.events.register_event('OnTimeYearUnevenNumber', __name__)
DOORPI.events.register_action('OnShutdown', TimeTickDestroyAction(self.stop))
return self
def stop(self):
DOORPI.events.unregister_source(__name__, True)
def do_tick_tack(self, time_for_this_tick = 0.2):
timestamp_now = time.time()
timestamp_past = self.last_time_tick
datetime_now = datetime.datetime.fromtimestamp(timestamp_now)
datetime_past = datetime.datetime.fromtimestamp(timestamp_past)
if datetime_now.year != datetime_past.year:
DOORPI.events('OnTimeYear', __name__)
if datetime_now.year % 2 is 0: DOORPI.events('OnTimeYearEvenNumber', __name__)
else: DOORPI.events('OnTimeYearUnevenNumber', __name__)
if datetime_now.month != datetime_past.month:
DOORPI.events('OnTimeMonth', __name__)
if datetime_now.month % 2 is 0: DOORPI.events('OnTimeMonthEvenNumber', __name__)
else: DOORPI.events('OnTimeMonthUnevenNumber', __name__)
if datetime_now.day != datetime_past.day:
DOORPI.events('OnTimeDay', __name__)
if datetime_now.day % 2 is 0: DOORPI.events('OnTimeDayEvenNumber', __name__)
else: DOORPI.events('OnTimeDayUnevenNumber', __name__)
if datetime_now.hour != datetime_past.hour:
DOORPI.events('OnTimeHour', __name__)
if datetime_now.hour % 2 is 0: DOORPI.events('OnTimeHourEvenNumber', __name__)
else: DOORPI.events('OnTimeHourUnevenNumber', __name__)
for hour in DOORPI.CONST.HOUR_RANGE:
if hour is datetime_now.hour: DOORPI.events('OnTimeHour%s'%hour, __name__)
if datetime_now.minute != datetime_past.minute:
DOORPI.events('OnTimeMinute', __name__)
if datetime_now.minute % 2 is 0: DOORPI.events('OnTimeMinuteEvenNumber', __name__)
else: DOORPI.events('OnTimeMinuteUnevenNumber', __name__)
for minute in DOORPI.CONST.MINUTE_RANGE:
if minute is datetime_now.minute: DOORPI.events('OnTimeMinute%s'%minute, __name__)
if datetime_now.minute % 5 is 0: DOORPI.events('OnTimeMinuteEvery5', __name__)
if datetime_now.second != datetime_past.second:
DOORPI.events('OnTimeSecond', __name__)
if datetime_now.second % 2 is 0: DOORPI.events('OnTimeSecondEvenNumber', __name__)
else: DOORPI.events('OnTimeSecondUnevenNumber', __name__)
microsecond = datetime_now.microsecond / 100000
if (microsecond % 2 is 0 or microsecond is 0) and microsecond is not self.last_realtime_event:
self.last_realtime_event = microsecond
DOORPI.events('OnTimeTick', __name__)
self.last_time_tick = timestamp_now
sleep_time = time_for_this_tick - (timestamp_now - time.time())
if sleep_time > 0: time.sleep(sleep_time)
return True
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Sep 24 00:22:57 2019
@author: kevin
"""
import cv2
import numpy as np
import PyQt5 import QtCore, QtGui, QtWidgets
from Epc660 import *
class ImageThread(QThread, camera):
signal = pyqtSignal('PyQt_PyObject')
def __init__(self, camera):
QThread.__init__(self, camera)
def run(self, camera):
dcs_img = camera.take_image('dcs')
self.signal.emit(dcs_img) |
##eta keno kaj korlo na???
import re
class Solution:
def mostCommonWord(self, paragraph, banned):
banned=set(banned)
words = re.findall(r'\w+', paragraph.lower())
print(words)
for i in words:
if i in banned:
words.remove(i)
dic = collections.Counter(words).most_common(1)
return dic[0][0]
##eta kaj korse
import re
class Solution:
def mostCommonWord(self, paragraph, banned):
banned=set(banned)
words = re.findall(r'\w+', paragraph.lower())
return collections.Counter(w for w in words if w not in banned).most_common(1)[0][0]
|
# -*- coding: utf-8 -*-
import base64
from datetime import date, datetime
from dateutil.relativedelta import relativedelta
from odoo import api, fields, models, _
from odoo.addons.hr_payroll.models.browsable_object import BrowsableObject, InputLine, WorkedDays, Payslips
from odoo.exceptions import UserError, ValidationError
from odoo.tools import float_round, date_utils
from odoo.tools.misc import format_date
from odoo.tools.safe_eval import safe_eval
from odoo.tools import float_compare, float_is_zero
class HrPayslipRun(models.Model):
_inherit = 'hr.payslip.run'
# def create_draft_entry_from_api(self,payslip):
# ret_db=self.env['db.credential'].search([],limit=1)
# if not ret_db:
# return None
# url = ret_db.server_url
# db = ret_db.db_name
# username = ret_db.db_user
# password = ret_db.db_password
#
# common = xmlrpc_client.ServerProxy('{}/xmlrpc/2/common'.format(url)
# , verbose=False, use_datetime=True, context=ssl._create_unverified_context())
# uid = common.authenticate(db, username, password, {})
# models = xmlrpc_client.ServerProxy('{}/xmlrpc/2/object'.format(url), allow_none=True
# , verbose=False, use_datetime=True, context=ssl._create_unverified_context())
#
# models.execute_kw(db, uid, password,
# 'hr.payslip', 'call_action_payslip_done',
# [payslip.id])
# ##
def action_validate(self):
"""
in action_validate
stop code and loop on payslips call custom action_payslip_done
and get move linked for each payslip then get lines inside it
loop to inc lines
remove the moves
create new one
"""
for rec in self:
batch_move_details = {}
counter = 1
move_lines = []
for payslip in rec.slip_ids.filtered(lambda slip: slip.state != 'cancel'):
# call payslip.action_payslip_done() from the api as looping on payslips
# of batch and call create draft entry not create a move
# but all payslips in batch has one move
payslip.custom_action_payslip_done()
move = payslip.move_id
if counter == 1:
batch_move_details['ref'] = move.ref
# batch_move_details['review'] = move.review
batch_move_details['date'] = str(move.date)
batch_move_details['journal_id'] = move.journal_id.id
batch_move_details['company_id'] = move.company_id.id
batch_move_details['invoice_user_id'] = move.invoice_user_id.id
batch_move_details['team_id'] = move.team_id.id
batch_move_details['auto_post'] = move.auto_post
for move_line in move.line_ids:
print('analytic_account_id',move_line.analytic_account_id.id)
move_lines.append(
{
'account_id': move_line.account_id.id,
'analytic_account_id': move_line.analytic_account_id.id,
'partner_id': move_line.partner_id.id,
'name': move_line.name,
'debit': move_line.debit,
'credit': move_line.credit,
# 'purchase_price':move_line.purchase_price,
})
counter += 1
move.unlink()
# create new move as to be for the whole batch
# with the all move lines of payslips
# merge move lines #run if need merging with analytic account and account id
# merged_move_lines=[]
# for line in move_lines:
# merged=False
# for merged_move in merged_move_lines:
# if line['account_id'] == merged_move['account_id'] and line['analytic_account_id'] == merged_move['analytic_account_id']:
# if merged_move['debit']!=0 and line['debit']!=0:
# merged_move['debit']+=line['debit']
# merged = True
# break
# if merged_move['credit'] != 0 and line['credit']!=0:
# merged_move['credit']+=line['credit']
# merged=True
# break
# if merged:
# continue
#
# else:
# merged_move_lines.append(line)
# create move for batch
batch_move_details['line_ids'] = [(0, 0, line_info) for line_info in
move_lines] # merged_move_lines if merged
move_for_batch = self.env['account.move'].create(batch_move_details)
for payslip in rec.slip_ids.filtered(lambda slip: slip.state == 'done'):
payslip.move_id = move_for_batch.id
rec.action_close()
class HrPayslip(models.Model):
_inherit = 'hr.payslip'
# @api.model
# def call_action_payslip_done(self,payslip_id):
# payslip=self.env['hr.payslip'].search([('id','=',payslip_id)])
# if payslip:
# payslip.custom_action_payslip_done()
# return True
def action_payslip_done_in_hr_payroll_account(self):
##
"""
Generate the accounting entries related to the selected payslips
A move is created for each journal and for each month.
"""
precision = self.env['decimal.precision'].precision_get('Payroll')
# Add payslip without run
payslips_to_post = self # .filtered(lambda slip: not slip.payslip_run_id)
# Adding pay slips from a batch and deleting pay slips with a batch that is not ready for validation.
payslip_runs = (self - payslips_to_post).mapped('payslip_run_id')
for run in payslip_runs:
if run._are_payslips_ready():
payslips_to_post |= run.slip_ids
# A payslip need to have a done state and not an accounting move.
payslips_to_post = payslips_to_post.filtered(lambda slip: slip.state == 'done' and not slip.move_id)
# Check that a journal exists on all the structures
if any(not payslip.struct_id for payslip in payslips_to_post):
raise ValidationError(_('One of the contract for these payslips has no structure type.'))
if any(not structure.journal_id for structure in payslips_to_post.mapped('struct_id')):
raise ValidationError(_('One of the payroll structures has no account journal defined on it.'))
# Map all payslips by structure journal and pay slips month.
# {'journal_id': {'month': [slip_ids]}}
slip_mapped_data = {
slip.struct_id.journal_id.id: {fields.Date().end_of(slip.date_to, 'month'): self.env['hr.payslip']} for slip
in payslips_to_post}
for slip in payslips_to_post:
slip_mapped_data[slip.struct_id.journal_id.id][fields.Date().end_of(slip.date_to, 'month')] |= slip
for journal_id in slip_mapped_data: # For each journal_id.
for slip_date in slip_mapped_data[journal_id]: # For each month.
line_ids = []
debit_sum = 0.0
credit_sum = 0.0
date = slip_date
move_dict = {
'narration': '',
'ref': date.strftime('%B %Y'),
'journal_id': journal_id,
'date': date,
}
for slip in slip_mapped_data[journal_id][slip_date]:
move_dict['narration'] += slip.number or '' + ' - ' + slip.employee_id.name or ''
move_dict['narration'] += '\n'
for line in slip.line_ids.filtered(lambda line: line.category_id):
amount = -line.total if slip.credit_note else line.total
if line.code == 'NET': # Check if the line is the 'Net Salary'.
for tmp_line in slip.line_ids.filtered(lambda line: line.category_id):
if tmp_line.salary_rule_id.not_computed_in_net: # Check if the rule must be computed in the 'Net Salary' or not.
if amount > 0:
amount -= abs(tmp_line.total)
elif amount < 0:
amount += abs(tmp_line.total)
if float_is_zero(amount, precision_digits=precision):
continue
debit_account_id = line.salary_rule_id.account_debit.id
credit_account_id = line.salary_rule_id.account_credit.id
if debit_account_id: # If the rule has a debit account.
debit = amount if amount > 0.0 else 0.0
credit = -amount if amount < 0.0 else 0.0
existing_debit_lines = (
line_id for line_id in line_ids if
line_id['name'] == line.name
and line_id['account_id'] == debit_account_id
and line_id['analytic_account_id'] == (
line.salary_rule_id.analytic_account_id.id or slip.contract_id.analytic_account_id.id)
and ((line_id['debit'] > 0 and credit <= 0) or (line_id['credit'] > 0 and debit <= 0)))
debit_line = next(existing_debit_lines, False)
if not debit_line:
debit_line = {
'name': line.name,
'partner_id': slip.employee_id.address_id.id,
'account_id': debit_account_id,
'journal_id': slip.struct_id.journal_id.id,
'date': date,
'debit': debit,
'credit': credit,
'analytic_account_id': line.salary_rule_id.analytic_account_id.id or slip.contract_id.analytic_account_id.id,
}
line_ids.append(debit_line)
else:
debit_line['debit'] += debit
debit_line['credit'] += credit
if credit_account_id: # If the rule has a credit account.
debit = -amount if amount < 0.0 else 0.0
credit = amount if amount > 0.0 else 0.0
existing_credit_line = (
line_id for line_id in line_ids if
line_id['name'] == line.name
and line_id['account_id'] == credit_account_id
and line_id['analytic_account_id'] == (
line.salary_rule_id.analytic_account_id.id or slip.contract_id.analytic_account_id.id)
and ((line_id['debit'] > 0 and credit <= 0) or (line_id['credit'] > 0 and debit <= 0))
)
credit_line = next(existing_credit_line, False)
if not credit_line:
credit_line = {
'name': line.name,
'partner_id': slip.employee_id.address_id.id,
'account_id': credit_account_id,
'journal_id': slip.struct_id.journal_id.id,
'date': date,
'debit': debit,
'credit': credit,
'analytic_account_id': line.salary_rule_id.analytic_account_id.id or slip.contract_id.analytic_account_id.id,
}
line_ids.append(credit_line)
else:
credit_line['debit'] += debit
credit_line['credit'] += credit
for line_id in line_ids: # Get the debit and credit sum.
debit_sum += line_id['debit']
credit_sum += line_id['credit']
# The code below is called if there is an error in the balance between credit and debit sum.
if float_compare(credit_sum, debit_sum, precision_digits=precision) == -1:
acc_id = slip.journal_id.default_credit_account_id.id
if not acc_id:
raise UserError(
_('The Expense Journal "%s" has not properly configured the Credit Account!') % (
slip.journal_id.name))
existing_adjustment_line = (
line_id for line_id in line_ids if line_id['name'] == _('Adjustment Entry')
)
adjust_credit = next(existing_adjustment_line, False)
if not adjust_credit:
adjust_credit = {
'name': _('Adjustment Entry'),
'partner_id': False,
'account_id': acc_id,
'journal_id': slip.journal_id.id,
'date': date,
'debit': 0.0,
'credit': debit_sum - credit_sum,
}
line_ids.append(adjust_credit)
else:
adjust_credit['credit'] = debit_sum - credit_sum
elif float_compare(debit_sum, credit_sum, precision_digits=precision) == -1:
acc_id = slip.journal_id.default_debit_account_id.id
if not acc_id:
raise UserError(_('The Expense Journal "%s" has not properly configured the Debit Account!') % (
slip.journal_id.name))
existing_adjustment_line = (
line_id for line_id in line_ids if line_id['name'] == _('Adjustment Entry')
)
adjust_debit = next(existing_adjustment_line, False)
if not adjust_debit:
adjust_debit = {
'name': _('Adjustment Entry'),
'partner_id': False,
'account_id': acc_id,
'journal_id': slip.journal_id.id,
'date': date,
'debit': credit_sum - debit_sum,
'credit': 0.0,
}
line_ids.append(adjust_debit)
else:
adjust_debit['debit'] = credit_sum - debit_sum
# Add accounting lines in the move
move_dict['line_ids'] = [(0, 0, line_vals) for line_vals in line_ids]
move = self.env['account.move'].create(move_dict)
for slip in slip_mapped_data[journal_id][slip_date]:
slip.write({'move_id': move.id, 'date': date})
##
def action_payslip_done_in_hr_payroll(self):
##
if any(slip.state == 'cancel' for slip in self):
raise ValidationError(_("You can't validate a cancelled payslip."))
self.write({'state': 'done'})
self.mapped('payslip_run_id').action_close()
if self.env.context.get('payslip_generate_pdf'):
for payslip in self:
if not payslip.struct_id or not payslip.struct_id.report_id:
report = self.env.ref('hr_payroll.action_report_payslip', False)
else:
report = payslip.struct_id.report_id
pdf_content, content_type = report.render_qweb_pdf(payslip.id)
if payslip.struct_id.report_id.print_report_name:
pdf_name = safe_eval(payslip.struct_id.report_id.print_report_name, {'object': payslip})
else:
pdf_name = _("Payslip")
self.env['ir.attachment'].create({
'name': pdf_name,
'type': 'binary',
'datas': base64.encodestring(pdf_content),
'res_model': payslip._name,
'res_id': payslip.id
})
##
# to be called from the payslip batch when press crate draft entry
# to create moves for payslips,then merge them in one move
def custom_action_payslip_done(self):
# make it as the functions named action_payslip_done in hr_payroll and hr_payrol_account
res = self.action_payslip_done_in_hr_payroll()
# action_payslip_done in hr_payroll
##action_payslip_done in hr_payroll_account
self.action_payslip_done_in_hr_payroll_account()
##chanage the analytic account to the one in contract
##
# for slip in self:
# # slip.write({'move_id': move.id, 'date': date})
# if slip.employee_id.contract_id and slip.employee_id.contract_id.analytic_account_id:
# analytic_account_in_contract = slip.employee_id.contract_id.analytic_account_id.id
# # change analytic account in move lines
# if slip.move_id:
# for rec in slip.move_id.line_ids:
# rec.write({
# # 'analytic_account_id': analytic_account_in_contract,
# 'partner_id': slip.employee_id.address_id.id,
# })
##
##
return res
# for create draft entry from a payslip,
# it's set analtic account of contratc on the journal entry
def action_payslip_done(self):
"""
Generate the accounting entries related to the selected payslips
A move is created for each journal and for each month.
"""
res = super(HrPayslip, self).action_payslip_done()
##
for slip in self:
# slip.write({'move_id': move.id, 'date': date})
if slip.employee_id.contract_id and slip.employee_id.contract_id.analytic_account_id:
analytic_account_in_contract = slip.employee_id.contract_id.analytic_account_id.id
# change analytic account in move lines
if slip.move_id:
slip.move_id.ref = slip.number
for rec in slip.move_id.line_ids:
rec.write({
# 'analytic_account_id':analytic_account_in_contract,
'partner_id': slip.employee_id.address_id.id,
})
for slip in self:
# slip.write({'move_id': move.id, 'date': date})
if slip.employee_id.contract_id and slip.employee_id.contract_id.analytic_account_id:
analytic_account_in_contract = slip.employee_id.contract_id.analytic_account_id.id
# change analytic account in move lines
if slip.move_id:
for rec in slip.move_id.line_ids:
rec.write({
'analytic_account_id':analytic_account_in_contract,
# 'partner_id': slip.employee_id.address_id.id,
})
analytic_account_in_contract = slip.employee_id.contract_id
##
return res
|
import os
from .default import *
SITE_URL = 'http://127.0.0.1:8000'
SPOTIFY_CLIENT_ID = os.getenv('SPOTIFY_CLIENT_ID')
SPOTIFY_CLIENT_SECRET = os.getenv('SPOTIFY_CLIENT_SECRET')
SPOTIFY_REDIRECT_URL = '{}/spotify_authorize_callback/'.format(SITE_URL)
|
import ast
import logging
from State import *
from Property import *
from SymbolicVariable import *
from Z3Solver import *
from BinarySolver import *
class Tree:
"""The Computation Tree, which walks down the Python Abstract Syntax Tree (AST)
Parameters
----------
code : string
A string of python to be symbolically executed
Attributes
----------
ast: ast.Module
The full AST, whose nodes are then referenced by symbolic states
root : State
The root state, all future states are an r or l-node
state : State
The active symbolic state
state_queue : List[State]
The queue of inactive, but satisfiable symbolic states
z3 : Z3Solver
The handler class for Z3 SMT solving
"""
def __init__(self, code: str):
# Root symbolic state
self.root_state = State()
# The Abstract Symbolic Tree
self.root_ast = list(ast.iter_child_nodes(ast.parse(code)))
# If string is a function definition
if isinstance(self.root_ast[0], ast.FunctionDef):
# Initiate "empty" symbolic variables
for arg in self.root_ast[0].args.args:
self.root_state.createSymbolicVariable(arg.arg)
self.root_ast = self.root_ast[0].body
self.z3 = Z3Solver()
self.walk(self.root_state, self.root_ast)
def walk(self, state, tree):
"""Walking down the AST at depth 1
Parameters
----------
tree : Any
The AST being walked down
"""
if state.sat == False:
return
for i in range(len(tree)):
node = tree[i]
t = type(node)
logging.debug("Evaluating AST node of type %s" % t)
# Is a variable assignment (e.g. x = y)
if t is ast.Assign:
# Can have multiple variables left of the operator (e.g. x, y, z = 2)
for target in node.targets:
self.assign(state, node.value, target.id)
# Is an augmented variable assignment (e.g. x += y)
elif t is ast.AugAssign:
self.augassign(state, node)
# Is an IF statement symbolic state gets forked
# Interrupt execution
elif t is ast.If:
self.conditional(state, node, i)
# Terminate the original state
state.active = False
# Passing current index will re-evaluate the conditional inf.
self.walk(state.left, tree[i+1:])
self.walk(state.right, tree[i+1:])
# Interrupt execution
return
# Is everything else
else:
logging.debug("Ignored AST node")
def assign(self, state: State, value: Any, name: str):
"""Handle assignment operations
Parameters
----------
value : Any
The value will be the initial state of the symbolic variable
e.g. name = y, value = x => y__0 == x__0
name : str
The name of the concrete variable
"""
# Assigning a variable's value to another variable
if isinstance(value, ast.Name):
v = state.getActiveVariable(value.id)
state.createSymbolicVariable(name, v)
# Assigning a constant value to the variable (simplest)
elif isinstance(value, ast.Constant):
state.createSymbolicVariable(name, value.value)
# Go recursive if there is a binary operation
elif isinstance(value, ast.BinOp):
# Translate binary operation to properties, or simplified constant
solver = BinarySolver(state)
p = solver.solve(value.left, value.op, value.right)
if isinstance(p, ast.Constant):
state.createSymbolicVariable(name, p.value)
else:
state.createSymbolicVariable(name, p)
else:
logging.error("The type %s of the assignment value for variable is unrecognized" % type(node.value))
def augassign(self, state: State, node: ast.AugAssign):
"""Handing a augmented assignment operation
Parameters
----------
node : ast.AugAssign
The AST AugAssign node containing the variable and its state change
"""
solver = BinarySolver(state)
# The l-node will never be a Constant, therefore neither will the return
p = solver.solve(node.target, node.op, node.value)
state.createSymbolicVariable(node.target.id, p)
def conditional(self, state: State, node: ast.If, index: int):
"""Handling a conditional operation
Every conditional will create two symbolic states; true and false
If these states' properties are SMT unsat; they will be marked as dead.
Parameters
----------
node : ast.If
The relevant ast.If node
"""
# Create new logical properties for relevant variables in relevant state
test = node.test
# Generate Conditional Properties (e.g. x__17 >= y__0)
p_true = state.generateConditionalProperties(test.left, test.ops, test.comparators)
p_false = copy.copy(p_true)
p_false.is_true = False
logging.debug("Forking %s with conditional %s" % (state.name, str(p_true)))
# Register new conditional symbolic state for TRUE
true_state = self.forkState(state, p_true)
state.right = true_state
self.solve(true_state)
# Register new conditional symbolic state for FALSE
false_state = self.forkState(state, p_false)
state.left = false_state
self.solve(false_state)
# Walk recursively down nested IF code
if true_state.sat:
self.walk(true_state, node.body)
# Set false state's AST to orelse Body if exists
# Process Else conditional
if false_state.sat and hasattr(node, 'orelse'):
# Walk recursively down nested ELSE code
self.walk(false_state, node.orelse)
def forkState(self, state: State, properties: list):
s = copy.deepcopy(state)
s.right = s.left = None
s.setStateName()
s.properties.append(properties)
return s
def solve(self, state: State):
logging.debug("Solving %s using Z3" % (state.name))
state.sat = self.z3.solve(state.properties)
logging.debug("%s solved using Z3 with state %r" % (state.name, state.sat))
|
import argparse
import tensorflow as tf
from tensorflow.keras.callbacks import Callback
import ray
import ray.train as train
from ray.data import Dataset
from ray.data.dataset_pipeline import DatasetPipeline
from ray.train import Trainer
class TrainReportCallback(Callback):
def on_epoch_end(self, epoch, logs=None):
train.report(**logs)
def get_dataset_pipeline(a=5, b=10, size=1000) -> DatasetPipeline:
def get_dataset(a, b, size) -> Dataset:
items = [i / size for i in range(size)]
dataset = ray.data.from_items([{
"x": x,
"y": a * x + b
} for x in items])
return dataset
dataset = get_dataset(a, b, size)
dataset_pipeline = dataset.repeat().random_shuffle_each_window()
return dataset_pipeline
def prepare_dataset_shard(dataset_shard: tf.data.Dataset):
# Disable Tensorflow autosharding since the dataset has already been
# sharded.
options = tf.data.Options()
options.experimental_distribute.auto_shard_policy = \
tf.data.experimental.AutoShardPolicy.OFF
dataset = dataset_shard.with_options(options)
return dataset
def build_and_compile_model(config):
model = tf.keras.Sequential([
tf.keras.layers.InputLayer(input_shape=(1, )),
tf.keras.layers.Dense(10),
tf.keras.layers.Dense(1)
])
model.compile(
optimizer=tf.keras.optimizers.SGD(
learning_rate=config.get("lr", 1e-3)),
loss=tf.keras.losses.mean_squared_error,
metrics=[tf.keras.metrics.mean_squared_error])
return model
def train_func(config):
batch_size = config.get("batch_size", 64)
epochs = config.get("epochs", 3)
strategy = tf.distribute.MultiWorkerMirroredStrategy()
with strategy.scope():
# Model building/compiling need to be within `strategy.scope()`.
multi_worker_model = build_and_compile_model(config)
dataset_pipeline = train.get_dataset_shard()
dataset_iterator = dataset_pipeline.iter_datasets()
results = []
for _ in range(epochs):
dataset = next(dataset_iterator)
tf_dataset = prepare_dataset_shard(
dataset.to_tf(
label_column="y",
output_signature=(tf.TensorSpec(
shape=(None, 1), dtype=tf.float32),
tf.TensorSpec(
shape=(None), dtype=tf.float32)),
batch_size=batch_size))
history = multi_worker_model.fit(
tf_dataset, callbacks=[TrainReportCallback()])
results.append(history.history)
return results
def train_tensorflow_linear(num_workers=2, use_gpu=False):
dataset_pipeline = get_dataset_pipeline()
trainer = Trainer(
backend="tensorflow", num_workers=num_workers, use_gpu=use_gpu)
trainer.start()
results = trainer.run(
train_func=train_func,
dataset=dataset_pipeline,
config={
"lr": 1e-3,
"batch_size": 32,
"epochs": 4
})
trainer.shutdown()
print(f"Results: {results[0]}")
return results
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument(
"--address",
required=False,
type=str,
help="the address to use for Ray")
parser.add_argument(
"--num-workers",
"-n",
type=int,
default=2,
help="Sets number of workers for training.")
parser.add_argument(
"--use-gpu",
action="store_true",
default=False,
help="Enables GPU training")
parser.add_argument(
"--smoke-test",
action="store_true",
default=False,
help="Finish quickly for testing.")
args, _ = parser.parse_known_args()
if args.smoke_test:
# 1 for datasets
num_cpus = args.num_workers + 1
num_gpus = args.num_workers if args.use_gpu else 0
ray.init(num_cpus=num_cpus, num_gpus=num_gpus)
else:
ray.init(address=args.address)
train_tensorflow_linear(num_workers=args.num_workers, use_gpu=args.use_gpu)
|
import sys
def day_name(num):
""" Get day from int """
if num == 0:
return "Sunday"
elif num == 1:
return "Monday"
elif num == 2:
return "Tuesday"
elif num == 3:
return "Wednesday"
elif num == 4:
return "Thursday"
elif num == 5:
return "Friday"
elif num == 6:
return "Saturday"
else:
return None
def day_num(num):
""" Get day from int """
if num == "Sunday":
return 0
elif num == "Monday":
return 1
elif num == "Tuesday":
return 2
elif num == "Wednesday":
return 3
elif num == "Thursday":
return 4
elif num == "Friday":
return 5
elif num == "Saturday":
return 6
else:
return None
def test(did_pass):
""" Print the result of a test. """
linenum = sys._getframe(1).f_lineno # Get the caller's line number.
if did_pass:
msg = "Test at line {0} ok.".format(linenum)
else:
msg = ("Test at line {0} FAILED.".format(linenum))
print(msg)
test(day_num("Friday") == 5)
test(day_num("Sunday") == 0)
test(day_num(day_name(3)) == 3)
test(day_name(day_num("Thursday")) == "Thursday")
test(day_num("Halloween") == None) |
# encoding: utf-8
import time ,os,time,datetime,random
random.seed(time)
# index select max(aid) from idbase;
index = 1
def Shuffer_1(start, end,fielname):
global index
glist=[i for i in range(start, end+1)];
random.shuffle(glist)
with open(fielname, 'w') as f:
for use in glist:
index = index +1
f.writelines("%d\t%d\n" % (index, use))
filename = "/opt/CardServer/randid.txt"
#Shuffer_1(10000000, 30000000, filename)
Shuffer_1(10000000, 11000000, filename)
|
import requests
from requests_oauthlib import OAuth1
import json
from urllib.parse import urlparse
import pprint
params = {
'app_key':'2mM1ISxurDZiulWBJdqa9WDcO',
'app_secret':'YtGkV6HuPukSI8OZHsHXLaOQzRPfvm4uwuRZYdsh5pRru79f9e',
'oauth_token':'334499616-T6vgrPbGZEc8yWPF3PZlQ9qNWg3cqbHWRwMulZxJ',
'oauth_token_secret':'yrKNHO2auoi3KtNBnUTGBGwQnYgX7rkBahOXpmg1s9s7u'
}
auth = OAuth1(
params['app_key'],
params['app_secret'],
params['oauth_token'],
params['oauth_token_secret']
)
#q = urlparse('Nike')
url_rest = "https://api.twitter.com/1.1/search/tweets.json"
payload = {
'q': 'Nike OR #Nike OR #nike', # May be @user_search or #hash_search also
'lang' : 'en', # Based on ISO 639-1 two-letter code
'result_type': 'mixed',
'count': '100', # Number of tweets to return per page, up to a max of 100
'until': '20161231'
}
results = requests.get(url_rest, auth=auth, params=payload)
pprint(results.json())
#for tweet in results.json():
#pprint(tweet["text"])
#pass
for tweet in results.json():
with open ("C:/Users/SrInaG/Documents/Python Scripts/twitter_nike_data","a+") as loadfile:
loadfile.write(tweet)
|
"""
app
"""
from flask import Flask, make_response, request
from flask_cors import CORS
from json import loads, dumps
from os import remove
from database_controller import DatabaseController
app = Flask(__name__)
CORS(app, resources={"/*": {"origins": "*"}})
def reset_database():
"""
resets basic database
"""
remove("./database.sql")
db = DatabaseController("database.sql")
db.create_table("bills", "speed INT, bills INT")
db.create_table("cars", "model TEXT, speed INT")
db.create_table("records", "name TEXT, speed INT, bills INT")
# basic bills (over speed limit : bills you have to pay)
bills = [(6, 0), (10, 20), (15, 40), (19, 60), (25, 90), (30, 140), (35, 200), (40, 280), (45, 360), (50, 440),
(55, 540), (60, 650)]
# basic cars (model : max speed)
cars = [("felicia", 80), ("cybertruck", 150), ("ferrari", 250), ("citroen", 120), ("bugatti", 250)]
for bill in bills:
db.add_line("bills", "speed, bills", f"{str(bill[0])}, {str(bill[1])}")
for car in cars:
db.add_line("cars", "model, speed", f"\"{str(car[0])}\", {str(car[1])}")
@app.route("/")
def mapping_():
"""
mapping for home page
:return: nothing
"""
return make_response(
"",
200
)
@app.route("/get_bills", methods=["POST"])
def mapping_get_bills_for_speed():
"""
mapping for getting bills
"""
data = loads(request.get_data().decode())
speed = data.get("speed") if data.get("speed") else 50
db = DatabaseController("database.sql")
bills = None
try:
bills = db.get_line("bills", "speed>=" + str(int(speed) - 50))[0][1]
except IndexError:
pass
if int(speed) <= 50:
return make_response(
dumps({
"speed_over_limit": 0,
"bills": 0
}),
200
)
if bills is None:
return make_response(
dumps({
"speed_over_limit": 0,
"bills": 0,
"removeDI": True
}),
200
)
return make_response(
dumps({
"speed_over_limit": int(speed) - 50,
"bills": int(bills)
}),
200
)
@app.route("/get_model_speed", methods=["POST"])
def mapping_get_model_speed():
"""
mapping for getting cars max speed
"""
data = loads(request.get_data().decode())
model = data.get("model")
db = DatabaseController("database.sql")
speed = None
try:
speed = db.get_line("cars", f"model=\"{model}\"")[0][1]
except IndexError:
pass
if speed is None:
return make_response(
dumps({
"state": "incorrect data"
}),
400
)
return make_response(
dumps({
"model": model,
"speed": speed
}),
200
)
@app.route("/add_record", methods=["POST"])
def mapping_add_record():
"""
mapping for adding records to database
"""
# TODO: this is basically free sql injection - fix
# you know what? let them try
data = loads(request.get_data().decode())
name = data.get("name")
speed = data.get("speed")
bills = data.get("bills").get("bills")
if not name or not speed or not bills:
return make_response(
dumps({
"state": "missing data"
}),
400
)
db = DatabaseController("database.sql")
db.add_line("records", "name, speed, bills", "\"" + name + "\", " + str(speed) + ", " + str(bills))
return make_response(
dumps({
"state": "done"
}),
200
)
if __name__ == '__main__':
if input("Do you want to reset database? [y/n] ") == "y":
reset_database()
app.run(host="0.0.0.0")
|
# -*- coding: utf-8 -*-
# @Time : 2019/3/25 15:54
# @Author : 昨夜
# @Email : 903165495@qq.com
from sqlalchemy import Column, String, Integer
from app.models.base import Base
class DurationPage(Base):
__tablename__ = 'duration_page'
id = Column(Integer, primary_key=True, comment='id')
duration_id = Column(Integer,comment='资料的名称',index=True)
page= Column(Integer,comment='资料页码')
url=Column(String(255),comment='页面缩略图')
__mapper_args__ = {"order_by": page}
@orm.reconstructor
def __init__(self):
self.fields = ['id', 'duration_id', 'page', 'url']
# @staticmethod
# def add(duration_id,page,url):
|
tupla = (100, "Hola", [1, 2, 3], -50)
for dato in tupla:
print(dato)
# Funciones de tuplas
print("La cantidad de datos que tiene esta posicion (Solo si son listas) en la tupla es :",len(tupla[1]))
print("El índice del valor 100 es :", tupla.index(100))
print("El índice del valor 'Hola' es :", tupla.index("Hola"))
print("454")
|
import random
def rollDice():
roll = random.randint(1,100)
return roll
# Now, just to test our dice, let's roll the dice 100 times.
x = 0
while x < 100:
result = rollDice()
print result
x+=1
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.favorites, name='favorites'),
path('add_favorites/<int:asset_id>/', views.add_favorites, name='add_favorites'),
path('remove_favorites/<int:asset_id>/<int:favorites_item_id>/', views.remove_favorites, name='remove_favorites'),
]
|
"""
This module contains the view for a user login/session creation
"""
from flask_restful import reqparse, Resource
from api import db
from api.models.account import Account
from flask import current_app
from passlib.hash import pbkdf2_sha256
import datetime
import jwt
class Login(Resource):
def post(self):
# parse request json
parser = reqparse.RequestParser()
parser.add_argument("account_number", location="json", required=True)
parser.add_argument("pin", location="json", required=True)
request_json = parser.parse_args()
account = db.session.query(Account.id,
Account.pin).filter_by(
account_number=request_json.account_number).first()
if account is None:
current_app.logger.info(
"Invalid account number: %s", request_json.account_number)
return {"message": "Invalid account number."}, 401
try:
pin_is_valid = pbkdf2_sha256.verify(request_json["pin"],
account.pin)
if not pin_is_valid:
current_app.logger.debug("Pin is invalid.")
return {"message": "Pin is invalid."}, 401
token = jwt.encode(
{
"account_id": account.id,
"iat": datetime.datetime.utcnow(),
"exp": datetime.datetime.utcnow() +
datetime.timedelta(
minutes=current_app.config["JWT_TOKEN_EXPIRY"])
}, current_app.config["SECRET_KEY"], algorithm='HS256')
return {"token": token.decode("utf-8")}
except ValueError as error:
if "not a valid pbkdf2_sha256 hash" in str(error):
current_app.logger.error("Invalid hash stored as pin.")
current_app.logger.error(error)
return {"message": "Something went wrong."}, 500
|
"""Expected test results"""
TEST_1 = """[\
{\
"category": "Local Eats", \
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}, {"name": "Smitty's Family Restaurant (St. James)", "address": \
"1017 St James St, Winnipeg, MB R3H 0K6"}, {"name": "Food Trip Kitchen", "address": "1045 St \
James Street, Winnipeg, Manitoba R3H 0K6"}, {"name": "Vi-Ann Restaurant", "address": "1020 \
Notre Dame Ave, Winnipeg, Manitoba R3E0N5"}, {"name": "Burrito Splendido (1046 henderson-)", \
"address": "1046 Henderson Highway, Winnipeg, NAMER R2K 2M5"}, {"name": "Panda Tea (McPhillips \
St)", "address": "1041 Mcphillips St, Winnipeg, MB R2X 2K6"}, {"name": "Smitty's Family \
Restaurant (Henderson)", "address": "#9 1919 Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "American", \
"venues": [{"name": "Smitty's Family Restaurant (St. James)", "address": "1017 St \
James St, Winnipeg, MB R3H 0K6"}, {"name": "McDonald's (1001 Empress Street)", "address": "1001 \
Empress Street, Winnipeg, MB R3R 3P8"}, {"name": "Smitty's Family Restaurant (Henderson)", \
"address": "#9 1919 Henderson Highway, Winnipeg, MB R2G 1P4"}, {"name": "Burger King #7961 (100 \
21st Street North)", "address": "100 21st Street North, Moorhead, MN 56560"}] \
}, \
{ \
"category": "Asian",
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}, {"name": "Vi-Ann Restaurant", "address": "1020 Notre Dame Ave, \
Winnipeg, Manitoba R3E0N5"}, {"name": "Panda Tea (McPhillips St)", "address": "1041 Mcphillips \
St, Winnipeg, MB R2X 2K6"}] \
}, \
{ \
"category": "Fast Food", \
"venues": [{"name": "McDonald's (1001 Empress Street)", "address": "1001 Empress \
Street, Winnipeg, MB R3R 3P8"}, {"name": "Little Caesars (Henderson Hwy)", "address": "1050 \
Henderson Hwy, Winnipeg, MB R3K 2M5"}, {"name": "Burger King #7961 (100 21st Street North)", \
"address": "100 21st Street North, Moorhead, MN 56560"}] \
}, \
{ \
"category": "Breakfast and Brunch", \
"venues": [{"name": "Smitty's Family Restaurant (St. James)", "address": "1017 St \
James St, Winnipeg, MB R3H 0K6"}, {"name": "Smitty's Family Restaurant (Henderson)", "address": \
"#9 1919 Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "Burgers", \
"venues": [{"name": "McDonald's (1001 Empress Street)", "address": "1001 Empress \
Street, Winnipeg, MB R3R 3P8"}, {"name": "Smitty's Family Restaurant (Henderson)", "address": \
"#9 1919 Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "Chicken", \
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}, {"name": "Smitty's Family Restaurant (Henderson)", "address": "#9 \
1919 Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "Sandwiches", \
"venues": [{"name": "Smitty's Family Restaurant (St. James)", "address": "1017 St \
James St, Winnipeg, MB R3H 0K6"}, {"name": "Smitty's Family Restaurant (Henderson)", "address": \
"#9 1919 Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "Allergy Friendly", \
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}] \
}, \
{ \
"category": "Asian Fusion", \
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}] \
}, \
{ \
"category": "burger", \
"venues": [{"name": "Burger King #7961 (100 21st Street North)", "address": "100 \
21st Street North, Moorhead, MN 56560"}] \
}, \
{ \
"category": "Chinese", \
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}] \
}, \
{ \
"category": "Desserts", \
"venues": [{"name": "Panda Tea (McPhillips St)", "address": "1041 Mcphillips St, \
Winnipeg, MB R2X 2K6"}] \
}, \
{ \
"category": "Family Meals", \
"venues": [{"name": "Burger King #7961 (100 21st Street North)", "address": "100 \
21st Street North, Moorhead, MN 56560"}] \
}, \
{ \
"category": "Filipino", \
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}] \
}, \
{ \
"category": "Fish and Chips", \
"venues": [{"name": "Smitty's Family Restaurant (Henderson)", "address": "#9 1919 \
Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "Healthy", \
"venues": [{"name": "Smitty's Family Restaurant (Henderson)", "address": "#9 1919 \
Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "Juice and Smoothies", \
"venues": [{"name": "Panda Tea (McPhillips St)", "address": "1041 Mcphillips St, \
Winnipeg, MB R2X 2K6"}] \
}, \
{ \
"category": "Latin American", \
"venues": [{"name": "Burrito Splendido (1046 henderson-)", "address": "1046 \
Henderson Highway, Winnipeg, NAMER R2K 2M5"}] \
}, \
{ \
"category": "New Mexican", \
"venues": [{"name": "Burrito Splendido (1046 henderson-)", "address": "1046 \
Henderson Highway, Winnipeg, NAMER R2K 2M5"}] \
}, \
{ \
"category": "Noodles", \
"venues": [{"name": "Vi-Ann Restaurant", "address": "1020 Notre Dame Ave, Winnipeg, \
Manitoba R3E0N5"}] \
}, \
{ \
"category": "Pizza", \
"venues": [{"name": "Little Caesars (Henderson Hwy)", "address": "1050 Henderson \
Hwy, Winnipeg, MB R3K 2M5"}] \
}, \
{ \
"category": "Salads", \
"venues": [{"name": "Smitty's Family Restaurant (Henderson)", "address": "#9 1919 \
Henderson Highway, Winnipeg, MB R2G 1P4"}] \
}, \
{ \
"category": "Seafood", \
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}] \
}, \
{ \
"category": "South East Asian", \
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}] \
}, \
{ \
"category": "Thai", \
"venues": [{"name": "Vi-Ann Restaurant", "address": "1020 Notre Dame Ave, Winnipeg, \
Manitoba R3E0N5"}] \
} \
]"""
TEST_2 = """[\
{\
"category": "Local Eats",\
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}, {"name": "Smitty's Family Restaurant (St. James)", "address":\
"1017 St James St, Winnipeg, MB R3H 0K6"}, {"name": "Food Trip Kitchen", "address": "1045 St \
James Street, Winnipeg, Manitoba R3H 0K6"}]\
},\
{\
"category": "Allergy Friendly",\
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina Hwy, \
Winnipeg, MB R3T 2G6"}]\
},\
{\
"category": "American",\
"venues": [{"name": "Smitty's Family Restaurant (St. James)", "address": "1017 St \
James St, Winnipeg, MB R3H 0K6"}]\
},\
{\
"category": "Asian",\
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}]\
},\
{\
"category": "Asian Fusion",\
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}]\
},\
{\
"category": "Breakfast and Brunch",\
"venues": [{"name": "Smitty's Family Restaurant (St. James)", "address": "1017 St \
James St, Winnipeg, MB R3H 0K6"}]\
},\
{\
"category": "Chicken",\
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}]\
},\
{\
"category": "Chinese",\
"venues": [{"name": "Dagu Rice Noodle (Winnipeg)", "address": "102-1855 Pembina \
Hwy, Winnipeg, MB R3T 2G6"}]\
},\
{\
"category": "Filipino",\
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}]\
},\
{\
"category": "Sandwiches",\
"venues": [{"name": "Smitty's Family Restaurant (St. James)", "address": "1017 St \
James St, Winnipeg, MB R3H 0K6"}]\
},\
{\
"category": "Seafood", \
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}]\
},\
{\
"category": "South East Asian", \
"venues": [{"name": "Food Trip Kitchen", "address": "1045 St James Street, \
Winnipeg, Manitoba R3H 0K6"}]\
}\
]""" |
#Brief intro about selenium
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
# Special keys can be sent using Keys
#step1: create object of webdriver
driver = webdriver.Firefox()
print(driver)
#step2: get the web
driver.get("http://www.python.org")
print(driver.title)
assert "Python" in driver.title
#assert used in debugging purpose, returns boolean value.If any error is there returns
# AssertionError
#find_element_by_name is used to find the html element by name there are many other locating elements
#other locating elements :https://selenium-python.readthedocs.io/locating-elements.html#locating-elements
elem = driver.find_element_by_name("q") #here in elem we get search input from the python.org
# elem.clear()
elem.send_keys("pycon") #this line is used to send the keys.It will set the value(pycon) in search input
elem.send_keys(Keys.RETURN) #Special keys are sent with the help of Keys class Here RETURN is special key
assert "No results found." not in driver.page_source #doubt
driver.close() |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
import os
import sys
import logging
import requests
import datetime
from datetime import timedelta
from jira import JIRA
import base64
import smtplib
from email.mime.text import MIMEText
reload(sys)
sys.setdefaultencoding("utf8")
###################### tools #####################
# pip install requests
# pip install request[socks]
# pip install JIRA
###################### tools #####################
dateFormat='%Y-%m-%d'
labelDateFormat='%Y%m%d'
currentMonthFormat='%Y%m'
now = datetime.datetime.now()
today = now.strftime(dateFormat)
label = now.strftime(labelDateFormat)
currentMonth= now.strftime(currentMonthFormat)
######################## Configuration [Start] ###############################
output_dir = os.environ.get('OUTPUT_DIR')
### START config Email Sender
email_username=os.environ.get('EMAIL_USER_NAME')
email_password=os.environ.get('EMAIL_PWD')
email_from= os.environ.get('EMAIL_FROM')
email_subject='%s CMC Tickets Tracking' % today
# config the email address here
email_to_tech_leads=os.environ.get('EMAIL_TO_TECH_LEADS').split(',')
email_ccs=os.environ.get('EMAIL_CCS').split(',')
### END config Email Sender
#### JIRA Account Config [Start]######
oz_jira_username=os.environ.get('OZ_JIRA_USERNAME')
oz_jira_password =os.environ.get('OZ_JIRA_PASSWORD')
cmc_jira_username=os.environ.get('CMC_JIRA_USERNAME')
cmc_jira_password=os.environ.get('CMC_JIRA_PASSWORD')
#### JIRA Account Config [End]########
qa_manager_full_names=os.environ.get('OZ_QA_MANAGER_NAME')
use_proxy=os.environ.get('USE_PROXY')
proxy_ip = os.environ.get('PROXY_SERVER')
### START config jira username & password
oz_jira_server = 'http://192.168.168.21:8091'
cmc_jira_server = 'https://jira.katabat.com'
### END config jira username & password
cmc_jira_url_prefix = 'https://jira.katabat.com/browse/%s'
oz_jira_url_prefix = 'http://192.168.168.21:8091/browse/%s'
oz_fixed_tickets_query='project = "CMC JIRA Tickets" AND labels = %s AND status in (Resolved, Closed, "Passed QA", "In QA", Done) ORDER BY status DESC'
proxies = None
if use_proxy and str(use_proxy).upper() in ('YES', 'TRUE'):
proxies = {
'http': str(proxy_ip).strip(),
'https': str(proxy_ip).strip()
}
log = logging.getLogger('JIRA Checker')
log.setLevel(logging.INFO)
fmt = logging.Formatter('%(levelname)s:%(name)s:%(message)s')
h = logging.StreamHandler()
h.setFormatter(fmt)
log.addHandler(h)
na_text='N/A'
rejected_ticket_file_name = '%s-CMC-Tickets-Audit.txt' % currentMonth
mail_to_set = set(email_to_tech_leads)
oz_jira_status_map = {
'已解决': 'Resolved',
'开始': 'Open',
'进行中': 'In Progress',
'关闭': 'Closed',
'重新打开': 'Reopened'
}
jira_ticket_status_styles = {
'resolved': '<span style="color:green;">Resolved</span>',
'closed': '<span style="color:green;">Closed</span>',
'passed-qa': '<span style="color:green;">Passed QA</span>',
'done': '<span style="color:green;">Done</span>',
'qa-complete': '<span style="color:green;">QA Complete</span>',
'In-QA': '<span style="color:blue;">In QA</span>',
'in-progress-dev': '<span style="color:blue;">In Progress Dev</span>',
'in-progress-qa': '<span style="color:blue;">In Progress QA</span>',
'rejected-qa': '<span style="color:red;"><b>Rejected QA</b></span>',
'reopen': '<span style="color:red;"><b>ReOpen</b></span><'
}
statistics_jira_status={
'rejected': 'Rejected'
}
html_templates = {
'item_content_tpl': '<li><span><span class="ozinfo">{ozTicketInfo}</span> <span class="cmcinfo">[{cmcTicketInfo}]</span> - <span class="summary">{ticketSummary}</span> </span>{extraInfo}</li>',
'oz_ticket_info_tpl': "<a href='{hyperLink}'>{hyperText}</a>, {ticketStatus}, {author}, Reporter: {qaReporter})",
'cmc_ticket_info_tpl': "<a href='{hyperLink}'>{hyperText}</a>, {ticketStatus}",
'author_info_tpl': "<span style='color:green'>{author}</span>",
'qa_report_info_tpl': "<span style='color:purple'>{qaReporter}</span>",
'extra_info_tpl': "{extraInfo}",
'not_assign_qa_reporter_tpl': "<div class='quote'><span>{qaManager} there are <b>{count}</b> ticket(s) without QA Reporter: </span> <div>{tickets}</div></div>",
'rejected_tickets_statistics_tpl': "<div class='quote'>{ticketStatus} (<b>{count}</b>): <div>{tickets}</div></div>",
'statistics_file_content_tpl': "{ticketStatus}|{date}|{label}: {tickets}\n",
'email_content_tpl': """\
<html>
<head>
<meta charset="UTF-8">
<style type="text/css">
body{margin: 20px; font-size: 14px;font-family: Cambria, "Hoefler Text", Utopia, "Liberation Serif", "Nimbus Roman No9 L Regular", Times, "Times New Roman", serif;}
ol>li{line-height: 25px;}
.extra-field-cls{font-size: 9px; line-height: 20px; border-bottom: 1px dotted #999 }
.extra-field-cls > div { border-bottom: 1px dotted #999}
.ozinfo{font-size:12px;}
.cmcinfo{font-size: 14px;}
.summary{font-size:12px;color:gray;}
/*.not-assign-qa-reporter > div {font-size:12px; padding: 5px 0 0 20px;}*/
.quote {
background: #f9f9f9;
border-left: 3px solid #ccc;
margin: 1.5em 10px;
padding: .5em 10px;
/*quotes: '\\201C''\\201D''\\2018''\\2019';*/
-webkit-border-top-left-radius: 10px;
-webkit-border-top-right-radius: 3px;
-webkit-border-bottom-right-radius: 6px;
-webkit-border-bottom-left-radius: 10px;
-moz-border-radius-topleft: 10px;
-moz-border-radius-topright: 3px;
-moz-border-radius-bottomright: 6px;
-moz-border-radius-bottomleft: 10px;
}
.quote:before {
color: #ccc;
content: '\\201C';
font-size: 4em;
line-height: .1em;
margin-right: .25em;
vertical-align: -.4em;
}
.quote:after{content:'';/*content: '\\201D';*/}
.quote p {display: inline; }
</style>
</head>
<body>
<p>
<div class='header-info'>
<p>
<div>This email just notice us CMC tickets status, developer, QA Reporter and etc which OZ handling</div>
</p>
<p>
<div> There are <b>%s</b> CMC ticket(s) were <b>Fixed</b> on our side within <b>%s</b> label.
</p>
</div>
</p>
<p>
<div>
<ol>
%s
</ol>
</div>
</p>
<p>
<div for='not-assign-qa-reporter'>
%s
</div>
</p>
<p>
<div for='reject-ticket-statisics'>
%s
</div>
</p>
<div>
<hr/>
<div style="font-size: 10px; color:gray;">
<div>This email template generated by OZ Ticket Tracking Robot</div>
<div>OZ Ticket Tracking job will trigger at 7:40 am on Mon - Fri</div>
</div>
</div>
</body>
</html>
"""
}
extra_info_texts = {
'cmc_status_not_change_to_dev_completed': "<div style='color:orange'>@{author}, why don't you change {cmcTicket} to <b>DEV COMPLETE</b> on CMC side? </div>",
'cmc_reject_qa': "<div style='color:red'>@{author}, CMC QA was <b>Rejected</b> {cmcTicket}, please take a look at this ticket.</div>",
'notice_qa_reporter_update_ticket_status_tpl':"<div style='color:orange'>@{qaReporter}, please change the {ticket} to refer status. CMC has changed {cmcTicket} to <b>{cmcTicketStatus}</b></div>"
}
rejected_ticket_by_cmc_qa=[]
rejected_ticket_by_cmc_qa_only_ticket_numbers=[]
# Connect to jira server with username and password
def connect_jira(jira_server, jira_user, jira_password, use_proxy=False):
log.info("Connecting: %s with author: %s", jira_server,jira_user)
try:
jira_option = {'server': jira_server}
if use_proxy:
log.info("Visit %s using Proxy: %s", jira_server, proxies)
jira = JIRA(jira_option, basic_auth={jira_user, jira_password}, proxies=proxies)
else:
jira = JIRA(jira_option, basic_auth={jira_user, jira_password})
log.info("Connected server: %s successfully!", jira_server)
return jira
except Exception, e:
print e.message
log.error("Failed connect JIRA server (%s)", jira_server)
return None
def get_work_path(workDir):
workPath = None
if workDir and not workDir.isspace():
workPath = os.path.expanduser(workDir)
else:
workPath = os.path.realpath(".")
if not os.path.exists(workPath):
os.makedirs(workPath)
return workPath
def get_last_work_day_label():
"""
Get last work day label
Usually Sat and Sun day were rest day
This method is
If today is Monday, then get the previous Friday's label
Else return today's label
"""
will_find_label = label
day_offset = 1
weekday = datetime.datetime.now().isoweekday()
if weekday == 1:
# monday, usually Sat and Sun were reset days
day_offset = 3
will_find_label = (now - timedelta(days=day_offset)).strftime(labelDateFormat)
return will_find_label
def generate_oz_ticket_hyperlink(oz_ticket_number):
return '<a href="%s">%s</a>' % (oz_jira_url_prefix % oz_ticket_number, oz_ticket_number)
def generate_cmc_ticket_hyperlink(cmc_ticket_number):
return '<a href="%s">%s</a>' % (cmc_jira_url_prefix % cmc_ticket_number, cmc_ticket_number)
def get_cmc_ticket_number(oz_ticket):
"""
Find the CMC ticket number from OZ Ticket
There are two ways to find it
# 1: get the cmc ticket from OZ ticket summary
# 2: get the cmc ticket from OZ ticket 'customfield_10227' fields (CMC JIRA Link)
If "CMC JIRA Link" value not None, the as the priority, get the CMC Ticket Number from "CMC JIRA Link" firstly
Else if "CMC JIRA Link" was not value, get the CMC Ticket Number from "Summary"
::param oz_ticket the ticket object
"""
cmc_ticket_num = None
if oz_ticket:
summary = oz_ticket.fields.summary
cmc_jira_link_url = oz_ticket.fields.customfield_10227 or ''
cmc_ticket_no_in_summary = str(summary).strip().split(" ")[0]
cmc_ticket_no_in_link_url = str(cmc_jira_link_url).strip().replace(cmc_jira_url_prefix % '', '')
if cmc_ticket_no_in_link_url:
cmc_ticket_num = cmc_ticket_no_in_link_url
else:
cmc_ticket_num = cmc_ticket_no_in_summary
return cmc_ticket_num.strip()
def get_wraped_ticket_status(ticket_status):
p_ticket_status = str(ticket_status).replace('(', '').replace(')', '').replace(' ', '-').strip().lower()
return jira_ticket_status_styles[p_ticket_status] if jira_ticket_status_styles.has_key(p_ticket_status) else ticket_status
def get_extra_info(oz_ticket_status, cmc_ticket_status, author, qa_reporter, oz_ticket_number, cmc_ticket_number):
extra_text = ''
p_cmc_ticket_status = str(cmc_ticket_status).replace('(', '').replace(')', '').replace(' ', '-').strip().upper()
if oz_ticket_status.upper() in ('RESOLVED', 'CLOSED', 'PASSED QA', 'DONE') and p_cmc_ticket_status in ('OPEN-DEV', 'IN-PROGRESS-DEV'):
extra_text = extra_info_texts['cmc_status_not_change_to_dev_completed'].format(author=author, cmcTicket=cmc_ticket_number)
elif "REJECT" in p_cmc_ticket_status and 'INC' not in cmc_ticket_number and 'PROD' not in cmc_ticket_number:
# Append rejected cmc ticket to array ignore INC and PROD tickets.
reject_arr_item_text= '%s[%s]' % (generate_cmc_ticket_hyperlink(cmc_ticket_number),
generate_oz_ticket_hyperlink(oz_ticket_number))
rejected_ticket_by_cmc_qa.append(reject_arr_item_text)
rejected_ticket_by_cmc_qa_only_ticket_numbers.append('%s[%s]' % (cmc_ticket_number, oz_ticket_number))
# get the extra text
extra_text = extra_info_texts['cmc_reject_qa'].format(author=author, cmcTicket=cmc_ticket_number)
elif ("PASSED" in p_cmc_ticket_status or p_cmc_ticket_status in ('CLOSED', 'RESOLVED', 'QA-COMPLETE')) and oz_ticket_status.upper() not in ('CLOSED', 'PASSED QA', 'DONE'):
extra_text = extra_info_texts['notice_qa_reporter_update_ticket_status_tpl'].format(qaReporter=qa_reporter,
ticket=generate_oz_ticket_hyperlink(oz_ticket_number),
cmcTicket=generate_cmc_ticket_hyperlink(cmc_ticket_number),
cmcTicketStatus=cmc_ticket_status)
return extra_text
def generate_item_content(oz_ticket, cmc_ticket):
oz_ticket_key = oz_ticket.key
oz_ticket_summary = oz_ticket.fields.summary
oz_ticket_status_name=oz_ticket.fields.status.name.encode('utf-8').strip()
oz_ticket_status = oz_jira_status_map[oz_ticket_status_name] if oz_jira_status_map.has_key(oz_ticket_status_name) else oz_ticket_status_name
oz_ticket_developer_full_name = oz_ticket.fields.assignee
oz_ticket_qaReporter_full_name = oz_ticket.fields.customfield_10320 or na_text
wraped_oz_ticket_status = get_wraped_ticket_status(oz_ticket_status)
cmc_ticket_key=cmc_ticket.key
cmc_ticket_status=cmc_ticket.fields.status.name
wraped_cmc_ticket_status = get_wraped_ticket_status(cmc_ticket_status)
author = html_templates['author_info_tpl'].format(author=oz_ticket_developer_full_name)
qa_reporter = html_templates['qa_report_info_tpl'].format(qaReporter=oz_ticket_qaReporter_full_name)
cmc_ticket_text = html_templates['cmc_ticket_info_tpl'].format(hyperLink=cmc_jira_url_prefix % cmc_ticket_key,
hyperText=cmc_ticket_key,
ticketStatus=wraped_cmc_ticket_status)
oz_ticket_text = html_templates['oz_ticket_info_tpl'].format(hyperLink=oz_jira_url_prefix % oz_ticket_key,
hyperText=oz_ticket_key,
ticketStatus=wraped_oz_ticket_status,
author=author,
qaReporter=qa_reporter)
extra_info_text = html_templates['extra_info_tpl'].format(extraInfo=get_extra_info(oz_ticket_status,
cmc_ticket_status,
author,
qa_reporter,
oz_ticket_key,
cmc_ticket_key))
item_html_text = html_templates['item_content_tpl'].format(ozTicketInfo=oz_ticket_text,
cmcTicketInfo=cmc_ticket_text,
ticketSummary=oz_ticket_summary,
extraInfo=extra_info_text)
return item_html_text
def get_html_content(totalCount, label, item_html_text, none_qa_reporter_html_text, tickets_status_statistics_html_text):
return html_templates['email_content_tpl'] % (totalCount, label, item_html_text,
none_qa_reporter_html_text,
tickets_status_statistics_html_text)
def check_tickets_main(search_label):
"""
Check OZ side CMC Tickets which status in (Resolved and Closed) on CMC Side status
If OZ side ticket was Resolved but CMC Side this ticket was Rejected, ReOpen or other status
Will send an email to developer to let the developer to known
customfield_10320: QA Reporter
"""
not_assign_qa_reporter_tickets = []
all_items_html_text = ''
none_qa_reporter_html_text=''
tickets_status_statistics_html_text=''
oz_jira_ins = connect_jira(oz_jira_server, oz_jira_username, oz_jira_password)
cmc_jira_ins = connect_jira(cmc_jira_server, cmc_jira_username, cmc_jira_password, True)
search_label = search_label if search_label else get_last_work_day_label()
query_jql = oz_fixed_tickets_query % search_label
oz_side_fixed_cmc_tickets = oz_jira_ins.search_issues(query_jql)
total_tickets_count = oz_side_fixed_cmc_tickets.__len__()
log.info("There are %s cmc tickets on %s were [Fixed]", total_tickets_count, search_label)
idx = 0
for oz_ticket in oz_side_fixed_cmc_tickets:
idx = idx + 1
log.info("Processing %s/%s.....", idx, total_tickets_count)
oz_ticket_key = oz_ticket.key
cmc_ticket_key = get_cmc_ticket_number(oz_ticket)
cmc_ticket = cmc_jira_ins.issue(str(cmc_ticket_key))
oz_developer_email_addr = oz_ticket.fields.assignee.emailAddress
oz_qa_email_addr = oz_ticket.fields.customfield_10320.emailAddress if oz_ticket.fields.customfield_10320 else None
# generate the item text
all_items_html_text = all_items_html_text + generate_item_content(oz_ticket, cmc_ticket)
# which developer and qa reporter will receive this email
if oz_developer_email_addr:
mail_to_set.add(oz_developer_email_addr)
if oz_qa_email_addr:
mail_to_set.add(oz_qa_email_addr)
else:
# append none assign the qa reporter to array.
not_assign_qa_reporter_tickets.append(generate_oz_ticket_hyperlink(oz_ticket_key))
# generate warning qa manager which tickets not assign qa reporter.
if not_assign_qa_reporter_tickets.__len__() > 0:
qa_manager_names = html_templates['qa_report_info_tpl'].format(qaReporter='@'+qa_manager_full_names)
none_qa_reporter_html_text = html_templates['not_assign_qa_reporter_tpl'].format(qaManager=qa_manager_names,
count=not_assign_qa_reporter_tickets.__len__(),
tickets=', '.join(not_assign_qa_reporter_tickets))
if rejected_ticket_by_cmc_qa.__len__() > 0:
tickets_status_statistics_html_text = html_templates['rejected_tickets_statistics_tpl'].format(ticketStatus=jira_ticket_status_styles['rejected-qa'],
count=rejected_ticket_by_cmc_qa.__len__(),
tickets=', '.join(rejected_ticket_by_cmc_qa))
try:
file_content = html_templates['statistics_file_content_tpl'].format(ticketStatus=statistics_jira_status['rejected'],
date=today,
label=search_label,
tickets = ', '.join(rejected_ticket_by_cmc_qa_only_ticket_numbers))
write_to_file(rejected_ticket_file_name, file_content)
except:
log.error('Write the rejected content error.')
# Generate the html content (email content)
html_content = get_html_content(total_tickets_count, search_label, all_items_html_text,
none_qa_reporter_html_text, tickets_status_statistics_html_text)
# write to file
# filename = os.path.join(get_work_path(output_dir), 'cmc_ticket_tracker_%s.html' % today)
# fout = open(filename, 'wr')
# fout.write(html_content)
# fout.flush()
# print filename
# send email
send_email(html_content)
def write_to_file(filename, content, write_mode='a'):
filename_with_path = os.path.join(get_work_path(output_dir), filename)
with open(filename_with_path, write_mode) as fp:
fp.write(content)
def send_email(html_content):
try:
if not html_content or html_content == None or html_content == '':
log.warn('No email content to send, ignore send email')
return
log.info("Ready send email.....")
email_tos = list(mail_to_set)
de_email_pwd = base64.decodestring(email_password)
mail_content = MIMEText(html_content, 'html')
mail_content['Subject'] = email_subject
mail_content['From'] = email_from
mail_content['To'] = ','.join(email_tos)
mail_content['Cc'] = ','.join(email_ccs)
server = smtplib.SMTP('smtp.gmail.com:587')
server.ehlo()
server.starttls()
server.login(email_username, de_email_pwd)
server.sendmail(email_from, email_tos, mail_content.as_string())
server.quit()
log.info('Send email successfully')
except:
log.error("Send Email Error")
if __name__ == '__main__':
parameters = sys.argv[1:]
search_label = None
if parameters.__len__() > 0:
search_label = parameters[0]
check_tickets_main(search_label)
exit() |
#!/usr/bin/python
import pymongo
import StockLib
from datetime import datetime, timedelta
client = pymongo.MongoClient('mongodb://mongodb_host:27017/')
db = client['stock']
stockLib = StockLib.StockLib()
startDate = '2014-01-01'
endDate = '2020-01-01'
'''
Each day, get top earnings estimates increased top 20 industries, check its stocks' performance
SET @date = '2019-11-25';
SELECT s.sector, REPLACE(s.industry, '&', '&') AS industry, count(*) as count,
SUM(d.marketCap) AS marketCap,
SUM(d.marketCap/d.ps) AS revenue,
SUM(d.eps*d.outstandingShares) AS earnings,
SUM(d.marketCap)/SUM(d.eps*d.outstandingShares) AS PE,
SUM(IF(d.forwardPE>0, d.marketCap/d.forwardPE, d.eps*d.outstandingShares)) AS forwardEarnings,
SUM(d.marketCap)/SUM(IF(d.forwardPE>0, d.marketCap/d.forwardPE, d.eps*d.outstandingShares)) AS ForwardPE,
SUM(IF(d.forwardPE>0, d.marketCap/d.forwardPE, d.eps*d.outstandingShares)) - SUM(d.outstandingShares*d.eps) AS earningsDiff,
(SUM(IF(d.forwardPE>0, d.marketCap/d.forwardPE, d.eps*d.outstandingShares)) - SUM(d.outstandingShares*d.eps))*100/SUM(d.marketCap/d.ps) AS MarginImprovement,
AVG(d.epsQQ),
AVG(d.epsTY),
AVG(d.epsNY),
AVG(d.epsNext5Y)
FROM finvizDailySummary s
JOIN finvizDailyDetails d ON (d.symbol = s.symbol AND d.date = s.date)
WHERE s.date = @date
AND d.symbol not in ('GOOG', 'BRK-B', 'RDS-B', 'PBR-A')
AND s.industry <> 'Exchange Traded Fund'
AND s.marketCap > 10
GROUP BY 1, 2
HAVING MarginImprovement > 10 and count>=3
ORDER BY MarginImprovement DESC
LIMIT 20;
'''
|
import sys
import cv2 as cv
from PyQt5 import uic, QtWidgets, QtCore
from PyQt5.QtWidgets import QLabel, QApplication, QMainWindow, QFileDialog, QAction, QMessageBox
from PyQt5.QtGui import QPixmap, QImage, QIcon
from ip import ImageProcessing as IP
class Ui_MainWindow(QtWidgets.QMainWindow):
def __init__(self):
self.filename = ""
super(Ui_MainWindow, self).__init__()
uic.loadUi("interface.ui", self)
self.setWindowTitle("Image Processing")
self.show()
self.Init()
self.ip = ""
self.toolbar = False
self.img_exist = False
self.x_current = 0
self.y_current = 0
self.setWindowIcon(QIcon("icon/mainicon.png"))
self.curImg = False
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def Init(self):
#bắt các sự kiện action
self.act_open_img.triggered.connect(self.open_image)
self.act_gray.triggered.connect(self.gray_click)
self.act_freq.triggered.connect(self.freq_click)
self.act_edge.triggered.connect(self.edge_click)
self.act_noise.triggered.connect(self.noise_click)
self.act_cartoon.triggered.connect(self.cartoon_click)
self.act_save.triggered.connect(self.save_img)
#mapping widget to function
#geomatric
self.rotation_inp.valueChanged.connect(self.rotation)
self.scaling_inp.valueChanged.connect(self.scaling)
self.tranX_inp.valueChanged.connect(self.getTranX)
self.tranY_inp.valueChanged.connect(self.getTranY)
self.shear_inp.valueChanged.connect(self.shearing)
#gray
self.log_inp.valueChanged.connect(self.log)
self.gamma_inp.valueChanged.connect(self.gamma)
#spatial domain
self.median_inp.valueChanged.connect(self.median)
self.bila_inp.valueChanged.connect(self.bilateral)
self.gaussian_inp.valueChanged.connect(self.gaussian)
self.blur_inp.valueChanged.connect(self.blur)
#median threshold
self.med_thresh_inp.valueChanged.connect(self.med_thresh)
def save_img(self):
img = self.curImg
s = self.filename[0]
print(s)
photoName = ""
for i in range(len(s) - 1, 0, -1):
if s[i] == '/':
photoName = s[i + 1:len(s) - 4]
photoName = photoName + "_processed" + s[len(s) - 4:]
break
q = QMessageBox.question(self, "Confirmation", "Save current image?", QMessageBox.Yes | QMessageBox.No,
QMessageBox.No)
if (q == QMessageBox.Yes):
cv.imwrite(photoName, img)
QMessageBox.about(self, "Alert", "Saved successfully in this app directory")
def open_image(self):
self.filename = QFileDialog.getOpenFileName(self, "Choose Image", "",
"Images File(*.jpg; *.jpeg; *.png, *.tif);;Python Files (*.py)")
if(self.filename[0] != ''):
width = self.org_label.width()
height = self.org_label.height()
self.org_label.setPixmap(QPixmap(self.filename[0]).scaled(width, height))
self.pro_label.setPixmap(QPixmap(self.filename[0]).scaled(width, height))
width = self.org_label.width()
height = self.org_label.height()
self.ip = IP(self.filename[0], width, height)
self.img_exist = True
self.geo_group_enable()
self.spatial_group_enable()
self.gray_group_enable()
def geo_group_enable(self):
if (self.img_exist == True):
self.geo_group.setEnabled(True)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def spatial_group_enable(self):
if (self.img_exist == True):
self.spatial_group.setEnabled(True)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def gray_group_enable(self):
if (self.img_exist == True):
self.gray_group.setEnabled(True)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def bind_to_label(self, changed_img):
qformat = QImage.Format_Indexed8
self.curImg=changed_img
if len(changed_img.shape) == 3:
if (changed_img.shape[2]) == 4:
qformat = QImage.Format_RGBA8888
else:
qformat = QImage.Format_RGB888
img = QImage(changed_img, changed_img.shape[1], changed_img.shape[0], changed_img.strides[0], qformat)
# BGR > RGB
img = img.rgbSwapped()
self.pro_label.setPixmap(QPixmap.fromImage(img))
self.pro_label.setAlignment(QtCore.Qt.AlignHCenter | QtCore.Qt.AlignVCenter)
#geometric
def negative(self):
res = self.ip.negative()
self.bind_to_label(res)
def histogram(self):
res = self.ip.histogram()
self.bind_to_label(res)
def adapt_histogram(self):
res = self.ip.adapt_histogram()
self.bind_to_label(res)
def getTranX(self, x):
self.tranX_val.setText(str(x))
self.translation()
def getTranY(self, y):
self.tranY_val.setText(str(y))
self.translation()
def translation(self):
x = int(self.tranX_val.text())
y = int(self.tranY_val.text())
res = self.ip.translation(x, y)
self.bind_to_label(res)
def scaling(self, size):
res = self.ip.scaling(size)
self.scaling_val.setText(str(size) + "%")
self.bind_to_label(res)
def rotation(self, angle):
self.rotation_val.setText(str(angle)+"°")
res = self.ip.rotation(angle)
self.bind_to_label(res)
def shearing(self, m):
self.shear_val.setText(str(m))
res = self.ip.shearing(m)
self.bind_to_label(res)
#gray
def log(self, m):
self.log_val.setText(str(m))
res = self.ip.log(m)
self.bind_to_label(res)
def gamma(self, m):
self.gamma_val.setText(str("{0:.1f}".format(round(m*0.1, 1))))
res = self.ip.gamma(m*0.1)
self.bind_to_label(res)
#spatial domain
def blur(self, n):
self.blur_val.setText(str(n * 2 - 1))
size = int(self.blur_val.text())
res = self.ip.blur(size)
self.bind_to_label(res)
def gaussian(self, n):
self.gaussian_val.setText(str(n * 2 - 1))
size = int(self.gaussian_val.text())
res = self.ip.gaussian(size)
self.bind_to_label(res)
def median(self, n):
self.median_val.setText(str(n * 2 - 1))
size = int(self.median_val.text())
res = self.ip.median(size)
self.bind_to_label(res)
def med_thresh(self, thresh):
self.med_thresh_val.setText(str(thresh))
size = int(self.med_thresh_val.text())
res = self.ip.med_thresh(thresh)
self.bind_to_label(res)
def bilateral(self, sigma):
self.bila_val.setText(str(sigma))
size = int(self.bila_val.text())
res = self.ip.bilateral(sigma)
self.bind_to_label(res)
#noise
def gauss_noise(self):
res=self.ip.gauss_noise_img()
self.bind_to_label(res)
def erlang_noise(self):
res=self.ip.erlang_noise_img()
self.bind_to_label(res)
def rayleigh_noise(self):
res=self.ip.rayleigh_noise_img()
self.bind_to_label(res)
def uniform_noise(self):
res=self.ip.uniform_noise_img()
self.bind_to_label(res)
#cartoon
def cartoon(self):
res=self.ip.cartoon()
self.bind_to_label(res)
def gauss_hp(self):
res=self.ip.gaussian_hp()
self.bind_to_label(res)
#edge
def sobelx(self):
res = self.ip.sobelX()
self.bind_to_label(res)
def sobely(self):
res = self.ip.sobelY()
self.bind_to_label(res)
def laplacian(self):
res = self.ip.laplacian()
self.bind_to_label(res)
def canny(self):
res=self.ip.canny()
self.bind_to_label(res)
#trigger action on toolbar
def gray_click(self):
if (self.img_exist == True):
if (self.toolbar == False):
self.toolbar = self.addToolBar('Toolbar')
else:
self.toolbar.clear()
act_negative = QAction(QIcon(), 'Negative', self)
act_negative.triggered.connect(self.negative)
act_histogram = QAction(QIcon(), 'Histogram', self)
act_histogram.triggered.connect(self.histogram)
act_adapt_histogram = QAction(QIcon(), 'Adaptive Histogram', self)
act_adapt_histogram.triggered.connect(self.adapt_histogram)
self.toolbar.clear()
self.toolbar.addAction(act_negative)
self.toolbar.addAction(act_histogram)
self.toolbar.addAction(act_adapt_histogram)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def freq_click(self):
if (self.img_exist == True):
if (self.toolbar == False):
self.toolbar = self.addToolBar('Toolbar')
else:
self.toolbar.clear()
act_gauss_hp = QAction('High-Pass Filter', self)
act_gauss_hp.triggered.connect(self.gauss_hp)
self.toolbar.addAction(act_gauss_hp)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def edge_click(self):
if (self.img_exist == True):
if (self.toolbar == False):
self.toolbar = self.addToolBar('Toolbar')
else:
self.toolbar.clear()
act_sobelX = QAction(QIcon(), 'SobelX', self)
act_sobelX.triggered.connect(self.sobelx)
self.toolbar.addAction(act_sobelX)
act_sobelY = QAction(QIcon(), 'SobelY', self)
act_sobelY.triggered.connect(self.sobely)
self.toolbar.addAction(act_sobelY)
act_laplacian = QAction(QIcon(), 'Lapcian', self)
act_laplacian.triggered.connect(self.laplacian)
self.toolbar.addAction(act_laplacian)
act_canny = QAction(QIcon(), 'Canny', self)
act_canny.triggered.connect(self.canny)
self.toolbar.addAction(act_canny)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def cartoon_click(self):
if (self.img_exist == True):
if (self.toolbar == False):
self.toolbar = self.addToolBar('Toolbar')
else:
self.toolbar.clear()
act_cartoon = QAction(QIcon(), 'Cartoon', self)
act_cartoon.triggered.connect(self.cartoon)
self.toolbar.addAction(act_cartoon)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
def noise_click(self):
if (self.img_exist == True):
if (self.toolbar == False):
self.toolbar = self.addToolBar('Toolbar')
else:
self.toolbar.clear()
act_gauss_noise = QAction(QIcon(), 'Gauss Noise', self)
act_gauss_noise.triggered.connect(self.gauss_noise)
self.toolbar.addAction(act_gauss_noise)
act_erlang_noise = QAction(QIcon(), 'Erlang Noise', self)
act_erlang_noise.triggered.connect(self.erlang_noise)
self.toolbar.addAction(act_erlang_noise)
act_rayleigh_noise = QAction(QIcon(), 'Rayleigh Noise', self)
act_rayleigh_noise.triggered.connect(self.rayleigh_noise)
self.toolbar.addAction(act_rayleigh_noise)
act_uniform_noise = QAction(QIcon(), 'Uniform Noise', self)
act_uniform_noise.triggered.connect(self.uniform_noise)
self.toolbar.addAction(act_uniform_noise)
else:
QMessageBox.about(self, "Alert", "To enable all functions, select image first: File -> Open image")
if __name__ == "__main__":
a = QtWidgets.QApplication(sys.argv)
win = Ui_MainWindow()
sys.exit(a.exec_())
|
# Aa Aa - Aa a Aa Aa a AaN
N
'''N
Aa a a a a a a a aN
'''N
N
# Aa Aa a Aa Aa aN
a a a aN
a a('a.a', a='a-0') a a_a:N
a_a = a(a(a_a))N
a_a_a = a_a[0]N
a_a = a_a[0:]N
N
a('Aa a:')N
a(a_a_a)N
a('Aa a a')N
a(a_a[0])N
N
# Aa a Aa, AaAa, AaAa a Aa a a a a aN
# Aa a a a a a a a (Aa, a, Aa, '')N
# Aa a a a a a a a a a aN
# Aa a a a a a a aN
# Aa a a a a a a a a a a a aN
N
a a_a_a(a_a_a_a):N
# a a a a'a a a a aN
a a_a_a_a != "":N
# a a a a a a aN
# a a aN
a_a_a_a = a(a_a_a_a.a("(", "").a(")", ""))N
N
a a_a_a_aN
N
N
a a_a_a(a):N
a_a = ["(",")","a","A",".","a","'", " "]N
N
a a a a_a:N
a = a.a(a,"")N
N
a '-' a a:N
a0, a0 = (a(a) a a a a.a('-'))N
a a((a0 + a0) / 0)N
N
a a(a)N
N
N
# Aa aN
a a a a_a:N
a = a[0].a('(', '').a(')', '').a()N
a = a[0]N
a = a[0]N
a = a[0].a('(', '').a(')', '').a()N
a = a_a_a(a[0])N
N
a[0] = a_a_a(a)N
a[0] = a_a_a(a)N
a[0] = aN
N
a a == '':N
a[0] = "Aa Aa"N
N
a:N
a[0] = aN
N
a a == '':N
a[0] = "Aa Aa/Aa"N
N
a:N
a[0] = aN
N
a()N
a('a a a. Aa a')N
a(a_a[:0]) |
# Definition for singly-linked list.
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
# 时间复杂度O(n),空间复杂度O(1)
class Solution:
def isPalindrome(self, head: ListNode) -> bool:
i, p = 0, head
while p:
i += 1
p = p.next
if i < 2:
return True
i = i // 2
# 定位到倒数第i个元素,然后把该部分链表翻转
dummy = ListNode(0)
dummy.next = head
p = q = dummy
for i in range(i):
q = q.next
while q:
p, q = p.next, q.next
nex = p.next
p.next = None
while nex:
new_nex = nex.next
nex.next = p
p = nex
nex = new_nex
q = head
while p:
if p.val != q.val:
return False
p, q = p.next, q.next
return True
|
import numpy as np
import torch.nn as nn
from web_processor import WebProcessor
from rel_predictor import RelPredictor
from link_evaluation import LinkEvaluation
from link_evaluation import CNN
class Web:
def __init__(self, query, W2V):
self.processor = WebProcessor(query, model=W2V)
self.page_eval = RelPredictor(query, self.processor)
self.outlink_eval = LinkEvaluation(query, self.processor)
def page_target_topics(self, link):
score = self.page_eval.get_relevance(link)
score = np.clip(score, 0, 1)
return score
def outlink_target_topics(self, relevant_urls):
score = self.outlink_eval.get_link_score(relevant_urls)
for i in score:
score[i] = np.clip(score[i], 0, 1)
return score
|
# Turn
from AI import*
from player_class import*
from AI_foreign_affairs import*
from copy import deepcopy
from Scenarios.historical.Scenario import*
from Scenarios.BalanceScenario.Scenario import*
def AI_turn(players, player, market, relations, provinces):
if len(player.provinces.keys()) < 1:
return
if type(player) == Human:
return
#if player.name == "England":
# player.general_priority = ""
print("___________________________________________________________________")
print("It is now %s's turn \n" % (player.name))
print("General priority: %s" % player.general_priority)
print("___________________________________________________________________")
player.has_obliterated = ""
for k in player.goods_produced.keys():
player.goods_produced[k] = 0
if player.reputation < 0.6:
player.reputation += player.developments["culture"] * 0.15
for k in player.military.keys():
if player.military[k] < 0:
player.military[k] = 0
player.AP += 1
print("AP = %s" % (player.AP))
player.calculate_access_to_goods(market)
player.calculate_resource_base()
player.update_priorities(market)
player.calculate_resource_production()
player.calculate_resource_need()
player.calculate_resource_forecast()
player.fulfill_needs(market, relations, players)
#player.view_AI_inventory()
#POP Increase
player.ai_increase_pop(market, relations, players)
player.ai_increase_pop(market, relations, players)
player.assign_priorities_to_provs()
player.AI_reset_POP()
player.AI_assign_POP()
player.use_culture(players)
player.choose_technology(market)
AI_values(player)
if player.general_priority == "expansion":
player.early_game_expansion(market, relations, players)
player.decide_build_navy(market, relations, players)
player.develop_industry(market, relations, players)
player.AIbuild_army(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.decide_build_navy(market, relations, players)
player.ai_decide_factory_productions(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.develop_industry(market, relations, players)
elif player.general_priority == "army":
player.early_game_army(market, relations, players)
player.AIbuild_army(market, relations, players)
player.ai_decide_factory_productions(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.develop_industry(market, relations, players)
player.decide_build_navy(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.develop_industry(market, relations, players)
elif player.general_priority == "industrialize":
player.early_game_development(market, relations, players)
player.develop_industry(market, relations, players)
player.ai_decide_factory_productions(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.decide_build_navy(market, relations, players)
player.AIbuild_army(market, relations, players)
player.develop_industry(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
elif player.general_priority == "production":
player.ai_decide_factory_productions(market, relations, players)
player.develop_industry(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.AIbuild_army(market, relations, players)
player.decide_build_navy(market, relations, players)
player.develop_industry(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
elif player.general_priority == "development":
player.ai_increase_middle_class(market, relations, players)
player.develop_industry(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.ai_decide_factory_productions(market, relations, players)
player.AIbuild_army(market, relations, players)
player.decide_build_navy(market, relations, players)
player.develop_industry(market, relations, players)
else:
player.early_game(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.develop_industry(market, relations, players)
player.ai_decide_factory_productions(market, relations, players)
player.AIbuild_army(market, relations, players)
player.decide_build_navy(market, relations, players)
player.develop_industry(market, relations, players)
player.ai_increase_middle_class(market, relations, players)
player.decide_build_navy(market, relations, players)
player.use_chemicals(market, relations, players)
#attack_target(player, players, relations, provinces, market)
if "England" in players.keys() or "Germany" in players.keys() or "France" in players.keys():
decide_target(player, players, market, provinces, relations)
else:
decide_rival_target(player, players, market, provinces, relations)
worsen_relations(player, players, relations, provinces, market)
gain_cb(player, players, relations)
attack_target(player, players, relations, provinces, market)
ai_bribe(player, players, relations)
ai_decide_ally_target(player, players, provinces)
count = 0
while player.diplo_action >= 2 and count < 12:
if player.rival_target == [] or len(player.CB) < 2:
worsen_relations(player, players, relations, provinces, market)
if len(player.embargo) > 0:
improve = sample(player.embargo, 1)
improve = improve[0]
relata = frozenset({player.name, improve})
improve = players[improve]
player.diplo_action -=1
if relata in relations.keys():
relations[relata].relationship += min(1, 5/(improve.POP + 0.001))
player.reputation += 0.02
print("Improves relations with %s by %s" % (improve.name, min(1, 5/(improve.POP + 0.001))))
damage_relations(player, players, relations)
ai_improve_relations(player, players, relations)
ai_destablize(player, players, relations)
count += 1
ai_embargo(player, players, relations)
ai_lift_embargo(player, players, relations)
#if player.AP >= 1:
# player.AI_set_objective(turn, market)
# player.attempt_objective(market)
player.AI_sell_surplus(market, players)
player.check_obsolete()
player.check_stability(market, relations, players)
player.use_spice_stability()
#player.supply_factories_with_material(market)
player.spend_excess_cash(market, players, relations)
player.turn(market)
|
import operator
import pathlib
from dataclasses import dataclass
from typing import Dict
def main():
instructions = instruction_parser(pathlib.Path('puzzle_input.txt').read_text())
acc = process(instructions)
print(acc)
@dataclass
class Operation:
instruction: str
arg: int
op: operator
def __post_init__(self):
self.arg = int(self.arg)
if self.op == '-':
self.op = operator.sub
elif self.op == '+':
self.op = operator.add
def instruction_parser(raw_instructions) -> Dict[int, Operation]:
all_instructions = dict()
step = 0
for unparsed_instruction in raw_instructions.split('\n'):
if unparsed_instruction.strip():
instruction, argument = unparsed_instruction.split(" ")
all_instructions[step] = Operation(
instruction=instruction,
arg=argument[1:],
op=argument[:1]
)
step += 1
return all_instructions
def process(instructions, accumulator=0):
processed_steps = set()
step = 0
while True:
operation = instructions[step]
if step in processed_steps:
return accumulator
processed_steps.add(step)
if operation.instruction == 'nop':
step += 1
continue
elif operation.instruction == 'acc':
accumulator = operation.op(accumulator, operation.arg)
step += 1
elif operation.instruction == 'jmp':
step = operation.op(step, operation.arg)
if __name__ == '__main__':
main()
|
from turtle import Turtle, Screen
tim = Turtle()
screen = Screen()
angle = 0
def move_forwards():
tim.forward(10)
def move_backwards():
tim.backward(10)
def turn_anticlockwise():
global angle
angle += 5
tim.setheading(angle)
# tim.setheading(tim.heading + 5)
def turn_clockwise():
global angle
angle -= 5
tim.setheading(angle)
# tim.setheading(tim.heading - 5)
def reset_canvas():
tim.reset()
screen.listen()
screen.onkey(key="w", fun=move_forwards)
screen.onkey(key="s", fun=move_backwards)
screen.onkey(key="a", fun=turn_anticlockwise)
screen.onkey(key="d", fun=turn_clockwise)
screen.onkey(key="c", fun=reset_canvas)
screen.exitonclick() |
def find_uniq(arr):
n = None
duplicate = None
test = arr[0]
for i in range(1,3):
if arr[i] == test:
duplicate = arr[i]
break
else:
duplicate = arr[i]
s = set(arr)
for i in s:
if i != duplicate:
n = i
return n # n: unique integer in the array |
# O(m+n) time where m and n are the number of characters in the letter and magazine respectively
# O(L) space where L is the number of distinct letters appearing in the letter
def is_letter_constructible_from_magazine(letter_text, magazine_text):
return (not collections.Counter(letter_text) - collections.Counter(magazine_text))
|
from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_migrate import Migrate
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
import uuid
from werkzeug.security import generate_password_hash, check_password_hash
import jwt
import datetime
from functools import wraps
from flask_script import Manager
from flask_migrate import Migrate
import os
app = Flask(__name__, static_folder='../client/build', static_url_path='/')
if os.environ.get('DATABASE_URL'):
app.config['SQLALCHEMY_DATABASE_URI'] = os.environ.get('DATABASE_URL').replace("://", "ql://", 1)
else:
app.config['SQLALCHEMY_DATABASE_URI'] = 'postgresql://postgres:n&6e-oca@localhost/flask'
SECRET_KEY = app.config['SECRET_KEY'] = '5bec7e1b45fb18a457ea033f'
db = SQLAlchemy(app)
migrate = Migrate(app, db)
bcrypt = Bcrypt(app)
login_manager = LoginManager(app)
login_manager.login_view = "login_page"
login_manager.login_message_category = "info"
from app import routes
from app import api
from app.api import bp as api_bp
app.register_blueprint(api_bp, url_prefix='/api')
|
# -*- coding: utf-8 -*-
#
# Copyright 2020 Nitrokey Developers
#
# Licensed under the Apache License, Version 2.0, <LICENSE-APACHE or
# http://apache.org/licenses/LICENSE-2.0> or the MIT license <LICENSE-MIT or
# http://opensource.org/licenses/MIT>, at your option. This file may not be
# copied, modified, or distributed except according to those terms.
import logging
import platform
import string
import subprocess
import time
from shutil import which
from typing import Optional
import click
import usb1
from intelhex import IntelHex
from tqdm import tqdm
from pynitrokey.cli.exceptions import CliException
from pynitrokey.helpers import (
AskUser,
check_pynitrokey_version,
confirm,
local_critical,
local_print,
prompt,
)
from pynitrokey.libnk import DeviceNotFound, NitrokeyStorage, RetCode
def connect_nkstorage():
try:
nks = NitrokeyStorage()
nks.connect()
return nks
except DeviceNotFound:
raise CliException("No Nitrokey Storage device found", support_hint=False)
logger = logging.getLogger(__name__)
@click.group()
def storage():
"""Interact with Nitrokey Storage devices, see subcommands."""
pass
def process_runner(c: str, args: Optional[dict] = None) -> str:
"""Wrapper for running command and returning output, both logged"""
cmd = c.split()
if args and any(f"${key}" in c for key in args.keys()):
for i, _ in enumerate(cmd):
template = string.Template(cmd[i])
cmd[i] = template.substitute(args)
logger.debug(f"Running {c}")
local_print(f'* Running \t"{c}"')
try:
output = subprocess.check_output(cmd, stderr=subprocess.STDOUT).decode()
except subprocess.CalledProcessError as e:
logger.error(f'Output for "{c}": {e.output}')
local_print(f'\tOutput for "{c}": "{e.output.strip().decode()}"')
raise
logger.debug(f'Output for "{c}": {output}')
return output
class DfuTool:
name = "dfu-programmer"
@classmethod
def is_available(cls):
"""Check whether `name` is on PATH and marked as executable."""
return which(cls.name) is not None
@classmethod
def get_version(cls) -> str:
c = f"{cls.name} --version"
output = process_runner(c).strip()
return output
@classmethod
def check_version(cls) -> bool:
# todo choose and use specialized package for version strings management, e.g:
# from packaging import version
ver_string = cls.get_version()
ver = ver_string.split()[1]
ver_found = (*map(int, ver.split(".")),)
ver_required = (0, 6, 1)
local_print(f"Tool found: {ver_string}")
return ver_found >= ver_required
@classmethod
def self_check(cls) -> bool:
if not cls.is_available():
local_print(
f"{cls.name} is not available. Please install it or use another tool for update."
)
raise click.Abort()
local_print("")
cls.check_version()
local_print("")
return True
class ConnectedDevices:
application_mode: int
update_mode: int
def __init__(self, application_mode, update_mode):
self.application_mode = application_mode
self.update_mode = update_mode
class UsbId:
vid: int
pid: int
def __init__(self, vid, pid):
self.vid = vid
self.pid = pid
def is_connected() -> ConnectedDevices:
devs = {}
usb_id = {
"update_mode": UsbId(0x03EB, 0x2FF1),
"application_mode": UsbId(0x20A0, 0x4109),
}
with usb1.USBContext() as context:
for k, v in usb_id.items():
res = context.getByVendorIDAndProductID(vendor_id=v.vid, product_id=v.pid)
devs[k] = 1 if res else 0
return ConnectedDevices(
application_mode=devs["application_mode"], update_mode=devs["update_mode"]
)
@click.command()
@click.argument("firmware", type=click.Path(exists=True, readable=True))
@click.option(
"--experimental",
default=False,
is_flag=True,
help="Allow to execute experimental features",
)
def update(firmware: str, experimental):
"""experimental: run assisted update through dfu-programmer tool"""
check_pynitrokey_version()
if platform.system() != "Linux" or not experimental:
local_print(
"This feature is Linux only and experimental, which means it was not tested thoroughly.\n"
"Please pass --experimental switch to force running it anyway."
)
raise click.Abort()
assert firmware.endswith(".hex")
DfuTool.self_check()
commands = """
dfu-programmer at32uc3a3256s erase
dfu-programmer at32uc3a3256s flash --suppress-bootloader-mem $FIRMWARE
dfu-programmer at32uc3a3256s start
"""
local_print(
"Note: During the execution update program will try to connect to the device. "
"Check your udev rules in case of connection issues."
)
local_print(f"Using firmware path: {firmware}")
# note: this is just for presentation - actual argument replacement is done in process_runner
# the string form cannot be used, as it could contain space which would break dfu-programmer's call
args = {"FIRMWARE": firmware}
local_print(
f"Commands to be executed: {string.Template(commands).substitute(args)}"
)
if not confirm("Do you want to perform the firmware update now?"):
logger.info("Update cancelled by user")
raise click.Abort()
check_for_update_mode()
commands_clean = commands.strip().split("\n")
for c in commands_clean:
c = c.strip()
if not c:
continue
try:
output = process_runner(c, args)
if output:
local_print(output)
except subprocess.CalledProcessError as e:
linux = "linux" in platform.platform().lower()
local_critical(
e, "Note: make sure you have the udev rules installed." if linux else ""
)
local_print("")
local_print("Finished!")
for _ in tqdm(range(10), leave=False):
if is_connected().application_mode != 0:
break
time.sleep(1)
list_cmd = storage.commands["list"]
assert list_cmd.callback
list_cmd.callback()
def check_for_update_mode():
connected = is_connected()
assert (
connected.application_mode + connected.update_mode > 0
), "No connected Nitrokey Storage devices found"
if connected.application_mode and not connected.update_mode:
# execute bootloader
storage.commands["enable-update"].callback()
for _ in tqdm(range(10), leave=False):
if is_connected().update_mode != 0:
break
time.sleep(1)
time.sleep(1)
else:
local_print(
"Nitrokey Storage in update mode found in the USB list (not connected yet)"
)
@click.command()
def list():
"""List connected devices"""
local_print(":: 'Nitrokey Storage' keys:")
devices = NitrokeyStorage.list_devices()
for dct in devices:
local_print(f" - {dct}")
if len(devices) == 1:
nks = NitrokeyStorage()
nks.connect()
local_print(f"Found libnitrokey version: {nks.library_version()}")
local_print(f"Firmware version: {nks.fw_version}")
local_print(f"Admin PIN retries: {nks.admin_pin_retries}")
local_print(f"User PIN retries: {nks.user_pin_retries}")
@click.command()
def enable_update():
"""Enable firmware update for NK Storage device
If the Firmware Password is not in the environment variable NITROPY_FIRMWARE_PASSWORD, it will be prompted from stdin
"""
password = AskUser(
"Firmware Password", envvar="NITROPY_FIRMWARE_PASSWORD", hide_input=True
).ask()
local_print("Enabling firmware update mode")
nks = connect_nkstorage()
if nks.enable_firmware_update(password) == 0:
local_print("setting firmware update mode - success!")
else:
local_critical(
"Enabling firmware update has failed. Check your firmware password."
)
@click.command()
def change_firmware_password():
"""
Change the firmware update password.
The user is prompted for the old and the new firmware update password. Per
default, the firmware update password is 12345678.
"""
nk = connect_nkstorage()
old_password = prompt(
"Old firmware update password", default="12345678", hide_input=True
)
new_password = prompt(
"New firmware update password", hide_input=True, confirmation_prompt=True
)
ret = nk.change_firmware_password(old_password, new_password)
if ret.ok:
local_print("Successfully updated the firmware password")
elif ret == RetCode.WRONG_PASSWORD:
local_critical("Wrong firmware update password", support_hint=False)
elif ret == RetCode.TooLongStringException:
local_critical(
"The new firmware update password is too long", support_hint=False
)
else:
local_critical(f"Failed to update the firmware password ({ret.name})")
@click.command()
def open_encrypted():
"""Unlock the encrypted volume
If the User PIN is not in the environment variable NITROPY_USER_PIN, it will be prompted from stdin
"""
password = AskUser("User PIN", envvar="NITROPY_USER_PIN", hide_input=True).ask()
nks = connect_nkstorage()
ret = nks.unlock_encrypted_volume(password)
if not ret.ok:
if ret == RetCode.WRONG_PASSWORD:
raise CliException("Wrong user PIN", support_hint=False)
else:
raise CliException(
"Unexpected error unlocking the encrypted volume {}".format(str(ret))
)
@click.command()
def close_encrypted():
"""Lock the encrypted volume"""
nks = connect_nkstorage()
ret = nks.lock_encrypted_volume()
if not ret.ok:
raise CliException("Error closing the encrypted volume: {}".format(str(ret)))
@click.command()
def open_hidden():
"""Unlock a hidden volume
If the hidden volume passphrase is not in the environment variable NITROPY_HIDDEN_PASSPHRASE, it will be prompted from stdin
"""
password = AskUser(
"Hidden volume passphrase", envvar="NITROPY_HIDDEN_PASSPHRASE", hide_input=True
).ask()
nks = connect_nkstorage()
ret = nks.unlock_hidden_volume(password)
if not ret.ok:
if ret == RetCode.WRONG_PASSWORD:
raise CliException("Wrong hidden volume passphrase", support_hint=False)
else:
raise CliException(
"Unexpected error unlocking the hidden volume: {}".format(str(ret))
)
@click.command()
def close_hidden():
"""Lock the hidden volumes"""
nks = connect_nkstorage()
ret = nks.lock_hidden_volume()
if not ret.ok:
raise CliException("Error closing the hidden volume: {}".format(str(ret)))
@click.command()
@click.argument(
"slot",
type=int,
)
@click.argument(
"begin",
type=int,
)
@click.argument("end", type=int)
def create_hidden(slot, begin, end):
"""Create a hidden volume
SLOT is the slot used for the hidden volume (1-4)\n
START is where the volume begins expressed in percent of total available storage (0-99)\n
END is where the volume ends expressed in percent of total available storage (1-100)\n
If the hidden volume passphrase is not in the environment variable NITROPY_HIDDEN_PASSPHRASE, it will be prompted from stdin
"""
if not slot in [1, 2, 3, 4]:
raise CliException("Error: Slot must be between 1 and 4", support_hint=False)
elif begin > 99 or begin < 0:
raise CliException("Error: Begin must be between 0 and 99", support_hint=False)
elif end < 1 or end > 100:
raise CliException("Error: End must be between 1 and 100", support_hint=False)
elif begin >= end:
raise CliException(
"Error: END must be strictly superior than START", support_hint=False
)
password = AskUser(
"Hidden volume passphrase", envvar="NITROPY_HIDDEN_PASSPHRASE", hide_input=True
).ask()
nks = connect_nkstorage()
ret = nks.create_hidden_volume(slot - 1, begin, end, password)
if not ret.ok:
raise CliException("Error creating the hidden volume: {}".format(str(ret)))
class MemoryConstants:
HEX_OFFSET = 0x80000000
APPLICATION_DATA_START = 0x2000
USER_DATA_START = 495 * 512 # 0x3DE00
# user data end, last page is for the bootloader data (as per dfu-programmer manual)
USER_DATA_END = 511 * 512 # 0x3FE00
def input_format(x: str):
return "hex" if x.endswith("hex") else "bin"
def empty_check_user_data(ih: IntelHex):
empty = 0
for i in range(MemoryConstants.USER_DATA_START, MemoryConstants.USER_DATA_END):
empty += ih[i] not in [0xFF, 0x00]
return empty == 0
@click.command()
@click.argument("firmware", type=click.Path(exists=True))
def check(firmware: str):
"""Check if provided binary image contains user data in the proper region
Use it on downloaded full image with `--force` flag, as in: \n
$ dfu-programmer at32uc3a3256s read --bin --force > dump.bin
"""
current_firmware_read = IntelHex()
current_firmware_read.loadfile(firmware, format=input_format(firmware))
if empty_check_user_data(current_firmware_read):
raise click.ClickException(
f"{firmware}: Provided dumped binary image does not contain user data"
)
click.echo(f"{firmware}: User data seem to be present")
@click.command()
@click.argument("fw1_path", type=click.Path(exists=True))
@click.argument("fw2_path", type=click.Path(exists=True))
@click.argument("region", type=click.Choice(["application", "user"]))
@click.option("--max-diff", type=int, default=10)
def compare(fw1_path: str, fw2_path: str, region: str, max_diff: int):
"""Compare two binary images"""
fw1 = IntelHex()
fw1.loadfile(fw1_path, format=input_format(fw1_path))
fw2 = IntelHex()
fw2.loadfile(fw2_path, format=input_format(fw2_path))
offset = {}
for f in [fw1, fw2]:
offset[f] = 0
if f.minaddr() >= MemoryConstants.HEX_OFFSET:
offset[f] = MemoryConstants.HEX_OFFSET
if fw1.minaddr() != fw2.minaddr():
click.echo(
f"Warning: different offsets found - this could make the operation fail: {hex(fw1.minaddr())} {hex(fw2.minaddr())}"
)
diff_count = 0
non_empty_count = 0
if region == "application":
data_start = MemoryConstants.APPLICATION_DATA_START
data_stop = MemoryConstants.USER_DATA_START
elif region == "user":
data_start = MemoryConstants.USER_DATA_START
data_stop = MemoryConstants.USER_DATA_END
else:
raise click.ClickException(f"Wrong type")
def geti(f, i):
return f[i + offset[f]]
click.echo(f"Checking binary images in range {hex(data_start)}:{hex(data_stop)}")
for i in range(data_start, data_stop):
fw1_i = geti(fw1, i)
fw2_i = geti(fw2, i)
data_equal = fw1_i == fw2_i or fw1_i in [0xFF, 0x00] and fw2_i in [0xFF, 0x00]
diff_count += not data_equal
non_empty_count += fw1_i not in [0xFF, 0x00]
if not data_equal:
click.echo(
f"Binaries differ at {hex(i)} (page {i // 512}): {hex(fw1_i)} {hex(fw2_i)}"
)
if diff_count > max_diff:
raise click.ClickException(f"Maximum diff count reached")
if diff_count > 0:
raise click.ClickException(f"Binaries differ")
if non_empty_count == 0:
raise click.ClickException(f"Binaries contain no data")
click.echo(f"Non-empty bytes count: {non_empty_count}")
click.echo("Binary images are identical")
@click.command()
@click.argument("dumped_firmware", type=click.Path(exists=True))
@click.argument("new_firmware_file", type=click.Path(exists=True))
@click.argument("output", type=click.File("w"))
@click.option("--overlap", type=click.Choice(["error", "ignore"]), default="error")
def merge(
dumped_firmware: str,
new_firmware_file: str,
output: click.File,
overlap: str,
):
"""Simple tool to merge user data into the new firmware binary"""
if not output.name.endswith("hex"):
raise click.ClickException("Provided output path has to end in .hex")
current_firmware_read = IntelHex()
current_firmware_read.loadfile(
dumped_firmware, format=input_format(dumped_firmware)
)
if empty_check_user_data(current_firmware_read):
raise click.ClickException(
"Provided dumped binary image does not contain user data"
)
new_firmware = IntelHex()
new_firmware.loadfile(new_firmware_file, format=input_format(new_firmware_file))
new_firmware.merge(
current_firmware_read[
MemoryConstants.USER_DATA_START : MemoryConstants.USER_DATA_END
],
overlap=overlap,
)
new_firmware.write_hex_file(output)
click.echo(f'Done. Results written to "{output.name}".')
@click.group()
def user_data():
"""experimental: commands to check and manipulate user data in the downloaded binary images"""
pass
user_data.add_command(merge)
user_data.add_command(check)
user_data.add_command(compare)
storage.add_command(list)
storage.add_command(enable_update)
storage.add_command(change_firmware_password)
storage.add_command(open_encrypted)
storage.add_command(close_encrypted)
storage.add_command(open_hidden)
storage.add_command(close_hidden)
storage.add_command(create_hidden)
storage.add_command(update)
storage.add_command(user_data)
|
from collections import deque
import collections
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def findClosestLeaf(self, root: TreeNode, k: int) -> int:
graph = collections.defaultdict(list)
# We use a depth-first search to record in our graph each edge travelled from parent to node.
def dfs(node, par=None):
if node:
graph[node].append(par)
graph[par].append(node)
dfs(node.left, node)
dfs(node.right, node)
dfs(root)
# After, we use a breadth-first search on nodes that started with a value of k, so that we are visiting nodes in order of their distance to k. When the node is a leaf (it has one outgoing edge, where the root has a "ghost" edge to null), it must be the answer.
queue = collections.deque(node for node in graph
if node and node.val == k)
seen = set(queue)
while queue:
node = queue.popleft()
if node:
if len(graph[node]) <= 1:
return node.val
for nei in graph[node]:
if nei not in seen:
seen.add(nei)
queue.append(nei)
# stack = deque([root])
# while stack:
# node = stack.pop()
# print("node",node)
# if node:
# if node.val == k:
# if node.left:
# return node.left.val
# elif node.right:
# return node.right.val
# if node.left:
# stack.append(node.left)
# if node.right:
# stack.append(node.val)
# return k
|
# 给定一个由整数组成的非空数组所表示的非负整数,在该数的基础上加一。
#
# 最高位数字存放在数组的首位, 数组中每个元素只存储一个数字。
#
# 你可以假设除了整数 0 之外,这个整数不会以零开头。
#
# 示例 1:
#
# 输入: [1,2,3]
# 输出: [1,2,4]
# 解释: 输入数组表示数字 123。
# 示例 2:
#
# 输入: [4,3,2,1]
# 输出: [4,3,2,2]
# 解释: 输入数组表示数字 4321。
class Solution(object):
def plusOne(self, digits):
"""
:type digits: List[int]
:rtype: List[int]
"""
l = len(digits)
if l == 0:
return None
cnt = 1
for i in range(l):
t = digits[l - 1 - i] + cnt
if t == 10:
cnt = 1
digits[l - 1 - i] = 0
if l - 1 - i == 0:
digits.insert(0, 1)
return a
else:
cnt = 0
digits[l - 1 - i] =t
return digits
s = Solution()
digits = [1,2,3]
a = s.plusOne(digits)
print(a)
|
from django.db import models
from AdmSchedule.models import Schedule
class Config(models.Model):
id = models.AutoField(db_column='ID', primary_key=True) # Field name made lowercase.
effectivedate = models.DateField(db_column='EffectiveDate') # Field name made lowercase.
capacitypercentage = models.DecimalField(db_column='CapacityPercentage', max_digits=5, decimal_places=4) # Field name made lowercase.
timeperday = models.JSONField(db_column='TimePerDay') # Field name made lowercase.
class Meta:
managed = False
db_table = 'Config'
def get_absolute_url(self):
return f'/{self.id}/'
def __str__(self):
return f'{self.id} - {self.effectivedate}'
class Gym(models.Model):
id = models.AutoField(db_column='ID', primary_key=True) # Field name made lowercase.
name = models.CharField(db_column='Name', max_length=45) # Field name made lowercase.
tuitioncost = models.DecimalField(db_column='TuitionCost', max_digits=15, decimal_places=2) # Field name made lowercase.
config = models.ForeignKey(Config, models.DO_NOTHING, db_column='Config_ID') # Field name made lowercase.
class Meta:
managed = False
db_table = 'Gym'
def get_absolute_url(self):
return f'/{self.id}/'
def __str__(self):
return f'{self.id} - {self.name}'
class Room(models.Model):
id = models.AutoField(db_column='ID', primary_key=True) # Field name made lowercase.
name = models.CharField(db_column='Name', max_length=45) # Field name made lowercase.
capacity = models.IntegerField(db_column='Capacity') # Field name made lowercase.
gym = models.ForeignKey(Gym, models.DO_NOTHING, db_column='Gym_ID') # Field name made lowercase.
schedule = models.ForeignKey(Schedule, models.DO_NOTHING, db_column='Schedule_ID') # Field name made lowercase.
class Meta:
managed = False
db_table = 'Room'
def get_absolute_url(self):
return f'/{self.id}/'
def __str__(self):
return f'{self.id} - {self.name}' |
import pprint
import sys
import os
# idea 引入自定义模块编译不通过,有红色的下划线,但是运行正常
# 解决办法:右键工程->open module settings->sdks->classpath->添加引入模块所在的路径
import searchTool
pprint.pprint(sys.path) # 打印摸快搜索路径
# pprint.pprint(os.environ) # 打印系统环境变量
# pprint.pprint(searchTool.find_files("E:\\java\\ideaWorkspace\\pythonTest\\tmp","简书"))
dect = {}
dect1 = {"a":1,"b":2,"c":3}
dect2 = {"d":4,"e":5,"f":6}
dect3 = {"g":7,"h":8,"i":9}
dect1.update(dect2)
dect1.update(dect3)
print(dect1);
dect_keys = dect1.keys()
for key in dect_keys:
print("%s=%s"%(key,dect1.get(key)))
a = ""
b = "1"
a.join(b)
print("a=%s"%a)
print(30*50)
|
import sys
import cv2 as cv
import numpy as np
from matplotlib import pyplot as plt
import scipy as sp
from scipy import ndimage
class BasicImageProcessing():
def __init__(self, _filename, _width, _height):
self.filename = _filename
self.width = _width
self.height = _height
self.img = cv.imread(self.filename)
self.img = cv.resize(self.img, (_width, _height))
self.img_original = self.img
#self.img = cv.cvtColor(self.img, cv.COLOR_BGR2RGB)
#chuong 3
def original(self):
self.img=cv.imread(self.filename)
self.img = cv.resize(self.img, (self.width, self.height))
return self.img
def negative(self):
res = ~self.img
return res
def histogram(self):
img_yuv = cv.cvtColor(self.img_original, cv.COLOR_RGB2YUV)
img_yuv[:, :, 0] = cv.equalizeHist(img_yuv[:, :, 0])
res = cv.cvtColor(img_yuv, cv.COLOR_YUV2RGB)
return res
def log(self, thresh):
res_1 = np.uint8(np.log(self.img))
res_2 = cv.threshold(res_1, thresh, 255, cv.THRESH_BINARY)[1]
return res_2
# def gamma(self, m):
# gamma = [m]
# for i in range(len(gamma)):
# self.img = 1 * (self.img + 1) ** gamma[i]
def gamma(self, gamma):
# build a lookup table mapping the pixel values [0, 255] to
# their adjusted gamma values\
invGamma = 1.0 / gamma
table = np.array([((i / 255.0) ** invGamma) * 255
for i in np.arange(0, 256)]).astype("uint8")
# apply gamma correction using the lookup table
return cv.LUT(self.img, table)
#chuong 2
def scaling(self, size):
res = cv.resize(self.img, None, fx = size*0.01, fy = size*0.01, interpolation = cv.INTER_CUBIC)
return res
def translation(self, x, y):
rows, cols, steps = self.img.shape
M = np.float32([[1, 0, x], [0, 1, y]])
res = cv.warpAffine(self.img, M, (cols, rows))
return res
def rotation(self, angle):
rows, cols, steps = self.img.shape
M = cv.getRotationMatrix2D((cols / 2, rows / 2), 360 - angle, 1)
res = cv.warpAffine(self.img, M, (cols, rows))
return res
def affine(self, m):
rows, cols, ch = self.img.shape
pts1 = np.float32([[50, m], [200, 50], [50, 200]])
pts2 = np.float32([[10, 100], [200, 50], [100, 250]])
M = cv.getAffineTransform(pts1, pts2)
res = cv.warpAffine(self.img, M, (cols, rows))
return res
#Chương 3
def avg(self, m):
res=cv.blur(self.img,(m,m))
return res
def gaussian(self, m):
res = cv.GaussianBlur(self.img,(m, m),0)
return res
def median(self, size):
res = cv.medianBlur(self.img, size)
return res
def unMark(self):
tmp=self.img
#image=self.img
gaussian= cv.GaussianBlur(tmp, (9, 9), 10.0)
res = cv.addWeighted(tmp, 1.5, gaussian, -0.5, 0.5,tmp)
#res = cv.addWeighted(self.img, 1.5, gaussian_3, -0.5, 0, self.img)
return res
def laplacian(self):
image=self.img
sharpeningKernel = np.zeros((3, 3), np.float32)
sharpeningKernel[0, 1] = 1.0
sharpeningKernel[1, 0] = 1.0
sharpeningKernel[1, 1] = -4.0
sharpeningKernel[1, 2] = 1.0
sharpeningKernel[2, 1] = 1.0
imgfloat = image.astype(np.float32) / 255
imgLaplacian = cv.filter2D(imgfloat, cv.CV_32F, sharpeningKernel)
res = imgfloat - imgLaplacian
res[res < 0.0] = 0.0
res[res > 1.0] = 1.0
res = (res * 255).astype(np.uint8)
return res
def compositeLaplacian(self):
image = self.img
sharpeningKernel = np.zeros((3, 3), np.float32)
sharpeningKernel[0, 1] = -1.0
sharpeningKernel[1, 0] = -1.0
sharpeningKernel[1, 1] = 5.0
sharpeningKernel[1, 2] = -1.0
sharpeningKernel[2, 1] = -1.0
imgfloat = image.astype(np.float32) / 255
imgLaplacian = cv.filter2D(imgfloat, cv.CV_32F, sharpeningKernel)
res = imgLaplacian
res[res < 0.0] = 0.0
res[res > 1.0] = 1.0
res = (res * 255).astype(np.uint8)
return res
def highBoost(self, A):
image = self.img
sharpeningKernel = np.zeros((3, 3), np.float32)
sharpeningKernel[0, 0] = -1.0
sharpeningKernel[0, 1] = -1.0
sharpeningKernel[0, 2] = -1.0
sharpeningKernel[1, 0] = -1.0
sharpeningKernel[1, 1] = A
sharpeningKernel[1, 2] = -1.0
sharpeningKernel[2, 0] = -1.0
sharpeningKernel[2, 1] = -1.0
sharpeningKernel[2, 2] = -1.0
imgfloat = image.astype(np.float32) / 255
imgHighBoost = cv.filter2D(imgfloat, cv.CV_32F, sharpeningKernel)
res=imgfloat+imgHighBoost
res[res < 0.0] = 0.0
res[res > 1.0] = 1.0
res = (res * 255).astype(np.uint8)
return res
#chương 5
def fourier(self):
img_gray = cv.cvtColor(self.img, cv.COLOR_RGB2GRAY)
f = np.fft.fft2(img_gray)
fshift = np.fft.fftshift(f)
print(1)
magnitude_spectrum = 20 * np.log(np.abs(fshift))
print(2)
#magnitude_spectrum = magnitude_spectrum.astype(np.uint8)
print(3)
cv.imshow("x", magnitude_spectrum)
#return magnitude_spectrum
def highPassGaussian(self):
data = np.array(self.img, dtype=float)
lowpass = ndimage.gaussian_filter(data, 3)
gauss_highpass = data - lowpass
gauss_highpass = np.uint8(gauss_highpass)
gauss_highpass = ~gauss_highpass
return gauss_highpass
#chương 8
def Canny(self):
image = cv.cvtColor(self.img_original, cv.COLOR_BGR2GRAY)
res = cv.Canny(image, 100, 200)
return res
#chương 7
def morphology(self, m):
if m > 0:
kernel = np.ones((2, 6), np.uint8)
res = cv.dilate(self.img, kernel, iterations=m)
else:
kernel = np.ones((4, 7), np.uint8)
res = cv.erode(self.img, kernel, iterations=m*-1)
return res
def open(self):
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (9, 9))
res = cv.morphologyEx(self.img, cv.MORPH_OPEN, kernel)
return res
def close(self):
kernel = cv.getStructuringElement(cv.MORPH_ELLIPSE, (9, 9))
res = cv.morphologyEx(self.img, cv.MORPH_CLOSE, kernel)
return res
# Lỗi
def hitmis(self):
kernel = np.array(([0, 1, 0], [1, -1, 1], [0, 1, 0]))
res = cv.morphologyEx(self.img, cv.MORPH_HITMISS, kernel)
return res
def gradient(self):
kernel = np.ones((5, 5), np.uint8)
res = cv.morphologyEx(self.img, cv.MORPH_GRADIENT, kernel)
return res
def morboundary(self):
se = np.ones((3, 3), np.uint8)
e1 = self.img - cv.erode(self.img, se, iterations=1)
res = e1
return res
def convex(self):
tmp = self.img
image = cv.cvtColor(tmp, cv.COLOR_BGR2GRAY)
blur = cv.blur(image, (3, 3))
ret, thresh = cv.threshold(blur, 50, 255, cv.THRESH_BINARY)
im2, contours, hierarchy = cv.findContours(thresh, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
hull = []
# calculate points for each contour
for i in range(len(contours)):
# creating convex hull object for each contour
hull.append(cv.convexHull(contours[i], False))
# create an empty black image
res = np.zeros((thresh.shape[0], thresh.shape[1], 3), np.uint8)
# draw contours and hull points
for i in range(len(contours)):
color_contours = (0, 255, 0) # green - color for contours
color = (255, 0, 0) # blue - color for convex hull
# draw ith contour
# cv2.drawContours( self.image, contours, i, color_contours, 1, 8, hierarchy)
# draw ith convex hull object
cv.drawContours(res, hull, i, color, 1, 8)
return res
#Chương 8
def sobelX(self):
sobelImgX = cv.Sobel(self.img, cv.CV_8U, 1, 0, ksize=7)
return sobelImgX
def sobelY(self):
sobelImgY = cv.Sobel(self.img, cv.CV_8U, 0, 1, ksize=5)
return sobelImgY
def lapcian(self):
lapcian = cv.Laplacian(self.img, cv.CV_8U)
return lapcian
|
#!/usr/bin/env python3
import sys,os
Code_path = os.path.dirname(os.path.abspath(__file__))
Project_path = os.path.dirname(Code_path)
serverside = os.path.join(Code_path, "server")
datafile = os.path.join(os.getcwd(), "profile.dat")
#-------------------------------------------------------------------------------------------
# create app
import flask
if getattr(sys, 'frozen', False):
template_folder = os.path.join(sys._MEIPASS,'templates')
static_folder = os.path.join(sys._MEIPASS,'static')
app = flask.Flask(__name__, template_folder = template_folder,static_folder = static_folder)
else:
app = flask.Flask(__name__)
#-------------------------------------------------------------------------------------------
app.config['SECRET_KEY'] = os.urandom(24)
#----------------------------------------------------------------------
# encryption API
from rudeencrypt import Encryption as PyDictFileEncy
#----------------------------------------------------------------------
app.config['ENCRYPTION_CLASS'] = PyDictFileEncy
app.config['PROFILE_DATA_FILE'] = datafile
app.config['DATA_CONTAINER'] = {}
#-------------------------------------------------------------------------------------------
# time js
# from flask_moment import Moment
# from static_moment.flask_moment import Moment
# moment = Moment(app)
#-------------------------------------------------------------------------------------------
# markdown support
import flaskext.markdown
flaskext.markdown.Markdown(app)
from server.permission import permission
from server.FUM import FUM
app.config['fun_FUM'] = FUM
#-------------------------------------------------------------------------------------------
# render all the pages
from Pages import *
#--------------------------------------------------------------------------------------------
# Run
if __name__ == '__main__':
host = '0.0.0.0'
port = 4999
print('''
╔═════════════════════════════════════════════════════
║ profile={}
║┌────────────────────────────┐
║ http://localhost:{}
║└────────────────────────────┘
╚═════════════════════════════════════════════════════
'''.format(datafile,port))
app.run(host=host, port=port, debug= True )
# linux
# webbrowser.open(url='http://0.0.0.0:4999', new=1)
|
'''
Prepare images for display/processing. We want to scale everything down so we
dont take up a ton of space on the disk. It is not necessary to run this module unless
the raw images are changed.
'''
import cv2
import os
from glob import glob
def process(filename):
img = cv2.imread(filename)
img = cv2.resize(img, (64, 48))
outname = './images/' + os.path.basename(filename)
cv2.imwrite(outname, img, [cv2.IMWRITE_JPEG_QUALITY, 70])
filenames = glob('./raw-images/*.JPEG')
for i, filename in enumerate(filenames):
process(filename)
print(f'Processed {i+1}/{len(filenames)}', end='\r')
print('\nDone!') |
"""
Tests for adapters.py
"""
import json
from django.contrib.auth.models import User
from django.test import TestCase
from main.adapters import adapt_model_to_frontend
from main.models import Chromosome
from main.models import Project
from main.models import ReferenceGenome
from main.models import Variant
from main.models import VariantAlternate
class TestAdapters(TestCase):
def setUp(self):
"""Override.
"""
TEST_USERNAME = 'testuser'
TEST_PASSWORD = 'password'
TEST_EMAIL = 'test@example.com'
user = User.objects.create_user(TEST_USERNAME, password=TEST_PASSWORD,
email=TEST_EMAIL)
TEST_PROJECT_NAME = 'recoli'
test_project = Project.objects.create(
title=TEST_PROJECT_NAME,
owner=user.get_profile())
REF_GENOME_1_LABEL = 'mg1655'
self.ref_genome_1 = ReferenceGenome.objects.create(
label=REF_GENOME_1_LABEL, project=test_project)
self.chromosome = Chromosome.objects.create(
reference_genome=self.ref_genome_1,
label='Chromosome',
num_bases=9001)
def test_adapters__one_level(self):
"""Test adapting to a single level.
"""
fe_ref_genomes = json.loads(adapt_model_to_frontend(
ReferenceGenome, {'id': self.ref_genome_1.id}))
self.assertTrue('field_config' in fe_ref_genomes)
self.assertTrue('obj_list' in fe_ref_genomes)
self.assertEqual(1, len(fe_ref_genomes['obj_list']))
ref_genome_1_fe = fe_ref_genomes['obj_list'][0]
for field in ReferenceGenome.get_field_order():
self.assertTrue(field['field'] in ref_genome_1_fe)
self.assertTrue('href' in ref_genome_1_fe)
|
import sqlite3
from employee import Employee
conn = sqlite3.connect(':memory:')
c = conn.cursor()
c.execute("""CREATE TABLE employees(
first text,
last text,
pay integer
)""")
def insert_emp(emp):
with conn:
c.execute("INSERT INTO employees VALUES(:first, :last, :pay)", {'first': emp.first, 'last': emp.last, 'pay': emp.pay})
def get_emps_by_name(lastname):
c.execute("SELECT * FROM employees WHERE last=:last", {'last': lastname})
return c.fetchall()
def update_emps(emp, pay):
with conn:
c.execute("UPDATE employees SET pay=:pay WHERE first=:first AND last=:last", {'first':emp.first, 'last':emp.last, 'pay': pay})
def remove_emp(emp):
with conn:
c.execute("DELETE FROM employees WHERE first=:first AND last=:last", {'first': emp.first, 'last': emp.last})
emp_1 = Employee('Darshan', 'Thapa', 90000)
emp_2 = Employee('Dipika', 'Thapa', 80000)
insert_emp(emp_1)
insert_emp(emp_2)
emps = get_emps_by_name('Thapa')
print(emps)
update_emps(emp_2, 85000)
remove_emp(emp_1)
emps = get_emps_by_name('Thapa')
print(emps)
conn.close()
|
# -*- coding: utf-8 -*-
import sys
import biblioteca
def menu():
print ("Conversor de Medidas")
print (" ")
print ("Digite a opção desejada: ")
print (" ")
print ("1 Conversão de pes para jardas\n")
print ("2 Conversão de polegadas para centimetros\n")
print ("3 Conversão de jardas para metros\n")
print ("4 Conversão de milhas para kms\n")
print ("0 para sair")
print (" ")
c = input("Digite a opção desejada: ")
d = float(input("Digite a distância: "))
if c == '1':
print("Conversão de pes para jardas : ", biblioteca.pesMetros(d))
elif c == '2':
print("Conversão de polegadas para centimetros: ", biblioteca.polegadasCentimetro(d))
elif c == '3':
print("Conversor de jardas para metros ", biblioteca.jardasMetros(d))
elif c =='4':
print("Converso de milhas para kms: ", biblioteca.milhasQuilometros(d))
elif c == '0':
sys.exit()
else:
print("Digite a opção certa!")
menu()
menu()
menu() |
from certification_script.tests import base
from certification_script.fuel_rest_api import with_timeout
class OSTFTests(base.BaseTests):
def run_test(self, test_name):
data = {'testset': test_name,
'tests': [],
'metadata': {'cluster_id': self.cluster_id}}
return self.conn.post('ostf/testruns', [data])
def run_tests(self, tests):
for test_name in tests:
print test_name
run_id = self.run_test(test_name)[0]['id']
def check_ready(self, run_id):
status = self.conn.get('/ostf/testruns/{}'.format(run_id))
return status['status'] == 'finished'
wt = with_timeout(self.timeout, "run test " + test_name)
wt(check_ready)(self, run_id)
yield self.conn.get('/ostf/testruns/{}'.format(run_id))
def get_available_tests(self):
testsets = self.conn.get('/ostf/testsets/{}'.format(self.cluster_id))
return [testset['id'] for testset in testsets]
|
import json
class GetDelivery:
def __init__(self, deliveryID, merchantOrderID, quote, sender, recipient,
cashOnDelivery, schedule, status, courier, timeline, trackingURL, advanceInfo):
self.deliveryID = deliveryID
self.merchantOrderID = merchantOrderID
self.quote = quote
self.sender = sender
self.recipient = recipient
self.cashOnDelivery = cashOnDelivery
self.schedule = schedule
self.status = status
self.courier = courier
self.timeline = timeline
self.trackingURL = trackingURL
self.advanceInfo = advanceInfo
@classmethod
def from_json(cls, json_str):
json_dict = json.loads(json_str)
return cls(**json_dict) |
# -*- coding: utf-8 -*-
import os
import sys
from os import path
from xmldiff import main
sys.path.append(path.join(path.dirname(__file__), '..', 'ccelib'))
from ccelib.v1_00 import leiauteCCe as cce
def test_in_out_leiauteCCe():
path = 'tests/cce/v1_00/leiauteCCe'
for filename in os.listdir(path):
inputfile = '%s/%s' % (path, filename,)
obj = cce.parse(inputfile)
outputfile = 'tests/output.xml'
with open(outputfile, 'w') as f:
obj.export(f, level=0, name_='evento',
namespacedef_='xmlns="http://www.portalfiscal.inf.br/nfe"')
diff = main.diff_files(inputfile, outputfile)
print(diff)
assert len(diff) == 0
def test_init_all():
for mod in [cce]:
for class_name in mod.__all__:
cls = getattr(mod, class_name)
if issubclass(cls, mod.GeneratedsSuper):
cls()
|
# -*- coding: utf-8 -*-
import logging
import numpy as np
from network import NetworkFunction
from hobotrl.tf_dependent.distribution import NNDistribution, DiscreteDistribution, NormalDistribution
from core import Policy
class GreedyPolicy(Policy):
def __init__(self, q_function):
"""
:param q_function:
:type q_function: NetworkFunction
"""
super(GreedyPolicy, self).__init__()
self.q_function = q_function
self._num_actions = q_function.output().op.shape.as_list()[-1]
def act(self, state, **kwargs):
q_values = self.q_function(np.asarray(state)[np.newaxis, :])[0]
action = np.argmax(q_values)
return action
class EpsilonGreedyPolicy(Policy):
def __init__(self, q_function, epsilon, num_actions):
"""
:param q_function:
:type q_function: NetworkFunction
:param epsilon:
:param num_actions:
"""
super(EpsilonGreedyPolicy, self).__init__()
self.q_function, self._epsilon, self._num_actions = q_function, epsilon, num_actions
def act(self, state, exploration=True, **kwargs):
if exploration and np.random.rand() < self._epsilon:
action = np.random.randint(self._num_actions)
else:
q_values = self.q_function(np.expand_dims(np.asarray(state), axis=0))[0]
action = np.argmax(q_values)
return action
class WrapEpsilonGreedy(Policy):
def __init__(self, policy, epsilon, num_actions, is_continuous):
super(WrapEpsilonGreedy, self).__init__()
self._policy, self._epsilon, self._num_actions, self._is_continuous = \
policy, epsilon, num_actions, is_continuous
def act(self, state, exploration=True, **kwargs):
if exploration and np.random.rand() < self._epsilon:
if self._is_continuous:
action = (np.random.rand(self._num_actions) - 0.5) * 2
else:
action = np.random.randint(self._num_actions)
else:
action = self._policy.act(state, **kwargs)
return action
class OUNoise(object):
def __init__(self, shape, mu, theta, sigma):
"""
:param shape:
:param mu: mean of noise
:param theta: 1 - momentum of noise
:param sigma: scale of noise
"""
self._shape, self._mu, self._theta, self._sigma = shape, mu, theta, sigma
self._x = np.ones(self._shape) * self._mu
def tick(self):
self._x += self._theta * (self._mu - self._x) +\
self._sigma * np.random.randn(*self._shape)
return self._x
class OUNoise2(object):
def __init__(self, shape, mu, momentum, scale):
"""
:param shape:
:param mu: mean of noise
:param momentum: momentum of noise
:param scale: scale of noise
"""
self._shape, self._mu, self._scale, self._momentum = shape, mu, scale, momentum
self._x = np.ones(self._shape) * self._mu + self._scale * np.random.randn(*self._shape)
def tick(self):
self._x = self._mu + self._momentum * (self._x - self._mu) + \
self._scale * (1.0 - self._momentum) * np.random.randn(*self._shape)
return self._x
class OUExplorationPolicy(Policy):
def __init__(self, action_function, mu, theta, sigma, noise_type=OUNoise):
"""
:param action_function:
:type action_function: NetworkFunction
:param mu:
:param theta:
:param sigma:
"""
self._action_function = action_function
self._action_shape = [action_function.output().op.shape.as_list()[-1]]
self._ou_noise = noise_type(self._action_shape, mu, theta, sigma)
@staticmethod
def action_add(action, noise):
"""
regularize action + noise into (-1, 1)
:param action:
:param noise:
:return:
"""
epsilon = 1e-8
margin = np.abs(np.sign(noise) - action)
return action + margin * np.tanh(noise / (margin + epsilon))
def act(self, state, **kwargs):
action = self._action_function(np.asarray(state)[np.newaxis, :])[0]
noise = self._ou_noise.tick()
action0 = self.action_add(action, noise)
# logging.warning("action: %s + %s -> %s", action, noise, action0)
return action0
class StochasticPolicy(Policy):
"""
returns action according to probability distribution.
"""
def __init__(self, distribution):
"""
:param distribution:
:type distribution NNDistribution
"""
super(StochasticPolicy, self).__init__()
self._distribution = distribution
def act(self, state, **kwargs):
return self._distribution.sample_run(np.asarray(state)[np.newaxis, :])[0]
class GreedyStochasticPolicy(Policy):
"""
returns action with the most probability in prob. distribution.
"""
def __init__(self, distribution):
"""
:param distribution:
:type distribution NNDistribution
"""
super(GreedyStochasticPolicy, self).__init__()
self._distribution = distribution
self._is_continuous = isinstance(distribution, NormalDistribution)
def act(self, state, **kwargs):
if self._is_continuous:
return self._distribution.mean_run([state])[0]
else:
distribution = self._distribution.dist_run([state])[0]
return np.argmax(distribution)
class KeepEpsilonPolicy(Policy):
def __init__(self, n, n_distribution, q_function, epsilon, num_actions):
super(KeepEpsilonPolicy, self).__init__(q_function, epsilon, num_actions)
self._q_function, self._epsilon, self._num_actions = q_function, epsilon, num_actions
self._n = n
self._last_action, self._countdown = None, 0
def act(self, state, exploration=True, **kwargs):
"""
if exploration=False, do not keep previous action. True otherwise.
:param state:
:param exploration:
:param kwargs:
:return:
"""
if not exploration:
q_values = self._q_function(np.asarray(state)[np.newaxis, :])[0]
action = np.argmax(q_values)
return action
if self._countdown > 0:
self._countdown -= 1
else:
if np.random.rand() < self._epsilon:
# random
self._last_action = np.random.randint(self._num_actions)
else:
q_values = self._q_function(np.asarray(state)[np.newaxis, :])[0]
self._last_action = np.argmax(q_values)
self._countdown = self._n - 1
return self._last_action
def set_n(self, n):
self._n = n
|
import copy
import sys
import matplotlib.pyplot as plt
import numpy as np
import re
from glob import glob
import cv2
np.set_printoptions(threshold=sys.maxsize)
def read_pgm(filename, byteorder='>'):
with open(filename, 'rb') as f:
buffer = f.read()
try:
header, width, height, maxval = re.search(
b"(^P5\s(?:\s*#.*[\r\n])*"
b"(\d+)\s(?:\s*#.*[\r\n])*"
b"(\d+)\s(?:\s*#.*[\r\n])*"
b"(\d+)\s(?:\s*#.*[\r\n]\s)*)", buffer).groups()
except AttributeError:
raise ValueError("Not a raw PGM file: '%s'" % filename)
return np.frombuffer(buffer,
dtype='u1' if int(maxval) < 256 else byteorder + 'u2',
count=int(width) * int(height),
offset=len(header)
).reshape((int(height), int(width)))
def print_original_image(index):
img = read_pgm(glob('**/*.pgm', recursive=True)[index])
plt.imshow(img, cmap='bone')
plt.show()
return img
def print_binary_image(index):
img = read_pgm(glob('**/*.pgm', recursive=True)[index])
ret, thresh = cv2.threshold(img, 15, 255, cv2.THRESH_BINARY)
plt.imshow(thresh, 'gray')
plt.show()
return thresh
def print_processed_image(processed_thresh, img):
processed_img = copy.deepcopy(img)
for i in range(1024):
for j in range(1024):
if processed_thresh[i][j] == 0:
processed_img[i][j] = 0
plt.imshow(processed_img, cmap='bone')
plt.show()
|
#some colors definitions
black = 0, 0, 0
yellow = 255, 255, 0
green = 0,255,0
blue = 0,0,150
red = 255,0,0
dimred = 125,0,0
white = 255,255,255
gray = 125,125,125
|
# ---
# jupyter:
# jupytext:
# formats: ipynb,py:light
# text_representation:
# extension: .py
# format_name: light
# format_version: '1.3'
# jupytext_version: 1.0.1
# kernelspec:
# display_name: Python 3
# language: python
# name: python3
# ---
# +
import pandas as pd
train = pd.read_csv('./input/train.csv')
test = pd.read_csv('./input/test.csv')
# -
print(train.shape)
train.head()
print(test.shape)
test.head()
train_temp = train.copy()
Xtrain = train_temp.drop(columns=['label'], axis=1)
ytrain = train.label
from sklearn.ensemble import RandomForestRegressor
from sklearn.pipeline import make_pipeline
from sklearn.impute import SimpleImputer
# my_pipeline = make_pipeline(SimpleImputer(), RandomForestRegressor())
from sklearn.model_selection import cross_val_score
# scores = cross_val_score(my_pipeline, Xtrain, ytrain, cv=5,
# scoring='neg_mean_absolute_error')
# print(scores)
# +
# print('Mean Absolute Error %2f' %(-1 * scores.mean()))
# +
#Evaluation Metrics
from sklearn.metrics import mean_squared_error, make_scorer
def rmse(predict, actual):
score = mean_squared_error(ytrain, y_pred) ** 0.5
return score
rmse_score = make_scorer(rmse)
def score(model):
score = cross_val_score(model, Xtrain, ytrain, cv=5,
scoring=rmse_score).mean()
return score
scores = {}
# -
from sklearn.metrics import mean_absolute_error, r2_score
# #### Simple Linear Regression
# +
from sklearn.linear_model import LinearRegression
lr_model = LinearRegression(n_jobs=-1)
lr_model.fit(Xtrain, ytrain)
#accuracies = cross_val_score(estimator=lr_model,
# X=Xtrain,
# y=Ytrain,
# cv=5,
# verbose=1)
y_pred = lr_model.predict(Xtrain)
print('')
print('####### Linear Regression #######')
meanCV = score(lr_model)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'OLS':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
# #### Lasso Regression
# +
from sklearn.preprocessing import RobustScaler
from sklearn.linear_model import Lasso
model_lasso = Lasso(random_state=42, alpha=0.00035)
lr_lasso = make_pipeline(RobustScaler(), model_lasso)
lr_lasso.fit(Xtrain, ytrain)
y_pred = lr_lasso.predict(Xtrain)
print('')
print('####### Lasso Regression #######')
meanCV = score(lr_lasso)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'Lasso':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
# #### Ridge Regression
# +
from sklearn.linear_model import Ridge
lr_ridge = make_pipeline(RobustScaler(),
Ridge(random_state=42,alpha=0.002))
lr_ridge.fit(Xtrain,ytrain)
y_pred = lr_ridge.predict(Xtrain)
print('')
print('####### Ridge Regression #######')
meanCV = score(lr_ridge)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'Ridge':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
# #### ElasticNet Regression
# +
from sklearn.linear_model import ElasticNet
lr_elasticnet = make_pipeline(RobustScaler(),
ElasticNet(alpha=0.02, l1_ratio=0.7,random_state=42))
lr_elasticnet.fit(Xtrain,ytrain)
y_pred = lr_elasticnet.predict(Xtrain)
print('')
print('####### ElasticNet Regression #######')
meanCV = score(lr_elasticnet)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'ElasticNet':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
# #### KNN Regression
# +
from sklearn.neighbors import KNeighborsRegressor
knn = make_pipeline(RobustScaler(),KNeighborsRegressor())
knn.fit(Xtrain,ytrain)
y_pred = knn.predict(Xtrain)
print('')
print('####### KNN Regression #######')
meanCV = score(knn)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'KNN':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
# #### GradientBoosting Regression
# +
from sklearn.ensemble import GradientBoostingRegressor
model_GBoost = GradientBoostingRegressor(n_estimators=3000,
learning_rate=0.05,
max_depth=4,
max_features='sqrt',
min_samples_leaf=15,
min_samples_split=10,
loss='huber',
random_state =42)
model_GBoost.fit(Xtrain,ytrain)
y_pred = model_GBoost.predict(Xtrain)
print('')
print('####### GradientBoosting Regression #######')
meanCV = score(model_GBoost)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'GradientBoosting':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
# #### RandomForest Regressor
# +
from sklearn.ensemble import RandomForestRegressor
forest_reg = RandomForestRegressor(random_state=42)
forest_reg.fit(Xtrain, ytrain)
y_pred = forest_reg.predict(Xtrain)
print('')
print('####### RandomForest Regression #######')
meanCV = score(forest_reg)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'RandomForest':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
# #### Grid Search for finding best params for RandomForest
# +
from sklearn.model_selection import GridSearchCV
param_grid = [
{'n_estimators': [70,100], 'max_features': [150]},
{'bootstrap': [True], 'n_estimators': [70,100],
'max_features': [150]},
]
forest_reg = RandomForestRegressor(random_state=42)
# train across 5 folds, that's a total of (12+6)*5=90 rounds of training
grid_search = GridSearchCV(forest_reg, param_grid, cv=5,
scoring='neg_mean_squared_error',
return_train_score=True)
grid_search.fit(Xtrain, ytrain)
y_pred = grid_search.predict(Xtrain) #???? missing
print('')
print('####### GridSearch RF Regression #######')
meanCV = score(grid_search)
print('Mean CV Score : %.4f' % meanCV)
mse = mean_squared_error(ytrain,y_pred)
mae = mean_absolute_error(ytrain, y_pred)
rmse = mean_squared_error(ytrain, y_pred)**0.5
r2 = r2_score(ytrain, y_pred)
scores.update({'GridSearchRF':[meanCV, mse, mae, rmse, r2]})
print('')
print('MSE(RSS) : %0.4f ' % mse)
print('MAE : %0.4f ' % mae)
print('RMSE : %0.4f ' % rmse)
print('R2 : %0.4f ' % r2)
# -
grid_search.best_estimator_
# +
scores_list =[]
for k,v in scores.items():
temp_lst =[]
temp_lst.append(k)
temp_lst.extend(v)
scores_list.append(temp_lst)
scores_df = pd.DataFrame(scores_list,
columns=['Model','CV_Mean_Score',
'MSE(RSS)','MAE','RMSE',
'R2Squared'])
scores_df.sort_values(['CV_Mean_Score'])
# -
_ = sns.scatterplot(x='Model',y='CV_Mean_Score',
data=scores_df,style='Model')
# +
Lasso_Predictions = lr_lasso.predict(test)
GBoost_Predictions = model_GBoost.predict(test)
KNN_Predictions = knn.predict(test)
GridSearch_Predictions = grid_search.best_estimator_.predict(test)
# +
submission=pd.read_csv('../input/sample_submission.csv')
submission['Label'] = Lasso_Predictions
submission.to_csv('../input/Lasso.csv',index=False)
submission['Label'] = GBoost_Predictions
submission.to_csv('../input/GBoost.csv',index=False)
submission['Label'] = KNN_Predictions
submission.to_csv('../input/KNN.csv',index=False)
submission['Label'] = GridSearch_Predictions
submission.to_csv('../input/GidSearch.csv',index=False)
|
# Generated by Django 3.1.2 on 2020-10-03 23:47
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='AirNowForecastSource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='AirNowReportingArea',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=45)),
('state_code', models.CharField(max_length=2)),
('location', django.contrib.gis.db.models.fields.PointField(srid=4326)),
],
),
migrations.CreateModel(
name='AirNowReportingAreaZipCode',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('city', models.CharField(db_index=True, max_length=100)),
('state', models.CharField(max_length=2)),
('zipcode', models.IntegerField(db_index=True)),
('location', django.contrib.gis.db.models.fields.PointField(srid=4326)),
],
),
migrations.CreateModel(
name='AirNowObservation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('issued_date', models.DateField()),
('valid_date', models.DateField()),
('valid_time', models.TimeField(null=True)),
('record_sequence', models.SmallIntegerField()),
('parameter_name', models.CharField(max_length=10)),
('aqi_value', models.SmallIntegerField(null=True)),
('aqi_category', models.CharField(max_length=40)),
('primary_pollutant', models.BooleanField()),
('type', models.CharField(choices=[('F', 'Forecast'), ('Y', 'Yesterday'), ('O', 'Hourly Observation')], max_length=1)),
('discussion', models.CharField(max_length=500)),
('reporting_area', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.airnowreportingarea')),
('source', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.airnowforecastsource')),
],
),
]
|
from helpers import swap_in_array, timeit
def insert_sort(unsorted):
'''Sorts a given list inplace using Insertion Sort Algo.
Algorithm: Consider that the array is divided in to 2 subarrays, Left
and Right. Array on Left is sorted while the Right is unsorted. In order
to sort the array we pick an element from the Right. And put its correct
place in Left. Thus in the end all the elements on the Left side become
sorted.
Initially Left has only 1 element. Thus sorted.
While there are elements in Right:
1. Pick first element on Right say E.
2. Move from right to left in Left array.
3. On each step compare the element on that step with E.
4. If E is smaller than element on that step swap element on step with
the next element. i.e. step + 1 position.
4. If E is greater than the element on that step stop the iteration.
This is an O(n**2) algorithm.
The algo is decent to be used for an array of length 30.
Best Case: Sorted Array. O(n).
Worst Case: Reverse Sorted Array. O(n**2).
'''
for i in range(1, len(unsorted)):
element = unsorted[i]
j = i-1
while j >= 0:
if unsorted[j] > element:
swap_in_array(unsorted, j, j+1)
else:
break
j -= 1
unsorted[j+1] = element
@timeit
def best_case():
# sorted array
a = list(xrange(30))
insert_sort(a)
print a
@timeit
def worst_case():
# reverse sorted array
a = list(xrange(30))
a.reverse()
insert_sort(a)
print a
if __name__ == '__main__':
best_case()
worst_case()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Date : 2018-07-12 21:07:54
# @Author : flying
import package_test
#导入包的本质->就是解释包下面的__init__文件
#>>>from package
#想要执行包下面的文件,更改__init__.py文件
package_test.test1.test()
|
import solidBoundaryPoint
#x, y, z are the solid body points.
#x1, y1, z1 are the FFD points
def computeTUVData(SolidBoundaryPointArray, FFDPointArray, FFDXMax, FFDXMin, FFDYMax, FFDYMin, FFDZMax, FFDZMin):
for i in range(len(SolidBoundaryPointArray)):
element = SolidBoundaryPointArray[i]
t = (element.getX() - FFDXMin)/(FFDXMax - FFDXMin)
u = (element.getY() - FFDYMin)/(FFDYMax-FFDYMin)
v = (element.getZ() - FFDZMin)/(FFDZMax-FFDZMin)
element.setT(t)
element.setU(u)
element.setV(v)
|
import random
import numpy as np
import matplotlib.pyplot as plt
def number_of_population():
"""
:return: pocet populacie, ktora sa vygeneruje
"""
return 100
def gems():
"""
:return: pozicie v jednorozmenom poli, kde su ukyte poklady
"""
return [11, 16, 27, 29, 39]
def load_map():
"""
:return: rozmer mapy a startovna pozicia
"""
map_size = 7
start_position = 45
return start_position, map_size
def incrementation(array, position):
"""
:param array: jeden jedinec
:param position: konkretny gen jedinca
:return: upraveny jedinec
"""
if array[position] == 256:
array[position] = 0
else:
array[position] += 1
return array
def decrementation(array, position):
"""
:param array: jeden jedinec
:param position: konkretny gen jedinca
:return: upraveny jedinec
"""
if array[position] == 0:
array[position] = 256
else:
array[position] -= 1
return array
def jump(array, position):
"""
:param array: jeden jedinec
:param position: pozicia konkretneho genu jedinca
:return: pozicia noveho genu
"""
new_position = array[position] & 63 # pozeram prvych 6 bitov cisla
return new_position, array
def output(array, position, move_array, actual_position, size_of_map):
"""
:param array: konkretny jedinec
:param position: pozicia konkretneho genu jedinca
:param move_array: pole v ktorom su ulozene hodnoty, kde vsade sa viem pohnut
:param actual_position: aktualna pozicia na mape, na ktorej sa nachadzam
:param size_of_map: velkost mapy
:return: aktualna pozicia, kde na mape sa nachadzam a pohyby ktore vykonal jedinec
"""
next_move = array[position] & 3 #pozeram posledne 2 bity cisla
if next_move == 0:
actual_position += size_of_map
move_array.append(actual_position)
elif next_move == 1:
actual_position -= size_of_map
move_array.append(actual_position)
elif next_move == 2:
actual_position -= 1
move_array.append(actual_position)
elif next_move == 3:
actual_position += 1
move_array.append(actual_position)
return actual_position, move_array
def generate():
"""
generovanie zakladnej populacie
:return: vratenie pola o velkosti 64 s cislami od 0 do 255
"""
return np.random.randint(0, 255, 64)
def convert(position, size_of_map):
"""
:param position: pozicia, kde na mape sa nachadzam
:param size_of_map: velkost mapy
:return: konvertovanie jednorozmerneho pola do dvojrozmerneho
"""
return position // size_of_map, position % size_of_map
def check(old_position, new_position, move_array, size_of_map):
"""
kontrola ci pohyb ktory som vykonal nejde mimo mapu
:param old_position: stara pozicia na ktorej som sa nachadzal
:param new_position: nova pozicia na ktorej som teraz
:param move_array: pole, v ktorom su zaznamenane suradnice pohybov po mape
:param size_of_map: velkost mapy
:return: tah bol korektny alebo nie
"""
prev_x, prev_y = convert(old_position, size_of_map)
next_x, next_y = convert(new_position, size_of_map)
number = move_array[-1]
if number < 0 or number > size_of_map * size_of_map - 1 or prev_x != next_x and prev_y != next_y:
return True
return False
def move(start_position, array, size_of_map):
"""
:param start_position: pozicia na mape z ktorej startujem
:param array: jedinec
:param size_of_map: velkost mapy
:return: vysledne pole pohybov po mape
"""
actual_position = start_position
position = 0
move_array = []
f_value = array[position] >> 6
moves = 0
gem = gems()
"""
prvych 500 iteracii po nich sa dany genofond skonci
"""
while moves != 500:
"""
ak presiahnem 8 bitov
"""
if array[position] > 255:
array[position] %= 255
"""
kontrolovanie prvych 2 bitov, podla nich viem co sa ma vykonat
"""
new_position = array[position] & 63
if f_value == 0:
array = incrementation(array, new_position)
elif f_value == 1:
array = decrementation(array, new_position)
elif f_value == 3:
"""
ulozenie starej pozicie, kvoli kontrole ci nevyndem z pola
"""
old_position = actual_position
actual_position, move_array = output(array, new_position, move_array, actual_position, size_of_map)
"""
ak sa som vysiel mimo mapy tak pole vytiahnem zo zasobnika a stroj ukoncim
"""
if check(old_position, actual_position, move_array, size_of_map):
move_array.pop()
break
count = 0
"""
kontrola kolko pokladov som zobral, ak mam vsetky tak mozem skoncit
"""
for i in gem:
if i in move_array:
count += 1
if count == gem.__len__():
break
if f_value == 2:
position, array = jump(array, position)
"""
v pripade ze som nepresiel jumpom, tak sa presuniem na dalsi gen jedinca
"""
else:
position += 1
if position > 63:
position = 0
f_value = array[position] >> 6
moves += 1 # zvysujem pocet iteracii
return move_array
def first_population(number):
"""
vygenerovanie prvych 20 jednotiek populacie
:return: vygenerovanych prvych 20 jedincov s genami
"""
input_array = []
for i in range(number):
input_array.append(generate())
return input_array
def fitness(arrays):
"""
:param arrays: vstupny jedinec, pre ktoreho chcem vypocitat fitness
:return: fitnes konkretneho jedinca
"""
fitnes = 1
start_position, map_size = load_map()
gem = gems()
array = move(start_position, arrays.copy(), map_size)
"""
kontrolujem kolko pokladov som cestov zobral
"""
for i in gem:
if i in array:
fitnes += 1
if array.__len__() > map_size * map_size:
fitnes -= (array.__len__() * map_size * 0.001)
"""
ak fitnes = poctu pokladov, tak som nasiel vsetky s dostacujucou cestou
"""
if fitnes == gem.__len__() + 1:
print("cesta je: ")
for i in array:
print("(", i // map_size, i % map_size, ")")
if i in gem:
print("zobral som poklad cislo ", i)
return fitnes
def generate_position(old_generation, total_sum):
"""
:param old_generation: generacia starych jedincov
:param total_sum: suma fitnes funkcii
:return: pozicia konkretneho jednica
"""
counter = -1
new_sum = 0
list_1 = random.randint(0, int(total_sum))
for m in old_generation:
counter += 1
new_sum += m[0]
if new_sum >= list_1:
return counter
def roulete(old_generation, count):
"""
:param old_generation: stara generacia jedincov
:param count: pocet, kolko jedincov ma ist do rulety
:return: nova generacia jedincov a suma fitness
"""
old_generation = sorted(old_generation.copy(), key=lambda x: x[0], reverse=True)
total_sum = 0
new_generation = []
pom_array = []
for i in old_generation:
total_sum += i[0]
for i in range(count):
"""
vyber 2 nahodnych jedincov z generacie
"""
list_1 = generate_position(old_generation, total_sum)
list_2 = generate_position(old_generation, total_sum)
for j in range(64):
"""
nahodne vyberam gen bud z otca alebo z matky
"""
probability = random.randint(0, 1)
if probability == 0:
pom_array.append(old_generation[list_1][1][j])
else:
pom_array.append(old_generation[list_2][1][j])
new_generation.append(pom_array)
pom_array = []
return new_generation, total_sum
def top_population_first(new_generation, counter):
"""
funkcia vrati to counter jedincov
:param new_generation: pole jedincov
:param counter: pocet kolko najlepsich jedincov ma byt vybratych
:return: top counter jedincov, spolu s fitness ohodnoteniami
"""
new_population = []
help_array = sorted(new_generation.copy(), key=lambda x: x[0], reverse=True)
for i in range(counter):
new_population.append(help_array[i].copy())
return new_population
def top_population(new_generation, counter):
"""
funkcia vrati to counter jedincov
:param new_generation: pole jedincov
:param counter: pocet kolko najlepsich jedincov ma byt vybratych
:return: top counter jedincov
"""
new_population = []
help_array = sorted(new_generation.copy(), key=lambda x: x[0], reverse=True)
for i in range(counter):
new_population.append(help_array[i][1].copy())
return new_population
def fill_with_random(new_generation, mutation):
"""
:param new_generation: nova generacia jedincov
:param mutation: pravdepodobnost mutacie
:return: nove pole zmutovanych jedincov a aktualna pravdepodobnost mutacie
"""
pom_array = []
new_population = []
for i in new_generation:
probability = random.randint(0, 100)
"""
zistovanie ci jedinec ma byt zmutovany
"""
if probability < mutation:
random_number = random.randint(0, 256)
for m in range(64):
"""
random vyber ci bude dedit od otca alebo od matky
"""
k = random.randint(0, 1)
if k == 0:
new_number = i[m] + random_number
"""
osetrenie aby som nemal cislo vecsie ako 8 bitov
"""
if new_number > 255:
new_number %= 256
else:
new_number = i[m]
pom_array.append(new_number)
"""
pridanie jedinca do novej populacie
"""
new_population.append(pom_array)
pom_array = []
else:
new_population.append(i)
return new_population, mutation
def roulete_test():
"""
hlavny beh, krizenie a mutovanie jedincov
:return: parametre pre vykreslenie grafu
"""
population = 1
graph_sum = []
graph_population = []
top_members = []
mutation = 20
number = number_of_population()
input_arrays = first_population(number)
rated_generation = []
"""
vygenerovanie a ohodnotenie prvej populacie
"""
for i in range(number):
rated_generation.append([fitness(input_arrays[i]), input_arrays[i]])
sorted_array = sorted(rated_generation.copy(), key=lambda x: x[0], reverse=True)
best_fitness = sorted_array[0][0]
gem = gems().__len__()
"""
ak sa fitnes rovna poctu gemov, tak som dosiahol vysledok s akceptovatelnou cestou
"""
while best_fitness != gem + 1:
population += 1
new_generation = []
help_array, total_sum = roulete(rated_generation, number - 5)
"""
vyuzitie jednotlivych vyssie spomenutych funkcii na vytvorenie novej populacie
"""
for i in help_array:
new_generation.append(i)
help_array, mutation = fill_with_random(new_generation, mutation)
new_generation = []
for i in help_array:
new_generation.append(i)
help_array = top_population_first(rated_generation, 5) # funkcia vrati top x jedincov
for i in help_array:
top_members.append(i.copy())
top_members = sorted(top_members, key=lambda x: x[0], reverse=True)
help_array = top_population(top_members, 5)
for i in help_array:
new_generation.append(i)
rated_generation = []
for i in range(number):
rated_generation.append([fitness(new_generation[i]), new_generation[i]])
rated_generation = sorted(rated_generation, key=lambda x: x[0], reverse=True)
best_fitness = rated_generation[0][0]
print("max fitness ", best_fitness, "priemerna fitness ", total_sum / number, "mutacia ", mutation, "populacia",
population)
if best_fitness == gem + 1:
break
graph_sum.append(total_sum / number)
graph_population.append(population)
"""
kontrola mutacie, aby nebolo prilis vysoka alebo prilis nizka
"""
if population > 2 and mutation < 55:
if abs(graph_sum[-2] - graph_sum[-1]) > 0.20:
mutation = 20
else:
mutation += 1
elif population > 2 and mutation == 0:
mutation += 1
print("ciel dosiahnuty na ", population, "tu generaciu")
return graph_population, graph_sum
def draw_graph():
"""
vykreslenie grafu
:return: graf
"""
generation, total_sum = roulete_test()
plt.plot(generation, total_sum)
plt.show()
draw_graph()
|
"""
The VORONOI program only accepts meshes in a select few formats.
See [1] for a list of supported filetypes.
This script is intended to generate an AVS-UCD mesh for use in VORONOI,
from a collection of nodes and connectivity.
[1]: http://lagrit.lanl.gov/docs/commands/READ.html
"""
import argparse
import numpy as np
import sys
def read_nodes(infile,delimiter=' '):
return np.loadtxt(infile,delimiter=delimiter,dtype=np.double)
def read_elements(infile,delimiter=' '):
return np.loadtxt(infile,delimiter=delimiter,dtype=np.int)
def read_attributes(infile,delimiter=' '):
pass
def write_avs(nodes,outfile,elements=None,ttype=None):
if ttype is None:
ttype = 'tri' if len(mo.elements[0]) == 3 else 'tet'
else:
assert ttype in ['tri','tet','quad','hex'],'Unknown element type'
count_nodes = len(nodes)
count_elems = len(elements) if elements is not None else 0
count_natts = len(mo.natts[0]) if natts is not None else 0
count_catts = len(mo.catts[0]) if catts is not None else 0
assert count_nodes > 0,'nodes array cannot be empty'
with open(outfile,'w') as f:
# Write out header
f.write("{:11d}{:11d}{:11d}{:11d}{:11d}\n".format(count_nodes,count_elems,count_natts,count_catts,0))
# Write out nodes
for i in range(0,count_nodes): f.write("{:03d} {:010E} {:010E} {:010E}\n".format(i+1,mo.nodes[i][0],mo.nodes[i][1],mo.nodes[i][2]))
# Write out elements
if count_elems != 0:
#f.write('\n'.join(["{:03d} 1 {} {}".format(i+1,ttype,' '.join(mo.elements[i])) for i in range(count_elems)]))
if ttype == 'tet':
for i in range(0,count_elems): f.write("{:03d} 1 tet {} {} {} {}\n".format(i+1,mo.elements[i][0],mo.elements[i][1],mo.elements[i][2],mo.elements[i][3]))
elif ttype == 'tri':
for i in range(0,count_elems): f.write("{:03d} 1 tri {} {} {}\n".format(i+1,mo.elements[i][0],mo.elements[i][1],mo.elements[i][2]))
# Write out node attributes
if count_natts != 0:
count = len(mo.natts[0])
# Write out attribute header (data types needs to be fixed)
f.write("{:05d}".format(count) + " 1"*count + "\n")
names = ["imt{}, {}\n".format(i+1, "real") for i in range(0,count)]
for i in range(0,len(names)): f.write(names[i])
# Write out att data
for i in range(0,len(mo.natts)):
formatted_atts = ' '.join(["{0:010E}".format(mo.natts[i][j]) for j in range(0,count)])
f.write('{0:010d} '.format(i+1) + formatted_atts + "\n")
# Write out element attributes
if count_catts != 0:
count = len(mo.catts[0])
# Write out attribute header (data types needs to be fixed)
f.write("{:05d}".format(count) + " 1"*count + "\n")
names = ["isn{}, {}\n".format(i+1, "real") for i in range(0,len(count))]
for i in range(0,len(names)): f.write(names[i])
# Write out att data
for i in range(0,len(mo.catts)):
formatted_atts = ' '.join(["{0:010E}".format(mo.catts[i][j]) for j in range(0,count)])
f.write('{0:010d} '.format(i+1) + formatted_atts + "\n")
if (__name__ == "__main__"):
applet_description = '''
This script converts a file of node (x,y,z) values and element (i,j,k,...) indices into
an AVS-UCD mesh.
Node input file should be in the form:
x0 y0 z0
x1 y1 z1
...
xN yN zN
Element input file should be in the form:
i1 j1 k1 ...
i2 j2 k2 ...
...
iN jN kN ...
A delimiter between entries may be specified with the -d argument. Defaults to space (` `).
It is recommended that the element file reference the node list with 1-based indexing; that is,
the element input file should reference the node `x0 y0 z0` as `1`.
If you use zero- or N-based indexing, use the --index flag to indicate this.
This script will automatically assume that an element file with 3 integers on a line refers to
a triangle element, and 4 integers refers to tetrahedrons. If you wish to manually specify what the
element type is (i.e., quad) then use --type ['tet','tri','quad','hex']. Note that only one element
type per file is allowed - mixing of element types in a single mesh is not supported in Voronoi.
If you only have a list of nodes, this script will still write out a file - but no element connectivity
will be defined. You can import the mesh into LaGriT and triangulate it, or use the SciPy Delaunay function.
If you wish to view the created mesh, it is recommended that you use ParaView: https://www.paraview.org
'''
parser = argparse.ArgumentParser(description=applet_description,formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('-o','--outfile',help='Filepath to save mesh to (defaults to \'mesh.avs\')',type=str,default='mesh.avs')
parser.add_argument('-n','--nodes',help='Filepath to node input file',type=str)
parser.add_argument('-e','--elements',help='Filepath to elements input file',type=str)
parser.add_argument('-t','--type',help='Type of elements (tri,tet,quad,...)',type=str)
parser.add_argument('-d','--delimiter',help='Delimiter between entries in node and elements files. Defaults to space.',type=str,default=' ')
parser.add_argument('-na','--nodeatts',help='Filepath to node attributes',type=str)
parser.add_argument('-ca','--cellatts',help='Filepath to cell attributes',type=str)
parser.add_argument('-i','--index',help='First node reference integer in elements file',type=int,default=1)
args = parser.parse_args()
if args.nodes is None:
parser.print_help()
print("\nERROR: Node input file is required")
sys.exit()
if args.elements is None:
print("WARNING: No element file provided. Writing only nodes...")
nodes = read_nodes(args.nodes,delimiter=delimiter)
elements = read_elements(args.elements,delimiter=delimiter) if args.elements is not None else None
natts = read_node_attributes(args.nodeatts,delimiter=delimiter) if args.nodeatts is not None else None
catts = read_node_attributes(args.cellatts,delimiter=delimiter) if args.cellatts is not None else None
write_avs(nodes,args.outfile,elements=elements,ttype=args.type)
print('Mesh successfully written to \'%s\'' % args.outfile)
|
import datetime
from datetime import date, timedelta
import pandas
import json
import os
import way2sms
import sys
sys.path.append(os.environ.get('TRADING_SOFTWARE')+'/src/1_input_marketdatafetcher/dataparsers/')
import google_history_data_parser as ghdp
import nse_option_data_parser as nodp
###############################################################################################################
today_time = datetime.datetime.now()
today_daystart_time = today_time.replace(hour=0, minute=0, second=0, microsecond=0)
market_start = today_time.replace(hour=9, minute=15, second=0, microsecond=0)
market_intraday_end = today_time.replace(hour=14, minute=55, second=0, microsecond=0)
market_interday_end = today_time.replace(hour=15, minute=25, second=0, microsecond=0)
print("Market Opens at - ", market_start)
print("Market Intraday Closes at - ", market_intraday_end)
print("Market Interday closes at - ", market_interday_end)
phone_number = '8447240641'
q=way2sms.sms(phone_number,'freesms')
enable_sms = False
bypass_trading_window = True
symbols = ["NIFTY","BHEL","BANKBARODA"]
###############################################################################################################
def load365DayWiseData(symbols):
df = []
for symbol in symbols :
filename = "New_nifty50_"+symbol+".csv"
print filename
frames = pandas.read_csv(filename)
df.append(frames)
result = pandas.concat(df)
return result
def load90DayMinuteWiseData(symbols):
df = []
for symbol in symbols :
filename = "Today_Yesterday_nifty50_"+symbol+".csv"
print filename
frames = pandas.read_csv(filename)
df.append(frames)
result = pandas.concat(df)
return result
os.chdir("../../2_datadump/datadump/daily/")
daywisedata = load365DayWiseData(symbols)
os.chdir("../minutewise/")
minutewisedata = load90DayMinuteWiseData(symbols)
os.chdir("../../../")
###############################################################################################################
call_option_chosen = 10000
put_option_chosen = 7000
starting_strikeprice_chosen = True
def options_check_rule1(options_current_data):
#If Today's current_low falls below yesterday's low
# & I know Today High Low would be atleast 1%
# & with yesterday low - today-low correlation of .99 and 20 percent chance of diff being less than 0.5%
# & I know even If Today's High is already higher than yesterday's low - I would cover 1% tommorow
global starting_strikeprice_chosen
if starting_strikeprice_chosen :
starting_strikeprice_chosen = False
#call_option_chosen = options_current_data.head(1)['CALL_LTP']
print("Today OPTIONS Trade -Start- CALL_StrikePrice:{0} PUT_StrikePrice:{1}".format(call_option_chosen,put_option_chosen))
#print(options_current_data[options_current_data['Strike Price'] == 9800][['Strike Price','PUT_LTP']])
#print(options_current_data[options_current_data['Strike Price'] == 10300][['Strike Price','CALL_LTP']])
msg = ''
return msg
def check_rule1(today_minute_data, symbol):
#If Today's current_low falls below yesterday's low
# & I know Today High Low would be atleast 1%
# & with yesterday low - today-low correlation of .99 and 20 percent chance of diff being less than 0.5%
# & today's high(today's predicted low + 1%) would come after today's low in chosen class
# & I know even If Today's High is already higher than yesterday's low - I would cover 1% tommorow
#yesterdate_date =
msg = ''
yesterday_date = daywisedata[daywisedata.symbol == symbol].iloc[-1]['date']
yesterday_low = float(daywisedata[daywisedata.symbol == symbol].iloc[-1]['low'])
current_low = float(today_minute_data.iloc[-1]['low'])
print(" Rule1 ",yesterday_date, yesterday_low, current_low)
if yesterday_low*0.995 > current_low:
#print(yesterday_date, yesterday_low, current_low)
msg = msg+' Rule1 - BuyNow -'+symbol+","
return msg
def check_rule2(today_minute_data, symbol):
#If Today's current_low falls below yesterday's low
# & I know Today High Low would be atleast 1%
# & with yesterday low - today-low correlation of .99 and 20 percent chance of diff being less than 0.5%
# & I know even If Today's High is already higher than yesterday's low - I would cover 1% tommorow
print(today_minute_data.tail(1))
msg = ''
return msg
###############################################################################################################
def performStreamingOperation(time):
print("**********************************************************************")
msg = ''
############################################################################
options_current_data = nodp.NseOptionsData()
msg = msg+options_check_rule1(options_current_data)
############################################################################
for symbol in symbols :
print "Get Data for "+symbol+" - for - "+datetime.datetime.strftime(time,'%d-%m-%Y-%H-%M')
inputjson = ghdp.GoogleIntradayQuote(symbol,60,1)
x = [i.split(',') for i in inputjson.to_csv().split()]
today_minute_data = pandas.DataFrame.from_records(x,columns=['symbol','date','time','open','high','low','close','volume'])
msg = msg+check_rule1(today_minute_data, symbol)
msg = msg+check_rule2(today_minute_data, symbol)
############################################################################
print(msg)
if enable_sms:
q.send( phone_number, msg )
def getTradingMarketMinute(time):
diff = time - today_daystart_time
elapsed_ms = (diff.days * 86400000) + (diff.seconds * 1000) + (diff.microseconds / 1000)
return int(elapsed_ms/(1000*60))
def startStreamingApp():
current_time = datetime.datetime.now()
last_minute_handled = -1
counter = 0
trading_started = False
intraday_ended = False
interday_ended = False
while(True) :
while ((current_time >= market_start) & (current_time <= market_interday_end) or bypass_trading_window):
if current_time >= market_start:
if not trading_started & (not interday_ended):
trading_started = True
print "Market Opened for trading"
if enable_sms:
q.send( phone_number, 'Market Opened for trading' )
if current_time >= market_intraday_end:
if trading_started & (not intraday_ended):
intraday_ended = True
print "Market Closing for Intradday trading"
if enable_sms:
q.send( phone_number, 'Market Closing for Intradday trading' )
if current_time >= market_interday_end:
if trading_started & (not interday_ended):
interday_ended = True
intraday_ended = True
trading_started = True
print "Market Closing for Interday trading"
if enable_sms:
q.send( phone_number, 'Market Closing for Interday trading' )
break
if counter > last_minute_handled:
try:
performStreamingOperation(current_time)
last_minute_handled = getTradingMarketMinute(current_time)
except ValueError:
print("Oops! That was no valid number. Try again...")
#reset time
current_time = datetime.datetime.now()
counter = getTradingMarketMinute(current_time)
if current_time > market_interday_end :
break
q.logout()
###############################################################################################################
if __name__ == '__main__':
startStreamingApp()
###############################################################################################################
|
n = int(input())
ans = 0 ;
three = [3,6,9,12] # 결론은 최소 공배수 15 전까지는 3의 몫이 최저해
# 15 이상부터 5로 나눠 떨어지면 몫이 최저값
# 그렇지 않으면 나머지가 3으로 떨어졌을때가 최저값
# 이것도 아니라면 5개 짜리 봉지를 먼저 만들어 보고 (5로 빼주고 계속해서 재귀호출)
# 12 이하까지 만들어졌을때 3,6,9,12 중 하나가 만들어지면
# 3으로 나눈값 + 5를 뺀 횟수 (재귀 호출 횟수)=최저값
# 그외는 만들 수 없는 값.
def solution(n):
global ans
global three
if n in three :
ans +=n //3
elif n < 5 :
if n % 3 !=0 :
ans = -1
else :
ans += 1
else :
if (n % 5)!=0:
if (n % 5) % 3 ==0:
ans += n //5 + (n % 5) //3
else :
if(n-5>0):
ans+=1
solution(n-5)
else :
ans += n //5
solution(n)
print(ans) |
from purchase import purchase,discountAmount,createInvoice
from readfiles import readInventory
from updateinventory import updateStock
import datetime
print("Hello!!! This is an electronic store.We sell different kinds of mobile phones,laptops and Harddisks.Please Proceed if you wish to buy.")
def main():
person_name = input("Enter your full name")
inventory = readInventory()
purchases = []
ans = True
while ans == True:
handling_1 = True
while handling_1 == True:
try:
ans = input("would you like to make a purchase?(y/n)")
if ans=="y":
purchased_item = purchase(inventory)
if (purchased_item):
purchases.append(purchased_item)
ans = True
elif ans=="n":
ans=False
handling_1 = False
else:
handling_1 = True
print("Please enter y or n")
except:
print("Please enter correct values.")
handling_1 = True
print("We give 10% discount in our product.Discount amount is subtracted in your bills.Enjoy shopping...")
discount_check = True
createInvoice(person_name, purchases, discount_check)
print("Thank you for visiting our store..")
main()
|
import numpy as np
def print_reproducibility_check(self, reproducible_success, reproducible_result, cochrain_critical_value):
print('--------------------------------------------------------------')
mean, var = self.points_mean_var()
df_numerator = self.count_of_parallel_experiments - 1
df_denominator = self.count_of_points
prac_cochrain_val = self.cochran_value(var)
print('Средние значения: ', np.round(mean, 3))
print('Дисперсия: ', np.round(var, 3))
print('\nОценка воспроизводимости критерием Кохрена')
print('Наблюдаемое значение критерия Кохрена:', np.round(prac_cochrain_val, 3))
print('Число степеней свободы числ. и знам. :', df_numerator, df_denominator)
if reproducible_success:
print('Критическое значение критерия Кохрена:', np.round(cochrain_critical_value, 3))
print('Эксперимент{} воспроизводим'.format('' if reproducible_result else ' не'))
else:
print('Не удалось найти значение в таблице')
print('Дисперсия ошибки (воспроизводимости) эксперимента:', round(self.reproducibility_var,4))
def print_object_model(self, student_test_success, student_crit):
df_student = self.count_of_points * (self.count_of_parallel_experiments - 1)
prac_student_values = self.student_values()
print('--------------------------------------------------------------')
print('Параметры модели: ', np.round(self.prac_b_coef, 3))
print('Дисперсия параметра модели: ', np.round(self.var_params_of_model,3))
print('\nПроверка значимости оценок параметров критерием Стьдента')
print('Наблюдаемые значения:', np.round(prac_student_values, 3))
print('Число степеней свободы:', df_student)
print('Критическое значение:', round(student_crit, 3))
if student_test_success:
if np.logical_not(self.significant_coef).any():
print('Есть незначимые коэффициенты.')
print('Значимые коэффициенты:', np.round(self.significant_b_coef,3))
else:
print('Все коэффициенты значимы')
else:
print('Не удалось найти значение в таблице')
def print_adequacy_check(self, fisher_test_success, fisher_test_result, fisher_crit):
prac_fisher_value = self.fisher_value()
df_numerator = self.count_of_points - self.significant_coef.sum()\
+ self.additional_experiment_conducted
df_denominator = self.count_of_points * (self.count_of_parallel_experiments - 1)
print('--------------------------------------------------------------')
print('Дисперсия адекватности:', round(self.adequacy_var,3))
print('\nПроверка адекватности критерием Фишера')
print('Отклик модели', np.round(self.model_response, 3))
print('Наблюдаемое значения:', round(prac_fisher_value, 3))
print('Число степеней свободы числ. и знам. :', df_numerator, df_denominator)
print('Критическое значение:', round(fisher_crit,3))
if fisher_test_success:
print('Модель{} адекватна'.format('' if fisher_test_result else ' не')) |
from bson.code import Code
from database import db
def run(args):
mapper = Code("""
function() {
var cells = {}
this.grid.cells.forEach(function(cell) {
cells[cell.x + "-" + cell.y] = {
"x": cell.x,
"y": cell.y,
"block": cell.block,
"void": cell.void,
};
});
var width = parseInt(this.width);
var height = parseInt(this.height);
function iterate(x, y, dx, dy) {
var cx = x;
var cy = y;
var length = 0;
for (var d = 0; true; d++) {
var next = (cx + "-" + cy);
if (!(next in cells)) {
break;
}
if (cells[next].block || cells[next].void) {
break;
}
length += 1;
cx += dx;
cy += dy;
}
return length;
}
function across_word_length(x, y) {
var prev = ((x - 1) + "-" + y);
if (!(prev in cells) || cells[prev].block || cells[prev].void) {
var length = iterate(x, y, 1, 0);
if (length >= 2) {
return length;
}
}
return -1;
}
function down_word_length(x, y) {
var prev = (x + "-" + (y - 1));
if (!(prev in cells) || cells[prev].block || cells[prev].void) {
var length = iterate(x, y, 0, 1);
if (length >= 2) {
return length;
}
}
return -1;
}
var counts = {};
for (var y = 0; y < height; y++) {
for (var x = 0; x < width; x++) {
var a_length = across_word_length(x, y);
var d_length = down_word_length(x, y);
if (a_length > 1) {
if (a_length in counts) {
counts[a_length] += 1;
} else {
counts[a_length] = 1;
}
}
if (d_length > 1) {
if (d_length in counts) {
counts[d_length] += 1;
} else {
counts[d_length] = 1;
}
}
}
}
emit("counts", counts);
}
""")
reducer = Code("""
function(key, values) {
var counts = {};
values.forEach(function(value) {
for (var c in value) {
if (c in counts) {
counts[c] += value[c];
} else {
counts[c] = value[c];
}
}
});
return { "counts": counts };
}
""")
result = db.crosswords.map_reduce(mapper, reducer, "wordcount")
counts = None
for d in result.find():
if d['_id'] == "counts":
counts = d['value']['counts']
if counts is None:
print "The total number of words by length could not be determined."
return
print "Total number of words (ordered by length)"
n_counts = {}
for key, value in counts.iteritems():
n_counts[int(key)] = int(value)
for length in sorted(n_counts.keys()):
print str(length) + ": " + str(n_counts[length])
|
from lib2d import res, quadtree, vec, context, bbox
from lib2d.utils import *
import euclid, physicsbody
import pygame, itertools
class PlatformerMixin(object):
"""
Mixin class that contains methods to translate world coordinates to screen
or surface coordinates.
"""
# accessing the bbox by index much faster than accessing by attribute
def toRect(self, bbox):
return pygame.Rect((bbox[1], bbox[2], bbox[4], bbox[5]))
def rectToBody(self, rect):
newbbox = (0, rect.x, rect.y, 1, rect.width, rect.height)
return physicsbody.Body3(newbbox, (0,0,0), (0,0,0), 0)
class AdventureMixin(object):
"""
Mixin class that contains methods to translate world coordinates to screen
or surface coordinates.
"""
# accessing the bbox by index much faster than accessing by attribute
def toRect(self, bbox):
return pygame.Rect((bbox[0], bbox[1], bbox[3], bbox[4]))
def rectToBody(self, rect):
newbbox = (rect.x, rect.y, 0, rect.width, rect.height, 0)
return physicsbody.Body3(newbbox, (0,0,0), (0,0,0), 0)
class PhysicsGroup(context.Context):
"""
object mangages a list of physics bodies and moves them around to simulate
simple physics. Currently, only gravity and simple movement is implemented
without friction or collision handling.
static bodies are simply called 'geometry' and handled slightly different
from dynamic bodies.
the dimensions of your objects are important! internally, collision
detection against static bodies is handled by pygame rects, which cannot
handle floats. this means that the smallest body in the game must be at
least a 1x1x1 meter cube.
For speed, only 2 axises are checked for collisions:
using the adventure mixin, this will be the xy plane
using the platformer mixin, this will be the zy plane
the bboxes passed to geometry will be translated into the correct type
a word on the coordinate system:
coordinates are 'right handed'
x axis moves toward viewer
y axis move left right
z axis is height
"""
def __init__(self, scaling, timestep, gravity, bodies, geometry, precision=2):
self.scaling = scaling
self.gravity = euclid.Vector3(0,0,gravity)
self.bodies = bodies
self.precision = precision
self.sleeping = []
self.staticBodies = []
[ self.scaleBody(b, scaling) for b in self.bodies ]
rects = []
for bbox in geometry:
body = physicsbody.Body3(bbox, (0,0,0), (0,0,0), 0)
self.scaleBody(body, scaling)
self.staticBodies.append(body)
rects.append(self.toRect(body.bbox))
self.geometry = quadtree.FastQuadTree(rects)
self.setTimestep(timestep)
def __iter__(self):
return itertools.chain(self.bodies, self.staticBodies)
def update(self, time):
for body in (b for b in self.bodies if b not in self.sleeping):
body.acc += self.gravity_delta
body.vel += body.acc * self.timestep
x, y, z = body.vel
if not x==0:
if not self.moveBody(body, (x, 0, 0)):
if abs(body.vel.x) > .2:
body.acc.x = 0.0
body.vel.x = -body.vel.x * .2
else:
body.acc.x = 0.0
body.vel.x = 0.0
if not y==0:
if not self.moveBody(body, (0, y, 0)):
if abs(body.vel.y) > .2:
body.acc.y = 0.0
body.vel.y = -body.vel.y * .2
else:
body.acc.y = 0.0
body.vel.y = 0.0
if z > 0:
if not self.moveBody(body, (0, 0, z)):
if abs(body.vel.z) > 2.5:
body.acc.z = 0.0
body.vel.z = -body.vel.z * .2
else:
body.acc.z = 0.0
body.vel.z = 0.0
elif z < 0:
self.moveBody(body, (0, 0, z))
if body.bbox.z == 0:
body.vel.x = body.vel.x * self.ground_friction
body.vel.y = body.vel.y * self.ground_friction
if (round(body.vel.x, 4) ==
round(body.vel.y, 4) ==
round(body.vel.z, 1) == 0.0) and body.bbox.z == 0:
self.sleeping.append(body)
def scaleBody(self, body, scale):
body.bbox.scale(scale, scale, scale)
def dynamicBodies(self):
return iter(self.bodies)
def wakeBody(self, body):
try:
self.sleeping.remove(body)
except:
pass
def setTimestep(self, time):
self.timestep = time
self.gravity_delta = self.gravity * time
self.ground_friction = pow(.0001, self.timestep)
def moveBody(self, body, (x, y, z), clip=True):
body.bbox.move(x, y, z)
# test for collision with level geometry
if self.testCollision(body.bbox):
if body.bbox[2] < -10:
body.bbox[2] = -10.0
body.bbox.move(-x, -y, 0)
else:
body.bbox.move(-x, -y, -z)
return False
else:
# test for collision with another object
# must do a spatial hash or oct tree or something here [later]
bbox = body.bbox
for other in (b for b in self.bodies if b is not body):
if bbox.collidebbox(other.bbox):
if self.moveBody(other, (x, y, z)):
return True
else:
body.bbox.move(-x, -y, -z)
return False
return True
def testCollisionOther(self, body, bbox=None):
if bbox is None:
bbox = body.bbox
for other in (b for b in self.bodies if b is not body):
if bbox.collidebbox(other.bbox):
return True
return False
def testCollision(self, bbox):
# for adventure games
if bbox[2] < 0:
return True
return bool(self.geometry.hit(self.toRect(bbox)))
class PlatformerPhysicsGroup(PhysicsGroup, PlatformerMixin):
pass
class AdventurePhysicsGroup(PhysicsGroup, AdventureMixin):
pass
|
from state import *
from accel import *
from indicators import *
from search import *
from bluepy.sensortag import *
import bluepy.btle as btle
import time, os
NUM_TAGS = 3
USB_PREFIX = '/media/'
LOCAL_PREFIX = '/home/pi/local-data/'
DEFAULT_SPIN_TIME = 1 #sec
if __name__ == '__main__':
# Turn off these debugging messages
btle.Debugging = False
# Find the USB stick if available
try:
usb = os.walk(USB_PREFIX).next()[1][0]
storage_prefix = USB_PREFIX + usb + '/pup-data/'
print "USB stick found."
except IndexError:
print "No USB stick found."
storage_prefix = None
# Create the state controller
state = ModuleStateController(LOCAL_PREFIX, storage_prefix)
# Connection loop variables
connected = False
calibrated = False
blinkToggle = False
# Begin operating loop
print "Waiting for button press to find tags."
while True:
# Check if the user has pushed the sync button.
# If so and we have not connected yet, do the connection
if checkButton() and not connected and not calibrated:
print "Connecting tags..."
clearAll()
connected = False
# Find the sensortags
[addr1, addr2, addr3] = findSensorTags()
# Connect the TI sensor tags
tag1 = SensorTag(addr1)
tag2 = SensorTag(addr2)
tag3 = SensorTag(addr3)
# Turn on the tag accelerometers
tags = [tag1, tag2, tag3]
for tag in tags:
tag.accelerometer.enable()
connected = True
okayStatus()
print "Tags connected."
print "Waiting for button press to determine tag locations."
print "Please apply tags to patient."
# Now that we've found the tags, figure out which is on what part
# Wait for user input to ensure tags are on patient
if checkButton() and connected and not calibrated:
print "Locating tags..."
clearAll()
[leftTag, centerTag, rightTag] = orderTags(tag1, tag2, tag3)
calibrated = True
setOkay()
print "Tags located."
print "Beginning monitoring loop:"
# Read accel data if connected and locations identified
if calibrated:
try:
leftAccl = leftTag.accelerometer.read()
rightAccl = rightTag.accelerometer.read()
centerAccl = centerTag.accelerometer.read()
# Calculate posture state
angle = calculateAngle(leftAccl, rightAccl)
sleeping = calculateSleeping(centerAccl)
print 'Body Angle', angle
print 'Sleeping?', sleeping
# Pass to state controller that will handle the rest
state.updateState(angle, sleeping)
# Wait to take the next reading
time.sleep(SEC_PER_READING - DEFAULT_SPIN_TIME)
# In case the sensor tags disconnect/fail
except btle.BTLEException:
clearAll()
errorStatus()
triggerAlarm()
connected = False
calibrated = False
# Flash this light to let user know they need to push the button
if not calibrated:
if blinkToggle:
setOkay()
else:
clearOkay()
blinkToggle = not blinkToggle
# Default wait between each loop (checking for button or reading)
time.sleep(DEFAULT_SPIN_TIME)
|
# Generated by Django 3.0.2 on 2021-06-06 21:11
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('assesment', '0001_initial'),
('user', '0005_userprofile_status'),
]
operations = [
migrations.AlterField(
model_name='questionpaperattended',
name='quest',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to='assesment.questionPaper'),
),
]
|
import abc
class HttpRequest:
def request(self):
raise NotImplementedError
class HttpGet(HttpRequest):
# This class will try to execute the method request to receive a NotImplementedError
...
class File(abc.ABC):
@abc.abstractmethod
def parse_content(self):
return 'parsed content'
class PDFFile(File):
# In runtime the interpreter will notice that this class hasn't implemented the interface abstract error
# and raise a TypeError
...
if __name__ == '__main__':
# req_get = HttpGet()
# req_get.request()
pdf_file = PDFFile()
print(pdf_file.parse_content()) |
import telnetlib
import time
import re
import random
import traceback
import tenacity
from baremetal.common import exceptions
import logging
from baremetal.common import locking as sw_lock, exceptions, utils, jsonobject, http
from baremetal.conductor import models
from oslo_config import cfg
from tooz import coordination
from baremetal.conductor import models
logger = logging.getLogger(__name__)
CONF = cfg.CONF
class CiscoSwitch(models.ModelBase):
def __init__(self, username, password, host, port=23, timeout=10):
super(CiscoSwitch, self).__init__()
self.host = host
self.username = username
self.password = password
self.port = port
self.timeout = timeout
self.sw_internal_cfg = {
"sw_telnet_connect_timeout": 60,
"sw_telnet_connect_interval": 10,
"sw_max_connections": CONF.sw_coordination.max_connections
}
self.locker = None
self.session_id = None
if CONF.sw_coordination.backend_url:
self.locker = coordination.get_coordinator(
CONF.sw_coordination.backend_url,
('switch-' + self.host).encode('ascii'))
self.locker.start()
self.session_id = hex(self.locker._coord.client_id[0])
logger.debug("zookeeper client connection[session_id:%s] opened." % self.session_id)
self.lock_kwargs = {
'locks_pool_size': int(self.sw_internal_cfg['sw_max_connections']),
'locks_prefix': self.host,
'timeout': CONF.sw_coordination.acquire_lock_timeout}
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.locker:
self.locker.stop()
logger.debug("zookeeper client connection[session_id:%s] closed." % self.session_id)
def _get_connection(self):
"""
This function hides the complexities of gracefully handling retrying
failed connection attempts.
"""
retry_exc_types = (EOFError, IndexError)
# Use tenacity to handle retrying.
@tenacity.retry(
# Log a message after each failed attempt.
after=tenacity.after_log(logger, logging.DEBUG),
# Reraise exceptions if our final attempt fails.
reraise=True,
# Retry on TELNET connection errors.
retry=tenacity.retry_if_exception_type(retry_exc_types),
# Stop after the configured timeout.
stop=tenacity.stop_after_delay(
int(self.sw_internal_cfg['sw_telnet_connect_timeout'])),
# Wait for the configured interval between attempts.
wait=tenacity.wait_fixed(
int(self.sw_internal_cfg['sw_telnet_connect_interval'])),
)
def _create_connection():
return telnetlib.Telnet(self.host, self.port, self.timeout)
# First, create a connection.
try:
net_connect = _create_connection()
except tenacity.RetryError as e:
logger.error("Reached maximum telnet connection attempts, not retrying")
raise exceptions.SwitchConnectionError(ip=self.host, error=e)
except Exception as e:
logger.error("Unexpected exception during telnet connection")
logger.error(traceback.format_exc())
raise exceptions.SwitchConnectionError(ip=self.host, error=e)
# Now yield the connection to the caller.
return net_connect
def _execute(self, command):
logger.debug("command:%s" % command)
net_connect = None
result = ""
try:
with sw_lock.PoolLock(self.locker, **self.lock_kwargs):
net_connect = self._get_connection()
net_connect.read_until("login:")
net_connect.write((self.username + '\n').encode('utf-8'))
net_connect.read_until('Password:')
net_connect.write((self.password + '\n').encode('utf-8'))
for i in command:
net_connect.write((i + '\n').encode('utf-8'))
error_msg = net_connect.read_until("Invalid", timeout=2)
logger.debug("execute command :%s" % error_msg)
if "Invalid" in error_msg and "Policy-map is being referenced" in error_msg:
result = net_connect.read_very_eager()
logger.debug("execute command failed.error msg:%s" % result)
raise exceptions.ConfigSwitchError(command=command, error=result)
result = self.save_configuration(net_connect)
count = 0
while count < 60:
result += net_connect.read_very_eager()
if 'Copy complete.' in result:
logger.debug("config switch end..")
break
else:
count += 1
time.sleep(1)
finally:
logger.debug("session close.")
net_connect.close()
return result
def save_configuration(self, net_connect):
retry_kwargs = {'wait': tenacity.wait_random(min=2, max=6),
'reraise': False,
'stop': tenacity.stop_after_delay(30)}
@tenacity.retry(**retry_kwargs)
def _save():
try:
net_connect.write(("copy running-config startup-config" + '\n').encode('utf-8'))
error_msg = net_connect.read_until("Invalid", timeout=2)
logger.debug("execute command :%s" % error_msg)
if "Configuration update aborted" in error_msg:
result = net_connect.read_very_eager()
logger.debug("execute command failed.error msg:%s" % result)
raise exceptions.ConfigSwitchError(command="copy running-config startup-config", error=result)
except Exception:
raise
return error_msg
return _save()
def _execute_relative(self, command):
logger.debug("command:%s" % command)
net_connect = None
result = ""
try:
with sw_lock.PoolLock(self.locker, **self.lock_kwargs):
net_connect = self._get_connection()
net_connect.read_until("login:")
net_connect.write((self.username + '\n').encode('utf-8'))
net_connect.read_until('Password:')
net_connect.write((self.password + '\n').encode('utf-8'))
for i in command:
net_connect.write((i + '\n').encode('utf-8'))
result = net_connect.read_until("Invalid", timeout=2)
logger.debug("execute command :%s" % result)
finally:
logger.debug("session close.")
if "Invalid" in result:
result = net_connect.read_very_eager()
logger.debug("execute command failed.error msg:%s" % result)
net_connect.close()
raise exceptions.ConfigSwitchError(command=command, error=result)
else:
net_connect.close()
return result
def gen_vlan_string(self, vlans):
vlan_string = ""
for vlan in vlans:
if "-" in vlan:
vlan = vlan.replace("-", " to ")
vlan_string += str(vlan) + ","
return vlan_string[:-1]
def set_vlan(self, ports):
unset_vlan_cmd = self._unset_vlan(ports)
set_vlan_cmd = []
for port in ports:
vlan_string = self.gen_vlan_string(port.vlan_id)
if port.set_link_type == "trunk":
set_vlan_cmd += ["interface " + port.port_name,
"switchport",
"switchport mode trunk",
"switchport trunk allowed vlan %s" % vlan_string,
"exit"]
else:
set_vlan_cmd += ["interface " + port.port_name,
"switchport",
"switchport mode access",
"switchport access vlan %s" % vlan_string,
"exit"]
commands = unset_vlan_cmd + set_vlan_cmd + ["exit"]
logger.debug("set vlan command:%s" % commands)
return self._execute(commands)
def unset_vlan(self, ports):
cmds = self._unset_vlan(ports)
commands = cmds + ['exit']
logger.debug("unset vlan command:%s" % commands)
return self._execute(commands)
def _unset_vlan(self, ports):
commands = ["configure terminal"]
for port in ports:
if port.current_link_type == "trunk":
commands += ["interface " + port.port_name,
"switchport",
"no switchport trunk allowed vlan",
"no switchport mode",
'no switchport', 'exit']
else:
commands += ["interface " + port.port_name, "switchport", 'no switchport access vlan',
"no switchport mode", 'no switchport' ,'exit']
logger.debug("unset vlan command:%s" % commands)
return commands
def open_port(self, ports):
open_cmd = ["configure terminal"]
for port in ports:
open_cmd += ["interface " + port, "no shutdown", "exit"]
commands = open_cmd + ["exit"]
logger.debug("open ports command:%s" % commands)
return self._execute(commands)
def shutdown_port(self, ports):
shutdown_cmd = ["configure terminal"]
for port in ports:
shutdown_cmd += ["interface " + port, "shutdown", "exit"]
commands = shutdown_cmd + ["exit"]
logger.debug("close ports command:%s" % commands)
return self._execute(commands)
def create_limit_template(self, templates):
create_command = ["configure terminal"]
for template in templates:
cir = int(template.bandwidth * 1024)
qos_cmd = ["policy-map %s" % template.name, "class class-default",
"police cir %s kbps conform transmit violate drop" % cir,
"exit", "exit"]
create_command += qos_cmd
commands = create_command + ['exit']
logger.debug("create template command:%s" % commands)
return self._execute(commands)
def delete_limit_template(self, templates):
delete_command = ["configure terminal"]
for template in templates:
undo_cmd = 'no policy-map ' + template
delete_command += [undo_cmd]
commands = delete_command + ['exit']
logger.debug("delete template command:%s" % commands)
return self._execute(commands)
def set_limit(self, limit_infos):
inbound_cmd = ["configure terminal"]
outbound_cmd = []
for info in limit_infos:
template_name = info.template_name
inbound_cmd += ["interface " + info.inbound_port,
"service-policy input %s" % template_name, "exit"]
for port in info.outbound_ports:
#cir = int(info.bandwidth) * 1024
#cbs = min(524288, cir * 2)
cmd1 = "service-policy output %s" % template_name
outbound_cmd += ["interface " + port, cmd1, "exit"]
commands = inbound_cmd + outbound_cmd + ['exit']
logger.debug("set limit command:%s" % commands)
return self._execute(commands)
def unset_limit(self, limit_infos):
inbound_cmd = ["configure terminal"]
outbound_cmd = []
for info in limit_infos:
template_name = info.template_name
inbound_cmd += ["interface " + info.inbound_port, "no service-policy input %s" % template_name, "exit"]
outbound_cmd += ["interface " + info.outbound_port, "no service-policy output %s" % template_name, "exit"]
commands = inbound_cmd + outbound_cmd + ["exit"]
logger.debug("unset limit command:%s" % commands)
return self._execute(commands)
def init_dhclient_config(self, switch, clean_cmd_set=[]):
set_vlan_cmd = []
if len(switch.vlan_ids) != 1:
raise exceptions.ConfigInternalVlanError()
for port in switch.ports:
set_vlan_cmd += ["interface " + port,
"switchport",
"switchport mode access",
"switchport access vlan %s" % switch.vlan_ids[0],
"exit"]
init_dhclient_cmds = set_vlan_cmd + ['exit']
logger.debug("init dhclient ports command:%s" % init_dhclient_cmds)
return self._execute(clean_cmd_set + init_dhclient_cmds)
def init_all_config(self, switch, template_name, is_dhclient):
clean_cmd_set = self._clean_all_config(switch)
if is_dhclient:
return self.init_dhclient_config(switch, clean_cmd_set)
all_ports_cmd = []
# 1. create limit template
bandwidth = int(template_name.split('-')[-1])
cir = int(bandwidth * 1024)
create_template_cmd = ["policy-map %s" % template_name, "class class-default",
"police cir %s kbps conform transmit violate drop" % cir,
"exit", "exit"]
vlan_string = self.gen_vlan_string(switch.vlan_ids)
# 2. set vlan
for port in switch.ports:
set_vlan_cmd = []
set_vlan_cmd += ["interface " + port,
"switchport",
"switchport mode trunk",
"switchport trunk allowed vlan %s" % vlan_string
]
# 3. set limit
inbound_cmd = ["service-policy input %s" % template_name]
#cir = int(bandwidth) * 1024
#cbs = min(524288, cir * 2)
#outbound_cmd = ["qos lr cir %s kbps cbs %s kbytes outbound" % (cir, cbs)]
outbound_cmd = ["service-policy output %s" % template_name]
open_port_cmd = ["no shutdown", "exit"]
port_per_cmd = set_vlan_cmd + inbound_cmd + outbound_cmd + open_port_cmd
all_ports_cmd += port_per_cmd
init_cmd_set = create_template_cmd + all_ports_cmd + ['exit']
logger.debug("init config commands:%s" % init_cmd_set)
return self._execute(clean_cmd_set + init_cmd_set)
def _clean_all_config(self, switch, template_name=None):
all_ports_cmd = ["configure terminal"]
delete_limit_template = []
unset_limit_cmd = []
for port in switch.ports:
# 1. unset vlan
unset_vlan_cmd = ["interface " + port,
"switchport",
"no switchport access vlan",
"no switchport trunk allowed vlan",
"no switchport mode"
]
# 2. unset limit
if template_name:
unset_limit_cmd = ["no service-policy input %s" % template_name, "no service-policy output %s" % template_name]
# 3. unset shutdown
unset_shutdown_cmd = ["no shutdown", "no switchport", "exit"]
port_per_cmd = unset_vlan_cmd + unset_limit_cmd + unset_shutdown_cmd
all_ports_cmd += port_per_cmd
# 3. delete limit template
if template_name:
delete_limit_template = ["no policy-map %s" % template_name]
commands = all_ports_cmd + delete_limit_template
logger.debug("clean config commands:%s" % commands)
return commands
def clean_all_config(self, switch, template_name=None):
clean_cmd_set = self._clean_all_config(switch, template_name) + ['exit']
return self._execute(clean_cmd_set)
def get_relations(self, special_vlan=None, special_mac=[]):
relations = []
pattern = re.compile(r'\S+')
if len(special_mac) > 0:
for item in special_mac:
datas = self._execute_relative(["show mac address-table address %s | grep Eth" % item])
for line in datas.split("\n")[24:-1]:
# if line[0] == "*":
data = pattern.findall(line)
mac = ":".join(i[0:2] + ":" + i[2:4] for i in data[2].split("."))
relations.append({"mac": mac, "port": data[-1]})
if special_vlan:
datas = self._execute_relative(["show mac address-table vlan %s | grep Eth" % special_vlan])
for line in datas.split("\n")[24:-1]:
# if line[0] == "*":
data = pattern.findall(line)
mac = ":".join(i[0:2] + ":" + i[2:4] for i in data[2].split("."))
relations.append({"mac": mac, "port": data[-1]})
return relations
class SwitchPlugin(object):
@utils.replyerror
def set_vlan(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.SetSwitchResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.set_vlan(body.ports)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for port in body.ports:
logger.debug("set vlan %s for port %s successfully."
% (port.vlan_id, port.port_name))
return jsonobject.dumps(rsp)
@utils.replyerror
def unset_vlan(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.SetSwitchResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.unset_vlan(body.ports)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for port in body.ports:
logger.debug("unset vlan for port %s successfully."
% ("Eth-Trunk %s" % port))
return jsonobject.dumps(rsp)
@utils.replyerror
def open_port(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.AgentResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.open_port(body.ports)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for port in body.ports:
logger.debug("open port %s successfully." % port)
return jsonobject.dumps(rsp)
@utils.replyerror
def close_port(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.AgentResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.shutdown_port(body.ports)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for port in body.ports:
logger.debug("close port %s successfully." % port)
return jsonobject.dumps(rsp)
@utils.replyerror
def set_limit(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.SetSwitchResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.set_limit(body.limit_infos)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for info in body.limit_infos:
logger.debug("set limit for port %s successfully." % info.inbound_port)
return jsonobject.dumps(rsp)
@utils.replyerror
def unset_limit(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.SetSwitchResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.unset_limit(body.limit_infos)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for info in body.limit_infos:
logger.debug("unset limit for port %s successfully." % info.inbound_port)
return jsonobject.dumps(rsp)
@utils.replyerror
def create_limit_template(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.SetSwitchResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.create_limit_template(body.templates)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for template in body.templates:
logger.debug("create limit template %s successfully."
% template.name)
return jsonobject.dumps(rsp)
@utils.replyerror
def delete_limit_template(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.SetSwitchResponse()
with CiscoSwitch(body.username, body.password, body.host) as client:
try:
result = client.delete_limit_template(body.templates)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
for template in body.templates:
logger.debug("delete limit template %s successfully."
% template)
return jsonobject.dumps(rsp)
@utils.replyerror
def init_all_config(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.AgentResponse()
for switch in body.switches:
with CiscoSwitch(switch.username, switch.password, switch.host) as client:
try:
time.sleep(random.randint(1, 3))
result = client.init_all_config(switch, body.template_name, body.is_dhclient)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
logger.debug("init switch %s port %s config successfully." %
(switch.host, switch.ports))
else:
logger.error("init switch %s port %s config result: %s." %
(switch.host, switch.ports, result))
return jsonobject.dumps(rsp)
@utils.replyerror
def clean_all_config(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.AgentResponse()
for switch in body.switches:
with CiscoSwitch(switch.username, switch.password, switch.host) as client:
try:
time.sleep(random.randint(1, 3))
result = client.clean_all_config(switch, body.template_name)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
if "Copy complete." in result:
logger.debug("clean switch %s port %s config successfully." %
(switch.host, switch.ports))
return jsonobject.dumps(rsp)
@utils.replyerror
def get_relations(self, req):
body = jsonobject.loads(req[http.REQUEST_BODY])
rsp = models.GetSwitchRelationsResp()
relations = []
with CiscoSwitch(body.username, body.password, body.host) as client:
vlan = int(body.vlan) if body.vlan else None
macs = body.macs if body.macs else []
try:
relations = client.get_relations(special_vlan=vlan, special_mac=macs)
except Exception as ex:
raise exceptions.SwitchTaskError(error=str(ex))
rsp.relations = relations
return jsonobject.dumps(rsp)
|
#Instituto Tecnologico de Costa Rica
#Escuela de Ing. en Computacion
#Tarea 2 - Requerimientos de Software
#Autores: * Jeison Esquivel Samudio (2013018688)
# * David Valverde Garro (2016034774)
# Comentarios: Adjunto esta una version mejorada del diagrama de actividad
# que incorpora las ideas de los diagramas de ambos integrantes.
#SISTEMA FACTURA ASODEC
from domain.Funciones import *
#Datos
idFactura = 0
factura = Factura(idFactura)
#BASE DE DATOS FALSA
inventario = inicializarBDFalsa_inv()
#BASE DE DATOS FALSA varibles globales
global inventario
inventario=inicializarBDFalsa_inv()
global clientes
clientes = inicializarBDFalsa_cl()
# variable global
descuento = 0.4
global facturas
facturas=[]
while True:
factura.cabecera = crearCabecera()
factura = crearDetalle(factura, inventario)
calcularTotalFactura(factura)
iniciarPago(clientes, factura, descuento)
actualizarInventario(factura, inventario)
print(factura.cabecera)
print(factura.getDetalleFactura())
continuar = input("¿Desea crear otra factura (s/n)?\n")
if continuar == "n":
break
idFactura += 1
factura = Factura(idFactura) |
#!/usr/bin/env ambari-python-wrap
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
class BigInsights42StackAdvisor(BigInsights41StackAdvisor):
def getServiceConfigurationRecommenderDict(self):
parentRecommendConfDict = super(BigInsights42StackAdvisor, self).getServiceConfigurationRecommenderDict()
childRecommendConfDict = {
"YARN": self.recommendYARNConfigurations,
"KAFKA": self.recommendKAFKAConfigurations,
"SOLR": self.recommendSolrConfigurations,
"HIVE": self.recommendHIVEConfigurations,
"AMBARI_METRICS": self.recommendAmsConfigurations
}
parentRecommendConfDict.update(childRecommendConfDict)
return parentRecommendConfDict
def recommendYARNConfigurations(self, configurations, clusterData, services, hosts):
super(BigInsights42StackAdvisor, self).recommendYARNConfigurations(configurations, clusterData, services, hosts)
putYarnProperty = self.putProperty(configurations, "yarn-site", services)
# Property Attributes
putYarnPropertyAttribute = self.putPropertyAttribute(configurations, "yarn-site")
nodeManagerHost = self.getHostWithComponent("YARN", "NODEMANAGER", services, hosts)
if (nodeManagerHost is not None):
if ("yarn-site" in services["configurations"]):
if ("yarn.nodemanager.resource.memory-mb" in services["configurations"]["yarn-site"]["properties"]):
# Compare the values derived from the hosts vs the yarn-site.xml static values, if the derived values are greater than the static ,
# override the static values with the derived to prevent the warnings that will appear in ambari
yarnPropertiesNmMemMb = int(configurations["yarn-site"]["properties"]["yarn.nodemanager.resource.memory-mb"])
yarnConfigurationsNmMemMb = int(services["configurations"]["yarn-site"]["properties"]["yarn.nodemanager.resource.memory-mb"])
if yarnPropertiesNmMemMb > yarnConfigurationsNmMemMb:
putYarnPropertyAttribute('yarn.scheduler.maximum-allocation-mb', 'maximum', configurations["yarn-site"]["properties"]["yarn.nodemanager.resource.memory-mb"])
putYarnPropertyAttribute('yarn.scheduler.minimum-allocation-mb', 'maximum', configurations["yarn-site"]["properties"]["yarn.nodemanager.resource.memory-mb"])
def recommendKAFKAConfigurations(self, configurations, clusterData, services, hosts):
super(BigInsights42StackAdvisor, self).recommendKAFKAConfigurations(configurations, clusterData, services, hosts)
putKafkaBrokerAttributes = self.putPropertyAttribute(configurations, "kafka-broker")
putKafkaBrokerAttributes('port','delete','true')
security_enabled = self.isSecurityEnabled(services)
putKafkaBrokerProperty = self.putProperty(configurations, "kafka-broker", services)
if (security_enabled):
putKafkaBrokerProperty("security.inter.broker.protocol", "SASL_PLAINTEXT")
def recommendSolrConfigurations(self, configurations, clusterData, services, hosts):
putSolrEnvProperty = self.putProperty(configurations, "solr-env", services)
putSolrAttributes = self.putPropertyAttribute(configurations, "solr-env")
putSolrAttributes('solr_lib_dir','delete','true')
def recommendHIVEConfigurations(self, configurations, clusterData, services, hosts):
super(BigInsights42StackAdvisor, self).recommendHIVEConfigurations(configurations, clusterData, services, hosts)
putHiveSiteProperty = self.putProperty(configurations, "hive-site", services)
putHiveSiteProperty("hive.exec.reducers.bytes.per.reducer", "256000000")
def recommendAmsConfigurations(self, configurations, clusterData, services, hosts):
super(BigInsights42StackAdvisor, self).recommendAmsConfigurations(configurations, clusterData, services, hosts)
putAmsEnvProperty = self.putProperty(configurations, "ams-env", services)
putHbaseEnvProperty = self.putProperty(configurations, "ams-hbase-env", services)
operatingMode = "embedded"
if "ams-site" in services["configurations"]:
if "timeline.metrics.service.operation.mode" in services["configurations"]["ams-site"]["properties"]:
operatingMode = services["configurations"]["ams-site"]["properties"]["timeline.metrics.service.operation.mode"]
hostsCount = 0
if hosts and "items" in hosts:
hostsCount = len(hosts["items"])
collector_heapsize, hbase_heapsize, total_sinks_count = self.getAmsMemoryRecommendation(services, hosts)
collector_heapsize = max(collector_heapsize, 2048)
putAmsEnvProperty("metrics_collector_heapsize", collector_heapsize)
# Distributed mode heap size
if operatingMode == "distributed":
hbase_heapsize = max(hbase_heapsize, 2048)
putHbaseEnvProperty("hbase_master_heapsize", "2048")
putHbaseEnvProperty("hbase_master_xmn_size", "409") #20% of 2048 heap size
putHbaseEnvProperty("hbase_regionserver_heapsize", hbase_heapsize)
putHbaseEnvProperty("regionserver_xmn_size", round_to_n(0.15*hbase_heapsize,64))
else:
# Embedded mode heap size : master + regionserver
hbase_rs_heapsize = 768
if hostsCount >= 6:
hbase_heapsize = max(hbase_heapsize, 2048)
putHbaseEnvProperty("hbase_regionserver_heapsize", hbase_rs_heapsize)
putHbaseEnvProperty("hbase_master_heapsize", hbase_heapsize)
putHbaseEnvProperty("hbase_master_xmn_size", round_to_n(0.15*(hbase_heapsize+hbase_rs_heapsize),64))
|
import os
import requests
import discord
from dotenv import load_dotenv
from discord.ext import commands
def filename(url, iden):
file = ''
for char in url:
if char not in './:':
file += char
file += iden
return file
usersitelist = {}
with open("sites.txt") as file:
for line in file:
line = line.strip()
username = ''
counter = 0
for char in line:
if(char != '!'):
username += char
counter+= 1
else:
break
line = line[counter+1:]
usersitelist[username] = line.split(',')
load_dotenv()
token = os.getenv('DISCORD_TOKEN')
bot = commands.Bot(command_prefix='!')
@bot.event
async def on_ready():
print(f'{bot.user.name} has connected to Discord!')
@bot.command(name='ping')
async def ping(ctx):
response = 'hello!'
await ctx.send(response)
@bot.command(name='rmvsite')
async def rmvsite(ctx, url):
found = False
user = str(ctx.author.id)
if user in usersitelist:
for site in usersitelist[user]:
if site == url or site + '/' == url:
found = True
sitetodel = site
if found:
os.remove(filename(url,iden) + '.txt')
usersitelist[user].remove(sitetodel)
with open("sites.txt",'w') as file:
filetext = ''
for person in usersitelist:
personline = ''
personline += person + '!'
for website in usersitelist[person]:
personline += website + ','
personline = personline[:len(personline)-1]
filetext += personline + '\n'
file.write(filetext)
response = 'site deleted!'
else:
response = 'site not found in list :('
else:
response = 'you dont have any sites yet, silly!'
await ctx.send(response)
@bot.command(name='addsite')
async def addsite(ctx, url):
inuse = False
user = str(ctx.author.id)
if user in usersitelist:
for site in usersitelist[user]:
if site == url:
message = "you're already tracking this one!"
inuse = True
if inuse == False:
usersitelist[user].append(url)
else:
usersitelist[user] = [url]
if inuse == False:
with open("sites.txt",'w') as file:
filetext = ''
for person in usersitelist:
personline = ''
personline += person + '!'
for website in usersitelist[person]:
personline += website + ','
personline = personline[:len(personline)-1]
filetext += personline + '\n'
file.write(filetext)
open(filename(url,user)+'.txt','a').close()
message = 'site added to list!'
await ctx.send(message)
@bot.command(name='siteupdate')
@commands.cooldown(1, 300, commands.BucketType.user)
async def siteupdate(ctx):
response = ''
user = str(ctx.author.id)
if user in usersitelist and usersitelist[user] != []:
for site in usersitelist[user]:
with open(filename(site,user) + ".txt") as file:
oldsite = file.read()
newsite = requests.get(site).text
diff = False
if len(oldsite) != len(newsite):
diff = True
else:
for i in range(len(oldsite)):
if oldsite[i] != newsite[i]:
if newsite[i] not in "\n\r\t ":
diff = True
if diff:
response += site + " updated!"
else:
response += 'No updates to ' + site
with open(filename(site,user) + ".txt", 'w') as file:
file.write(newsite)
else:
response = 'You dont have any sites you are tracking yet!'
await ctx.send(response)
@siteupdate.error
async def siteupdate_error(ctx, error):
if isinstance(error, commands.CommandOnCooldown):
await ctx.send('Try again in a bit, you can only update once every five minutes')
bot.run(token)
|
#!/usr/bin/env python
import sys
import rospy
import rospkg
import cv2 as cv
import numpy as np
import constants as CONST
from sensor_msgs.msg import CompressedImage
img = None
hsv = None
wait = False
click = False
number = None
is_mask = False
sub_sampling = 0.5
camera_position = None
image_width, image_height = None, None
screen_width, screen_height = None, None
pixel = {}
pixel['x'], pixel['y'] = -1, -1
class window:
global screen_width, screen_height, hsv, number
def __init__(self):
self.size = 250
self.x = screen_width / 3 + 10
self.y = 20
self.lower = {}
self.upper = {}
self.lower_tmp = {}
self.upper_tmp = {}
self.select = {}
self.path = rospkg.RosPack().get_path('object_detection')
def create(self, window_name):
for name in window_name:
cv.namedWindow(name, flags=cv.WINDOW_NORMAL)
cv.moveWindow(name, self.x + self.x / 5, self.y + self.y / 5)
cv.resizeWindow(name, self.size, self.size)
self.update_position()
self.create_range(name)
def update_position(self):
self.y += self.size
if self.y + self.size >= screen_height:
self.x += self.size
self.y = 20
def create_range(self, name):
lower_param, upper_param = self.get_param(name)
self.lower[name] = [lower_param]
self.upper[name] = [upper_param]
self.lower_tmp[name] = []
self.upper_tmp[name] = []
self.select[name] = False
def push_range(self, name, lower, upper):
self.lower[name].append(lower)
self.upper[name].append(upper)
def get_range(self, name):
return self.lower[name][-1], self.upper[name][-1]
def undo_range(self, name):
if len(self.lower[name]) > 0:
self.lower_tmp[name].append(self.lower[name][-1])
self.upper_tmp[name].append(self.upper[name][-1])
self.lower[name].pop()
self.upper[name].pop()
set_trackbar(self.lower[name][-1], self.upper[name][-1])
print_result('UNDO')
else:
print_result('Cannot Undo')
def redo_range(self, name):
if len(self.lower_tmp[name]) > 0:
self.lower[name].append(self.lower_tmp[name][-1])
self.upper[name].append(self.upper_tmp[name][-1])
self.lower_tmp[name].pop()
self.upper_tmp[name].pop()
set_trackbar(self.lower[name][-1], self.upper[name][-1])
print_result('REDO')
else:
print_result('Cannot Redo')
def reset_range(self, name):
self.lower[name].append([179, 255, 255])
self.upper[name].append([0, 0, 0])
set_trackbar(self.lower[name][-1], self.upper[name][-1])
print_result('RESET')
def show_image(self, window_name):
for name in window_name:
result = cv.inRange(hsv, np.array(self.lower[name][-1], np.uint8),
np.array(self.upper[name][-1], np.uint8))
cv.imshow(name, result)
def range_str2list(self, str):
str = str.split(',')
return [int(str[0]), int(str[1]), int(str[2])]
def range_list2str(self, list):
seq = (str(list[0]), str(list[1]), str(list[2]))
ch_join = ','
return ch_join.join(seq)
def get_param(self, name):
self.param_lower = rospy.get_param(
'object_detection/object_color_range/' + name +"/lower", '179,255,255')
self.param_upper = rospy.get_param(
'object_detection/object_color_range/' + name +"/upper", '0,0,0')
self.param_lower = self.range_str2list(self.param_lower)
self.param_upper = self.range_str2list(self.param_upper)
return self.param_lower, self.param_upper
def save(self):
for name in self.lower:
if(name == 'mask'):
continue
rospy.set_param(
'object_detection/object_color_range/' + name +"/lower", self.range_list2str(self.lower[name][-1]))
rospy.set_param(
'object_detection/object_color_range/' + name +"/upper", self.range_list2str(self.upper[name][-1]))
f = open(self.path + '/params/object_color_range.yaml', 'w')
x = self.genyaml()
f.write(x)
f.close()
print_result('save')
def genyaml(self):
tmp = "object_color_range:\n"
for name in self.lower:
if(name == 'mask'):
continue
tmp += " " + name + ": " + "\n" + " " + "upper: '" + self.range_list2str(self.upper[name][-1]) + "'\n" +\
" " + "lower: '" + self.range_list2str(self.lower[name][-1]) + "'\n\n"
return tmp
def nothing(what):
pass
def image_callback(msg):
global img, wait, hsv, image_width, image_height, sub_sampling
if wait:
return
arr = np.fromstring(msg.data, np.uint8)
img_data = cv.resize(cv.imdecode(arr, 1), (0, 0),
fx=sub_sampling, fy=sub_sampling)
img = img_data
hsv = cv.cvtColor(img, cv.COLOR_BGR2HSV)
def draw_circle(event, x, y, flags, param):
global pixel, click
if event == cv.EVENT_LBUTTONDOWN:
click = True
pixel['x'], pixel['y'] = x, y
def has_color(window_name, k):
for name in window_name:
if k == ord(name[0]) and k != ord('m'):
return name, True
return None, False
def set_trackbar(lower, upper):
[hmin, smin, vmin], [hmax, smax, vmax] = lower, upper
cv.setTrackbarPos('Hmin', 'image', hmin)
cv.setTrackbarPos('Smin', 'image', smin)
cv.setTrackbarPos('Vmin', 'image', vmin)
cv.setTrackbarPos('Hmax', 'image', hmax)
cv.setTrackbarPos('Smax', 'image', smax)
cv.setTrackbarPos('Vmax', 'image', vmax)
def get_trackbar():
Hmin = cv.getTrackbarPos('Hmin', 'image')
Smin = cv.getTrackbarPos('Smin', 'image')
Vmin = cv.getTrackbarPos('Vmin', 'image')
Hmax = cv.getTrackbarPos('Hmax', 'image')
Smax = cv.getTrackbarPos('Smax', 'image')
Vmax = cv.getTrackbarPos('Vmax', 'image')
lower = [Hmin, Smin, Vmin]
upper = [Hmax, Smax, Vmax]
return lower, upper
def compare_range(l, u, l1, u1):
return not (l == l1 and u == u1)
def print_result(msg):
print('<------------ ') + msg + (' ------------>')
def select_color():
global pixel, img, wait, hsv, click, is_mask, image_height, image_width, screen_height, screen_width
window_name = ['mask'] + CONST.MISSION_LIST
cv.namedWindow('image_bgr', flags=cv.WINDOW_NORMAL)
cv.moveWindow('image_bgr', 400, 400)
cv.resizeWindow('image_bgr', (screen_width / 3) +
30, (screen_height / 3) + 30)
cv.namedWindow('image', flags=cv.WINDOW_NORMAL)
cv.moveWindow('image', 20, 20)
cv.resizeWindow('image', (screen_width / 3), screen_height)
cv.createTrackbar('Hmin', 'image', 0, 179, nothing)
cv.createTrackbar('Smin', 'image', 0, 255, nothing)
cv.createTrackbar('Vmin', 'image', 0, 255, nothing)
cv.createTrackbar('Hmax', 'image', 0, 179, nothing)
cv.createTrackbar('Smax', 'image', 0, 255, nothing)
cv.createTrackbar('Vmax', 'image', 0, 255, nothing)
cv.createTrackbar('m <-> c', 'image', 0, 2, nothing)
cv.createTrackbar('shoot_x', 'image', 0, image_width, nothing)
cv.createTrackbar('shoot_y', 'image', 0, image_height, nothing)
set_trackbar([179, 255, 255], [0, 0, 0])
cv.setTrackbarPos('m <-> c', 'image', 1)
cv.setTrackbarPos('shoot_x', 'image', int(image_width / 2))
cv.setTrackbarPos('shoot_y', 'image', int(image_height / 2))
cv.setMouseCallback('image', draw_circle)
w = window()
w.create(window_name)
while(img is None):
rospy.sleep(0.01)
while not rospy.is_shutdown():
key = cv.waitKey(1) & 0xff
if key == ord('p') and wait == False and not click:
wait = True
elif key == ord('p') and wait == True and not click:
wait = False
name, status = has_color(window_name, key)
lower, upper = w.get_range('mask')
lower_bar, upper_bar = get_trackbar()
if click and is_mask:
h, s, v = hsv[pixel['y'], pixel['x']]
[hl, sl, vl], [hu, su, vu] = w.get_range('mask')
lower_current = [min(h, hl), min(s, sl), min(v, vl)]
upper_current = [max(h, hu), max(s, su), max(v, vu)]
w.push_range('mask', lower_current, upper_current)
set_trackbar(lower_current, upper_current)
elif compare_range(lower, upper, lower_bar, upper_bar):
w.push_range('mask', lower_bar, upper_bar)
elif status:
if w.select[name]:
lower_current, upper_current = w.get_range('mask')
w.push_range(name, lower_current, upper_current)
cv.setTrackbarPos('m <-> c', 'image', 2)
is_mask = False
else:
lower_current, upper_current = w.get_param(name)
w.push_range('mask', lower_current, upper_current)
set_trackbar(lower_current, upper_current)
cv.setTrackbarPos('m <-> c', 'image', 0)
is_mask = True
w.select[name] = not w.select[name]
elif is_mask:
if key == ord('z'):
w.undo_range('mask')
elif key == ord('x'):
w.redo_range('mask')
elif key == ord('c'):
w.reset_range('mask')
elif key == ord('s'):
w.save()
elif key == ord('q'):
break
x = cv.getTrackbarPos('shoot_x', 'image')
y = cv.getTrackbarPos('shoot_y', 'image')
w.show_image(window_name)
cv.circle(hsv, (int(x), int(y)), 5, (100, 255, 255), -1)
cv.imshow('image', hsv)
cv.imshow('image_bgr', img)
click = False
status = False
cv.destroyAllWindows()
if __name__ == '__main__':
rospy.init_node('object_color_range', anonymous=True)
#############################################################################################
seg_topic_default = "/semantic_segmentation/compressed"
seg_topic = rospy.get_param("/object_detection/segmentation_topic", seg_topic_default)
#############################################################################################
k = str(raw_input("Press y or Y to continue, Any key to exit: "))
if not k.lower() == 'y':
exit(0)
#############################################################################################
print_result('INITIAL VARIABLE')
# app = QApplication(sys.argv)
# screen_resolution = app.desktop().screenGeometry()
screen_width, screen_height = 1024, 768
image_width, image_height = 484, 304
#############################################################################################
print_result('SUBSCRIBER')
rospy.Subscriber(seg_topic, CompressedImage, image_callback)
print_result('SELECT COLOR')
select_color()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.