blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2
values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M โ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23
values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 220
values | src_encoding stringclasses 30
values | language stringclasses 1
value | is_vendor bool 2
classes | is_generated bool 2
classes | length_bytes int64 2 10.3M | extension stringclasses 257
values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
37321511f55b483428e71701554e9e17bf1df771 | 98c6ea9c884152e8340605a706efefbea6170be5 | /examples/data/Assignment_7/hrnali002/question1.py | bddd72a0b19d90ef62d339aa08a5e015b73c2dc2 | [] | no_license | MrHamdulay/csc3-capstone | 479d659e1dcd28040e83ebd9e3374d0ccc0c6817 | 6f0fa0fa1555ceb1b0fb33f25e9694e68b6a53d2 | refs/heads/master | 2021-03-12T21:55:57.781339 | 2014-09-22T02:22:22 | 2014-09-22T02:22:22 | 22,372,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 593 | py | """A program to print a list with not duplicated words
Alison Hoernle
HRNALI002
27 April 2014"""
# get input and convert to a list
list = []
strings = input("Enter strings (end with DONE):\n")
while strings != "DONE":
list.append(strings)
strings = input()
print()
print("Unique list:")
# create an empty string and then go through list. Add each word to empty string and if in string already then don't print that word again
counted_words = ''
for string in list:
if string in counted_words:
continue
else:
print(string)
counted_words += string | [
"jarr2000@gmail.com"
] | jarr2000@gmail.com |
89183a2d8db1679d97f14a455576df1e17c8699d | a93caa7552963b075e72bb9b6beb08aab94bac27 | /code/plots.py | 353f651e62182392288e94882edcf51ad8e349d6 | [] | no_license | anmalara/DeepWWTagger | ae2e7773f230bf7e74136a44dc09f51b066dbf35 | 1d3e1745a6b6b6a64bb83ae7f86dd53208a720a6 | refs/heads/master | 2020-03-11T03:07:19.989454 | 2019-02-19T09:36:44 | 2019-02-19T09:36:44 | 129,737,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,247 | py | import sys
import os
import os.path
import glob
import time
import numpy as np
from ROOT import TFile, TCanvas, TLegend, TH1F, TH2F, TColor, TAxis
from ROOT import kWhite, kBlack, kGray, kRed, kGreen, kBlue, kYellow, kMagenta, kCyan, kOrange, kSpring, kTeal, kAzure, kViolet, kPink
from ROOT import kNone
from root_numpy import *
from math import pi as PI
from sklearn.preprocessing import MaxAbsScaler
from variables import *
sys.path.append("/nfs/dust/cms/user/amalara/WorkingArea/UHH2_94/CMSSW_9_4_1/src/UHH2/PersonalCode/")
from tdrstyle_all import *
colors = [kBlack, kRed+1, kBlue-4, kGreen-2, kOrange, kMagenta, kViolet-3, kCyan, kSpring, kTeal, kYellow+1, kPink+10, kAzure+7, kAzure+1, kRed+3, kGray]
def get_binInfo(branch="Pt", isScale=True):
if isScale:
return 100, 0, 1, 0, 1, 0, 0
elif "jetPt".lower() in branch.lower():
return 300, 0, 3000, 0.00001, 1., 0, 1
elif "jetEta".lower() in branch.lower():
return 100, -PI, PI, 0.00001, 0.05, 0, 0
elif "jetPhi".lower() in branch.lower():
return 100, -PI, PI, 0.00001, 0.03, 0, 0
elif "jetEnergy".lower() in branch.lower():
return 500, 0, 5000, 0.00001, 0.35, 0, 1
elif "jetBtag".lower() in branch.lower():
return 100, 0, 1, 0.00001, 0.07, 0, 0
elif "SoftDrop".lower() in branch.lower():
return 100, 0, 500, 0.00001, 0.3, 0, 0
elif "jetMass".lower() in branch.lower():
return 100, 0, 500, 0.00001, 0.14, 0, 0
elif "jetTau1".lower() in branch.lower():
return 100, 0, 1, 0.00001, 0.1, 0, 0
elif "jetTau2".lower() in branch.lower():
return 100, 0, 1, 0.00001, 0.15, 0, 0
elif "jetTau3".lower() in branch.lower():
return 100, 0, 1, 0.00001, 0.2, 0, 0
elif "jetTau4".lower() in branch.lower():
return 100, 0, 1, 0.00001, 0.25, 0, 0
elif "isB".lower() in branch.lower():
return 100, 0, 1, 0.00001, 0.25, 0, 0
elif "CandEnergy".lower() in branch.lower():
return 150, 0, 1500, 0.00001, 1, 0, 1
elif "CandPx".lower() in branch.lower():
return 100, -1500, 1500, 0.00001, 1.0, 0, 1
elif "CandPy".lower() in branch.lower():
return 100, -1500, 1500, 0.00001, 1.0, 0, 1
elif "CandPz".lower() in branch.lower():
return 100, -2000, 2000, 0.00001, 1.0, 0, 1
elif "CandPt".lower() in branch.lower():
return 150, 0, 1500, 0.00001, 1., 0, 1
elif "CandEta".lower() in branch.lower():
return 100, -2*PI, 2*PI, 0., 0.1, 0, 0
elif "CandPhi".lower() in branch.lower():
return 50, -PI, PI, 0., 0.06, 0, 0
elif "CandPdgId".lower() in branch.lower():
return 500, -250, 250, 0.01, 1, 0, 1
elif "CandMass".lower() in branch.lower():
return 200, -1, 1, 0.00001, 1., 0, 1
elif "CandDXY".lower() in branch.lower():
return 100, -20, 20, 0.00001, 10, 0, 1
elif "CandDZ".lower() in branch.lower():
return 100, -0.4, 0.4, 0.000001, 10, 0, 1
elif "CandPuppiWeight".lower() in branch.lower():
return 100, 0, 1, 0.00001, 50, 0, 1
else:
return 100, 0, 1000, 0.00001, 1., 0, 0
# return N_bins, bin_min, bin_max, min, max, isLogx, isLogy
@timeit
def plotJetVariables(arrays=[], array_names=["Higgs"], output_path="./", branch_names=["jetPt", "jetEta"], isCand=False):
for index, branch in enumerate(branch_names):
print branch
N_bins, bin_min, bin_max, max_, min_, isLogx, isLogy = get_binInfo(branch)
c = tdrCanvas(branch, bin_min, bin_max, max_, min_, branch, "A.U.", square=kRectangular, iPeriod=0, iPos=11, extraText_="Simulation")
c.SetLogx(isLogx)
c.SetLogy(isLogy)
leg = tdrLeg(0.55, 0.5, 0.9, 0.9, textSize=0.025)
tdrHeader(leg, branch)
histos = []
for index_array, array in enumerate(arrays):
h = TH1F( branch+array_names[index_array], branch+array_names[index_array], N_bins, bin_min, bin_max)
if isCand:
for i in range(array.shape[2]):
fill_hist(h, array[:,index,i])
else:
fill_hist(h, array[:,index])
h.SetLineWidth(3)
if h.Integral()>0:
h.Scale(1./h.Integral())
tdrDraw(h, "hist", mcolor=colors[index_array+1], lcolor=colors[index_array+1], fstyle=0, fcolor=colors[index_array+1])
leg.AddEntry(h, array_names[index_array] + ", Entries: "+str(round(float(h.GetEntries())/1000000,3))+" M","l")
histos.append(h)
c.Print(output_path+branch+".pdf")
c.Print(output_path+branch+".png")
c.Print(output_path+branch+".root")
@timeit
def runOverInputs(arrays,array_names, branch_names, isCand):
output_path = out_path+common_path+"all_scale/"
if not os.path.isdir(output_path):
os.makedirs(output_path)
arrays_ = []
for array in arrays:
array_ = MaxAbsScaler().fit_transform(array)
arrays_.append(array_)
plotJetVariables(arrays_, array_names, output_path, branch_names, isCand)
output_path = out_path+common_path+"all/"
if not os.path.isdir(output_path):
os.makedirs(output_path)
# plotJetVariables(arrays, array_names, output_path, branch_names, isCand)
for bkg in bkgs:
temp_array_names = [array_names[index] for index, test in enumerate(array_names) if bkg in test]
temp_arrays = [arrays[index] for index, test in enumerate(array_names) if bkg in test]
# print temp_array_names
output_path = out_path+common_path+bkg+"/"
if not os.path.isdir(output_path):
os.makedirs(output_path)
# plotJetVariables(temp_arrays, temp_array_names, output_path, branch_names, isCand)
for radius in radii:
temp_array_names = [array_names[index] for index, test in enumerate(array_names) if radius in test]
temp_arrays = [arrays[index] for index, test in enumerate(array_names) if radius in test]
# print temp_array_names
output_path = out_path+common_path+radius+"/"
if not os.path.isdir(output_path):
os.makedirs(output_path)
# plotJetVariables(temp_arrays, temp_array_names, output_path, branch_names, isCand)
def resetError(arrays):
for array in arrays:
for x in range(array.shape[0]):
for y in range(array.shape[1]):
try:
len(array[x,y])
array[x,y] = 100000
except:
pass
@timeit
def addFiles(path, info, bkg):
firstEvent = True
for i in range(files_dictionary[bkg][0]):
file_name = path+info+"_"+bkg+"_"+str(i)+".npy"
if os.path.isfile(file_name):
file = np.load(file_name)
if firstEvent:
array = file
firstEvent = False
else:
array = np.concatenate((array,file))
# if len(array)>1000000:
if len(array)>1000:
break
return array
########################
# #
# Main Program #
# #
########################
# for info in ["jet", "cand", "gen_jet", "gen_cand"]:
# for info in ["cand", "gen_jet", "gen_cand"]:
for info in branch_names_dict:
if "Event" in info or "Extra" in info:
continue
arrays = []
array_names = []
for bkg in bkgs:
for radius in radii:
path = out_path+"input_varariables/NTuples_Tagger/"+bkg+"_"+radius+"/"
array_name = bkg+radius
array_names.append(array_name)
# print bkg, radius
array = addFiles(path, info, bkg)
# print array.shape
arrays.append(array)
del array
isCand = False
if "Cand" in info:
isCand = True
branch_names = branch_names_dict[info]
# if not isCand:
# resetError(arrays)
common_path = "./plot/"
# print info
# print branch_names
# runOverInputs(arrays, array_names, branch_names, isCand)
# for info in ["", "gen_"]:
for info in branch_names_dict:
if "Event" in info or "Extra" in info or "Cand" in info or "Sub" in info or "Gen" in info:
continue
#for pt in ["300_500", "500_10000"]:
for pt in ["300_500"]:
arrays = []
array_names = []
for bkg in bkgs:
for radius in radii:
for path in glob.glob(out_path+"input_varariables/NTuples_Tagger/Sequential/Sequential_"+info+"_"+bkg+"_"+radius+"*"+pt+"*"):
if not os.path.isfile(path):
continue
array_name = bkg+radius+"_pt_"+pt
print array_name
try:
array = np.load(path)
# print path
# print array.shape
arrays.append(array)
array_names.append(array_name)
except:
continue
isCand = False
branch_names = branch_names_dict[info]
# resetError(arrays)
common_path = "./plot/Sequential/pt_"+pt+"/"
print info
# print branch_names
runOverInputs(arrays, array_names, branch_names, isCand)
| [
"andrea.malara@cern.ch"
] | andrea.malara@cern.ch |
f3a2ad5c32de8876caeae5f5f9095fdd0ef824c5 | 400c569b19d003d0b9d1b31bc1b698ae510cbc46 | /Celestial classification/models.py | d4b60dffc8e997aebb887787f6bf21975ed96fb3 | [] | no_license | as950118/dacon | 05a203ab36375a69549ac39ba3b02a90431c860a | a1489a55a7a53a755d6cf50081522bd7c1c48b4f | refs/heads/master | 2021-02-13T20:06:38.169482 | 2020-03-03T19:51:51 | 2020-03-03T19:51:51 | 244,727,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,054 | py | import pandas as pd
from sklearn.model_selection import train_test_split
from catboost import CatBoostClassifier
from lightgbm import LGBMClassifier
from xgboost import XGBClassifier
from data_processing import DataProcessing
random_seed = 0
train_data_path = "./data/train.csv"
test_data_path = "./data/test.csv"
sample_submission_data_path = "./data/sample_submission.csv"
data_processing = DataProcessing(train_data_path, test_data_path, sample_submission_data_path)
train_data, test_data, sample_submission_data = data_processing.load_file()
x_train, x_valid, y_train, y_valid = data_processing.set_data(train_data, test_data)
'''
# catboost
cat_clf = CatBoostClassifier(iterations = 20000, random_state = random_seed, task_type="GPU")
cat_clf.fit(x_train, y_train, eval_set = [(x_train, y_train), (x_valid, y_valid)])
cat_pred = cat_clf.predict_proba(test_data)
submission = pd.DataFrame(data=cat_pred, columns=sample_submission_data.columns, index=sample_submission_data.index)
submission.to_csv('./results/cat_boost2.csv', index=True)
'''
# lgbm
#lgbm_clf = LGBMClassifier(n_estimators = 1000, n_jobs=-1, random_state = random_seed, device = 'gpu')
lgbm_clf = LGBMClassifier(n_estimators = 1000, n_jobs=-1, random_state = random_seed)
lgbm_clf.fit(x_train, y_train, eval_set = [(x_train, y_train), (x_valid, y_valid)])
lgbm_pred = lgbm_clf.predict_proba(test_data)
submission = pd.DataFrame(data=lgbm_pred, columns=sample_submission_data.columns, index=sample_submission_data.index)
submission.to_csv('./results/light_gbm2.csv', index=True)
# xgboost
#xgb_clf = XGBClassifier(n_estimators = 1000, n_jobs=-1, random_state=random_seed, tree_method='gpu_exact')
xgb_clf = XGBClassifier(n_estimators = 1000, n_jobs=-1, random_state=random_seed)
xgb_clf.fit(x_train, y_train, eval_set = [(x_train, y_train), (x_valid, y_valid)])
xgb_pred = xgb_clf.predict_proba(test_data)
submission = pd.DataFrame(data=xgb_pred, columns=sample_submission_data.columns, index=sample_submission_data.index)
submission.to_csv('./results/xg_boost2.csv', index=True)
| [
"na_qa@icloud.com"
] | na_qa@icloud.com |
fa2be5e5a08ffaa6b639bc4c0c9eee8944b46147 | c12947b326618c1c000e240d9515aa79ef127d5d | /migrations/versions/6decd80da190_added_equipment_table_to_the_database.py | d6dfaa538895073361eac32e2edf225747dcaa83 | [] | no_license | allanderek/angular-velocity | 3135559b18587477774375c1c2b0a4a93a85ed60 | cdc2d3c0b647e0ed41015356dd058870589a41a8 | refs/heads/master | 2016-08-12T06:18:42.330608 | 2016-01-23T16:10:21 | 2016-01-23T16:10:21 | 49,957,645 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 969 | py | """Added equipment table to the database.
Revision ID: 6decd80da190
Revises: 69a3f32dd441
Create Date: 2016-01-23 15:03:36.991513
"""
# revision identifiers, used by Alembic.
revision = '6decd80da190'
down_revision = '69a3f32dd441'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.create_table('equipment',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('owner', sa.Integer(), nullable=True),
sa.Column('name', sa.String(length=2400), nullable=True),
sa.Column('description', sa.String(length=2400), nullable=True),
sa.Column('requires_human', sa.Boolean(), nullable=True),
sa.ForeignKeyConstraint(['owner'], ['user.id'], ),
sa.PrimaryKeyConstraint('id')
)
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_table('equipment')
### end Alembic commands ###
| [
"allan.clark@gmail.com"
] | allan.clark@gmail.com |
7dc2b3f8fd6f281401a3c4676506bc5b3d23b623 | ca5a3cd03a1951db3ba496fbb4b62dbbfcc198dc | /obtain_government_data/mergeFiles.py | d0a17d6da91be3f6dc63d3c49ef12e625f7e9073 | [] | no_license | KaartGroup/localModelScripts | e9ae5c0f2fabee531a04c220106416350e9ff341 | 89acc3c6968c7aa3ede1da70751f13dd2063af5f | refs/heads/master | 2022-12-11T18:30:16.218586 | 2021-05-06T18:24:00 | 2021-05-06T18:32:47 | 242,195,519 | 0 | 1 | null | 2022-12-08T07:44:50 | 2020-02-21T17:33:43 | Python | UTF-8 | Python | false | false | 1,307 | py | #!/usr/bin/env python3
import json
import os
import glob
import sys
def merge_json(files, save_file):
if (len(files) == 0):
return
json_data = {}
for fh in files:
with open(fh) as fd:
try:
data = json.load(fd)
for item in data:
if item in json_data and type(data[item]) == type(json_data[item]) and type(data[item]) == list:
for entry in data[item]:
json_data[item].append(entry)
else:
json_data[item] = data[item]
except json.decoder.JSONDecodeError as e:
print("Bad json file: {}".format(fh))
with open(save_file, 'w') as save:
print(save_file)
json.dump(json_data, save, indent=" ")
def main(directory):
json_files = sorted(glob.glob(os.path.join(directory, '*.json')))
geojson_files = sorted(glob.glob(os.path.join(directory, '*.geojson')))
merge_json(json_files, os.path.normpath(directory) + '.json')
merge_json(geojson_files, os.path.normpath(directory) + '.geojson')
if __name__ == "__main__":
if (len(sys.argv) == 1):
print("We need a directory or set of directories")
for directory in sys.argv[1:]:
main(directory)
| [
"taylor.smock@kaart.com"
] | taylor.smock@kaart.com |
446f69a026cc2ab7a8d1473354a3452e64b019c0 | e148f3cd0f96b7ab189a6de8a59b756ec980228a | /proxy.py | 392f7f0edbb089b0bd405264c1cd22d3fe7b70b3 | [] | no_license | innovationb1ue/govinfo | 439844552bac769f707855c9247109fad276e127 | 501a232ef21e82721c8d9020a28e4c8a34eeea5f | refs/heads/master | 2022-03-12T13:02:21.084077 | 2019-10-23T14:42:50 | 2019-10-23T14:42:50 | 215,671,788 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,154 | py | import requests
import random
import time
import multiprocessing
USE_REDIS = True
if USE_REDIS:
import redis as red
redis = red.Redis(decode_responses=True)
redis.flushdb()
class Proxy_Pool:
def __init__(self, proxy_url:str,test_url:str,failwords:list=None, worker=4):
self.proxy_url = proxy_url
self.test_url = test_url
self.failwords = failwords
if not self.failwords:
self.failwords = []
self.s = requests.Session()
self.Headers= {
'User-Agent': 'Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/77.0.3865.90 Safari/537.36'}
self.proxy = None
self.start_proxy_service(worker)
@staticmethod
def save_Exception_info(e:Exception):
with open('./Exception.txt', 'a') as f:
f.write(str(e))
f.write('\n')
# get url with proxy (http only)
def get(self,url,headers=None,renew=False,timeout=2):
if not self.proxy:
while redis.llen("proxy_Pool") == 0:
time.sleep(random.random())
self.proxy = redis.lpop("proxy_Pool")
if renew:
while redis.llen("proxy_Pool") == 0:
time.sleep(random.random())
self.proxy = redis.lpop("proxy_Pool")
try:
resp = self.s.get(url,headers=headers,timeout=timeout,proxies={'http':self.proxy})
except requests.RequestException as e:
self.save_Exception_info(e)
return 0
except Exception as e:
self.save_Exception_info(e)
return self.get(url,headers,renew=True, timeout=timeout)
try:
content = resp.content.decode('utf-8')
except UnicodeEncodeError as e:
self.save_Exception_info(e)
return 0
except Exception as e:
self.save_Exception_info(e)
return 0
# check status ---->
if resp.status_code != 200:
print('Error status code', resp.status_code)
return self.get(url,renew=True)
for word in self.failwords:
if word in content:
return self.get(url,renew=True)
# check end here ------
return resp
# start button of proxy service
def start_proxy_service(self, worker):
if not worker:
raise ValueError('proxy worker not specified, expect int, got', type(worker) , worker)
p = multiprocessing.Pool(worker)
for _ in range(worker):
print('start')
p.apply_async(self.proxy_process)
p.close()
def proxy_process(self):
while True:
proxy = self.get_proxy(self.proxy_url,
self.test_url,
['Bad gate']
)
redis.lpush("proxy_Pool",proxy)
print('Add 1 proxy')
time.sleep(1)
# get a valid proxy
def get_proxy(self,proxy_url:str, test_url:str, failwords=None):
if not failwords:
failwords = []
proxy_count = 0
while True:
proxy_list = self.s.get(proxy_url,headers=self.Headers).content.decode('utf-8').split('\r\n') # windows form
random.shuffle(proxy_list) # optional
for proxy in proxy_list:
# test validity
try:
response = self.s.get(test_url,headers=self.Headers,proxies={'http':proxy},timeout=3)
except Exception as e:
proxy_count += 1
continue
# check status
if response.status_code != 200:
proxy_count += 1
continue
# decode contents
try:
content = response.content.decode('utf-8')
except Exception as e:
self.save_Exception_info(e)
proxy_count += 1
continue
# check key word
for word in failwords:
if word in content:
proxy_count += 1
continue
# refresh to try a new proxy list
if proxy_count >= 7:
return self.get_proxy(proxy_url,test_url,failwords)
return proxy
class MyRequests:
def __init__(self):
self.s = requests.Session()
self.Headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.99 Safari/537.36'}
@staticmethod
def save_Exception_info(e:Exception):
with open('./Exception.txt', 'a') as f:
f.write(str(e))
f.write('\n')
def get(self, url, timeout=5, retry=False, retryMax=0, retryCount=0):
try:
resp = self.s.get(url, timeout=timeout, headers=self.Headers)
except Exception as e:
print(e)
self.save_Exception_info(e)
if retry and retryCount < retryMax:
retryCount += 1
return self.get(url, timeout, retry, retryMax, retryCount)
else:
return False
# check status code
if resp.status_code != 200:
print(resp.content.decode('utf-8'))
if retry and retryCount < retryMax:
retryCount += 1
return self.get(url, timeout, retry, retryMax, retryCount)
else:
return False
else:
return resp
if __name__ == '__main__':
e = Proxy_Pool('http://dev.energy67.top/api/?apikey=90c68bee2d04747b727310c1a810d9272a43cde8&num=15&type=text&line=win&proxy_type=putong&sort=rand&model=post&protocol=http&address=&kill_address=&port=&kill_port=&today=false&abroad=1&isp=&anonymity=2',
'http://search.ccgp.gov.cn/bxsearch?searchtype=1&page_index=1&start_time=&end_time=&timeType=2&searchparam=&searchchannel=0&dbselect=bidx&kw=&bidSort=0&pinMu=0&bidType=0&buyerName=&projectId=&displayZone=&zoneId=&agentName=',
['Bad gate'])
| [
"517262600@qq.com"
] | 517262600@qq.com |
a3f76984e69b7bfbe74621b895652b299a42164e | fcb06b09a805dbb983b1f7acdde97d41cf1da2e9 | /igamingplatform/spins/apps.py | cb4041a710f6304b158ab53690a52d02af4b137d | [] | no_license | BartoszBereza/i-gaming-platform | a6a1e1148b22799cd5afaa1050aac970625cfece | b39658eb2c395755df09a43ab3b03b6e2739c1fe | refs/heads/master | 2020-03-27T10:46:23.836289 | 2018-09-06T12:49:37 | 2018-09-06T12:49:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 85 | py | from django.apps import AppConfig
class SpinsConfig(AppConfig):
name = 'spins'
| [
"bbplen@gmail.com"
] | bbplen@gmail.com |
833f34bfee5778cae876b543fd5e519ac588c965 | db371807e5f23a739369c1d86677b6f888a2c724 | /dev/parse_time_domain/parse_functions.py | 81f2592e21ef207c765866a525889a8efa9e1c6e | [] | no_license | ivyu1265/rocketsat12 | d1cc2b15fcfb68ff081ecd6f0c40f7b7e0b9933d | 4171848fd6acec64fc570b211924ddfef556fb52 | refs/heads/master | 2018-10-21T06:12:24.853508 | 2018-07-26T19:06:36 | 2018-07-26T19:06:36 | 122,880,264 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,225 | py | import subprocess as sp
import re
import pandas as pd
import struct
import numpy as np
class Header_Info:
def __init__(self, file_name):
self.file_name = file_name
self.read_file_data()
self.define_headers_list()
def read_file_data(self,):
gnuradio_command = 'gr_read_file_metadata -D {}'.format(self.file_name)
gnuradio_function_output = sp.run(gnuradio_command, shell=True, stdout=sp.PIPE).stdout.decode()
self.file_data = gnuradio_function_output
def define_headers_list(self,):
self.file_output_header_list = self.file_data.split('\n\n\n\n')
self.headers = []
for file_output_header in self.file_output_header_list:
if len(file_output_header) == 0:
continue
self.define_header_dictionary(file_output_header)
def define_header_dictionary(self, file_output_header):
header_dict = dict()
header_dict['header_number'] = self.get_header_number(file_output_header)
header_dict['sample_rate_sps'] = self.get_sample_rate(file_output_header)
header_dict['data_type_size_bytes'] = self.get_data_type_size(file_output_header)
header_dict['data_type'] = self.get_data_type(file_output_header)
header_dict['data_length_bytes'] = self.get_data_length(file_output_header)
header_dict['seconds_since_start'] = self.get_seconds_since_start(file_output_header)
if header_dict['data_length_bytes'] == 0:
return
self.headers.append(header_dict)
def get_header_number(self, output_str):
relevant_line = re.search('HEADER \d*', output_str).group()
header_number = int(relevant_line.split(' ')[-1])
return header_number
def get_sample_rate(self, output_str):
relevant_line = re.search('Sample Rate: \d*.\d*', output_str).group()
sample_rate = float(relevant_line.split(':')[-1])
return sample_rate
def get_data_type_size(self, output_str):
relevant_line = re.search('Item size: \d*', output_str).group()
data_type_size_bytes = int(relevant_line.split(':')[-1])
return data_type_size_bytes
def get_data_type(self, output_str):
relevant_line = re.search('Data Type: \w*', output_str).group()
data_type = relevant_line.split(':')[-1].strip()
return data_type
def get_data_length(self, output_str):
relevant_line = re.search('Size of Data: \d* bytes', output_str).group()
data_length_bytes = int(relevant_line.split(' ')[-2])
return data_length_bytes
def get_seconds_since_start(self, output_str):
relevant_line = re.search('Seconds: \d*.\d*', output_str).group()
seconds_since_start = float(relevant_line.split(':')[-1])
return seconds_since_start
class Time_Data:
def __init__(self, file_name, header_info):
self.file_name = file_name
self.header_info = header_info
self.signal_df = pd.DataFrame()
self.load_file_data()
self.populate_df()
def load_file_data(self):
with open(self.file_name, 'rb') as f:
self.file_data = f.read()
def populate_df(self):
self.file_byte_index = 0
self.define_struct_format()
for header in self.header_info.headers:
self.load_header_chunk_only_outliers(header)
# self.load_header_chunk(header)
def define_struct_format(self):
# Making the assumption here that all files are same type
# It would be super weird if they weren't
# Add a check if feeling paranoid, I guess?
sample_header = self.header_info.headers[0]
# If working with more data types, add more options here
if sample_header['data_type'] == 'float':
format_character = 'f'
else:
Exception('Unknown data type')
datapoints_in_file = int(sample_header['data_length_bytes'] / sample_header['data_type_size_bytes'])
self.struct_format = '{}{}'.format(datapoints_in_file, format_character)
def load_header_chunk_only_outliers(self, header):
# Time vector
samples = header['data_length_bytes'] / header['data_type_size_bytes']
time = np.linspace(0, samples / header['sample_rate_sps'], samples) + header['seconds_since_start']
# Want the outliers from signal strengths
# Also want a sampling of other points that are the same as the mean
strength_chunk = self.unpack_from_bytes(header)
signal_mu = strength_chunk.mean()
signal_sigma = 5 * strength_chunk.std()
outlier_indices = (strength_chunk > (signal_mu + signal_sigma)) | (strength_chunk < (signal_mu - signal_sigma))
reduction = 1000
random_indices = np.zeros(strength_chunk.shape, dtype=np.bool)
random_indices[::reduction] = True
strength_chunk[random_indices] = signal_mu
indices_to_keep = outlier_indices | random_indices
strength = strength_chunk[indices_to_keep]
header_number = np.ones(strength.shape) * header['header_number']
time = time[indices_to_keep]
chunk_df = pd.DataFrame({
'strength' : strength,
'header_number' : header_number,
'noise_floor' : random_indices[indices_to_keep],
}, index=time)
print(header_number[0])
self.signal_df = self.signal_df.append(chunk_df)
def unpack_from_bytes(self, header):
chunk_start_index = self.file_byte_index
chunk_end_index = chunk_start_index + header['data_length_bytes']
self.file_byte_index = chunk_end_index
bytes_chunk = self.file_data[chunk_start_index:chunk_end_index]
numbers_chunk = struct.unpack(self.struct_format, bytes_chunk)
return np.array(numbers_chunk) | [
"dawson.beatty@colorado.edu"
] | dawson.beatty@colorado.edu |
3994ec01676f94e3b0ed9d34c4e51522f1548082 | 6b3ec47ee410a7d2ed2102cc5bcfa13c7a6342e2 | /bin/easy_install-3.6 | 5d6f8c4e10d68c760d508456eeaaa31b7e59754b | [] | no_license | makkar-nishant123/Refermeframework | fddb912304bdb4ffe3e169fda2d60b4171d8b6c1 | a152f42f6ab63c037bf3f117aa5be1ceb3a1d178 | refs/heads/master | 2020-05-15T23:29:18.684101 | 2019-04-28T17:31:22 | 2019-04-28T17:31:22 | 182,555,118 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 460 | 6 | #!/Users/nishantmakkar/PycharmProjects/RefermeFramework/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.6'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.6')()
)
| [
"makkar.nishant123@gmail.com"
] | makkar.nishant123@gmail.com |
f529c2813ffd27be60a2c246cf2853fcf650896f | 78912badbaa634d84a93ac03872f18b3f14092a0 | /photosorter-readbuckets.py | 21e4410b93a348af18e57021e9ae46609456fa81 | [] | no_license | mperry8889/photosorter | fc556054ce2af1a50c91c585c80eb6d65ff23f4f | d20c7a51a6e0e7aef4e4eb9260a344d54c52e539 | refs/heads/master | 2021-05-29T06:55:32.482767 | 2011-05-08T17:04:59 | 2011-05-08T17:04:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 442 | py | #!/usr/bin/env python
from photosorter import PhotoSorter
from photosorter import Bucket
from photosorter import Photo
if __name__ == "__main__":
p = PhotoSorter()
for bucket in p.buckets:
for state in ["during", "after", "before", "unknown", "unsorted"]:
for photo in getattr(bucket, state):
print "%s %s %s %s %s" % (state, bucket.year, photo.filename, photo.rotation, photo.flip_horizontal)
| [
"none@none"
] | none@none |
1154ef51601a72adda015a0c0c6295a88ba79108 | cad7eb211a9254a263e5d924f9d3d05859e42f8f | /app/utils/view.py | 30f20e2823a77bbcb050c3965093723311996e0d | [] | no_license | Arthur264/music-new.chat | d0568b6096119c863bf95f58159839d9844a1f30 | 0cf4fabf385954c060dec7408213a911a49358e7 | refs/heads/master | 2020-04-23T17:29:26.766098 | 2019-03-12T21:27:43 | 2019-03-12T21:27:43 | 171,333,921 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,024 | py | from cassandra.cqlengine.query import LWTException
from sanic.views import HTTPMethodView
from app.http import error_response, json_response
from app.utils.request import check_uuid
class ModelBaseView(HTTPMethodView):
model = None
@staticmethod
async def _make_request(data, many=False):
if not data:
return await json_response({})
if many:
return await json_response([item.to_dict() for item in data])
if isinstance(data, list):
return await json_response(data[0].to_dict())
return await json_response(data.to_dict())
@check_uuid
async def get(self, request):
param_id = request.raw_args.get('id')
if not param_id:
instances = self.model.objects().all()
return await self._make_request(instances, many=True)
instance = self.model.objects(id=param_id)
if not instance:
model_name = self.model.__name__.replace('Model', '')
return await error_response(msg=f'{model_name} not found',
status=404)
return await self._make_request(instance)
async def post(self, request):
try:
data = self.prepare_data(request)
instance = self.model.if_not_exists().create(**data)
return await self._make_request(instance)
except LWTException:
return await error_response(msg=f'Instance already exist.',
status=400)
@staticmethod
def prepare_data(request):
return request.json
@check_uuid
async def delete(self, request):
param_id = request.raw_args.get('id')
instance = self.model.objects(id=param_id)
if not instance:
model_name = self.model.__name__.replace('Model', '')
return await error_response(msg=f'{model_name} not found',
status=404)
return await self._make_request(instance)
| [
"Artyr2643@gmail.com"
] | Artyr2643@gmail.com |
254224591b0953224bf8db8c78c02177f1786852 | 3b4719eb840e4a60bdb5bfd3e694dfb3e08593bb | /generate_preds.py | 8bfa41746e69eb8a280d2bbe0e9382602be0510d | [] | no_license | qzhsjz/TSVM-on-Python | 60408d339c344a57dfe2b4cd47f6bd673058f70e | 9b900453bcc065b86a7f9c27c670737fed992943 | refs/heads/master | 2021-01-11T19:01:49.370519 | 2017-01-18T02:54:30 | 2017-01-18T02:54:30 | 79,295,940 | 0 | 0 | null | 2017-01-18T02:40:44 | 2017-01-18T02:40:44 | null | UTF-8 | Python | false | false | 793 | py | from svm_hps import *
from svm_ovo import *
from kernel_gen_pol import *
from kernel_gen_lin import *
from F1 import *
from pandas import *
import numpy as np
x2 = np.array(read_csv("../data/titanic/test_samples.csv"))
y2 = np.array(read_csv("../data/titanic/test_answers.csv"))
x1 = np.array(read_csv("../data/titanic/train_samples.csv"))
y1 = np.array(read_csv("../data/titanic/train_answers.csv"))
n = np.array([])
acc, prm, t = svm_hps(x1,y1,n,x2,y2,svm_ovo,kernel_gen_pol, np.array([[0.1,100.],[0.,0.],[0.,10.],[0.,5.]]),3,5)
nnm = kernel_gen_pol(prm[2:])
f, SX, SY, SA, t = svm_ovo(x1, y1, n, prm[0], prm[1], nnm)
preds = map(f, x2)
y2 = pd.DataFrame(y2)
preds = pd.DataFrame(preds)
y2.to_csv("test_answers.csv", index = False)
preds.to_csv("pred_answers.csv", index = False)
| [
"esengie@esengie-K53SV.(none)"
] | esengie@esengie-K53SV.(none) |
da8775e18d3b0e6f3cfa5b7ce00126f7f11d9688 | b819632a899cc4919c4efb097b87009a9d07d209 | /testbed_nodel11_vm_container.py | a54514a0093d7fb87304a63cdeb2ee24793ed008 | [] | no_license | NuthanChandra/ctools | bb2570786d9b1a584c5b08800f48b02ed8664480 | bcb967c53375104e32b32c8f0d2b3ca25ed69e49 | refs/heads/master | 2022-11-28T04:25:30.092129 | 2020-04-14T12:38:27 | 2020-04-14T12:38:27 | 255,604,269 | 1 | 1 | null | 2020-07-23T16:29:45 | 2020-04-14T12:34:11 | Python | UTF-8 | Python | false | false | 4,050 | py | from fabric.api import env
import os
host1 = 'root@10.204.216.115'
host2 = 'root@10.204.216.116'
host3 = 'root@10.204.216.117'
host4 = 'root@10.204.216.118'
host5 = 'root@10.204.216.119'
host6 = 'root@10.204.216.125'
kvm_nodel12 = '10.204.216.114'
ext_routers = [('hooper','10.204.217.240')]
router_asn = 64512
public_vn_rtgt = 2225
public_vn_subnet = '10.204.221.160/28'
host_build = 'stack@10.204.216.49'
{env_roledefs}
#env.roledefs = {
# 'all': [host1,host2,host3,host4,host5,host6],
# 'cfgm': [host1, host2],
# 'webui': [host1],
# 'openstack': [host1],
# 'control': [host2, host3],
# 'collector': [host1],
# 'database': [host1, host2, host3],
# 'compute': [host4, host5, host6],
# 'build': [host_build]
#}
env.physical_routers={
'hooper' : { 'vendor': 'juniper',
'model' : 'mx',
'asn' : '64512',
'name' : 'hooper',
'ssh_username' : 'root',
'ssh_password' : 'c0ntrail123',
'mgmt_ip' : '10.204.217.240',
}
}
env.hostnames = {
'all': ['nodel12-vm1', 'nodel12-vm2', 'nodel12-vm3', 'nodel12-vm4', 'nodel12-vm5', 'nodel12-vm6']
}
env.openstack_admin_password = 'contrail123'
env.password = 'c0ntrail123'
env.passwords = {
host1: 'c0ntrail123',
host2: 'c0ntrail123',
host3: 'c0ntrail123',
host4: 'c0ntrail123',
host5: 'c0ntrail123',
host6: 'c0ntrail123',
host_build: 'stack@123',
}
reimage_param = 'ubuntu-14.04.5'
vm_node_details = {
'default': {
'image_dest' : '/mnt/disk1/images/',
'ram' : '32768',
'server': kvm_nodel12,
'vcpus' : '4',
'disk_format' : 'qcow2',
'image_source' : 'http://10.204.217.158/images/node_vm_images/%s-256G.img.gz' % (reimage_param),
},
host1 : {
'name' : 'nodel12-vm1',
'network' : [{'bridge' : 'br1', 'mac':'52:53:59:01:00:01'}
],
},
host2 : { 'name' : 'nodel12-vm2',
'network' : [{'bridge' : 'br1', 'mac':'52:53:59:01:00:02'}
]
},
host3 : { 'name' : 'nodel12-vm3',
'network' : [{'bridge' : 'br1', 'mac':'52:53:59:01:00:03'}
]
},
host4 : { 'name' : 'nodel12-vm4',
'network' : [{'bridge' : 'br1', 'mac':'52:53:59:01:00:04'}
]
},
host5 : { 'name' : 'nodel12-vm5',
'network' : [{'bridge' : 'br1', 'mac':'52:53:59:01:00:05'}
]
},
host6 : { 'name' : 'nodel12-vm6',
'network' : [{'bridge' : 'br1', 'mac':'52:53:59:01:00:06'}
]
}
}
env.keystone = {'admin_password': 'c0ntrail123'}
env.openstack = {'manage_amqp': "true"}
minimum_diskGB=32
env.kernel_upgrade=False
env.rsyslog_params = {'port':19876, 'proto':'tcp', 'collector':'dynamic', 'status':'enable'}
env.test_repo_dir='/home/stack/multi_interface_parallel/centos65/icehouse/contrail-test'
env.mail_from='contrail-build@juniper.net'
env.mail_to='dl-contrail-sw@juniper.net'
multi_tenancy=True
env.interface_rename = True
env.enable_lbaas = True
enable_ceilometer = True
ceilometer_polling_interval = 60
env.encap_priority = "'VXLAN','MPLSoUDP','MPLSoGRE'"
env.log_scenario='Multi-Node Nodel12 Contrainer Sanity[mgmt, ctrl=data]'
env.ntp_server = '10.204.217.158'
| [
"nuthanc@juniper.net"
] | nuthanc@juniper.net |
93f7e9fbb43b6cfe911188440c10510bd94cd5be | 10933f33099b423c5971d12993de07cc6f7d0f07 | /python_scripts/oci_3layer.py | aaba84aa9465707fc5e2577b74818c16c6aa4614 | [] | no_license | Joako360/Voice-Identification | ed521d3fe41c6d862ab72e4585b1600742295847 | 744cb2276097c2839e7bd5f5db9f461d44e48b25 | refs/heads/master | 2023-03-15T15:10:30.219968 | 2019-04-29T02:56:09 | 2019-04-29T02:56:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,389 | py | import os
import sys
import time
import librosa
import tflearn
import pydub
import wave
import pickle
import speech_data
import segment_data
import tensorflow as tf
import librosa.display
import numpy as np
# load constants - training directory, testing directory
training = '/home/cc/Data/train/'
testing = '/home/cc/Data/test/'
# calculate the mfcc matrices for training from the segmented data
X = []
Y = []
speakers = speech_data.get_speakers(training)
for f in os.listdir(training):
Y.append(speech_data.one_hot_from_item(speech_data.speaker(f), speakers))
y, sr = librosa.load(training + f)
mfcc = np.asarray(librosa.feature.mfcc(y=y, sr=sr, n_mfcc=20))
X.append(mfcc)
# input size for fully connected layers
layer_size = int(sys.argv[1])
dropout = float(sys.argv[2])
# define the network and the model for training
tflearn.init_graph(num_cores=8, gpu_memory_fraction=0.5)
# for just mfcc
net = tflearn.input_data(shape=[None, 20, 87])
net = tflearn.fully_connected(net, layer_size)
net = tflearn.fully_connected(net, layer_size)
net = tflearn.fully_connected(net, layer_size)
net = tflearn.dropout(net, dropout)
net = tflearn.fully_connected(net, len(speakers), activation='softmax')
net = tflearn.regression(net, optimizer='adam', loss='categorical_crossentropy')
# now train the model!
t0 = time.time()
model = tflearn.DNN(net)
model.fit(X, Y, n_epoch=100, show_metric=True, snapshot_step=1000, validation_set=0.05)
t1 = time.time()
# test the trained model using the testing directory
# calculate the mfcc matrices for testing from the segmented data
Xtest = []
Ytest = []
speakers = speech_data.get_speakers(testing)
for f in os.listdir(testing):
Ytest.append(speech_data.one_hot_from_item(speech_data.speaker(f), speakers))
y, sr = librosa.load(testing + f)
Xtest.append(librosa.feature.mfcc(y=y, sr=sr, n_mfcc=20))
# now test model over the test segments
result = model.predict(Xtest)
c = 0
for f,r in zip(os.listdir(testing), result):
res = speech_data.one_hot_to_item(r, speakers)
if res in f:
c = c + 1
acc = float(c) / float(len(Xtest))
# now output to a text file for comparison
l = ['Layer Size : ' + str(layer_size), 'Dropout: ' + str(dropout), 'Test Acc: ' + str(acc), 'Train time: ' + str(t1 - t0)]
with open('oci_3layer_stats.txt', 'a') as myfile:
[myfile.write(a + ' , ') for a in l]
myfile.write('\n')
| [
"drew.boles88@gmail.com"
] | drew.boles88@gmail.com |
f05d5623522500c5c226e1312c11a49f091677f5 | 422c3d0ff2f02b8393d82d89ffdfe4d08e817d36 | /myapp/admin/views.py | 9f58e6c802b2201e7cfc964b996a744602a86cf7 | [] | no_license | juanda/blue | dd132857589d16464cbcaa6a8aacada99d5f2d43 | 2291959b5a2fc395f9c3c8d4a0af12ce5eb725af | refs/heads/master | 2021-01-12T00:05:15.774907 | 2017-01-11T18:56:21 | 2017-01-11T18:56:21 | 78,669,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 130 | py | from flask import render_template
from . import admin
@admin.route('/admin')
def admin():
return render_template('admin.html')
| [
"juanda@yuido.com"
] | juanda@yuido.com |
315b999cddf33d0a70d199ad6d25cc2503896eb6 | d271993bf1579e835995e0d4c2427dbe1abddde3 | /gim/migrations/0012_product.py | 78667b6c7c26ef349504b185af5ad7ddc41b428e | [] | no_license | Mesus/Uvis | 5392507083abf02a6a98c59c20bb82a82072acbf | 9dd54dea852a61a60d1ac54611566c03da6f4dfd | refs/heads/master | 2021-01-22T19:59:16.065873 | 2017-03-17T03:19:32 | 2017-03-17T03:24:40 | 85,266,398 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 573 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-01 03:16
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('gim', '0011_area'),
]
operations = [
migrations.CreateModel(
name='product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=150, null=True)),
],
),
]
| [
"304073257@qq.com"
] | 304073257@qq.com |
8f377dbae4bdfac6f266dec47f88176f4f0e1eca | b50f07920a48df36c5303e6bbd35ff1eafbece16 | /jms/expression.py | 0668bf0f294aef112c6ee929ab72cafc5af0faa2 | [] | no_license | principia12/project_re | ed21cd369412d440ae794fd7ff422400988be5e3 | d165026e08cd1efd27ed9a0147aaf790f9374916 | refs/heads/master | 2020-08-27T19:39:08.872522 | 2019-11-07T09:31:04 | 2019-11-07T09:31:04 | 217,472,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,657 | py |
from abc import ABC, abstractmethod
from .common import ConsumeFailException, is_valid_char, is_whitespace, is_word_char, is_numeric
from .tokenizer import TokenType
class Expr(ABC):
@abstractmethod
def consume(self, text, idx):
pass
@classmethod
def from_token(cls, token):
if token.token_type == TokenType.CHAR:
return Term(token.value)
elif token.token_type == TokenType.ANCHOR_CHAR:
return AnchorTerm(token.value)
elif token.token_type in [TokenType.CLASS_CHAR, TokenType.WILDCARD_CHAR]:
return ClassTerm(token.value)
else:
raise ValueError()
@classmethod
def with_and(cls, exprs):
return AndExpr(exprs)
@classmethod
def with_or(cls, exprs):
return OrExpr(exprs)
@staticmethod
def get_char(text, idx):
if idx >= len(text):
raise ConsumeFailException()
return text[idx]
class EmptyTerm(Expr):
def consume(self, text, idx):
return idx
class Term(Expr):
def __init__(self, c):
self.c = c
def consume(self, text, idx):
c = self.get_char(text, idx)
if c == self.c:
return idx + 1
else:
raise ConsumeFailException()
class AnchorTerm(Expr):
check_funcs = {
'^': lambda text, idx: idx == 0,
'$': lambda text, idx: idx == len(text)
}
def __init__(self, c):
self.check_func = self.check_funcs[c]
def consume(self, text, idx):
if self.check_func(text, idx):
return idx
else:
raise ConsumeFailException()
class ClassTerm(Expr):
check_funcs = {
'.': is_valid_char,
'd': is_numeric,
'w': is_word_char,
's': is_whitespace,
}
def __init__(self, c: str):
self.positive = c == '.' or c.islower()
self.check_func = self.check_funcs[c.lower()]
def consume(self, text, idx):
c = self.get_char(text, idx)
if self.check_func(c) == self.positive:
return idx + 1
else:
raise ConsumeFailException()
class AndExpr(Expr):
def __init__(self, exprs):
self.exprs = exprs
def consume(self, text, idx):
for expr in self.exprs:
idx = expr.consume(text, idx)
return idx
class OrExpr(Expr):
def __init__(self, exprs):
self.exprs = exprs
def consume(self, text, idx):
for expr in self.exprs:
try:
return expr.consume(text, idx)
except ConsumeFailException:
pass
raise ConsumeFailException()
| [
"jms7446@gmail.com"
] | jms7446@gmail.com |
100838865d1fd32a0d5a82570553b60d6f2b6b9e | 4734604d2a835ae6e9050626d27e933a4e58aeb7 | /Exercise/kaggle/HousingPricePredicting/HousePricePredicting_NumAndCat.py | 1bea9b1337b54ad7ae5fde54facf59486cf786a8 | [] | no_license | iamjunwei/TensorflowLearning | a9a44a4db590a33d39d1c14eeecb4ba86d89cdc7 | 586cf6af5ac31c5dca9824351f734a150f3afa5c | refs/heads/master | 2020-04-05T17:52:09.289319 | 2019-01-22T15:31:53 | 2019-01-22T15:31:53 | 157,079,220 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,314 | py | import tensorflow as tf
import pandas as pd
from sklearn.ensemble import IsolationForest
from sklearn.preprocessing import MinMaxScaler
from sklearn.model_selection import train_test_split
import numpy as np
import itertools
import matplotlib
import matplotlib.pyplot as plt
# Deep Neural Network for continuous and categorical features
train = pd.read_csv("./train.csv")
train.drop("Id", axis=1, inplace=True)
train_numerical = train.select_dtypes(exclude=["object"])
train_numerical.fillna(0, inplace=True)
train_categoric = train.select_dtypes(include=["object"])
train_categoric.fillna("NONE", inplace=True)
train = train_numerical.merge(train_categoric, left_index=True, right_index=True)
test = pd.read_csv("./test.csv")
ID = test.Id
test.drop("Id", axis=1, inplace=True)
test_numerical = test.select_dtypes(exclude=["object"])
test_numerical.fillna(0, inplace=True)
test_categoric = test.select_dtypes(include=["object"])
test_categoric.fillna("NONE", inplace=True)
test = test_numerical.merge(test_categoric, left_index=True, right_index=True)
clf = IsolationForest(max_samples=100, random_state=42)
clf.fit(train_numerical)
y_noano = clf.predict(train_numerical)
y_noano = pd.DataFrame(y_noano, columns=["Top"])
train_numerical = train_numerical.iloc[y_noano[y_noano["Top"] == 1].index.values]
train_numerical.reset_index(drop=True, inplace=True)
train_categoric = train_categoric.iloc[y_noano[y_noano["Top"] == 1].index.values]
train_categoric.reset_index(drop=True, inplace=True)
train = train.iloc[y_noano[y_noano["Top"] == 1].index.values]
train.reset_index(drop=True, inplace=True)
col_train_num = list(train_numerical.columns)
col_train_num_bis = list(train_numerical.columns)
col_train_cat = list(train_categoric.columns)
col_train_num_bis.remove("SalePrice")
mat_train = np.matrix(train_numerical)
mat_test = np.matrix(test_numerical)
mat_new = np.matrix(train_numerical.drop("SalePrice", axis=1))
mat_y = np.matrix(train.SalePrice)
prepro_y = MinMaxScaler()
prepro_y.fit(mat_y.reshape(1314, 1))
prepro = MinMaxScaler()
prepro.fit(mat_train)
prepro_test = MinMaxScaler()
prepro_test.fit(mat_new)
train_num_scale = pd.DataFrame(prepro.transform(mat_train), columns=col_train_num)
test_num_scale = pd.DataFrame(prepro_test.transform(mat_test), columns=col_train_num_bis)
train[col_train_num] = pd.DataFrame(prepro.transform(mat_train), columns=col_train_num)
test[col_train_num_bis] = pd.DataFrame(prepro_test.transform(mat_test), columns=col_train_num_bis)
# numerical and categorical features -> engineered features
COLUMNS = col_train_num
FEATURES = col_train_num_bis
LABEL = "SalePrice"
FEATURES_CAT = col_train_cat
engineered_features = []
for continuous_feature in FEATURES:
engineered_features.append(tf.contrib.layers.real_valued_column(continuous_feature))
for categorical_feature in FEATURES_CAT:
sparse_column = tf.contrib.layers.sparse_column_with_hash_bucket(categorical_feature, hash_bucket_size=1000)
engineered_features.append(tf.contrib.layers.embedding_column(sparse_id_column=sparse_column,
dimension=16,
combiner="sum"))
training_set = train[FEATURES + FEATURES_CAT]
prediction_set = train.SalePrice
x_train, x_test, y_train, y_test = train_test_split(training_set, prediction_set, test_size=0.33, random_state=42)
y_train = pd.DataFrame(y_train, columns=[LABEL])
training_set = pd.DataFrame(x_train, columns=FEATURES + FEATURES_CAT)\
.merge(y_train, left_index=True, right_index=True)
print(FEATURES + FEATURES_CAT)
training_sub = training_set[FEATURES + FEATURES_CAT]
testing_sub = test[FEATURES + FEATURES_CAT]
y_test = pd.DataFrame(y_test, columns=[LABEL])
testing_set = pd.DataFrame(x_test, columns=FEATURES + FEATURES_CAT)\
.merge(y_test, left_index=True, right_index=True)
training_set[FEATURES_CAT] = training_set[FEATURES_CAT].applymap(str)
testing_set[FEATURES_CAT] = testing_set[FEATURES_CAT].applymap(str)
def input_fn_new(data_set, training=True):
continuous_cols = {k: tf.constant(data_set[k].values) for k in FEATURES}
categorical_cols = {k: tf.SparseTensor(indices=[[i, 0] for i in range(data_set[k].size)],
values=data_set[k].values,
dense_shape=[data_set[k].size, 1]) for k in FEATURES_CAT}
feature_cols = dict(list(continuous_cols.items()) + list(categorical_cols.items()))
if training == True:
label = tf.constant(data_set[LABEL].values)
return feature_cols, label
return feature_cols
regressor = tf.contrib.learn.DNNRegressor(feature_columns=engineered_features,
activation_fn=tf.nn.relu,
hidden_units=[200, 100, 50, 25, 12])
categorical_cols = {k: tf.SparseTensor(indices=[[i, 0] for i in range(training_set[k].size)],
values=training_set[k].values,
dense_shape=[training_set[k].size, 1]) for k in FEATURES_CAT}
regressor.fit(input_fn=lambda: input_fn_new(training_set), steps=2000)
ev = regressor.evaluate(input_fn=lambda: input_fn_new(testing_set, training=True), steps=1)
loss_score = ev["loss"]
print("Final Loss on the testing set: {0:f}".format(loss_score))
y = regressor.predict(input_fn=lambda: input_fn_new(testing_set))
predictions = list(itertools.islice(y, testing_set.shape[0]))
predictions = pd.DataFrame(prepro_y.inverse_transform(np.array(predictions).reshape(434, 1)),
columns=["Prediction"])
reality = pd.DataFrame(prepro.inverse_transform(testing_set[COLUMNS]), columns=[COLUMNS]).SalePrice
matplotlib.rc('xtick', labelsize=12)
matplotlib.rc('ytick', labelsize=12)
fig, ax = plt.subplots(figsize=(10, 8))
plt.style.use('ggplot')
plt.plot(predictions.values, reality.values, 'ro')
plt.xlabel('Predictions', fontsize=12)
plt.ylabel('Reality', fontsize=12)
plt.title('Predictions x Reality on dataset Test', fontsize=12)
ax.plot([reality.min(), reality.max()], [reality.min(), reality.max()], 'k--', lw=4)
plt.show()
y_predict = regressor.predict(input_fn=lambda: input_fn_new(testing_sub, training=False))
def to_submit(pred_y, name_out):
y_predict = list(itertools.islice(pred_y, test.shape[0]))
y_predict = pd.DataFrame(prepro_y.inverse_transform(np.array(y_predict).reshape(test.shape[0], 1)),
columns=["Prediction"])
y_predict = y_predict.join(ID)
y_predict.to_csv(name_out + ".csv", index=False)
to_submit(y_predict, "submission_continuous_and_category")
# Shallow Network
regressor = tf.contrib.learn.DNNRegressor(feature_columns=engineered_features,
activation_fn=tf.nn.relu,
hidden_units=[1000])
regressor.fit(input_fn=lambda: input_fn_new(training_set), steps=2000)
ev = regressor.evaluate(input_fn=lambda: input_fn_new(training_set, training=True), steps=1)
loss_score_shallow = ev["loss"]
print("Final Loss on the testing set: {0:f}".format(loss_score_shallow))
y_predict = regressor.predict(input_fn=lambda: input_fn_new(testing_sub, training=False))
to_submit(y_predict, "submission_continuous_and_category_shallow")
| [
"xiajunwei0713@163.com"
] | xiajunwei0713@163.com |
b3dfa3fff3e16c7710431debde8cfa47233c5fea | 652f467ebefadaa2da28d36d1da54e5be993ff70 | /tests/integration_tests/scripts/choose_strand.py | 3a21ae45c2c4494fad3a9d89f5fb7b44557570c6 | [
"MIT"
] | permissive | malonge/RagTag | e64d807797e6f8a916b9cc4af5748b1ab0895e4c | df751bfeacbf0d9529eb2a0c82b10684407db59e | refs/heads/master | 2023-04-12T08:16:27.410868 | 2022-12-27T22:32:03 | 2022-12-27T22:32:03 | 242,898,323 | 354 | 46 | MIT | 2022-11-30T20:36:23 | 2020-02-25T03:13:56 | Python | UTF-8 | Python | false | false | 341 | py | #!/usr/bin/env python
import sys
import pysam
from ragtag_utilities.utilities import reverse_complement
# Only one argument: FASTA file
x = pysam.FastaFile(sys.argv[1])
for i in x.references:
print(">" + i)
s1 = x.fetch(i)
s2 = reverse_complement(s1)
if s1 < s2:
print(s1)
else:
print(s2)
x.close() | [
"malonge11@gmail.com"
] | malonge11@gmail.com |
e3e20c4815f94f2e31e28474bdc0058ee5c7bdbb | 69f25bc9f53fd6b93a2aafb4d6973b38c7a56371 | /homework/migrations/0003_product_price.py | ca166231f377341f9c96df8d3a009e6abcfcbab0 | [] | no_license | sahara66/Dz_3-master | 2ceff65165d03c8264fe124a76c92b3111535a6e | 738a651eb660b9fc19592912037b56521de31f85 | refs/heads/master | 2023-06-16T05:35:38.920825 | 2021-07-10T09:01:55 | 2021-07-10T09:01:55 | 384,655,291 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | # Generated by Django 3.2.2 on 2021-05-08 15:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('homework', '0002_auto_20210508_1401'),
]
operations = [
migrations.AddField(
model_name='product',
name='price',
field=models.IntegerField(null=True),
),
]
| [
"fasterkombast@gmail.com"
] | fasterkombast@gmail.com |
7b50501068693c67817ab9351f21fd24bab7380a | 5e4f98d3808b98c980b902f4ce6dc3864fefc365 | /ipython_config.py | b62195b5c7096e72a48c344532d21e5bdd32ccf0 | [] | no_license | tom-doerr/dotfiles | bdd608bd147beafc78b99d1530f4211a1751c069 | 9603e64c4dcfb18626c3051abc660c1c213db817 | refs/heads/master | 2023-08-08T13:31:14.489282 | 2023-07-28T14:26:47 | 2023-07-28T14:26:47 | 117,894,597 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 22,406 | py | # Configuration file for ipython.
#------------------------------------------------------------------------------
# InteractiveShellApp(Configurable) configuration
#------------------------------------------------------------------------------
## A Mixin for applications that start InteractiveShell instances.
#
# Provides configurables for loading extensions and executing files as part of
# configuring a Shell environment.
#
# The following methods should be called by the :meth:`initialize` method of the
# subclass:
#
# - :meth:`init_path`
# - :meth:`init_shell` (to be implemented by the subclass)
# - :meth:`init_gui_pylab`
# - :meth:`init_extensions`
# - :meth:`init_code`
## Execute the given command string.
#c.InteractiveShellApp.code_to_run = ''
## Run the file referenced by the PYTHONSTARTUP environment variable at IPython
# startup.
#c.InteractiveShellApp.exec_PYTHONSTARTUP = True
## List of files to run at IPython startup.
#c.InteractiveShellApp.exec_files = []
## lines of code to run at IPython startup.
#c.InteractiveShellApp.exec_lines = []
## A list of dotted module names of IPython extensions to load.
#c.InteractiveShellApp.extensions = []
## dotted module name of an IPython extension to load.
#c.InteractiveShellApp.extra_extension = ''
## A file to be run
#c.InteractiveShellApp.file_to_run = ''
## Enable GUI event loop integration with any of ('glut', 'gtk', 'gtk2', 'gtk3',
# 'osx', 'pyglet', 'qt', 'qt4', 'qt5', 'tk', 'wx', 'gtk2', 'qt4').
#c.InteractiveShellApp.gui = None
## Should variables loaded at startup (by startup files, exec_lines, etc.) be
# hidden from tools like %who?
#c.InteractiveShellApp.hide_initial_ns = True
## Configure matplotlib for interactive use with the default matplotlib backend.
#c.InteractiveShellApp.matplotlib = None
## Run the module as a script.
#c.InteractiveShellApp.module_to_run = ''
## Pre-load matplotlib and numpy for interactive use, selecting a particular
# matplotlib backend and loop integration.
#c.InteractiveShellApp.pylab = None
## If true, IPython will populate the user namespace with numpy, pylab, etc. and
# an ``import *`` is done from numpy and pylab, when using pylab mode.
#
# When False, pylab mode should not import any names into the user namespace.
#c.InteractiveShellApp.pylab_import_all = True
## Reraise exceptions encountered loading IPython extensions?
#c.InteractiveShellApp.reraise_ipython_extension_failures = False
#------------------------------------------------------------------------------
# Application(SingletonConfigurable) configuration
#------------------------------------------------------------------------------
## This is an application.
## The date format used by logging formatters for %(asctime)s
#c.Application.log_datefmt = '%Y-%m-%d %H:%M:%S'
## The Logging format template
#c.Application.log_format = '[%(name)s]%(highlevel)s %(message)s'
## Set the log level by value or name.
#c.Application.log_level = 30
#------------------------------------------------------------------------------
# BaseIPythonApplication(Application) configuration
#------------------------------------------------------------------------------
## IPython: an enhanced interactive Python shell.
## Whether to create profile dir if it doesn't exist
#c.BaseIPythonApplication.auto_create = False
## Whether to install the default config files into the profile dir. If a new
# profile is being created, and IPython contains config files for that profile,
# then they will be staged into the new directory. Otherwise, default config
# files will be automatically generated.
#c.BaseIPythonApplication.copy_config_files = False
## Path to an extra config file to load.
#
# If specified, load this config file in addition to any other IPython config.
#c.BaseIPythonApplication.extra_config_file = ''
## The name of the IPython directory. This directory is used for logging
# configuration (through profiles), history storage, etc. The default is usually
# $HOME/.ipython. This option can also be specified through the environment
# variable IPYTHONDIR.
#c.BaseIPythonApplication.ipython_dir = ''
## Whether to overwrite existing config files when copying
#c.BaseIPythonApplication.overwrite = False
## The IPython profile to use.
#c.BaseIPythonApplication.profile = 'default'
## Create a massive crash report when IPython encounters what may be an internal
# error. The default is to append a short message to the usual traceback
#c.BaseIPythonApplication.verbose_crash = False
#------------------------------------------------------------------------------
# TerminalIPythonApp(BaseIPythonApplication,InteractiveShellApp) configuration
#------------------------------------------------------------------------------
## Whether to display a banner upon starting IPython.
#c.TerminalIPythonApp.display_banner = True
## If a command or file is given via the command-line, e.g. 'ipython foo.py',
# start an interactive shell after executing the file or command.
#c.TerminalIPythonApp.force_interact = False
## Start IPython quickly by skipping the loading of config files.
#c.TerminalIPythonApp.quick = False
#------------------------------------------------------------------------------
# InteractiveShell(SingletonConfigurable) configuration
#------------------------------------------------------------------------------
## An enhanced, interactive shell for Python.
## 'all', 'last', 'last_expr' or 'none', specifying which nodes should be run
# interactively (displaying output from expressions).
#c.InteractiveShell.ast_node_interactivity = 'last_expr'
## A list of ast.NodeTransformer subclass instances, which will be applied to
# user input before code is run.
#c.InteractiveShell.ast_transformers = []
## Make IPython automatically call any callable object even if you didn't type
# explicit parentheses. For example, 'str 43' becomes 'str(43)' automatically.
# The value can be '0' to disable the feature, '1' for 'smart' autocall, where
# it is not applied if there are no more arguments on the line, and '2' for
# 'full' autocall, where all callable objects are automatically called (even if
# no arguments are present).
#c.InteractiveShell.autocall = 0
## Autoindent IPython code entered interactively.
#c.InteractiveShell.autoindent = True
## Enable magic commands to be called without the leading %.
#c.InteractiveShell.automagic = True
## The part of the banner to be printed before the profile
#c.InteractiveShell.banner1 = 'Python 3.6.1 |Anaconda 4.4.0 (64-bit)| (default, May 11 2017, 13:09:58) \nType "copyright", "credits" or "license" for more information.\n\nIPython 5.3.0 -- An enhanced Interactive Python.\n? -> Introduction and overview of IPython\'s features.\n%quickref -> Quick reference.\nhelp -> Python\'s own help system.\nobject? -> Details about \'object\', use \'object??\' for extra details.\n'
## The part of the banner to be printed after the profile
#c.InteractiveShell.banner2 = ''
## Set the size of the output cache. The default is 1000, you can change it
# permanently in your config file. Setting it to 0 completely disables the
# caching system, and the minimum value accepted is 20 (if you provide a value
# less than 20, it is reset to 0 and a warning is issued). This limit is
# defined because otherwise you'll spend more time re-flushing a too small cache
# than working
#c.InteractiveShell.cache_size = 1000
## Use colors for displaying information about objects. Because this information
# is passed through a pager (like 'less'), and some pagers get confused with
# color codes, this capability can be turned off.
#c.InteractiveShell.color_info = True
## Set the color scheme (NoColor, Neutral, Linux, or LightBG).
#c.InteractiveShell.colors = 'Neutral'
##
#c.InteractiveShell.debug = False
## **Deprecated**
#
# Will be removed in IPython 6.0
#
# Enable deep (recursive) reloading by default. IPython can use the deep_reload
# module which reloads changes in modules recursively (it replaces the reload()
# function, so you don't need to change anything to use it). `deep_reload`
# forces a full reload of modules whose code may have changed, which the default
# reload() function does not. When deep_reload is off, IPython will use the
# normal reload(), but deep_reload will still be available as dreload().
#c.InteractiveShell.deep_reload = False
## Don't call post-execute functions that have failed in the past.
#c.InteractiveShell.disable_failing_post_execute = False
## If True, anything that would be passed to the pager will be displayed as
# regular output instead.
#c.InteractiveShell.display_page = False
## (Provisional API) enables html representation in mime bundles sent to pagers.
#c.InteractiveShell.enable_html_pager = False
## Total length of command history
#c.InteractiveShell.history_length = 10000
## The number of saved history entries to be loaded into the history buffer at
# startup.
#c.InteractiveShell.history_load_length = 1000
##
#c.InteractiveShell.ipython_dir = ''
## Start logging to the given file in append mode. Use `logfile` to specify a log
# file to **overwrite** logs to.
#c.InteractiveShell.logappend = ''
## The name of the logfile to use.
#c.InteractiveShell.logfile = ''
## Start logging to the default log file in overwrite mode. Use `logappend` to
# specify a log file to **append** logs to.
#c.InteractiveShell.logstart = False
##
#c.InteractiveShell.object_info_string_level = 0
## Automatically call the pdb debugger after every exception.
#c.InteractiveShell.pdb = False
## Deprecated since IPython 4.0 and ignored since 5.0, set
# TerminalInteractiveShell.prompts object directly.
#c.InteractiveShell.prompt_in1 = 'In [\\#]: '
## Deprecated since IPython 4.0 and ignored since 5.0, set
# TerminalInteractiveShell.prompts object directly.
#c.InteractiveShell.prompt_in2 = ' .\\D.: '
## Deprecated since IPython 4.0 and ignored since 5.0, set
# TerminalInteractiveShell.prompts object directly.
#c.InteractiveShell.prompt_out = 'Out[\\#]: '
## Deprecated since IPython 4.0 and ignored since 5.0, set
# TerminalInteractiveShell.prompts object directly.
#c.InteractiveShell.prompts_pad_left = True
##
#c.InteractiveShell.quiet = False
##
#c.InteractiveShell.separate_in = '\n'
##
#c.InteractiveShell.separate_out = ''
##
#c.InteractiveShell.separate_out2 = ''
## Show rewritten input, e.g. for autocall.
#c.InteractiveShell.show_rewritten_input = True
## Enables rich html representation of docstrings. (This requires the docrepr
# module).
#c.InteractiveShell.sphinxify_docstring = False
##
#c.InteractiveShell.wildcards_case_sensitive = True
##
#c.InteractiveShell.xmode = 'Context'
#------------------------------------------------------------------------------
# TerminalInteractiveShell(InteractiveShell) configuration
#------------------------------------------------------------------------------
## Set to confirm when you try to exit IPython with an EOF (Control-D in Unix,
# Control-Z/Enter in Windows). By typing 'exit' or 'quit', you can force a
# direct exit without any confirmation.
#c.TerminalInteractiveShell.confirm_exit = True
## Options for displaying tab completions, 'column', 'multicolumn', and
# 'readlinelike'. These options are for `prompt_toolkit`, see `prompt_toolkit`
# documentation for more information.
#c.TerminalInteractiveShell.display_completions = 'multicolumn'
## Shortcut style to use at the prompt. 'vi' or 'emacs'.
#c.TerminalInteractiveShell.editing_mode = 'emacs'
## Set the editor used by IPython (default to $EDITOR/vi/notepad).
#c.TerminalInteractiveShell.editor = 'vi'
## Enable vi (v) or Emacs (C-X C-E) shortcuts to open an external editor. This is
# in addition to the F2 binding, which is always enabled.
#c.TerminalInteractiveShell.extra_open_editor_shortcuts = False
## Highlight matching brackets.
#c.TerminalInteractiveShell.highlight_matching_brackets = True
## The name or class of a Pygments style to use for syntax
# highlighting:
# default, emacs, friendly, colorful, autumn, murphy, manni, monokai, perldoc, pastie, borland, trac, native, fruity, bw, vim, vs, tango, rrt, xcode, igor, paraiso-light, paraiso-dark, lovelace, algol, algol_nu, arduino, rainbow_dash, abap
#c.TerminalInteractiveShell.highlighting_style = traitlets.Undefined
## Override highlighting format for specific tokens
#c.TerminalInteractiveShell.highlighting_style_overrides = {}
## Enable mouse support in the prompt
#c.TerminalInteractiveShell.mouse_support = False
## Class used to generate Prompt token for prompt_toolkit
#c.TerminalInteractiveShell.prompts_class = 'IPython.terminal.prompts.Prompts'
## Use `raw_input` for the REPL, without completion, multiline input, and prompt
# colors.
#
# Useful when controlling IPython as a subprocess, and piping STDIN/OUT/ERR.
# Known usage are: IPython own testing machinery, and emacs inferior-shell
# integration through elpy.
#
# This mode default to `True` if the `IPY_TEST_SIMPLE_PROMPT` environment
# variable is set, or the current terminal is not a tty.
#c.TerminalInteractiveShell.simple_prompt = False
## Number of line at the bottom of the screen to reserve for the completion menu
#c.TerminalInteractiveShell.space_for_menu = 6
## Automatically set the terminal title
#c.TerminalInteractiveShell.term_title = True
## Use 24bit colors instead of 256 colors in prompt highlighting. If your
# terminal supports true color, the following command should print 'TRUECOLOR'
# in orange: printf "\x1b[38;2;255;100;0mTRUECOLOR\x1b[0m\n"
#c.TerminalInteractiveShell.true_color = False
#------------------------------------------------------------------------------
# HistoryAccessor(HistoryAccessorBase) configuration
#------------------------------------------------------------------------------
## Access the history database without adding to it.
#
# This is intended for use by standalone history tools. IPython shells use
# HistoryManager, below, which is a subclass of this.
## Options for configuring the SQLite connection
#
# These options are passed as keyword args to sqlite3.connect when establishing
# database conenctions.
#c.HistoryAccessor.connection_options = {}
## enable the SQLite history
#
# set enabled=False to disable the SQLite history, in which case there will be
# no stored history, no SQLite connection, and no background saving thread.
# This may be necessary in some threaded environments where IPython is embedded.
#c.HistoryAccessor.enabled = True
## Path to file to use for SQLite history database.
#
# By default, IPython will put the history database in the IPython profile
# directory. If you would rather share one history among profiles, you can set
# this value in each, so that they are consistent.
#
# Due to an issue with fcntl, SQLite is known to misbehave on some NFS mounts.
# If you see IPython hanging, try setting this to something on a local disk,
# e.g::
#
# ipython --HistoryManager.hist_file=/tmp/ipython_hist.sqlite
#
# you can also use the specific value `:memory:` (including the colon at both
# end but not the back ticks), to avoid creating an history file.
#c.HistoryAccessor.hist_file = ''
#------------------------------------------------------------------------------
# HistoryManager(HistoryAccessor) configuration
#------------------------------------------------------------------------------
## A class to organize all history-related functionality in one place.
## Write to database every x commands (higher values save disk access & power).
# Values of 1 or less effectively disable caching.
#c.HistoryManager.db_cache_size = 0
## Should the history database include output? (default: no)
#c.HistoryManager.db_log_output = False
#------------------------------------------------------------------------------
# ProfileDir(LoggingConfigurable) configuration
#------------------------------------------------------------------------------
## An object to manage the profile directory and its resources.
#
# The profile directory is used by all IPython applications, to manage
# configuration, logging and security.
#
# This object knows how to find, create and manage these directories. This
# should be used by any code that wants to handle profiles.
## Set the profile location directly. This overrides the logic used by the
# `profile` option.
#c.ProfileDir.location = ''
#------------------------------------------------------------------------------
# BaseFormatter(Configurable) configuration
#------------------------------------------------------------------------------
## A base formatter class that is configurable.
#
# This formatter should usually be used as the base class of all formatters. It
# is a traited :class:`Configurable` class and includes an extensible API for
# users to determine how their objects are formatted. The following logic is
# used to find a function to format an given object.
#
# 1. The object is introspected to see if it has a method with the name
# :attr:`print_method`. If is does, that object is passed to that method
# for formatting.
# 2. If no print method is found, three internal dictionaries are consulted
# to find print method: :attr:`singleton_printers`, :attr:`type_printers`
# and :attr:`deferred_printers`.
#
# Users should use these dictionaries to register functions that will be used to
# compute the format data for their objects (if those objects don't have the
# special print methods). The easiest way of using these dictionaries is through
# the :meth:`for_type` and :meth:`for_type_by_name` methods.
#
# If no function/callable is found to compute the format data, ``None`` is
# returned and this format type is not used.
##
#c.BaseFormatter.deferred_printers = {}
##
#c.BaseFormatter.enabled = True
##
#c.BaseFormatter.singleton_printers = {}
##
#c.BaseFormatter.type_printers = {}
#------------------------------------------------------------------------------
# PlainTextFormatter(BaseFormatter) configuration
#------------------------------------------------------------------------------
## The default pretty-printer.
#
# This uses :mod:`IPython.lib.pretty` to compute the format data of the object.
# If the object cannot be pretty printed, :func:`repr` is used. See the
# documentation of :mod:`IPython.lib.pretty` for details on how to write pretty
# printers. Here is a simple example::
#
# def dtype_pprinter(obj, p, cycle):
# if cycle:
# return p.text('dtype(...)')
# if hasattr(obj, 'fields'):
# if obj.fields is None:
# p.text(repr(obj))
# else:
# p.begin_group(7, 'dtype([')
# for i, field in enumerate(obj.descr):
# if i > 0:
# p.text(',')
# p.breakable()
# p.pretty(field)
# p.end_group(7, '])')
##
#c.PlainTextFormatter.float_precision = ''
## Truncate large collections (lists, dicts, tuples, sets) to this size.
#
# Set to 0 to disable truncation.
#c.PlainTextFormatter.max_seq_length = 1000
##
#c.PlainTextFormatter.max_width = 79
##
#c.PlainTextFormatter.newline = '\n'
##
#c.PlainTextFormatter.pprint = True
##
#c.PlainTextFormatter.verbose = False
#------------------------------------------------------------------------------
# Completer(Configurable) configuration
#------------------------------------------------------------------------------
## Activate greedy completion PENDING DEPRECTION. this is now mostly taken care
# of with Jedi.
#
# This will enable completion on elements of lists, results of function calls,
# etc., but can be unsafe because the code is actually evaluated on TAB.
#c.Completer.greedy = False
#------------------------------------------------------------------------------
# IPCompleter(Completer) configuration
#------------------------------------------------------------------------------
## Extension of the completer class with IPython-specific features
## DEPRECATED as of version 5.0.
#
# Instruct the completer to use __all__ for the completion
#
# Specifically, when completing on ``object.<tab>``.
#
# When True: only those names in obj.__all__ will be included.
#
# When False [default]: the __all__ attribute is ignored
#c.IPCompleter.limit_to__all__ = False
## Whether to merge completion results into a single list
#
# If False, only the completion results from the first non-empty completer will
# be returned.
#c.IPCompleter.merge_completions = True
## Instruct the completer to omit private method names
#
# Specifically, when completing on ``object.<tab>``.
#
# When 2 [default]: all names that start with '_' will be excluded.
#
# When 1: all 'magic' names (``__foo__``) will be excluded.
#
# When 0: nothing will be excluded.
#c.IPCompleter.omit__names = 2
#------------------------------------------------------------------------------
# ScriptMagics(Magics) configuration
#------------------------------------------------------------------------------
## Magics for talking to scripts
#
# This defines a base `%%script` cell magic for running a cell with a program in
# a subprocess, and registers a few top-level magics that call %%script with
# common interpreters.
## Extra script cell magics to define
#
# This generates simple wrappers of `%%script foo` as `%%foo`.
#
# If you want to add script magics that aren't on your path, specify them in
# script_paths
#c.ScriptMagics.script_magics = []
## Dict mapping short 'ruby' names to full paths, such as '/opt/secret/bin/ruby'
#
# Only necessary for items in script_magics where the default path will not find
# the right interpreter.
#c.ScriptMagics.script_paths = {}
#------------------------------------------------------------------------------
# StoreMagics(Magics) configuration
#------------------------------------------------------------------------------
## Lightweight persistence for python variables.
#
# Provides the %store magic.
## If True, any %store-d variables will be automatically restored when IPython
# starts.
#c.StoreMagics.autorestore = False
c.TerminalInteractiveShell.editing_mode = 'vi'
| [
"tom.doerr@tum.de"
] | tom.doerr@tum.de |
75d3392dc40e06676c640968578a29a6e4230e6b | 1e139784a36ce2a26dafaac0bb795b168ca91776 | /electron_project/abstract/migrations/0003_delete_workeraccount.py | bda3728b90ddb267ad2ad6addfa863d7ca628b2e | [] | no_license | TestAccount2077/Mas-Electronics | a9f4431be7ea740b99616cb4ce4acf9bba46096f | 6bb887805900affdcd905deb33b341892bebd41f | refs/heads/master | 2020-03-28T15:11:57.044686 | 2019-01-26T16:01:55 | 2019-01-26T16:01:55 | 148,566,318 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 363 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2018-10-20 04:57
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('abstract', '0002_workeraccount'),
]
operations = [
migrations.DeleteModel(
name='WorkerAccount',
),
]
| [
"maselectronics594@gmail.com"
] | maselectronics594@gmail.com |
6a6293a6a797e6bfa61d9f42a97405b209674de1 | 87b4f4074c3eb18ed4b83e698237205637a249b0 | /Examples/IPv4 Address/add_static_ip4_address_example/add_static_ip4_address_example_page.py | 4b4d0419e59cdc6d7b922628f12e4549dee45d3a | [
"Apache-2.0"
] | permissive | glennmcallister/gateway-workflows | 1b18d5c3a4bb8d0be6cf343f184144b5136458d0 | d9daa0ba4efa9715ed40ef7e54b2b98fba4bb63e | refs/heads/master | 2021-03-24T13:24:39.607721 | 2017-10-25T20:30:22 | 2017-10-25T20:33:02 | 109,297,506 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,211 | py | # Copyright 2017 BlueCat Networks. All rights reserved.
# Various Flask framework items.
import os
import sys
import importlib
from flask import url_for, redirect, render_template, flash, g, request
from bluecat import route, util
from bluecat.api_exception import APIException
import config.default_config as config
from main_app import app
from .add_static_ip4_address_example_form import GenericFormTemplate
# Import the common; this type of import is requried due to a space in the name
ip4_example_common = importlib.import_module("bluecat_portal.workflows.Examples.IPv4 Address.ip4_example_common")
def module_path():
encoding = sys.getfilesystemencoding()
return os.path.dirname(os.path.abspath(unicode(__file__, encoding)))
# The workflow name must be the first part of any endpoints defined in this file.
# If you break this rule, you will trip up on other people's endpoint names and
# chaos will ensue.
@route(app, '/add_static_ip4_address_example/add_static_ip4_address_example_endpoint')
@util.workflow_permission_required('add_static_ip4_address_example_page')
@util.exception_catcher
def add_static_ip4_address_example_add_static_ip4_address_example_page():
form = GenericFormTemplate()
# Remove this line if your workflow does not need to select a configuration
form.configuration.choices = util.get_configurations(default_val=True)
return render_template(
'add_static_ip4_address_example_page.html',
form=form,
text=util.get_text(module_path(), config.language),
options=g.user.get_options()
)
@route(app, '/add_static_ip4_address_example/form', methods=['POST'])
@util.workflow_permission_required('add_static_ip4_address_example_page')
@util.exception_catcher
def add_static_ip4_address_example_add_static_ip4_address_example_page_form():
form = GenericFormTemplate()
# Remove this line if your workflow does not need to select a configuration
form.configuration.choices = util.get_configurations(default_val=True)
if form.validate_on_submit():
try:
# Retrieve form attributes
configuration = g.user.get_api().get_entity_by_id(form.configuration.data)
selected_view = request.form.get('view', '')
selected_hostname = request.form.get('hostname', '')
hostinfo = ''
if selected_view != '' and selected_hostname != '':
view = configuration.get_view(selected_view)
hostinfo = util.safe_str(selected_hostname) + '.' + util.safe_str(request.form.get('zone', '')) + ',' + util.safe_str(view.get_id()) + ',' + 'true' + ',' + 'false'
properties = 'name=' + form.description.data
# Assign ip4 object
ip4_object = configuration.assign_ip4_address(request.form.get('ip4_address', ''), form.mac_address.data, hostinfo, 'MAKE_STATIC', properties)
# Put form processing code here
g.user.logger.info('Success - Static IP4 Address ' + ip4_object.get_property('address') + ' Added with Object ID: ' + util.safe_str(ip4_object.get_id()))
flash('Success - Static IP4 Address ' + ip4_object.get_property('address') + ' Added with Object ID: ' + util.safe_str(ip4_object.get_id()), 'succeed')
return redirect(url_for('add_static_ip4_address_exampleadd_static_ip4_address_example_add_static_ip4_address_example_page'))
except Exception as e:
flash(util.safe_str(e))
# Log error and render workflow page
g.user.logger.warning('%s' % util.safe_str(e), msg_type=g.user.logger.EXCEPTION)
return render_template('add_static_ip4_address_example_page.html',
form=form,
text=util.get_text(module_path(), config.language),
options=g.user.get_options())
else:
g.user.logger.info('Form data was not valid.')
return render_template('add_static_ip4_address_example_page.html',
form=form,
text=util.get_text(module_path(), config.language),
options=g.user.get_options())
| [
"vfarafontov@bluecatnetworks.com"
] | vfarafontov@bluecatnetworks.com |
7dc113ee481c418f95b7e7967637e33cc63663f3 | a41735b5092b1f8576e21ca6c7b93b57ebae58b2 | /processing_tennis_matches.py | 68b33567ba963e5426117bd7ca9dc92c84511859 | [] | no_license | ajkrish95/cs229-tennis-prediction | 300f378f8e8aa17c71e5a248f2afbb032cc146ca | 6609486defcb2af19e3f31b5d42aeedf5ee76ceb | refs/heads/master | 2020-11-24T05:26:10.972751 | 2019-12-14T07:29:27 | 2019-12-14T07:29:27 | 227,983,585 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 38,163 | py | import csv
import matplotlib.pyplot as plt
import numpy as np
import json
from sklearn.linear_model import LogisticRegression
from sklearn import svm
from sklearn.ensemble import RandomForestClassifier
def getdays(date1):
year1 = int(date1[0] + date1[1] + date1[2] + date1[3])
month1 = int(date1[4] + date1[5])
date1 = int(date1[6] + date1[7])
answer = 365*year1 + 30*month1 + date1
return answer
def getmonth(date1):
month1 = int(date1[4] + date1[5])
return month1
# Reorders all csv files to fixed column ordering and loads to a ndarray
def create_add_year_matches(input_path, output_path):
with open(input_path, 'r') as infile, open(output_path, 'w') as outfile:
# Output dict needs a list for new column ordering
fieldnames = ['tourney_id', #0
'tourney_name', #1
'surface', #2
'draw_size', #3
'tourney_level', #4
'tourney_date', #5
'match_num', #6
'winner_id', #7
'winner_seed', #8
'winner_entry', #9
'winner_name', #10
'winner_hand', #11
'winner_ht', #12
'winner_ioc', #13
'winner_age', #14
'winner_rank', #15
'winner_rank_points', #16
'loser_id', #17
'loser_seed', #18
'loser_entry', #19
'loser_name', #20
'loser_hand', #21
'loser_ht', #22
'loser_ioc', #23
'loser_age', #24
'loser_rank', #25
'loser_rank_points', #26
'score', #27
'best_of', #28
'round', #29
'minutes', #30
'w_ace', #31
'w_df', #32
'w_svpt', #33
'w_1stIn', #34
'w_1stWon', #35
'w_2ndWon', #36
'w_SvGms', #37
'w_bpSaved', #38
'w_bpFaced', #39
'l_ace', #40
'l_df', #41
'l_svpt', #42
'l_1stIn', #43
'l_1stWon', #44
'l_2ndWon', #45
'l_SvGms', #46
'l_bpSaved', #47
'l_bpFaced'] #48
writer = csv.DictWriter(outfile, fieldnames=fieldnames)
# reorder the header first
writer.writeheader()
for row in csv.DictReader(infile):
# writes the reordered rows to the new file
writer.writerow(row)
inputs = np.loadtxt(output_path, dtype=np.dtype(str), delimiter=',', skiprows=1)
return inputs
with open('atp_matches_1997.csv', 'r') as infile, open('reordered_atp_matches_1997.csv', 'w') as outfile:
# output dict needs a list for new column ordering
fieldnames = ['tourney_id', 'tourney_name', 'surface', 'draw_size', 'tourney_level', 'tourney_date', 'match_num', 'winner_id', 'winner_seed', 'winner_entry', 'winner_name', 'winner_hand', 'winner_ht', 'winner_ioc', 'winner_age', 'winner_rank', 'winner_rank_points' ,'loser_id', 'loser_seed' ,'loser_entry', 'loser_name', 'loser_hand', 'loser_ht' , 'loser_ioc', 'loser_age', 'loser_rank', 'loser_rank_points', 'score', 'best_of' ,'round', 'minutes', 'w_ace', 'w_df', 'w_svpt', 'w_1stIn', 'w_1stWon', 'w_2ndWon', 'w_SvGms', 'w_bpSaved', 'w_bpFaced', 'l_ace', 'l_df', 'l_svpt', 'l_1stIn', 'l_1stWon', 'l_2ndWon', 'l_SvGms', 'l_bpSaved', 'l_bpFaced']
writer = csv.DictWriter(outfile, fieldnames=fieldnames)
# reorder the header first
writer.writeheader()
for row in csv.DictReader(infile):
# writes the reordered rows to the new file
writer.writerow(row)
inputs = np.loadtxt("reordered_atp_matches_1997.csv", dtype=np.dtype(str), delimiter=',', skiprows=1)
for i in range(1998, 2019):
input_path = 'atp_matches_' + str(i) + '.csv'
output_path = 'reordered_atp_matches' + str(i) + '.csv'
inputs_temp = create_add_year_matches(input_path, output_path)
inputs = np.concatenate((inputs, inputs_temp))
for i in range(2010, 2018):
input_path = 'atp_matches_qual_chall_' + str(i) + '.csv'
output_path = 'reordered_atp_matches_qual_chall_matches' + str(i) + '.csv'
inputs_temp = create_add_year_matches(input_path, output_path)
inputs = np.concatenate((inputs, inputs_temp))
for i in range(2019, 2020):
input_path = 'atp_matches_' + str(i) + '.csv'
output_path = 'reordered_atp_matches' + str(i) + '.csv'
inputs_temp = create_add_year_matches(input_path, output_path)
inputs = np.concatenate((inputs, inputs_temp))
print(inputs.shape)
print(inputs[1])
num_rows, num_cols = inputs.shape
print(num_rows, num_cols)
print(inputs[0])
# Sanity checking tourney_id
ct = 0
for i in range(num_rows):
if(inputs[i][0] == ""):
ct = ct + 1
print(ct)
#Sanity checking tourney_name
ct = 0
for i in range(num_rows):
if(inputs[i][1] == ""):
ct = ct + 1
print(ct)
# Sanity checking surface and creating dict of surfaces
ct = 0
tc = 0
none_surfaces_rows = list()
for i in range(num_rows):
if(inputs[i][2] == ""):
ct = ct + 1
none_surfaces_rows.append(i)
if(inputs[i][2] == "None"):
tc = tc + 1
none_surfaces_rows.append(i)
inputs = np.delete(inputs, none_surfaces_rows, 0)
print(ct)
print(tc)
ct = 0
surfaces = dict()
size = 0
num_rows, num_cols = inputs.shape
for i in range(num_rows):
if(inputs[i][2] == ""):
ct = ct + 1
else:
if inputs[i][2] not in surfaces:
surfaces[inputs[i][2]] = size
size = size + 1
print(ct)
print(surfaces)
# Sanity checking draw_size
ct = 0
tc = 0
draw_sizes = dict()
size = 0
for i in range(num_rows):
if(inputs[i][3] == ""):
ct = ct + 1
else:
if inputs[i][3] not in draw_sizes:
draw_sizes[inputs[i][3]] = size
size = size + 1
print(draw_sizes)
print(ct)
# Drop tourey_name column
#inputs = np.delete(inputs, 1, axis=1)
# Drop draw_size column
#inputs = np.delete(inputs, 3, axis=1)
# Drop tourey_level column
#inputs = np.delete(inputs, 4, axis=1)
# Drop match_num column
#inputs = np.delete(inputs, 6, axis=1)
# Drop winner_name column
#inputs = np.delete(inputs, 10, axis=1)
# Drop loser_name column
#inputs = np.delete(inputs, 20, axis=1)
# Drop winner_ioc column - country of winner
#inputs = np.delete(inputs, 13, axis=1)
# Drop loser_ioc column - country of loser
#inputs = np.delete(inputs, 23, axis=1)
# Drop score
#inputs = np.delete(inputs, 27, axis=1)
# Sanity checking and sanitizing month from time of match
ct = 0
for i in range(num_rows):
if(inputs[i][5] == ""):
ct = ct + 1
print(ct)
# Sanity checking winner_id
ct = 0
federer_win = 0
for i in range(num_rows):
if(inputs[i][7] == ""):
ct = ct + 1
if(inputs[i][17] == ""):
ct = ct + 1
if(inputs[i][7] == "103819"):
federer_win = federer_win + 1
print(ct) # It comes out to be 0\
print(federer_win)
# Sanitizing winner_seed
ct = 0
federer_win = 0
for i in range(num_rows):
if(inputs[i][8] == ""):
ct = ct + 1
inputs[i][8] = '40'
if(inputs[i][18] == ""):
ct = ct + 1
inputs[i][18] = '40'
if(not inputs[i][8].isdigit()):
ct = ct + 1
inputs[i][8] = '40'
if(not inputs[i][18].isdigit()):
ct = ct + 1
inputs[i][18] = '40'
print(ct)
# Sanitizing winner_entry
ct = 0
federer_win = 0
entries = dict()
entry_type = 0
for i in range(num_rows):
if inputs[i][9].lower() not in entries:
entries[inputs[i][9].lower()] = entry_type
entry_type = entry_type + 1
if inputs[i][19].lower() not in entries:
entries[inputs[i][19].lower()] = entry_type
entry_type = entry_type + 1
inputs[i][9] = entries[inputs[i][9].lower()]
inputs[i][19] = entries[inputs[i][19].lower()]
print(entries)
# Sanitizing winner_hand - can only be left or right handed
for i in range(num_rows):
if inputs[i][11].lower() == 'l':
inputs[i][11] = '0'
else:
inputs[i][11] = '1'
if inputs[i][21].lower() == 'l':
inputs[i][21] = '0'
else:
inputs[i][21] = '1'
# Sanitizing winner_ht
# Making all winners without a height as height = -1
ct = 0
for i in range(num_rows):
if inputs[i][12].lower() == "":
ct = ct + 1
inputs[i][12] = '-1'
if inputs[i][22].lower() == "":
ct = ct + 1
inputs[i][22] = '-1'
# Too many players without a height - so just going to take difference in height as feature
# and make it 0 when input doesn't have it
print(ct)
# Sanitizing winner_age
ct = 0
total = 0.0
total_matches = 0
for i in range(num_rows):
if inputs[i][14].lower() == "":
ct = ct + 1
inputs[i][14] = "26.08"
else:
total = total + float(inputs[i][14])
total_matches = total_matches + 1
if inputs[i][24].lower() == "":
ct = ct + 1
inputs[i][24] = "26.08"
else:
total = total + float(inputs[i][24])
total_matches = total_matches + 1
# Seems like 26 is the average age of the winner
print(total/total_matches)
# Seems like 18 winners overall don't have an age - default to 26
print(ct)
# Sanitizing winner_rank and loser_rank - if no rank then replacing with 2000 which refers to a very high rank which
# represents the last rank
ct = 0
for i in range(num_rows):
if(inputs[i][15] == ""):
inputs[i][15] = 2000
rank_1 = 2000
else:
rank_1 = int(inputs[i][15])
if(inputs[i][25] == ""):
inputs[i][25] = 2000
rank_2 = 2000
else:
rank_2 = int(inputs[i][25])
if(rank_1 < rank_2):
ct = ct + 1
print(ct*100/num_rows)
benchmark_higher_rankings = ct*100/num_rows
# Sanitizing winner_rank points - if rankings points is empty replacing with 0
ct = 0
for i in range(num_rows):
if(inputs[i][16] == ""):
ct = ct + 1
inputs[i][16] = 0
if(inputs[i][26] == ""):
ct = ct + 1
inputs[i][26] = 0
print(ct)
# Sanity checking score
ct1 = 0
ct2 = 0
walkover_matches = list()
for i in range(num_rows):
if(inputs[i][27] == ""):
ct1 = ct1 + 1
walkover_matches.append(i)
if(inputs[i][27].lower() == "w/o"):
ct2 = ct2 + 1
walkover_matches.append(i)
print(ct1)
print(ct2)
inputs = np.delete(inputs, walkover_matches, 0)
num_rows, num_cols = inputs.shape
# Sanity checking best_of
ct = 0
for i in range(num_rows):
if(inputs[i][28] == ""):
ct = ct + 1
print(ct)
# Sanity checking and converting rounds to numbers
ct = 0
rounds = {'Q1' : -3, 'Q2' : -2, 'Q3': -3, 'R128': 0, 'RR': 0, 'BR': 0, 'R64': 1, 'R32': 2, 'R16': 3, 'QF': 4, 'SF': 5, 'F': 6}
type_rounds = 0
for i in range(num_rows):
if(inputs[i][29] == ""):
ct = ct + 1
else:
inputs[i][29] = rounds[inputs[i][29].upper()]
print(num_rows)
print(rounds)
print(ct)
# Sanity checking minutes
ct = 0
no_minutes = list()
for i in range(num_rows):
if(inputs[i][30] == ""):
ct = ct + 1
no_minutes.append(i)
# Lots of rows without minutes
print(ct)
inputs = np.delete(inputs, no_minutes, 0)
num_rows, num_cols = inputs.shape
# Sanity checking all numerical features of the match - double faults, aces, first serve, etc.
#and deleting all rows that have any missing
ct = 0
missing_values = list()
for i in range(num_rows):
for j in range(31, 49):
if(inputs[i][j] == ""):
ct = ct + 1
missing_values.append(i)
print(ct)
inputs = np.delete(inputs, missing_values, 0)
num_rows, num_cols = inputs.shape
print(num_rows) #63770 rows in total
career_stats_winner = np.zeros((num_rows, 49))
career_stats_loser = np.zeros((num_rows, 49))
career_stats_winner_total = np.zeros((num_rows, 49))
career_stats_loser_total = np.zeros((num_rows, 49))
x = 0
player_id_stats_overall_sum = [dict() for x in range(num_cols)]
player_id_stats_overall_count = [dict() for x in range(num_cols)]
player_name = dict()
delete_list = []
count_2019 = 0
for i in range(num_rows):
if inputs[i][5][0] == '2' and inputs[i][5][1] == '0' and inputs[i][5][2] == '1' and inputs[i][5][3] == '9' and getmonth(inputs[i][5]) > 6:
count_2019+=1
# Delete davis cup matches in prediction - they are generally extremely hard to predict
# If Davis Cup matches needs to be predicted the most likely best way to do is to have a different model for davis cup
for i in range(num_rows - count_2019, num_rows):
if "davis" in inputs[i][1].lower():
delete_list.append(i)
inputs = np.delete(inputs, delete_list, 0)
num_rows, num_cols = inputs.shape
count_2019 = 0
for i in range(num_rows):
if inputs[i][5][0] == '2' and inputs[i][5][1] == '0' and inputs[i][5][2] == '1' and inputs[i][5][3] == '9' and getmonth(inputs[i][5]) > 6:
count_2019+=1
X_inputs = np.zeros((2*(num_rows - count_2019), 51))
Y_inputs = np.zeros(2*(num_rows - count_2019))
X_prediction = np.zeros((2*count_2019, 51))
Y_prediction = np.zeros(2*count_2019)
print(count_2019)
matches_won_lost = dict()
head_to_head = dict()
head_to_head_surface = dict()
matches_won_lost_surface = dict()
rank_count = dict()
rank_total = dict()
rankings_points_total = dict()
form = dict()
form_surface = dict()
tournament_form_win = dict()
tournament_form_count = dict()
common_head_to_head = dict()
total_no_head_to_head = 0
for i in range(num_rows):
player_id_winner = inputs[i][7]
player_id_loser = inputs[i][17]
# Start of Tournament level form
if (player_id_winner, inputs[i][0][5:]) not in tournament_form_win:
tournament_form_win[(player_id_winner, inputs[i][0][5:])] = 0
tournament_form_count[(player_id_winner, inputs[i][0][5:])] = 0
if (player_id_loser, inputs[i][0][5:]) not in tournament_form_win:
tournament_form_win[(player_id_loser, inputs[i][0][5:])] = 0
tournament_form_count[(player_id_loser, inputs[i][0][5:])] = 0
tournament_form_win[(player_id_winner, inputs[i][0][5:])] += 1
tournament_form_win[(player_id_loser, inputs[i][0][5:])] += 0
tournament_form_count[(player_id_winner, inputs[i][0][5:])] += 1
tournament_form_count[(player_id_loser, inputs[i][0][5:])] += 1
if i < num_rows - count_2019:
X_inputs[2*i][44] = tournament_form_win[(player_id_winner, inputs[i][0][5:])] - 1 - tournament_form_win[(player_id_loser, inputs[i][0][5:])]
X_inputs[2*i][45] = tournament_form_count[(player_id_winner, inputs[i][0][5:])] - tournament_form_count[(player_id_loser, inputs[i][0][5:])]
temp11 = 1
temp12 = 1
if tournament_form_count[(player_id_winner, inputs[i][0][5:])] != 1:
temp11 = tournament_form_count[(player_id_winner, inputs[i][0][5:])]
if tournament_form_count[(player_id_loser, inputs[i][0][5:])] != 1:
temp12 = tournament_form_count[(player_id_loser, inputs[i][0][5:])]
X_inputs[2*i][46] = ((tournament_form_win[(player_id_winner, inputs[i][0][5:])] - 1)*100/temp11) - ((tournament_form_win[(player_id_loser, inputs[i][0][5:])])*100/temp12)
X_inputs[2*i+1][44] = -X_inputs[2*i][44]
X_inputs[2*i+1][45] = -X_inputs[2*i][45]
X_inputs[2*i+1][46] = -X_inputs[2*i][46]
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][44] = tournament_form_win[(player_id_winner, inputs[i][0][5:])] - 1 - tournament_form_win[(player_id_loser, inputs[i][0][5:])]
X_prediction[x1][45] = tournament_form_count[(player_id_winner, inputs[i][0][5:])] - tournament_form_count[(player_id_loser, inputs[i][0][5:])]
temp11 = 1
temp12 = 1
if tournament_form_count[(player_id_winner, inputs[i][0][5:])] != 1:
temp11 = tournament_form_count[(player_id_winner, inputs[i][0][5:])]
if tournament_form_count[(player_id_loser, inputs[i][0][5:])] != 1:
temp12 = tournament_form_count[(player_id_loser, inputs[i][0][5:])]
X_prediction[x1][46] = ((tournament_form_win[(player_id_winner, inputs[i][0][5:])] - 1)*100/temp11) - ((tournament_form_win[(player_id_loser, inputs[i][0][5:])])*100/temp12)
X_prediction[x1+1][44] = -X_prediction[x1][44]
X_prediction[x1+1][45] = -X_prediction[x1][45]
X_prediction[x1+1][46] = -X_prediction[x1][46]
# End of Tournament level form
# Start of overall form
if player_id_winner not in form:
form[player_id_winner] = []
form[player_id_winner].append((1, inputs[i][5]))
if player_id_loser not in form:
form[player_id_loser] = []
form[player_id_loser].append((0, inputs[i][5]))
total_winner_5 = -1
total_winner_10 = -1
total_winner_15 = -1
total_winner_25 = -1
total_loser_5 = -1
total_loser_10 = -1
total_loser_15 = -1
total_loser_25 = -1
winner_win_5 = 0
winner_win_10 = 0
winner_win_15 = 0
winner_win_25 = 0
loser_win_5 = 0
loser_win_10 = 0
loser_win_15 = 0
loser_win_25 = 0
for (a1, a2) in reversed(form[player_id_winner]):
if total_winner_5 == -1:
total_winner_5 = 0
else:
if total_winner_5 < 5:
winner_win_5 = winner_win_5 + a1
total_winner_5 += 1
for (a1, a2) in reversed(form[player_id_winner]):
if total_winner_10 == -1:
total_winner_10 = 0
else:
if total_winner_10 < 10:
winner_win_10 = winner_win_10 + a1
total_winner_10 += 1
for (a1, a2) in reversed(form[player_id_winner]):
if total_winner_15 == -1:
total_winner_15 = 0
else:
if total_winner_15 < 15:
winner_win_15 = winner_win_15 + a1
total_winner_15 += 1
for (a1, a2) in reversed(form[player_id_winner]):
if total_winner_25 == -1:
total_winner_25 = 0
else:
if total_winner_25 < 25:
winner_win_25 = winner_win_25 + a1
total_winner_25 += 1
for (a1, a2) in reversed(form[player_id_loser]):
if total_loser_5 == -1:
total_loser_5 = 0
else:
if total_loser_5 < 5:
loser_win_5 = loser_win_5 + a1
total_loser_5 += 1
for (a1, a2) in reversed(form[player_id_loser]):
if total_loser_10 == -1:
total_loser_10 = 0
else:
if total_loser_10 < 10:
loser_win_10 = loser_win_10 + a1
total_loser_10 += 1
for (a1, a2) in reversed(form[player_id_loser]):
if total_loser_15 == -1:
total_loser_15 = 0
else:
if total_loser_15 < 15:
loser_win_15 = loser_win_15 + a1
total_loser_15 += 1
for (a1, a2) in reversed(form[player_id_loser]):
if total_loser_25 == -1:
total_loser_25 = 0
else:
if total_loser_25 < 25:
loser_win_25 = loser_win_25 + a1
total_loser_25 += 1
if i < num_rows - count_2019:
X_inputs[2*i][36] = winner_win_5 - loser_win_5
X_inputs[2*i][37] = winner_win_10 - loser_win_10
X_inputs[2*i][38] = winner_win_15 - loser_win_15
X_inputs[2*i][39] = winner_win_25 - loser_win_25
X_inputs[2*i+1][36] = loser_win_5 - winner_win_5
X_inputs[2*i+1][37] = loser_win_10 - winner_win_10
X_inputs[2*i+1][38] = loser_win_15 - winner_win_15
X_inputs[2*i+1][39] = loser_win_25 - winner_win_25
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][36] = winner_win_5 - loser_win_5
X_prediction[x1][37] = winner_win_10 - loser_win_10
X_prediction[x1][38] = winner_win_15 - loser_win_15
X_prediction[x1][39] = winner_win_25 - loser_win_25
X_prediction[x1+1][36] = loser_win_5 - winner_win_5
X_prediction[x1+1][37] = loser_win_10 - winner_win_10
X_prediction[x1+1][38] = loser_win_15 - winner_win_15
X_prediction[x1+1][39] = loser_win_25 - winner_win_25
# End of overall form
# Start of last 1 month form overall
total_winner_1 = -1
total_loser_1 = -1
winner_win_1 = 0
loser_win_1 = 0
for (a1, a2) in reversed(form[player_id_winner]):
if total_winner_1 == -1:
total_winner_1 = 0
else:
if getdays(inputs[i][5]) - getdays(a2) < 30 and getdays(inputs[i][5]) - getdays(a2) >= 0:
winner_win_1 = winner_win_1 + a1
total_winner_1 += 1
#print(getdays(inputs[i][5]) - getdays(a2), a2, inputs[i][5], i, 1, total_winner_1)
else:
break
for (a1, a2) in reversed(form[player_id_loser]):
if total_loser_1 == -1:
total_loser_1 = 0
else:
if getdays(inputs[i][5]) - getdays(a2) < 30 and getdays(inputs[i][5]) - getdays(a2) >= 0:
loser_win_1 = loser_win_1 + a1
total_loser_1 += 1
#print(getdays(inputs[i][5]) - getdays(a2), a2, inputs[i][5], i, 5, total_loser_1)
else:
break
if i < num_rows - count_2019:
X_inputs[2*i][47] = winner_win_1 - loser_win_1
X_inputs[2*i+1][47] = loser_win_1 - winner_win_1
X_inputs[2*i][48] = total_winner_1 - total_loser_1
X_inputs[2*i+1][48] = total_loser_1 - total_winner_1
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][47] = winner_win_1 - loser_win_1
X_prediction[x1+1][47] = loser_win_1 - winner_win_1
X_prediction[x1][48] = total_winner_1 - total_loser_1
X_prediction[x1+1][48] = total_loser_1 - total_winner_1
# End of last 1 month form overall
# I tried adding 3, 6 and 12 month forms as well - it didn't seem to help - in fact made the prediction worse
# Start of surface level form
if player_id_winner not in form_surface:
form_surface[(player_id_winner, inputs[i][2])] = []
form_surface[(player_id_winner, inputs[i][2])].append((1, inputs[i][5]))
if player_id_loser not in form_surface:
form_surface[(player_id_loser, inputs[i][2])] = []
form_surface[(player_id_loser, inputs[i][2])].append((0, inputs[i][5]))
total_winner_5 = -1
total_winner_10 = -1
total_winner_15 = -1
total_winner_25 = -1
total_loser_5 = -1
total_loser_10 = -1
total_loser_15 = -1
total_loser_25 = -1
winner_win_5 = 0
winner_win_10 = 0
winner_win_15 = 0
winner_win_25 = 0
loser_win_5 = 0
loser_win_10 = 0
loser_win_15 = 0
loser_win_25 = 0
for (a1, a2) in reversed(form_surface[(player_id_winner, inputs[i][2])]):
if total_winner_5 == -1:
total_winner_5 = 0
else:
if total_winner_5 < 5:
winner_win_5 = winner_win_5 + a1
total_winner_5 += 1
for (a1, a2) in reversed(form_surface[(player_id_winner, inputs[i][2])]):
if total_winner_10 == -1:
total_winner_10 = 0
else:
if total_winner_10 < 10:
winner_win_10 = winner_win_10 + a1
total_winner_10 += 1
for (a1, a2) in reversed(form_surface[(player_id_winner, inputs[i][2])]):
if total_winner_15 == -1:
total_winner_15 = 0
else:
if total_winner_15 < 15:
winner_win_15 = winner_win_15 + a1
total_winner_15 += 1
for (a1, a2) in reversed(form_surface[(player_id_winner, inputs[i][2])]):
if total_winner_25 == -1:
total_winner_25 = 0
else:
if total_winner_25 < 25:
winner_win_25 = winner_win_25 + a1
total_winner_25 += 1
for (a1, a2) in reversed(form_surface[(player_id_loser, inputs[i][2])]):
if total_loser_5 == -1:
total_loser_5 = 0
else:
if total_loser_5 < 5:
loser_win_5 = loser_win_5 + a1
total_loser_5 += 1
for (a1, a2) in reversed(form_surface[(player_id_loser, inputs[i][2])]):
if total_loser_10 == -1:
total_loser_10 = 0
else:
if total_loser_10 < 10:
loser_win_10 = loser_win_10 + a1
total_loser_10 += 1
for (a1, a2) in reversed(form_surface[(player_id_loser, inputs[i][2])]):
if total_loser_15 == -1:
total_loser_15 = 0
else:
if total_loser_15 < 15:
loser_win_15 = loser_win_15 + a1
total_loser_15 += 1
for (a1, a2) in reversed(form_surface[(player_id_loser, inputs[i][2])]):
if total_loser_25 == -1:
total_loser_25 = 0
else:
if total_loser_25 < 25:
loser_win_25 = loser_win_25 + a1
total_loser_25 += 1
if i < num_rows - count_2019:
X_inputs[2*i][40] = winner_win_5 - loser_win_5
X_inputs[2*i][41] = winner_win_10 - loser_win_10
X_inputs[2*i][42] = winner_win_15 - loser_win_15
X_inputs[2*i][43] = winner_win_25 - loser_win_25
X_inputs[2*i+1][40] = loser_win_5 - winner_win_5
X_inputs[2*i+1][41] = loser_win_10 - winner_win_10
X_inputs[2*i+1][42] = loser_win_15 - winner_win_15
X_inputs[2*i+1][43] = loser_win_25 - winner_win_25
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][40] = winner_win_5 - loser_win_5
X_prediction[x1][41] = winner_win_10 - loser_win_10
X_prediction[x1][42] = winner_win_15 - loser_win_15
X_prediction[x1][43] = winner_win_25 - loser_win_25
X_prediction[x1+1][40] = loser_win_5 - winner_win_5
X_prediction[x1+1][41] = loser_win_10 - winner_win_10
X_prediction[x1+1][42] = loser_win_15 - winner_win_15
X_prediction[x1+1][43] = loser_win_25 - winner_win_25
# End of surface level form
# Start of Overall win loss
p1, p2 = (0, 0)
b1, b2 = (0, 0)
if player_id_winner not in matches_won_lost:
matches_won_lost[player_id_winner] = (1, 0)
(p1, p2) = (0, 0)
else:
(a1, a2) = matches_won_lost[player_id_winner]
matches_won_lost[player_id_winner] = (a1 + 1, a2)
(p1, p2) = (a1, a2)
if player_id_loser not in matches_won_lost:
matches_won_lost[player_id_loser] = (0, 1)
(b1, b2) = (0, 0)
else:
(a1, a2) = matches_won_lost[player_id_loser]
matches_won_lost[player_id_loser] = (a1, a2 + 1)
(b1, b2) = (a1, a2)
if((p1 + p2) != 0):
temp1 = (p1*100/(p1+p2))
else:
temp1 = 0
if((b1 + b2) != 0):
temp2 = (b1*100/(b1+b2))
else:
temp2 = 0
if i < num_rows - count_2019:
X_inputs[2*i][25] = p1 - b1
X_inputs[2*i+1][25] = b1 - p1
X_inputs[2*i][26] = temp1 - temp2
X_inputs[2*i+1][26] = temp2 - temp1
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][25] = p1 - b1
X_prediction[x1+1][25] = b1 - p1
X_prediction[x1][26] = temp1 - temp2
X_prediction[x1+1][26] = temp2 - temp1
# End of Overall win loss
# Start of section for win loss based on surface
p1, p2 = (0, 0)
b1, b2 = (0, 0)
if (player_id_winner, inputs[i][2]) not in matches_won_lost_surface:
matches_won_lost_surface[(player_id_winner, inputs[i][2])] = (1, 0)
(p1, p2) = (0, 0)
else:
(a1, a2) = matches_won_lost_surface[(player_id_winner, inputs[i][2])]
matches_won_lost_surface[(player_id_winner, inputs[i][2])] = (a1 + 1, a2)
(p1, p2) = (a1, a2)
if (player_id_loser, inputs[i][2]) not in matches_won_lost_surface:
matches_won_lost_surface[(player_id_loser, inputs[i][2])] = (0, 1)
(b1, b2) = (0, 0)
else:
(a1, a2) = matches_won_lost_surface[(player_id_loser, inputs[i][2])]
matches_won_lost_surface[(player_id_loser, inputs[i][2])] = (a1, a2 + 1)
(b1, b2) = (a1, a2)
if((p1 + p2) != 0):
temp1 = (p1*100/(p1+p2))
else:
temp1 = 0
if((b1 + b2) != 0):
temp2 = (b1*100/(b1+b2))
else:
temp2 = 0
if i < num_rows - count_2019:
X_inputs[2*i][33] = p1 - b1
X_inputs[2*i+1][33] = b1 - p1
X_inputs[2*i][34] = temp1 - temp2
X_inputs[2*i+1][34] = temp2 - temp1
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][33] = p1 - b1
X_prediction[x1+1][33] = b1 - p1
X_prediction[x1][34] = temp1 - temp2
X_prediction[x1+1][34] = temp2 - temp1
# End of section for win loss based on surface
# Start of Overall Head to Head
if (player_id_winner, player_id_loser) not in head_to_head:
head_to_head[(player_id_winner, player_id_loser)] = (1, 0)
head_to_head[(player_id_loser, player_id_winner)] = (0, 1)
if i < num_rows - count_2019:
X_inputs[2*i][15] = 1
X_inputs[2*i+1][15] = -1
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
total_no_head_to_head = total_no_head_to_head + 1
print(inputs[i][10], inputs[i][20])
X_prediction[x1][15] = 1
X_prediction[x1+1][15] = -1
else:
(a1, a2) = head_to_head[(player_id_winner, player_id_loser)]
if i < num_rows - count_2019:
X_inputs[2*i][15] = a1 - a2
X_inputs[2*i+1][15] = a2 - a1
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][15] = a1 - a2
X_prediction[x1+1][15] = a2 - a1
head_to_head[(player_id_winner, player_id_loser)] = (a1 + 1, a2)
head_to_head[(player_id_loser, player_id_winner)] = (a2, a1 + 1)
# End of Overall Head to Head
# Start of Common Opponent Head to Head
if player_id_winner not in common_head_to_head:
common_head_to_head[player_id_winner] = {player_id_loser: (0, 0)}
if player_id_loser not in common_head_to_head:
common_head_to_head[player_id_loser] = {player_id_winner: (0, 0)}
if player_id_loser not in common_head_to_head[player_id_winner]:
common_head_to_head[player_id_winner][player_id_loser] = (0, 0)
if player_id_winner not in common_head_to_head[player_id_loser]:
common_head_to_head[player_id_loser][player_id_winner] = (0, 0)
(x11, y11) = common_head_to_head[player_id_winner][player_id_loser]
(x22, y22) = common_head_to_head[player_id_loser][player_id_winner]
common_head_to_head[player_id_winner][player_id_loser] = (x11+1, y11)
common_head_to_head[player_id_loser][player_id_winner] = (x22, y22+1)
new_head_to_head_winner = (0, 0)
new_head_to_head_loser = (0, 0)
for player_id in common_head_to_head[player_id_winner]:
if player_id in common_head_to_head[player_id_loser]:
(temp1, temp2) = common_head_to_head[player_id_winner][player_id]
t1, t2 = new_head_to_head_winner
new_head_to_head_winner = t1 + temp1, t2 + temp2
for player_id in common_head_to_head[player_id_loser]:
if player_id in common_head_to_head[player_id_winner]:
(temp1, temp2) = common_head_to_head[player_id_loser][player_id]
t1, t2 = new_head_to_head_loser
new_head_to_head_loser = t1 + temp1, t2 + temp2
temp1, temp2 = new_head_to_head_winner
temp3, temp4 = new_head_to_head_loser
temp5 = 0
temp6 = 0
if(temp1 + temp2) != 0:
temp5 = ((temp1*100)/(temp1+temp2))
if(temp3 + temp4) != 0:
temp6 = ((temp3*100)/(temp3+temp4))
if i < num_rows - count_2019:
X_inputs[2*i][49] = temp1 - temp3
X_inputs[2*i][49] = temp5 - temp6
X_inputs[2*i+1][50] = temp3 - temp1
X_inputs[2*i+1][50] = temp6 - temp5
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][49] = temp1 - temp3
X_prediction[x1][49] = temp5 - temp6
X_prediction[x1+1][50] = temp3 - temp1
X_prediction[x1+1][50] = temp6 - temp5
# End of Common Opponent Head to Head
# Start of surface level head to head
if (player_id_winner, player_id_loser, inputs[i][2]) not in head_to_head_surface:
if i < num_rows - count_2019:
X_inputs[2*i][35] = 0
X_inputs[2*i+1][35] = 0
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][35] = 0
X_prediction[x1+1][35] = 0
head_to_head_surface[(player_id_winner, player_id_loser, inputs[i][2])] = (1, 0)
head_to_head_surface[(player_id_loser, player_id_winner, inputs[i][2])] = (0, 1)
else:
(a1, a2) = head_to_head_surface[(player_id_winner, player_id_loser, inputs[i][2])]
if i < num_rows - count_2019:
X_inputs[2*i][35] = a1 - a2
X_inputs[2*i+1][35] = a2 - a1
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][35] = a1 - a2
X_prediction[x1+1][35] = a2 - a1
head_to_head_surface[(player_id_winner, player_id_loser, inputs[i][2])] = (a1 + 1, a2)
head_to_head_surface[(player_id_loser, player_id_winner, inputs[i][2])] = (a2, a1 + 1)
# End of surface level head to head
for j in range(31, 40):
if player_id_winner not in player_id_stats_overall_count[j-31]:
career_stats_winner[i][j] = 0
career_stats_winner_total[i][j] = 0
player_id_stats_overall_count[j-31][player_id_winner] = 1
player_id_stats_overall_sum[j-31][player_id_winner] = int(inputs[i][j])
player_name[player_id_winner] = inputs[i][10]
else:
career_stats_winner[i][j] = player_id_stats_overall_sum[j-31][player_id_winner]/player_id_stats_overall_count[j-31][player_id_winner]
career_stats_winner_total[i][j] = player_id_stats_overall_sum[j-31][player_id_winner]
player_id_stats_overall_count[j-31][player_id_winner] = player_id_stats_overall_count[j-31][player_id_winner] + 1
player_id_stats_overall_sum[j-31][player_id_winner] = player_id_stats_overall_sum[j-31][player_id_winner] + int(inputs[i][j])
player_name[player_id_winner] = inputs[i][10]
for j in range(40, 49):
if player_id_loser not in player_id_stats_overall_count[j-40]:
career_stats_loser[i][j] = 0
career_stats_loser_total[i][j] = 0
player_id_stats_overall_count[j-40][player_id_loser] = 1
player_id_stats_overall_sum[j-40][player_id_loser] = int(inputs[i][j])
player_name[player_id_loser] = inputs[i][20]
else:
career_stats_loser[i][j] = player_id_stats_overall_sum[j-40][player_id_loser]/player_id_stats_overall_count[j-40][player_id_loser]
career_stats_loser_total[i][j] = player_id_stats_overall_sum[j-40][player_id_loser]
player_id_stats_overall_count[j-40][player_id_loser] = player_id_stats_overall_count[j-40][player_id_loser] + 1
player_id_stats_overall_sum[j-40][player_id_loser] = player_id_stats_overall_sum[j-40][player_id_loser] + int(inputs[i][j])
player_name[player_id_loser] = inputs[i][20]
if i < num_rows - count_2019:
X_inputs[2*i][j-40] = career_stats_winner[i][j-9] - career_stats_loser[i][j]
X_inputs[2*i+1][j-40] = career_stats_loser[i][j] - career_stats_winner[i][j-9]
X_inputs[2*i][j-24] = career_stats_winner_total[i][j-9] - career_stats_loser_total[i][j]
X_inputs[2*i+1][j-24] = career_stats_loser_total[i][j] - career_stats_winner_total[i][j-9]
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][j-40] = career_stats_winner[i][j-9] - career_stats_loser[i][j]
X_prediction[x1+1][j-40] = career_stats_loser[i][j] - career_stats_winner[i][j-9]
X_prediction[x1][j-24] = career_stats_winner_total[i][j-9] - career_stats_loser_total[i][j]
X_prediction[x1+1][j-24] = career_stats_loser_total[i][j] - career_stats_winner_total[i][j-9]
#15, 16, 25, 26
if player_id_winner not in rank_count:
rank_count[player_id_winner] = 1
rank_total[player_id_winner] = inputs[i][15]
rankings_points_total[player_id_winner] = inputs[i][16]
else:
rank_count[player_id_winner] += 1
rank_total[player_id_winner] += inputs[i][15]
rankings_points_total[player_id_winner] += inputs[i][16]
if player_id_loser not in rank_count:
rank_count[player_id_loser] = 1
rank_total[player_id_loser] = inputs[i][25]
rankings_points_total[player_id_loser] = inputs[i][26]
else:
rank_count[player_id_loser] += 1
rank_total[player_id_loser] += inputs[i][25]
rankings_points_total[player_id_loser] += inputs[i][26]
# 26, 25, 24, 22, 21, 18 - 16, 15, 14, 12, 11, 8
k = 9
for j in range(18, 27):
if j != 19 and j != 20 and j != 23:
if i < num_rows - count_2019:
X_inputs[2*i][k] = float(inputs[i][j-10])
X_inputs[2*i][k+18] = float(inputs[i][j])
X_inputs[2*i+1][k] = float(inputs[i][j])
X_inputs[2*i+1][k+18] = float(inputs[i][j-10])
if j == 22 and (int(inputs[i][j-10]) == -1 or int(inputs[i][j]) == -1):
X_inputs[2*i][k] = 0
X_inputs[2*i][k+18] = 0
X_inputs[2*i+1][k] = 0
X_inputs[2*i+1][k+18] = 0
k += 1
else:
x1 = int(2*i - 2*int(num_rows) + 2*int(count_2019))
X_prediction[x1][k] = float(inputs[i][j-10])
X_prediction[x1][k+18] = float(inputs[i][j])
X_prediction[x1+1][k] = float(inputs[i][j])
X_prediction[x1+1][k+18] = float(inputs[i][j-10])
if j == 22 and (int(inputs[i][j-10]) == -1 or int(inputs[i][j]) == -1):
X_prediction[x1][k] = 0
X_prediction[x1][k+18] = 0
X_prediction[x1+1][k] = 0
X_prediction[x1+1][k+18] = 0
k += 1
if i < num_rows - count_2019:
Y_inputs[2*i] = 1
Y_inputs[2*i+1] = 0
else:
x1 = 2*i - 2*int(num_rows) + 2*int(count_2019)
Y_prediction[x1] = 1
Y_prediction[x1+1] = 0
clf = LogisticRegression(multi_class='ovr', random_state=0, solver='liblinear', penalty='l2').fit(X_inputs, Y_inputs)
training_prediction = clf.predict_proba(X_prediction)
np.savetxt("training_data.csv", X_inputs, delimiter=",")
total = count_2019
right = 0
print(training_prediction.shape)
for i in range(count_2019):
(a, b) = training_prediction[2*i]
(c, d) = training_prediction[2*i+1]
if a < b and c > d:
predicted = 1
else:
if a > b and c < d:
predicted = 0
else:
if a + d < b + c:
predicted = 1
else:
predicted = 0
if Y_prediction[2*i] == predicted:
right = right + 1
print(total)
print(right)
print(right*100/total)
training_prediction_1 = clf.predict(X_inputs)
total = 2*(num_rows - count_2019)
right = 0
for i in range(2*(num_rows - count_2019)):
if Y_inputs[i] == training_prediction_1[i]:
right = right + 1
print(total)
print(right)
print(right*100/total)
print(total_no_head_to_head)
# it was 59.5% with 9 features which were just stats,
# with ranking, seed, ranking points, basically values known pre-match which describe the player - improved to 62.5%
#62.654205607476634 - l1 penalty
# lbfgs - l2 - 62.57943925233645
#clf = svm.SVC(kernel='linear')
#clf.fit(X_inputs, Y_inputs)
#svm.SVC(kernel='linear')
#svm.SVC(kernel='rbf')
#svm.SVC(kernel=โsigmoidโ)
#svm.SVC(kernel=โpoly')
#logistic regression - penaltystr, โl1โ, โl2โ, โelasticnetโ or โnoneโ, optional (default=โl2โ)
#predictions_svm_svc = clf.predict(X_prediction)
#right = 0
#for i in range(2*count_2019):
# if Y_prediction[i] == predictions_svm_svc[i]:
# right = right + 1
#print(total)
#print(right)
#print(right*100/total)
# Sanity testing whether the total number of aces of Federer calculated the real aces - it matches!!
for player_id in player_id_stats_overall_count[0]:
if "Federer" in player_name[player_id]:
print(player_name[player_id], player_id_stats_overall_sum[0][player_id], player_id_stats_overall_count[0][player_id])
| [
"noreply@github.com"
] | ajkrish95.noreply@github.com |
c82c8b8c6b31aa7a2fbea0a59b5f32fd34dcd6e1 | d9f4400d47e0ce914be636698365e26f836b766c | /apps/screen/urls.py | 71d2c6af754bfffc99bcf77e2a85e73445a03918 | [] | no_license | otonelunico/prevex | deffc3cfd82354b20e61ac636b2b7fb4dd48d360 | e32efb317a05031a5e0c454d3343748ea7ff534e | refs/heads/master | 2021-01-22T08:39:26.389747 | 2017-06-24T20:52:27 | 2017-06-24T20:52:27 | 92,628,288 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | from django.conf.urls import url, include
from apps.screen.views import index, Settings, Prevent_, Video_
from django.contrib.auth.decorators import login_required
urlpatterns = [
url(r'^$', index, name='index'),
url(r'^settings/$', login_required(Settings), name="settings"),
url(r'^prevent/(?P<funct>\w+)/(?P<type>\d+)/(?P<id>\d+)$', login_required(Prevent_), name="prevent"),
url(r'^video/(?P<funct>\w+)/(?P<type>\d+)$', login_required(Video_), name="video"),
] | [
"ocubillosj@gmail.com"
] | ocubillosj@gmail.com |
c13a51b1f3b36682ca85ce3ef6df16001152ec90 | a2a0a6db5781626035e3657f7be8034579aad3d0 | /app.py | 0fe9f88555fff88aa16e02d1ded9aba9257744c3 | [] | no_license | lpreimesberger/otr | 07c8d2d8ed0c5dc9fcbcc6c58abe869e1d8df3ea | 0024ffaa866c1c89f2ce7becb870501c63881f98 | refs/heads/master | 2023-02-09T11:14:08.783890 | 2020-12-23T20:57:08 | 2020-12-23T20:57:08 | 296,576,483 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,153 | py | """
burning man 2021 project
numbers from: https://freemusicarchive.org/music/The_Conet_Project
"""
#!/usr/bin/env python
import sys
import os
import random
from time import sleep
import psutil
from pydub import AudioSegment
from pydub.playback import play
from threading import Lock
from flask import Flask, render_template, session, request, \
copy_current_request_context, send_from_directory
from flask_socketio import SocketIO, emit, join_room, leave_room, \
close_room, rooms, disconnect
SOUND_DIRECTORY = "./static/numbers"
# Set this variable to "threading", "eventlet" or "gevent" to test the
# different async modes, or leave it set to None for the application to choose
# the best option based on installed packages.
async_mode = None
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode=async_mode)
thread = None
thread_lock = Lock()
def background_thread():
count = 0
while True:
socketio.sleep(2)
count += 1
play_next = random.choice(os.listdir(SOUND_DIRECTORY))
print("BACKGROUND FIRE")
socketio.emit('my_response',
{'data': play_next, 'count': count, "playing": play_next},
namespace='/test')
sound = AudioSegment.from_wav(SOUND_DIRECTORY + "/" + play_next)
play(sound)
socketio.sleep(5)
@app.route('/otr/<the_file>')
def otr(the_file):
return send_from_directory('static', the_file)
@app.route('/')
def index():
return render_template('index.html', async_mode=socketio.async_mode)
@socketio.on('my_event', namespace='/test')
def test_message(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': message['data'], 'count': session['receive_count']})
@socketio.on('my_broadcast_event', namespace='/test')
def test_broadcast_message(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': message['data'], 'count': session['receive_count']},
broadcast=True)
@socketio.on('join', namespace='/test')
def join(message):
join_room(message['room'])
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': 'In rooms: ' + ', '.join(rooms()),
'count': session['receive_count']})
@socketio.on('leave', namespace='/test')
def leave(message):
leave_room(message['room'])
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': 'In rooms: ' + ', '.join(rooms()),
'count': session['receive_count']})
@socketio.on('close_room', namespace='/test')
def close(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response', {'data': 'Room ' + message['room'] + ' is closing.',
'count': session['receive_count']},
room=message['room'])
close_room(message['room'])
@socketio.on('my_room_event', namespace='/test')
def send_room_message(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': message['data'], 'count': session['receive_count']},
room=message['room'])
@socketio.on('disconnect_request', namespace='/test')
def disconnect_request():
@copy_current_request_context
def can_disconnect():
disconnect()
session['receive_count'] = session.get('receive_count', 0) + 1
# for this emit we use a callback function
# when the callback function is invoked we know that the message has been
# received and it is safe to disconnect
emit('my_response',
{'data': 'Disconnected!', 'count': session['receive_count']},
callback=can_disconnect)
@socketio.on('my_ping', namespace='/test')
def ping_pong():
emit('my_pong')
@socketio.on('connect', namespace='/test')
def test_connect():
global thread
with thread_lock:
if thread is None:
thread = socketio.start_background_task(background_thread)
emit('my_response', {'data': 'Connected', 'count': 0})
@socketio.on('disconnect', namespace='/test')
def test_disconnect():
print('Client disconnected', request.sid)
if __name__ == '__main__':
procs = [p for p in psutil.process_iter() if 'python.exe' in p.name() and __file__ in p.cmdline()]
if len(procs) > 1:
print('Process is already running...')
sys.exit(1)
socketio.run(app, debug=True)
"""
import threading
import atexit
import time
import os
import psutil
import random
from flask import Flask, render_template, copy_current_request_context, jsonify, send_from_directory, session
from flask_socketio import SocketIO, send, emit, join_room, leave_room, close_room, disconnect, rooms
import requests
import gevent
import geventwebsocket
SOUND_DIRECTORY = "./static/numbers"
app = Flask(__name__)
app.config['SECRET_KEY'] = 'secret!'
socketio = SocketIO(app, async_mode="gevent")
# variables that are accessible from anywhere
commonDataStruct = {"file": "", "numbers": False}
# lock to control access to variable
dataLock = threading.Lock()
# thread handler
yourThread = threading.Thread()
def interrupt():
global yourThread
yourThread.cancel()
@socketio.on_error() # Handles the default namespace
def error_handler(e):
print("SOCKET ERROR")
print(e)
pass
@socketio.on('json', namespace="/mq")
def handle_json(json):
print("SOCKET IN")
print('received json: ' + str(json))
@socketio.on('my_event', namespace='/test')
def test_message(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': message['data'], 'count': session['receive_count']})
@socketio.on('my_broadcast_event', namespace='/test')
def test_broadcast_message(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': message['data'], 'count': session['receive_count']},
broadcast=True)
@socketio.on('join', namespace='/test')
def join(message):
join_room(message['room'])
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': 'In rooms: ' + ', '.join(rooms()),
'count': session['receive_count']})
@socketio.on('leave', namespace='/test')
def leave(message):
leave_room(message['room'])
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': 'In rooms: ' + ', '.join(rooms()),
'count': session['receive_count']})
@socketio.on('close_room', namespace='/test')
def close(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response', {'data': 'Room ' + message['room'] + ' is closing.',
'count': session['receive_count']},
room=message['room'])
close_room(message['room'])
@socketio.on('my_room_event', namespace='/test')
def send_room_message(message):
session['receive_count'] = session.get('receive_count', 0) + 1
emit('my_response',
{'data': message['data'], 'count': session['receive_count']},
room=message['room'])
@socketio.on('disconnect_request', namespace='/test')
def disconnect_request():
@copy_current_request_context
def can_disconnect():
disconnect()
session['receive_count'] = session.get('receive_count', 0) + 1
# for this emit we use a callback function
# when the callback function is invoked we know that the message has been
# received and it is safe to disconnect
emit('my_response',
{'data': 'Disconnected!', 'count': session['receive_count']},
callback=can_disconnect)
@socketio.on('my_ping', namespace='/test')
def ping_pong():
emit('my_pong')
@socketio.on('message', namespace="/mq")
def handle_text(json):
print("SOCKET IN")
print('received text: ' + str(json))
@app.route('/')
def hello_world():
return render_template('index.html', async_mode=socketio.async_mode)
@app.route('/otr/<the_file>')
def otr(the_file):
return send_from_directory('static', the_file)
@app.route('/emit/<name>')
def ws_emit(name):
print(name)
try:
send({"file": name, "numbers": True}, namespace="/mq")
print("Message sent!")
except AttributeError:
pass
return jsonify({"result": "ok"})
def player():
# wait until web starts
print("START PLAYER")
time.sleep(5)
while True:
print("boink")
play_next = random.choice(os.listdir(SOUND_DIRECTORY))
requests.get("http://127.0.0.1:5000/emit/{}".format(play_next))
print("Playing -> {}", play_next)
time.sleep(500)
procs = [p for p in psutil.process_iter() if 'python.exe' in p.name() and __file__ in p.cmdline()]
if len(procs) > 1:
print('Process is already running...')
sys.exit(1)
print("Launching background threads")
print("player...")
yourThread = threading.Thread(target=player)
yourThread.start()
atexit.register(interrupt)
print("web server launching!")
socketio.run(app)
"""
| [
"meathead123"
] | meathead123 |
59dc987bd295de691feb48bd7cd6fb3b0022b320 | 7ce00c738fea22b200e8c0b5f14110da678f3560 | /MayLongchallenge3.py | eb830d191c73038b09f517abe1efa92e75940827 | [] | no_license | Yogesh-001/Python | c0c929100b169b17d178436a3a43b64518e5d576 | 6cb25790c4f0de3d21389377ee978830a3febd56 | refs/heads/main | 2023-05-07T01:55:05.120602 | 2021-06-02T14:44:41 | 2021-06-02T14:44:41 | 354,456,700 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 120 | py | # cook your dish here
t=int(input())
for _ in range(t):
p = (int)(1e9+7)
n=int(input())
print(pow(2,n-1,p))
| [
"noreply@github.com"
] | Yogesh-001.noreply@github.com |
dd5e2a1ea852ce0926d23d2517ce7a6499aa5d2c | 75bd816c06203f9ae8b988b1f51778b199fbe629 | /backend/app/db/__init__.py | 47daae9c5a072f088859a5a05e80b73162277462 | [] | no_license | mcptr/bbone-js-py | ce209e377976707d1e0661fda5d5ceb6452dd8a1 | ee07dce6907c645fbdd843daa80604d7228778b1 | refs/heads/master | 2020-03-27T00:11:34.310231 | 2018-08-21T18:17:30 | 2018-08-21T18:17:30 | 145,602,495 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19 | py | from . db import *
| [
"dev@metaceptron.com"
] | dev@metaceptron.com |
d91f30dbf517bb4df1471f50de15a38476683448 | 41cf0c7473ffb214633f793087f765d3208b5eab | /naslib/predictors/trees/__init__.py | 3542b946613e40dd5c3cf7e1a0ee6a447dcaa350 | [
"Apache-2.0"
] | permissive | sailor921/NASLib | 7ca38dcbe6ef87ee388e57c8a8b54f3b9d75189c | a91a4714e08aca2507f1cec15e125e8c405bcaaa | refs/heads/master | 2023-03-31T04:33:10.910230 | 2021-04-05T04:33:35 | 2021-04-05T04:33:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 170 | py | from .base_tree_class import BaseTree
from .ngb import NGBoost
from .xgb import XGBoost
from .gbdt import GBDTPredictor
from .random_forest import RandomForestPredictor
| [
"zelaa@cs.uni-freiburg.de"
] | zelaa@cs.uni-freiburg.de |
734a353a9b4a5f50f3a72adeae60c79376b0e30d | e82245a9e623ef3e2b4b9c02f0fd932c608c4484 | /firecode.io/08-find_the_transpose_of_a_square_matrix.py | 3d2ba5c22dcb19e7aba1339638498c7d1921455a | [] | no_license | Zylophone/Programming-for-Sport | 33e8161028cfddce3b7a1243eb092070107342e3 | 193d6184f939303d8661f68d6fd06bdec95df351 | refs/heads/master | 2020-06-16T23:11:44.719286 | 2017-05-21T17:10:46 | 2017-05-21T17:10:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | py | '''
Args:
- matrix (list of lists)
- a square matrix
Modifies:
- arg into its transpose in-place
Returns:
- nothing (None)
Complexity:
- O(n^2) time
- O(1) extra space, in-place
'''
def transpose_matrix(matrix):
if matrix is None:
return None
n= len(matrix)
for i in range(n):
for j in range(i):
matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j] | [
"jfv33@cornell.edu"
] | jfv33@cornell.edu |
9aa0ccb8b6f0b1229c280b27cf975f65ac1d9503 | 55ca1d0e6fbd6e1b024eb59068e3d6ae2c466882 | /dice.py | cc51e4ec8e0004b48d7abddf1e59ab0e7493b1ff | [] | no_license | coding-world/led-matrix | 899c2c126a5c4305fa25dc81bb8249fed23d9175 | ef47bc0b44330a9b0b7ef8f8757bdd674afee13f | refs/heads/master | 2021-01-09T20:07:47.894084 | 2016-08-13T17:10:34 | 2016-08-13T17:10:34 | 65,627,897 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import RPi.GPIO as gpio
import time
import max7219.led as led
from random import randint
from max7219.font import proportional, SINCLAIR_FONT, TINY_FONT, CP437_FONT
gpio.setmode(gpio.BCM)
taster = 14
tasterStatus = 0
gpio.setup(taster,gpio.IN,pull_up_down=gpio.PUD_UP)
matrix = led.matrix()
def neue_zahl(channel):
matrix.letter(0, ord(str(randint(1,6))))
gpio.add_event_detect(taster, gpio.RISING, callback=neue_zahl)
matrix.letter(0, ord("?"))
while True:
time.sleep(0.1)
| [
"samuel@jugend-programmiert.com"
] | samuel@jugend-programmiert.com |
74557e0c1abec5628914160e2920d0efd85752c9 | 57b0ca183f325a06da5aef60b18e7ba03657e623 | /alpha.py | a060dbef2004cbd5c8f4ea43a6a0eb553aecff3d | [] | no_license | vickyrr24/guvi | 5baa287195c8ce9e1259091f54f1885e3ec2ad83 | 3fa306d901a76071acb48da7db724ba1725c63af | refs/heads/master | 2020-05-28T02:24:05.829944 | 2019-08-01T09:38:24 | 2019-08-01T09:38:24 | 188,852,485 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 119 | py | ch = input()
if((ch >= 'a' and ch <= 'z') or (ch >= 'A' and ch <= 'Z')):
print("Alphabet")
else:
print("No")
| [
"51054175+vickyrr24@users.noreply.github.com"
] | 51054175+vickyrr24@users.noreply.github.com |
484a63b88f96b2ae3a944ccd12b5db00e8411892 | 89fcda1a024b2b341c2995891fcefc0aa3196c11 | /util.py | 2513f97b737a64896bbe418f4ea52038f446ef37 | [] | no_license | guruzoa/DBPI-BlindSR | e05d018dc0369b2414c5745e6f4ae69c8215a2ed | 5c7702cfd34018ea93576755afe1b0bbc67e7fd6 | refs/heads/master | 2022-12-08T09:43:33.168318 | 2020-08-31T10:41:46 | 2020-08-31T10:41:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,710 | py | import torch
import numpy as np
from PIL import Image
from scipy.signal import convolve2d
def move2cpu(d):
"""Move data from gpu to cpu"""
return d.detach().cpu().float().numpy()
def tensor2im(im_t):
"""Copy the tensor to the cpu & convert to range [0,255]"""
im_np = np.clip(np.round((np.transpose(move2cpu(im_t).squeeze(0), (1, 2, 0)) + 1) / 2.0 * 255.0), 0, 255)
return im_np.astype(np.uint8)
def im2tensor(im_np):
"""Copy the image to the gpu & converts to range [-1,1]"""
im_np = im_np / 255.0 if im_np.dtype == 'uint8' else im_np
return torch.FloatTensor(np.transpose(im_np, (2, 0, 1)) * 2.0 - 1.0).unsqueeze(0).cuda()
def read_image(path):
"""Loads an image"""
im = Image.open(path).convert('RGB')
im = np.array(im, dtype=np.uint8)
return im
def rgb2gray(im):
"""Convert and RGB image to gray-scale"""
return np.dot(im, [0.299, 0.587, 0.114]) if len(im.shape) == 3 else im
def swap_axis(im):
"""Swap axis of a tensor from a 3 channel tensor to a batch of 3-single channel and vise-versa"""
return im.transpose(0, 1) if type(im) == torch.Tensor else np.moveaxis(im, 0, 1)
def create_gradient_map(im, window=5, percent=.97):
"""Create a gradient map of the image blurred with a rect of size window and clips extreme values"""
# Calculate gradients
gx, gy = np.gradient(rgb2gray(im))
# Calculate gradient magnitude
gmag, gx, gy = np.sqrt(gx ** 2 + gy ** 2), np.abs(gx), np.abs(gy)
# Pad edges to avoid artifacts in the edge of the image
gx_pad, gy_pad, gmag = pad_edges(gx, int(window)), pad_edges(gy, int(window)), pad_edges(gmag, int(window))
lm_x, lm_y, lm_gmag = clip_extreme(gx_pad, percent), clip_extreme(gy_pad, percent), clip_extreme(gmag, percent)
# Sum both gradient maps
grads_comb = lm_x / lm_x.sum() + lm_y / lm_y.sum() + gmag / gmag.sum()
# Blur the gradients and normalize to original values
loss_map = convolve2d(grads_comb, np.ones(shape=(window, window)), 'same') / (window ** 2)
# Normalizing: sum of map = numel
return loss_map / np.mean(loss_map)
def create_probability_map(loss_map, crop):
"""Create a vector of probabilities corresponding to the loss map"""
# Blur the gradients to get the sum of gradients in the crop
blurred = convolve2d(loss_map, np.ones([crop // 2, crop // 2]), 'same') / ((crop // 2) ** 2)
# Zero pad s.t. probabilities are NNZ only in valid crop centers
prob_map = pad_edges(blurred, crop // 2)
# Normalize to sum to 1
prob_vec = prob_map.flatten() / prob_map.sum() if prob_map.sum() != 0 else np.ones_like(prob_map.flatten()) / prob_map.flatten().shape[0]
return prob_vec
def pad_edges(im, edge):
"""Replace image boundaries with 0 without changing the size"""
zero_padded = np.zeros_like(im)
zero_padded[edge:-edge, edge:-edge] = im[edge:-edge, edge:-edge]
return zero_padded
def clip_extreme(im, percent):
"""Zeroize values below the a threshold and clip all those above"""
# Sort the image
im_sorted = np.sort(im.flatten())
# Choose a pivot index that holds the min value to be clipped
pivot = int(percent * len(im_sorted))
v_min = im_sorted[pivot]
# max value will be the next value in the sorted array. if it is equal to the min, a threshold will be added
v_max = im_sorted[pivot + 1] if im_sorted[pivot + 1] > v_min else v_min + 10e-6
# Clip an zeroize all the lower values
return np.clip(im, v_min, v_max) - v_min
def nn_interpolation(im, sf):
"""Nearest neighbour interpolation"""
pil_im = Image.fromarray(im)
return np.array(pil_im.resize((im.shape[1] * sf, im.shape[0] * sf), Image.NEAREST), dtype=im.dtype)
| [
"noreply@github.com"
] | guruzoa.noreply@github.com |
81c239cc97dac7e912fc4ac2e31f9fd9697588f7 | 5dd82b92cef1ff19d5b5a42b4d0388b7456535b0 | /zajecia02/del01c.py | 6a2d457b786505037a666a8dbb7729c8b89bc5ee | [] | no_license | grzeborz/codeme_pyth_adv | 0bd350daf56baf228c6639913ce964b290cee5be | 98808d179d6dec8e11ed04d172fd12810469a0ae | refs/heads/master | 2023-02-06T13:06:12.915439 | 2020-03-02T21:15:36 | 2020-03-02T21:15:36 | 238,560,637 | 0 | 0 | null | 2023-02-02T05:14:21 | 2020-02-05T22:23:54 | Python | UTF-8 | Python | false | false | 214 | py | class Klass:
def __init__(self):
print('Nowy obiekt:', self)
def __del__(self):
print('Usuniฤto obiekt:', self)
if __name__ == '__main__':
k1 = Klass()
print('Koniec programu')
| [
"grzegorz.szyperek@gmail.com"
] | grzegorz.szyperek@gmail.com |
5db1fbf5131e9fcb3b1160d38c497df02b701c2d | 12a5b72982291ac7c074210afc2c9dfe2c389709 | /online_judges/Codeforces/113/A/code.py | 6a269bbf442e5a4f164b88db14eb1cdb942cc845 | [] | no_license | krantirk/Algorithms-and-code-for-competitive-programming. | 9b8c214758024daa246a1203e8f863fc76cfe847 | dcf29bf976024a9d1873eadc192ed59d25db968d | refs/heads/master | 2020-09-22T08:35:19.352751 | 2019-05-21T11:56:39 | 2019-05-21T11:56:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 270 | py | s = ["lios","liala","etr","etra","initis","inites"]
input_string = raw_input().split()
answer = True
for e in input_string:
flag = False
for k in s:
if e.endswith(k):
flag = True
if not flag:
answer = False
break
if (answer): print "YES"
else: print "NO"
| [
"mariannelinharesm@gmail.com"
] | mariannelinharesm@gmail.com |
b6d6600a7f6b283bd1bc4668a6c9dba2a2c17779 | 20a8c6b3a8a2e21a2af58cb13383959352b74976 | /models/resnet/resnet18.py | 0635a71a6dbf1677a77c74550826bc455261b5d7 | [] | no_license | ririverce/neural-network-pipeline | 5f4a028f371c3820847ad75fa6b93397fdb3d0ba | 13d58c2f4370e239e6781da538c208564a147333 | refs/heads/master | 2022-10-06T02:53:07.500727 | 2020-06-01T01:25:07 | 2020-06-01T01:25:07 | 235,989,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,904 | py | import torch
import torch.nn.functional as F
from models.resnet.resnet_components import ResidualBlock
class ResNet18(torch.nn.Module):
def __init__(self, input_channels, num_classes):
super(ResNet18, self).__init__()
self.input_channels = input_channels
if type(num_classes) is list:
self.num_classes = num_classes
else:
self.num_classes = [num_classes]
self.conv1 = torch.nn.Conv2d(self.input_channels, 64,
kernel_size=7, stride=2, padding=3)
self.bn1 = torch.nn.BatchNorm2d(64)
self.pool1 = torch.nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.block2_1 = ResidualBlock(64, 64)
self.block2_2 = ResidualBlock(64, 64)
self.block3_1 = ResidualBlock(64, 128, stride=2)
self.block3_2 = ResidualBlock(128, 128)
self.block4_1 = ResidualBlock(128, 256, stride=2)
self.block4_2 = ResidualBlock(256, 256)
self.block5_1 = ResidualBlock(256, 512, stride=2)
self.block5_2 = ResidualBlock(512, 512)
self.block5_pool = torch.nn.AdaptiveAvgPool2d((1, 1))
self.classifier = torch.nn.Linear(512, sum(self.num_classes))
def forward(self, x):
h = x
h = F.relu(self.bn1(self.conv1(h)))
h = self.pool1(h)
h = self.block2_1(h)
h = self.block2_2(h)
h = self.block3_1(h)
h = self.block3_2(h)
h = self.block4_1(h)
h = self.block4_2(h)
h = self.block5_1(h)
h = self.block5_2(h)
h = self.block5_pool(h)
h = h.view(h.size(0), -1)
h = self.classifier(h)
if len(self.num_classes) > 1:
y = []
c_start = 0
for c in self.num_classes:
y.append(h[:, c_start:c_start+c])
c_start += c
else:
y = h
return y | [
"ririverce@gmail.com"
] | ririverce@gmail.com |
89c9f9e46665a2454c9859f9dcb6c2109c28180a | 9f571823bbbd3dbd1bbc75b0918c45b0a4d1f2f9 | /loss_functions_keras.py | 02c8615efc6989c03aefad1b157b86d9846682d9 | [] | no_license | woooo95/useful-codes | 61bfada92156eeb62ebdc9172d13e1658ff454f0 | dc328fb3d929d22afe80264068cd65523ba9febc | refs/heads/main | 2023-04-07T09:56:18.872038 | 2021-04-13T04:55:03 | 2021-04-13T04:55:03 | 357,427,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,584 | py | import numpy
import keras
import keras.backend as K
#DiceLoss
def DiceLoss(targets, inputs, smooth=1e-6):
#flatten label and prediction tensors
inputs = K.flatten(inputs)
targets = K.flatten(targets)
intersection = K.sum(K.dot(targets, inputs))
dice = (2*intersection + smooth) / (K.sum(targets) + K.sum(inputs) + smooth)
return 1 - dice
#BCE-DiceLoss
def DiceBCELoss(targets, inputs, smooth=1e-6):
#flatten label and prediction tensors
inputs = K.flatten(inputs)
targets = K.flatten(targets)
BCE = binary_crossentropy(targets, inputs)
intersection = K.sum(K.dot(targets, inputs))
dice_loss = 1 - (2*intersection + smooth) / (K.sum(targets) + K.sum(inputs) + smooth)
Dice_BCE = BCE + dice_loss
return Dice_BCE
#Jaccard/Intersection over Union (IoU) Loss
def IoULoss(targets, inputs, smooth=1e-6):
#flatten label and prediction tensors
inputs = K.flatten(inputs)
targets = K.flatten(targets)
intersection = K.sum(K.dot(targets, inputs))
total = K.sum(targets) + K.sum(inputs)
union = total - intersection
IoU = (intersection + smooth) / (union + smooth)
return 1 - IoU
#Focal Loss
Focal_ALPHA = 0.8
Focal_GAMMA = 2
def FocalLoss(targets, inputs, alpha=Focal_ALPHA, gamma=Focal_GAMMA):
inputs = K.flatten(inputs)
targets = K.flatten(targets)
BCE = K.binary_crossentropy(targets, inputs)
BCE_EXP = K.exp(-BCE)
focal_loss = K.mean(alpha * K.pow((1-BCE_EXP), gamma) * BCE)
return focal_loss
#Tversky Loss
Tversky_ALPHA = 0.5
Tversky_BETA = 0.5
def TverskyLoss(targets, inputs, alpha=Tversky_ALPHA, beta=Tversky_BETA, smooth=1e-6):
#flatten label and prediction tensors
inputs = K.flatten(inputs)
targets = K.flatten(targets)
#True Positives, False Positives & False Negatives
TP = K.sum((inputs * targets))
FP = K.sum(((1-targets) * inputs))
FN = K.sum((targets * (1-inputs)))
Tversky = (TP + smooth) / (TP + alpha*FP + beta*FN + smooth)
return 1 - Tversky
#Focal Tversky Loss
FT_ALPHA = 0.5
FT_BETA = 0.5
FT_GAMMA = 1
def FocalTverskyLoss(targets, inputs, alpha=FT_ALPHA, beta=FT_BETA, gamma=FT_GAMMA, smooth=1e-6):
#flatten label and prediction tensors
inputs = K.flatten(inputs)
targets = K.flatten(targets)
#True Positives, False Positives & False Negatives
TP = K.sum((inputs * targets))
FP = K.sum(((1-targets) * inputs))
FN = K.sum((targets * (1-inputs)))
Tversky = (TP + smooth) / (TP + alpha*FP + beta*FN + smooth)
FocalTversky = K.pow((1 - Tversky), gamma)
return FocalTversky
#Combo Loss
ce_w = 0.5 #beta
ce_d_w = 0.5 #alpha
e = K.epsilon()
smooth = 1
'''
ce_w values smaller than 0.5 penalize false positives more while values larger than 0.5 penalize false negatives more
ce_d_w is level of contribution of the cross-entropy loss in the total loss.
'''
def Combo_loss(y_true, y_pred):
y_true_f = K.flatten(y_true)
y_pred_f = K.flatten(y_pred)
intersection = K.sum(y_true_f * y_pred_f)
d = (2. * intersection + smooth) / (K.sum(y_true_f) + K.sum(y_pred_f) + smooth)
y_pred_f = K.clip(y_pred_f, e, 1.0 - e)
out = - (ce_w * y_true_f * K.log(y_pred_f)) + ((1 - ce_w) * (1.0 - y_true_f) * K.log(1.0 - y_pred_f))
weighted_ce = K.mean(out, axis=-1)
combo = (ce_d_w * weighted_ce) - ((1 - ce_d_w) * d)
return combo
| [
"kswoo3030@korea.ac.kr"
] | kswoo3030@korea.ac.kr |
bcc1bcb89bfa3c79f11eb519c8602ae48751272e | 69ac330e946e6be0ea58e10b4647cf9771cc28cc | /comments/migrations/0001_initial.py | c21e325499116ca51dc26a9278846a59de3ed60b | [] | no_license | dcbaker1992/YouTube_backend | 83a4f2f41b34ce89e57eab8dacbd395041417b89 | 0f532adc283ebb314838fb9e35787b95246fa823 | refs/heads/main | 2023-06-04T01:58:32.430652 | 2021-06-29T17:22:29 | 2021-06-29T17:22:29 | 379,698,343 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | # Generated by Django 3.1.8 on 2021-06-23 19:51
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('video_id', models.CharField(max_length=50)),
('comment_text', models.CharField(max_length=250)),
('like', models.IntegerField(default=0)),
('dislike', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Reply',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('reply_text', models.CharField(max_length=250)),
('comment', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='comments.comment')),
],
),
]
| [
"schmidt1519@gmail.com"
] | schmidt1519@gmail.com |
5be18446763910fd04a7f72bed150df5e238ae6e | 337d3a3cc8d8df90848456227f35c2422ab2df0c | /cmar.py | e38658a66dddc9abf2d12552a935fae78c0bd31e | [] | no_license | rb640/CmarReports | 005da6e5369b9385c3a4716a2be662ffbf9d80b4 | 68e61bb389d78ded62f19580770b008e1f005bcb | refs/heads/master | 2021-01-21T11:40:32.752129 | 2015-07-24T03:47:19 | 2015-07-24T03:47:19 | 17,615,415 | 1 | 0 | null | 2014-04-17T01:22:39 | 2014-03-11T02:09:19 | Python | UTF-8 | Python | false | false | 3,490 | py | # -*- coding: utf-8 -*-
"""
Flaskr
~~~~~~
A microblog example application written as Flask tutorial with
Flask and sqlite3.
:copyright: (c) 2014 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
from rpttree import get_logs_list
from sqlite3 import dbapi2 as sqlite3
from flask import Flask, request, session, g, redirect, url_for, abort, \
render_template, flash, Response
# create our little application :)
app = Flask(__name__)
static = 'templates'
# Load default config and override config from an environment variable
app.config.update(dict(
DATABASE=os.path.join(app.root_path, 'flaskr.db'),
DEBUG=True,
SECRET_KEY='development key',
USERNAME='admin',
PASSWORD='default'
))
app.config.from_envvar('FLASKR_SETTINGS', silent=True)
def connect_db():
"""Connects to the specific database."""
rv = sqlite3.connect(app.config['DATABASE'])
rv.row_factory = sqlite3.Row
return rv
def init_db():
"""Creates the database tables."""
with app.app_context():
db = get_db()
with app.open_resource('schema.sql', mode='r') as f:
db.cursor().executescript(f.read())
db.commit()
def get_db():
"""Opens a new database connection if there is none yet for the
current application context.
"""
if not hasattr(g, 'sqlite_db'):
g.sqlite_db = connect_db()
return g.sqlite_db
@app.teardown_appcontext
def close_db(error):
"""Closes the database again at the end of the request."""
if hasattr(g, 'sqlite_db'):
g.sqlite_db.close()
@app.route('/')
def show_entries():
db = get_db()
cur = db.execute('select title, text from entries order by id desc')
entries = cur.fetchall()
return render_template('show_entries.html', entries=entries)
@app.route('/add', methods=['POST'])
def add_entry():
if not session.get('logged_in'):
abort(401)
db = get_db()
db.execute('insert into entries (title, text) values (?, ?)',
[request.form['title'], request.form['text']])
db.commit()
flash('New entry was successfully posted')
return redirect(url_for('show_entries'))
@app.route('/login', methods=['GET', 'POST'])
def login():
error = None
if request.method == 'POST':
if request.form['username'] != app.config['USERNAME']:
error = 'Invalid username'
elif request.form['password'] != app.config['PASSWORD']:
error = 'Invalid password'
else:
session['logged_in'] = True
flash('You were logged in')
return redirect(url_for('show_entries'))
return render_template('login.html', error=error)
@app.route('/logout')
def logout():
session.pop('logged_in', None)
flash('You were logged out')
return redirect(url_for('show_entries'))
@app.route('/g')
def goog():
return redirect("http://www.google.com", code=302)
@app.route('/reports/')
def reports():
reports = get_logs_list()
return render_template('report_layout.html',reports=reports)
# @app.route('/reports/')
# def reports():
# reports = get_logs_list()
# return render_template('css_tree/treeview.html',reports=reports)
@app.route('/stream')
def streamed_response():
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(stream_with_context(generate()))
if __name__ == '__main__':
init_db()
app.run()
| [
"junk@ronbarnard.com"
] | junk@ronbarnard.com |
e150154f909cece055f4e8c8dc55d8fe129130e0 | 931e18f0102882b917f85788ad9094bc2b5ec625 | /Python/Django/dojo_ninjas/main/apps/dojo_ninjas/migrations/0001_initial.py | 0557ce657151b24c302a5e6cf7392b52cdbb0ff4 | [] | no_license | jaechoi15/CodingDojoAssignments | 5008591e36989332493197ed3b5ca3525877ecdd | ce44710722cb6f17c888b2f29d8243fb8862ce86 | refs/heads/master | 2021-05-05T15:33:10.022211 | 2018-06-10T03:39:43 | 2018-06-10T03:39:43 | 108,343,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.11 on 2018-03-16 14:44
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Dojo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('city', models.CharField(max_length=255)),
('state', models.CharField(max_length=2)),
],
),
]
| [
"jchoi625@gmail.com"
] | jchoi625@gmail.com |
653c6ee77e55fe39bf26522d6e3e04161daa0ce3 | 022104aa2456429356bdd26c701a2949381a83cf | /install/lib/python2.7/dist-packages/robotnik_msgs/msg/_SetElevatorFeedback.py | fe9731c70b42e53a2afd11197435c3aea3f8e08d | [] | no_license | nachocz/campero_ws | 204f313d5fbdb81d1f7cc568341a1170ddd2b4cf | f2b09f96165166c0e867e3f5f3dcd092dbac1c1b | refs/heads/master | 2023-02-02T03:25:56.603172 | 2020-12-11T11:28:42 | 2020-12-11T11:28:42 | 320,539,897 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,506 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from robotnik_msgs/SetElevatorFeedback.msg. Do not edit."""
import codecs
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import robotnik_msgs.msg
class SetElevatorFeedback(genpy.Message):
_md5sum = "47e3f709643220443260a9d8c1f901ea"
_type = "robotnik_msgs/SetElevatorFeedback"
_has_header = False # flag to mark the presence of a Header object
_full_text = """# ====== DO NOT MODIFY! AUTOGENERATED FROM AN ACTION DEFINITION ======
robotnik_msgs/ElevatorStatus status
================================================================================
MSG: robotnik_msgs/ElevatorStatus
# state
string RAISING=raising
string LOWERING=lowering
string IDLE=idle
string ERROR_G_IO=error_getting_io
string ERROR_S_IO=error_setting_io
string ERROR_TIMEOUT=error_timeout_in_action
# position
string UP=up
string DOWN=down
string UNKNOWN=unknown
# IDLE, RAISING, LOWERING
string state
# UP, DOWN, UNKNOWN
string position
float32 height
"""
__slots__ = ['status']
_slot_types = ['robotnik_msgs/ElevatorStatus']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
status
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(SetElevatorFeedback, self).__init__(*args, **kwds)
# message fields cannot be None, assign default values for those that are
if self.status is None:
self.status = robotnik_msgs.msg.ElevatorStatus()
else:
self.status = robotnik_msgs.msg.ElevatorStatus()
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self.status.state
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.position
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.height
buff.write(_get_struct_f().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.status is None:
self.status = robotnik_msgs.msg.ElevatorStatus()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.state = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.state = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.position = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.position = str[start:end]
start = end
end += 4
(self.status.height,) = _get_struct_f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self.status.state
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.position
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
buff.write(struct.Struct('<I%ss'%length).pack(length, _x))
_x = self.status.height
buff.write(_get_struct_f().pack(_x))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
codecs.lookup_error("rosmsg").msg_type = self._type
try:
if self.status is None:
self.status = robotnik_msgs.msg.ElevatorStatus()
end = 0
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.state = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.state = str[start:end]
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.status.position = str[start:end].decode('utf-8', 'rosmsg')
else:
self.status.position = str[start:end]
start = end
end += 4
(self.status.height,) = _get_struct_f().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) # most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_f = None
def _get_struct_f():
global _struct_f
if _struct_f is None:
_struct_f = struct.Struct("<f")
return _struct_f
| [
"nachocz@gmail.com"
] | nachocz@gmail.com |
5b53a36bee2036572206bbee9f1c7c467f6c0d83 | bc1b6df82b2d5897549399363f95ef4d05c9ac54 | /18B09784-04-06_a.py | 0ff99eda559d47b568b19743e8a98cafa162df08 | [] | no_license | Tananan-Jee/exp_ict_34 | 45945a11341c9a1b859693456d61f7aa21df979c | f582bd48c9c4532a41da7026b6e5c0046d2fa5ec | refs/heads/master | 2021-05-19T05:57:01.350274 | 2020-03-31T09:27:16 | 2020-03-31T09:27:16 | 251,556,749 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,336 | py | # -*- coding: utf-8 -*-
#ใใญใฐใฉใใณใฐ่จ่ช๏ผPython version 3.8.0
#ใณใณใใคใซๆนๆณ๏ผpython3 18B09784-04-06_a.py
#ๅฎ่กๆนๆณ๏ผใฟใผใใใซไธใง python3 18B09784-04-06_a.py ใๅฎ่ก
import math
import numpy as np
def print_array(array):
for i in range(len(array)):
print(''.join(map(str,array[i])))
def add(array1,array2):
array=''
x=''
for i in range(1,len(array1)-1):
if array1[i] != "[" or array1[i] != "]":
array = array + str((int(array1[i])+int(array2[i]))%2)
x= "[" + array + "]"
return x
def mul(array1,array2,f,len_f,deg):
#normal multiply
array=np.zeros(2*deg-1)
for i in range(1,len(array1)-1):
for j in range(1,len(array2)-1):
array[i+j-2]=(array[i+j-2]+(int(array2[j])*int(array1[i])))%2
#find remainder
array=array[::-1] #get result of array1*array2
f=f[::-1] #reverse f(string)
keep = array[:] #copy array to keep
for i in range(len(array)-len(f)+1):
if keep[i]== 1 or keep[i]== '1':
#normal add
a=''
for i in range(len(array)):
if i>=len(f):
a = a + str(int(keep[i])%2)
else:
a = a + str((int(keep[i])+int(f[i]))%2)
keep=a[:]
f=np.insert(f,0,0)
x=''
for j in range(-1,-deg-1,-1):
x=x+str(int(keep[j]))
y ="["+x+"]"
return y
def div(array1,array2,f,len_f,deg,all_ele):
if array2=='[000]':
return "error"
else:
for i in all_ele:
if mul(array2,i,f,len_f,deg)==array1:
return i
break
print("INPUT: RS256_encode_example.txt")
with open("RS256_encode_example.txt") as file:
lines = file.read().splitlines()
data=[]
for i in range(len(lines)):
for j in range(len(lines[i])):
if lines[i][j]=="=":
data.append(lines[i][j+1:])
break
n=int(data[1])
k=int(data[2])
print("n=" + str(n))
print("k=" + str(k))
#get value f
ele_f=data[0]
f=[] #make list of string ['1', '1', '0', '1']
len_f=0
for i in range(len(ele_f)):
if ele_f[i] == '0' or ele_f[i] == '1':
f.append(ele_f[i])
len_f+=1 #count element in f
f_int=[] #make array [1 1 0 1]
for i in range(len(ele_f)):
if ele_f[i] == '0' or ele_f[i] == '1':
f_int.append(int(ele_f[i]))
f_int = np.asarray(f_int) #change list to array
m=len_f-1 #length of each alpha
#make array of all alpha
all_alpha=[]
alpha='[01000000]'
for i in range(n):
if i==0:
x='[10000000]'
else:
x=mul(x,alpha,f,len_f,m)
all_alpha.append(x)
#get value u
a=[]
for z in range(5,6):
i = 0
while i != k*(m+2):
a.append(lines[z][i])
i+=1
u=[]
ele_u=a[:]
ele_u_in=[]
j=0
s=0
while s != k: #k element in one row
x=''
for i in range(j,m+j+2):
x=x+ele_u[i]
ele_u_in.append(x)
s+=1
j=m+j+2
u.append(ele_u_in)
U = np.array(u)
print("u=")
print_array(U)
#find c
c=[]
for j in range(n): #length c
ans='[00000000]'
for i in range(k):
if i==0:
keep = '[10000000]'
else:
keep = mul(keep,all_alpha[j],f,len_f,m)
keep_u = mul(keep,U[0][i],f,len_f,m)
ans = add(keep_u,ans)
c.append(ans)
C = np.array([c])
print("c=")
print_array(C)
| [
"noreply@github.com"
] | Tananan-Jee.noreply@github.com |
13df03b871310d1372d8dcd906428643b3748f60 | 482de18083520953c95120a9c421a8a08c0e5aee | /lib/common/wdr/task.py | 629f7b73d8fc481bf0d37058e655e9e5522115f2 | [
"Apache-2.0"
] | permissive | WDR/WDR | 9cbb7ef28e922db83849663fcc5cb9a8c1523b52 | 5e2576cbff2675f7338e31abd673cd761eeb802b | refs/heads/master | 2021-07-08T23:41:39.598603 | 2021-04-01T20:24:40 | 2021-04-01T20:24:40 | 4,683,359 | 32 | 28 | NOASSERTION | 2021-04-01T20:13:17 | 2012-06-16T08:46:16 | Python | UTF-8 | Python | false | false | 1,436 | py | import logging
import re
import wdr
(
AdminApp, AdminConfig, AdminControl, AdminTask, Help
) = wdr.WsadminObjects().getObjects()
logger = logging.getLogger('wdr.task')
_listPattern = re.compile(r'\[(.*)\]')
_itemPattern = re.compile(
r'(?<=\[)'
r'(?P<key>\S+)'
r'\s+'
r'(?:'
+ (
r''
+ r'\[(?P<valueQuoted>[^\]]+)\]'
+ r'|'
+ r'(?P<valueNotQuoted>[^ \[\]]+)'
) +
r')'
)
def adminTaskAsDict(adminTaskList):
result = {}
for (key, valueQuoted, valueNotQuoted) in _itemPattern.findall(
adminTaskList
):
result[key] = valueQuoted or valueNotQuoted
return result
def adminTaskAsDictList(adminTaskListOfLists):
result = []
for l in adminTaskListOfLists.splitlines():
listMatcher = _listPattern.match(l)
if listMatcher:
result.append(adminTaskAsDict(listMatcher.group(1)))
return result
def adminTaskAsListOfLists(adminTaskList):
result = []
for (key, valueQuoted, valueNotQuoted) in _itemPattern.findall(
adminTaskList
):
result.append([key, valueQuoted or valueNotQuoted])
return result
def adminTaskAsListOfListsList(adminTaskListOfLists):
result = []
for l in adminTaskListOfLists.splitlines():
listMatcher = _listPattern.match(l)
if listMatcher:
result.append(adminTaskAsListOfLists(listMatcher.group(1)))
return result
| [
"mplonka@gmail.com"
] | mplonka@gmail.com |
65d840aff58fbb8be0d0880fe19a3af7eccfdc7a | 9e57a2776dc5b91d29eb3c5370934c3ef5cb1e74 | /sudukoSOLVER/newBoard.py | 608dd885df36355a48643300ad96b79cd16350a2 | [] | no_license | steffanp98/Freelance | 38e42d7ade9051d215e417d170bf410df45a41b4 | bf49597a2329d781ca681f1976f19c2578cde515 | refs/heads/main | 2023-07-13T00:47:37.336942 | 2021-08-05T23:29:48 | 2021-08-05T23:29:48 | 393,140,478 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,263 | py | class board(object):
def __init__(self,board_file):
self.board = self.__create_board(board_file)
def __create_board(self,board_file):
#create the matrix data struct for the board
board = []
#iterate over each line of the matrix
for line in board_file:
line = line.strip()
#error check the board matches with the suduko rules
#9 lines in total 9x9 grid / 3x3 boxes
#line should not be shorter/longer than 9 char
#char should only be int
if len(line) != 9:
board = []
print("Error with the amount of characters generated by the board")
#creating a list for the board
board.append([])
#iterate over the char
for c in line:
#raise an error if char != int
if not c.isdigit():
print("valid characters in suduko are 1-9")
board[-1].append(int(c))
# error checking for number of lines on board
if len(board) != 9:
print('Error with the amount of lines on the board')
return board
#class sudukoGame(object):
print(board) | [
"steffan.phillips@hotmail.com"
] | steffan.phillips@hotmail.com |
2ffa0b789fd7bedb02f8cc8683ee87eb0cdbb113 | 1bf9f6b0ef85b6ccad8cb029703f89039f74cedc | /src/spring/azext_spring/vendored_sdks/appplatform/v2022_01_01_preview/aio/operations/_build_service_agent_pool_operations.py | 3f303bb5a4713c0abb643d1787ec91d545c4e585 | [
"MIT",
"LicenseRef-scancode-generic-cla"
] | permissive | VSChina/azure-cli-extensions | a1f4bf2ea4dc1b507618617e299263ad45213add | 10b7bfef62cb080c74b1d59aadc4286bd9406841 | refs/heads/master | 2022-11-14T03:40:26.009692 | 2022-11-09T01:09:53 | 2022-11-09T01:09:53 | 199,810,654 | 4 | 2 | MIT | 2020-07-13T05:51:27 | 2019-07-31T08:10:50 | Python | UTF-8 | Python | false | false | 22,671 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
from urllib.parse import parse_qs, urljoin, urlparse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._build_service_agent_pool_operations import (
build_get_request,
build_list_request,
build_update_put_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class BuildServiceAgentPoolOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.appplatform.v2022_01_01_preview.aio.AppPlatformManagementClient`'s
:attr:`build_service_agent_pool` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(
self, resource_group_name: str, service_name: str, build_service_name: str, **kwargs: Any
) -> AsyncIterable["_models.BuildServiceAgentPoolResource"]:
"""List build service agent pool.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either BuildServiceAgentPoolResource or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildServiceAgentPoolResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-01-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResourceCollection]
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("BuildServiceAgentPoolResourceCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools"} # type: ignore
@distributed_trace_async
async def get(
self, resource_group_name: str, service_name: str, build_service_name: str, agent_pool_name: str, **kwargs: Any
) -> _models.BuildServiceAgentPoolResource:
"""Get build service agent pool.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param agent_pool_name: The name of the build service agent pool resource. Required.
:type agent_pool_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: BuildServiceAgentPoolResource or the result of cls(response)
:rtype: ~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildServiceAgentPoolResource
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-01-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource]
request = build_get_request(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
agent_pool_name=agent_pool_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore
async def _update_put_initial(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
agent_pool_name: str,
agent_pool_resource: Union[_models.BuildServiceAgentPoolResource, IO],
**kwargs: Any
) -> _models.BuildServiceAgentPoolResource:
error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-01-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(agent_pool_resource, (IO, bytes)):
_content = agent_pool_resource
else:
_json = self._serialize.body(agent_pool_resource, "BuildServiceAgentPoolResource")
request = build_update_put_request(
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
agent_pool_name=agent_pool_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_put_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_put_initial.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore
@overload
async def begin_update_put(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
agent_pool_name: str,
agent_pool_resource: _models.BuildServiceAgentPoolResource,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.BuildServiceAgentPoolResource]:
"""Create or update build service agent pool.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param agent_pool_name: The name of the build service agent pool resource. Required.
:type agent_pool_name: str
:param agent_pool_resource: Parameters for the update operation. Required.
:type agent_pool_resource:
~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildServiceAgentPoolResource
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BuildServiceAgentPoolResource or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildServiceAgentPoolResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_update_put(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
agent_pool_name: str,
agent_pool_resource: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.BuildServiceAgentPoolResource]:
"""Create or update build service agent pool.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param agent_pool_name: The name of the build service agent pool resource. Required.
:type agent_pool_name: str
:param agent_pool_resource: Parameters for the update operation. Required.
:type agent_pool_resource: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BuildServiceAgentPoolResource or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildServiceAgentPoolResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_update_put(
self,
resource_group_name: str,
service_name: str,
build_service_name: str,
agent_pool_name: str,
agent_pool_resource: Union[_models.BuildServiceAgentPoolResource, IO],
**kwargs: Any
) -> AsyncLROPoller[_models.BuildServiceAgentPoolResource]:
"""Create or update build service agent pool.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param service_name: The name of the Service resource. Required.
:type service_name: str
:param build_service_name: The name of the build service resource. Required.
:type build_service_name: str
:param agent_pool_name: The name of the build service agent pool resource. Required.
:type agent_pool_name: str
:param agent_pool_resource: Parameters for the update operation. Is either a model type or a IO
type. Required.
:type agent_pool_resource:
~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildServiceAgentPoolResource or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either BuildServiceAgentPoolResource or the
result of cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.appplatform.v2022_01_01_preview.models.BuildServiceAgentPoolResource]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-01-01-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.BuildServiceAgentPoolResource]
polling = kwargs.pop("polling", True) # type: Union[bool, AsyncPollingMethod]
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token = kwargs.pop("continuation_token", None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_put_initial( # type: ignore
resource_group_name=resource_group_name,
service_name=service_name,
build_service_name=build_service_name,
agent_pool_name=agent_pool_name,
agent_pool_resource=agent_pool_resource,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("BuildServiceAgentPoolResource", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method = cast(
AsyncPollingMethod,
AsyncARMPolling(lro_delay, lro_options={"final-state-via": "azure-async-operation"}, **kwargs),
) # type: AsyncPollingMethod
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_put.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.AppPlatform/Spring/{serviceName}/buildServices/{buildServiceName}/agentPools/{agentPoolName}"} # type: ignore
| [
"noreply@github.com"
] | VSChina.noreply@github.com |
fe127ffd20fb046fece7f6a04ab6c4c45564fef9 | 8b9e8ea570070db4b9b7e75c1b818d4518aff81a | /main.py | fdf8710a5bf752e72b0b345efef59359146afbd2 | [] | no_license | cygong/lstm_simple_sinus | eb6db1449946c567b044b4a890a555a03f1ceac3 | 2e9cb32317acf8afe00663b2ed9b35f1f4107542 | refs/heads/main | 2023-03-27T09:21:05.473240 | 2021-03-29T03:26:11 | 2021-03-29T03:26:11 | 352,504,474 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,458 | py | from model import *
from generate_data import *
from torch.utils.data import DataLoader
import matplotlib.pyplot as plt
def main():
# generate_data
dataset_train,dataset_test = generate_data()
#neural network
model = neural_network()
train(model,dataset_train)
test(model,dataset_test)
def train(model,dataset_train):
#dataloader
train_loader = DataLoader(dataset_train,shuffle=True,batch_size=256)
# optimizer , loss
criterion = torch.nn.MSELoss()
optimizer = torch.optim.Adam(model.parameters(),lr=0.0001)
epochs = 1500
#training loop
for i in range(epochs):
for j,data in enumerate(train_loader):
x = data[:][0] # batch * time (256 * 10)
x = x.view(-1,10,1) # batch * time * input_size (256 * 10 * 1)
y_pred = model(x) # batch * output_size (256 * 1)
y_pred = y_pred.view(-1) # batch (256)
loss = criterion(y_pred,data[:][1])
loss.backward()
optimizer.step()
if i%100 == 0:
print(i,"th iteration : ",loss)
def test(model,dataset_test):
#test set actual vs predicted
test_pred = model(dataset_test[:][0].view(-1,10,1)).view(-1)
plt.figure()
plt.plot(test_pred.detach().numpy(),label='predicted')
plt.plot(dataset_test[:][1].view(-1),label='original')
plt.legend()
plt.show()
if __name__ == "__main__":
main() | [
"noreply@github.com"
] | cygong.noreply@github.com |
93bf069315275af00c1b6b2ba64ba2d59d2877ca | 201e9473ee35b00b7fffe9e59724b3c98c986add | /utils.py | aa001cbad72eec8289ec16376c3850c627f6cb63 | [] | no_license | Harshp1802/pos_negation | 810e1bb1ca11c033efb2ea2eaa00475ea9c0a699 | b74f58e531c9591f59193674e86e211b9b15bac2 | refs/heads/master | 2023-06-27T01:20:31.366975 | 2021-06-19T17:34:12 | 2021-06-19T17:34:12 | 371,942,316 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,748 | py | from sklearn.metrics import f1_score
import torch
# def create_vocab(training_sentences, training_POS):
# words, tags = set([]), set([])
# for s in training_sentences:
# for w in s:
# words.add(w.lower())
# for ts in training_POS:
# for t in ts:
# tags.add(t)
# word2index = {w: i + 2 for i, w in enumerate(list(words))}
# word2index['-PAD-'] = 0 # The special value used for padding
# word2index['-OOV-'] = 1 # The special value used for OOVs
# tag2index = {t: i + 1 for i, t in enumerate(list(tags))}
# tag2index['-PAD-'] = 0 # The special value used to padding
# return words, tags, word2index, tag2index
# def convert2index(sentences,word2index,POS,tag2index):
# sentences_X = []
# tags_y = []
# for s in sentences:
# s_int = []
# for w in s:
# try:
# s_int.append(word2index[w.lower()])
# except KeyError:
# s_int.append(word2index['-OOV-'])
# sentences_X.append(s_int)
# for s in POS:
# tags_y.append([tag2index[t] for t in s])
# return sentences_X, tags_y
def f1_scope(y_true, y_pred, level = 'scope'): #This is for gold cue annotation scope, thus the precision is always 1.
if level == 'token':
print(f1_score([i for i in j for j in y_true], [i for i in j for j in y_pred]))
elif level == 'scope':
tp = 0
fn = 0
fp = 0
for y_t, y_p in zip(y_true, y_pred):
if y_t == y_p:
tp+=1
else:
fn+=1
prec = 1
rec = tp/(tp+fn)
print(f"Precision: {prec}")
print(f"Recall: {rec}")
print(f"F1 Score: {2*prec*rec/(prec+rec)}")
def categorical_accuracy(preds, y, tag_pad_idx,listed=False):
"""
Returns accuracy per batch, i.e. if you get 8/10 right, this returns 0.8, NOT 8
"""
if(not listed):
max_preds = preds.argmax(dim = 1, keepdim = True) # get the index of the max probability
else:
max_preds = preds
non_pad_elements = (y != tag_pad_idx).nonzero()
correct = max_preds[non_pad_elements].squeeze(1).eq(y[non_pad_elements])
return correct.sum() / torch.FloatTensor([y[non_pad_elements].shape[0]]).to(torch.device('cuda'))
def epoch_time(start_time, end_time):
elapsed_time = end_time - start_time
elapsed_mins = int(elapsed_time / 60)
elapsed_secs = int(elapsed_time - (elapsed_mins * 60))
return elapsed_mins, elapsed_secs
def f1(preds, y, tag_pad_idx, cls,listed=False):
if(not listed):
max_preds = preds.argmax(dim = 1, keepdim = True) # get the index of the max probability
else:
max_preds = preds
non_pad_elements = (y != tag_pad_idx).nonzero()
# correct = max_preds[non_pad_elements].squeeze(1).eq(y[non_pad_elements])
y_hat = max_preds[non_pad_elements].squeeze(1)
y_real = y[non_pad_elements]
counter =dict(zip(* torch.unique(y_hat,return_counts=True)))
for k,v in list(counter.items()):
counter[k.item()]=v.item()
# counter = counter.to(torch.device('cuda'))
try:
if(counter[cls] != 0):
P = len(y_real[(y_real == y_hat) & (y_real == cls) & (y_hat == cls)])/counter[cls]
except:
P = 0.001
print("P",P)
pass
counter = dict(zip(*torch.unique(y_real,return_counts=True)))
for k,v in list(counter.items()):
counter[k.item()]=v.item()
# counter = counter.to(torch.device('cuda'))
try:
if(counter[cls] != 0):
R = len(y_real[(y_real == y_hat) & (y_real == cls) & (y_hat == cls)])/counter[cls]
except:
R = 0.001
print("R",R)
pass
return 2*P*R/(P+R)
| [
"45335740+Harshp1802@users.noreply.github.com"
] | 45335740+Harshp1802@users.noreply.github.com |
aa5e3ad78d8d478ac37adf19c49d550439526cd0 | 9f177297a490a3b2604bda027eac87deccb22ade | /convert_html.py | 1dbc9e39c4657d4ad6e8d32f3cfbfc71b3cdf869 | [] | no_license | lin826/Janet | a27cdd5043f18b53911fdec4a3d6fe1613c0b5d1 | cbb59cdeddb57bad44d57060dfd4bd402f0fd9df | refs/heads/master | 2021-12-14T16:45:40.745307 | 2021-12-13T05:00:06 | 2021-12-13T05:00:06 | 88,283,857 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,037 | py | INPUT_TXT = 'data/doc_origin.docx.txt'
OUTPUT_HTML = 'test.html'
str_rating = "<div class='ui huge star rating'></div>"
str_solved = "<button class='ui primary button'>Solved</button>"
full_page = ""
def write_content():
global full_page
with open(INPUT_TXT,'r') as file:
c = file.readline()
while(c=='\n'):
c = file.readline()
full_page += "<h2 id='title' align='center'>"+c+"</h2>\n"
full_page += "<div class='half mCustomScrollbar' data-mcs-theme='inset-2-dark'>\n\t<p id='content'>"
while(c):
c = file.readline()
c.replace('\n','<br>')
full_page += c+'<br>'
full_page += "</p>\n</div>"
full_page += "</div>\n\t</td>\n\t<td>\n\t<div id='history'>"
return 0
with open('data/index_prefix.html','r') as file:
c = file.read()
full_page += c
write_content()
with open('data/index_postfix.html','r') as file:
c = file.read()
full_page += c
with open(OUTPUT_HTML,'w') as file:
file.write(full_page)
| [
"liniju826@gmail.com"
] | liniju826@gmail.com |
0271411a63fff75c6ccceb030d69175bd3075563 | cbf9f600374d7510988632d7dba145c8ff0cd1f0 | /abc/207/a.py | ab70569c2994dfed4061f58d6ae22f8216cea250 | [] | no_license | sakakazu2468/AtCoder_py | d0945d03ad562474e40e413abcec39ded61e6855 | 34bdf39ee9647e7aee17e48c928ce5288a1bfaa5 | refs/heads/master | 2022-04-27T18:32:28.825004 | 2022-04-21T07:27:00 | 2022-04-21T07:27:00 | 225,844,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 62 | py | a, b, c = map(int, input().split())
print(max(a+b, b+c, c+a))
| [
"sakakazu2468@icloud.com"
] | sakakazu2468@icloud.com |
f4daf147759bbfcfded6daaf2aec856edc7bed2d | 1d9706595c9d2a18c7de072804f8e566eef219fd | /utils/base_utils.py | 6aef92b19c7a6b3ae354e538d74f1f067725f6a2 | [] | no_license | bimo12138/tornado_demo | 506e19337b918d7644b218303d2c5a51efe629ea | bc5fe2f44e5cf1a78a1b6faf1161e1410c948dd3 | refs/heads/master | 2022-11-15T08:44:12.000690 | 2020-07-18T05:15:30 | 2020-07-18T05:15:30 | 280,369,359 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,659 | py | """
@author: 13716
@date-time: 2019/7/1-19:17
@ide: PyCharm
@name: base_utils.py
"""
import hashlib
import uuid
from model.models import Students, Teachers
import time
import base64
import time
import hmac
def hashed(text):
return hashlib.md5(text.encode()).hexdigest()
class GetUuid(object):
@classmethod
def get_time_uuid(cls):
return uuid.uuid1().hex
@classmethod
def get_name_uuid(cls, text):
return uuid.uuid3(uuid.NAMESPACE_DNS, text).hex
@classmethod
def get_name_uuid_5(cls, text):
return uuid.uuid5(uuid.NAMESPACE_DNS, text).hex
@classmethod
def get_random_uuid(cls):
return uuid.uuid4().hex
class Token(object):
# ๅ็ฑป 0 ๅญฆ็ 1 ่ๅธ
@classmethod
def get_token(cls, classify, no):
cls.key = b"rsmrtxcsnmzj.zyxczpc.hwjz"
header = {
"alg": "HS256"
}
b_header = base64.b64encode(str(header).encode("utf-8"))
if classify == 0:
payload = {
"iss": "็ฌๅขจ",
"iat": Students.get_last_time(no)
}
b_pay_load = base64.b64encode(str(payload).encode("utf-8"))
code = hmac.new(cls.key, b_header + b"." + b_pay_load, digestmod="MD5")
return code.hexdigest()
elif classify == 1:
payload = {
"iss": "็ฌๅขจ",
"iat": Teachers.get_last_time(no)
}
b_pay_load = base64.b64encode(str(payload).encode("utf-8"))
code = hmac.new(cls.key, b_header + b"." + b_pay_load, digestmod="MD5")
return code.hexdigest()
@classmethod
def check_token(cls, token, classify, no):
cls.key = b"rsmrtxcsnmzj.zyxczpc.hwjz"
header = {
"alg": "HS256"
}
b_header = base64.b64encode(str(header).encode("utf-8"))
if classify == 0:
payload = {
"iss": "็ฌๅขจ",
"iat": Students.get_last_time(no)
}
b_pay_load = base64.b64encode(str(payload).encode("utf-8"))
code = hmac.new(cls.key, b_header + b"." + b_pay_load, digestmod="MD5")
return code.hexdigest() == token
elif classify == 1:
payload = {
"iss": "็ฌๅขจ",
"iat": Teachers.get_last_time(no)
}
b_pay_load = base64.b64encode(str(payload).encode("utf-8"))
code = hmac.new(cls.key, b_header + b"." + b_pay_load, digestmod="MD5")
return code.hexdigest() == token
def authenticate(username, password):
if username and password:
password_data = Students.get_password(username)
if password_data and password_data == hashed(password):
return True
else:
return False
def teacher_authenticate(username, password):
if username and password:
if hashed(password) == Teachers.get_password(username):
return True
else:
return False
class Result(object):
@classmethod
def not_this_message(cls, message=""):
message_json = {
"code": 400,
"message": message
}
return message_json
@classmethod
def success(cls, message):
message_json = {
"code": 200,
"message": message
}
return message_json
@classmethod
def already_register(cls, message):
message_json = {
"code": 416,
"message": message
}
return message_json
@classmethod
def params_error(cls, message):
message_json = {
"code": 403,
"message": message
}
return message_json
def time_load(raw_time):
week_day, mouth, day, year, de_time, zone, desc = raw_time.split(" ")
hour, minute, second = de_time.split(":")
return " ".join([year, mouth, day, hour, minute, second, week_day])
class TimeProcess(object):
"""
ๅญๅ
ฅๆฐๆฎๅบไฝฟ็จ ๆถ้ดๆณ
่ฏปๅไนๅ ่ฎพ็ฝฎไธไธชๅปๅบฆๆจกๅผ๏ผไฝๆฏ่่ฏ้ช่ฏไพ็ถไฝฟ็จๆถ้ดๆณ
่ฎฐไฝๆถ้ดๆณ่ฆ่ฟ่กๅๆด
"Tue Jul 02 2019 08:00:00 GMT+0800 (ไธญๅฝๆ ๅๆถ้ด)"
"""
@classmethod
def save_to_table(cls, raw_string):
trans_string = time_load(raw_string)
decode_time = "%Y %b %d %H %M %S %a"
timestamp = int(time.mktime(time.strptime(trans_string, decode_time)))
return timestamp
@classmethod
def to_load(cls, timestamp):
return str(time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(int(timestamp))))
| [
"1371639183@qq.com"
] | 1371639183@qq.com |
c43e8a88c499ba3fea713b72bb54b40182ee466a | 44ee0d902a9cd7d321e4ed7c0962ef9ffb978e10 | /tile_editor.py | a930432d2c4ff789009cb1a9b9e3a338b0b081a1 | [] | no_license | aashishthy/Tile_editor | f085238fdb2e63bbf5d83d686185af99faf6627c | d177c1fecdeb8055f4a607236f738de2a3af29db | refs/heads/master | 2020-03-06T21:04:55.846952 | 2018-03-28T02:08:52 | 2018-03-28T02:08:52 | 127,069,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,911 | py | """
Following are the modules used for creating the tile editor
"""
import pygame
import PIL.Image
from tkinter.filedialog import askopenfilename
from tkinter.filedialog import asksaveasfilename
from tkinter import messagebox
from tkinter import *
import os
import glob
from shutil import copy
from te_constants import *
"""
This dictionary stores the mapping of the tiles that can be used to create the map
"""
tile_dict = {}
"""
This dictionary stores the mapping of all the tiles textures with their corresponding textures
"""
tile_map = {}
"""
This dictionary holds all the tile data that are used in this map. This will be written into the props file
"""
tile_map_prop = {}
"""
Initialize all pygame modules
"""
pygame.init()
"""
Set the screen size to width * height
"""
screen = pygame.display.set_mode((width, height))
"""
This is the font file that is used for the menu and strength button labels
"""
font = pygame.font.Font("PressStart2P.ttf", 8)
"""
This is the font file that is used for the header files for the menu and strength and the usage text
"""
menu_font = pygame.font.Font("PressStart2P.ttf", 16)
def show_grid():
"""
This function shows the entire map as a grid
"""
pygame.draw.rect(screen, black, pygame.Rect(0, 0, width, total_blocks_y*block_size))
for y in range(total_blocks_y):
for x in range(total_blocks_x):
rect = pygame.Rect(x * block_size, y * block_size, block_size - 1, block_size - 1)
pygame.draw.rect(screen, greyish_white, rect)
def load_tile_set():
"""
This function is used to load all the tiles from the Tiles/ folder onto the editor
"""
global tile_dict, current_tile, tile_map, tile_map_prop
i = 0
j = tile_location
pygame.draw.rect(screen, black, pygame.Rect(i, j - 5, width, (block_size*8)))
tile_index = 1
for infile in glob.glob('Tiles/*.png'):
pic = pygame.image.load(infile)
pic = pygame.transform.scale(pic, (block_size, block_size))
if i + block_size > width:
i = 0
j += block_size
screen.blit(pic, (i, j))
index = str(i) + ':' + str(j)
tile_map[pic] = tile_index
tile_map_prop[tile_index] = infile
tile_index += 1
tile_dict[index] = pic
i += block_size
pygame.display.flip()
def text_surface(message_text, text_font, color):
"""
This function is used to create the font text surface
"""
text_surf = text_font.render(message_text, True, color)
return text_surf, text_surf.get_rect()
def display_menu_text():
"""
This function displays the usage and menu text at the relevant places
"""
display_text(usage_text, 450, usage_text_location, menu_font, white)
display_text(menu_text[0], 170, button_text_location, menu_font, white)
display_text(menu_text[1], 790, button_text_location, menu_font, white)
def display_text(message_text, x, y, text_font, color):
"""
This function is used to display the text at the given positions with the given font and color
"""
t_surface, t_rect = text_surface(message_text, text_font, color)
t_rect.center = (x, y)
screen.blit(t_surface, t_rect)
pygame.display.update()
def load_buttons():
"""
This function displays the menu and usage text and also creates the menu and strength buttons
"""
display_menu_text()
create_menu_buttons()
create_strength_buttons()
pygame.display.update()
def create_strength_buttons():
"""
This function creates the strength buttons
"""
j = width - (block_size*2) - 10
index = 0
for i in range(3):
selected_color = dark_green
if i == current_strength:
button_color = selected_color
else:
button_color = white
pygame.draw.rect(screen, button_color, (j, button_location, block_size * 2, block_size))
button_text = strength_button_text[index]
index += 1
pad = 0
button_split = button_text.split(" ")
for word in button_split:
display_text(word, j + block_size, button_location + (block_size / 2) + pad, font, black)
pad += 10
j -= ((block_size * 2) + 10)
def create_menu_buttons():
"""
This function creates the menu buttons
"""
j = 0
index = 0
for i in range(5):
button_color = white
pygame.draw.rect(screen, button_color, (j, button_location, block_size * 2, block_size))
button_text = menu_button_text[index]
index += 1
pad = 0
button_split = button_text.split(" ")
for word in button_split:
display_text(word, j + block_size, button_location + (block_size / 3) + pad, font, black)
pad += 10
j += ((block_size * 2) + 10)
def in_tile_menu(mouse_y):
"""
This function is used to check whether the mouse is in the tile menu
"""
if tile_location - 10 < mouse_y < button_text_location:
return True
else:
return False
def in_button_menu(mouse_y):
"""
This function is used to check whether the mouse is in the button menu
"""
if mouse_y >= button_location:
return True
else:
return False
def in_map_area(mouse_y):
"""
This function is used to check whether the mouse is in the map area
"""
if mouse_y < usage_text_location - 10:
return True
else:
return False
def menu_buttons_clicked(mouse_x):
"""
This function checks which of the menu buttons was clicked and calls the corresponding function
"""
if button1(mouse_x):
open_fd()
elif button2(mouse_x):
save_map()
elif button3(mouse_x):
load_map()
elif button4(mouse_x):
reset_map()
elif button5(mouse_x):
exit_tile_editor()
def exit_tile_editor():
"""
This function is used to exit the Tile Editor if Yes was clicked in the pop up message
"""
if pop_up_msg("Exit Map ?"):
pygame.quit()
def reset_map():
"""
This function is used to clear the map in the Tile Editor if Yes was clicked in the pop up message
"""
if pop_up_msg("Create new Map ?"):
clear_map()
def pop_up_msg(message):
"""
This function pops up a messagebox and returns true if the Yes button is clicked and no otherwise
"""
root = Tk()
root.withdraw()
root.wm_attributes('-topmost', 1)
answer = messagebox.askquestion(message, "Are you sure you want to - " + message)
root.focus()
root.destroy()
if answer == 'yes':
return True
return False
def clear_map():
"""
This function clears the map in the Tile Editor and displays the grid
"""
global map_array
map_array = [['0:0'] * total_blocks_x for item in range(total_blocks_y)]
show_grid()
pygame.display.update()
def button_clicked(mouse_x):
"""
This function calls functions which detect which buttons were clicked
"""
menu_buttons_clicked(mouse_x)
strength_buttons_clicked(mouse_x)
def strength_buttons_clicked(mouse_x):
"""
This function sets the strength to 0, 1 or 2 based on which strength button is clicked
"""
global current_strength
if (width - (block_size * 2) - 10) <= mouse_x <= (width - 10):
current_strength = 0
elif (width - (block_size * 4)) - 20 <= mouse_x <= (width - block_size * 2) - 20:
current_strength = 1
elif (width - (block_size * 6)) - 30 <= mouse_x <= (width - (block_size * 4)) - 30:
current_strength = 2
load_buttons()
def load_map():
"""
This function loads the map from the .gmap and .gmap.props file. We first select the files from the window that is displayed
"""
extracted_map = []
extracted_tile_dict = {}
index = 0
root = Tk()
root.withdraw()
root.wm_attributes('-topmost', 1)
file_types = [("Map File", "*.gmap")]
root.map_file = askopenfilename(filetypes=file_types)
map_file = root.map_file
if map_file == '':
root.destroy()
return
file_types = [("Map Props File", "*.props")]
root.props_file = askopenfilename(filetypes=file_types)
props_file = root.props_file
if props_file == '':
root.destroy()
return
fd = open(map_file, "r")
line = fd.readline()
while line:
extracted_line = line.split(" ")
extracted_line = extracted_line[:-1]
line = fd.readline()
extracted_map.insert(index, extracted_line)
index += 1
fd_prop = open(props_file, "r")
line = fd_prop.readline()
while line:
tile_index, tile_path = line.split("=")
extracted_tile_dict[tile_index] = tile_path
line = fd_prop.readline()
fd_prop.close()
fd.close()
root.destroy()
load_textures(extracted_map, extracted_tile_dict)
def load_textures(e_map_array, e_tile_dict):
"""
This function loads all the textures that were present in the .gmap and .gmap.props file
"""
global map_array
map_array = e_map_array
texture_dict = {}
for key in e_tile_dict.keys():
pic = pygame.image.load(e_tile_dict[key].replace('\n', ''))
pic = pygame.transform.scale(pic, (block_size, block_size))
texture_dict[key] = pic
for i in range(total_blocks_y):
for j in range(total_blocks_x):
index, strength = e_map_array[i][j].split(":")
if int(index) == 0:
continue
else:
screen.blit(texture_dict[index], (j*block_size, i*block_size))
pygame.display.update()
def save_map():
"""
This function saves the map into a .gmap and .gmap.props file
"""
root = Tk()
root.withdraw()
root.wm_attributes('-topmost', 1)
f = asksaveasfilename(confirmoverwrite=False, filetype=[("Map File", "*.gmap")])
if f is None: # asksaveasfile return `None` if dialog closed with "cancel".
root.destroy()
return
f = f + '.gmap'
write_map_to_file(f)
write_map_properties_to_file(f)
root.destroy()
def write_map_to_file(filename):
"""
This function writes the map_array to a file in the "<tile_index>:<strength>" format.
"""
fd = open(filename, "w+")
for i in range(total_blocks_y):
for j in range(total_blocks_x):
fd.write(map_array[i][j]+" ")
fd.write("\n")
fd.close()
os.chmod(filename, 0o777)
def write_map_properties_to_file(filename):
"""
This function write the tile_index and the corresponding path to the image file as a .gmap.props
file in a "<tile_index> = <path>" format
"""
filename = filename + '.props'
fd = open(filename, "w+")
img = {}
for i in range(total_blocks_y):
for j in range(total_blocks_x):
value = map_array[i][j]
value = value.split(":")
if int(value[0]) != 0:
img[int(value[0])] = tile_map_prop[int(value[0])]
for i in img.keys():
head, tail = os.path.split(img[i])
fd.write(str(i) + "=" + tail + '\n')
fd.close()
os.chmod(filename, 0o777)
def open_fd():
"""
This function opens the file explorer so that a new tile can be imported
"""
root = Tk()
root.withdraw()
root.wm_attributes('-topmost', 1)
file_types = [("PNG", "*.png")]
root.filename = askopenfilename(filetypes=file_types)
filename = root.filename
if filename == '':
root.destroy()
return
copy(filename, "Tiles/")
load_tile_set()
root.destroy()
def button1(mouse_x):
"""
This function returns true if the mouse is over the first button
"""
if mouse_x <= block_size * 2:
return True
else:
return False
def button2(mouse_x):
"""
This function returns true if the mouse is over the second button
"""
if (block_size * 2) + 10 <= mouse_x <= (block_size * 2) + 10 + (block_size * 2):
return True
else:
return False
def button3(mouse_x):
"""
This function returns true if the mouse is over the third button
"""
if (block_size * 4) + 20 <= mouse_x <= (block_size * 4) + 20 + (block_size * 2):
return True
else:
return False
def button4(mouse_x):
"""
This function returns true if the mouse is over the fourth button
"""
if (block_size * 6) + 30 <= mouse_x <= (block_size * 6) + 30 + (block_size * 2):
return True
else:
return False
def button5(mouse_x):
"""
This function returns true if the mouse is over the fifth button
"""
if (block_size * 8) + 40 <= mouse_x <= (block_size * 8) + 40 + (block_size * 2):
return True
else:
return False
def highlight_selection():
"""
This function is used to highlight the tile that is currently selected
"""
global present_x, present_y
pygame.draw.rect(screen, green, pygame.Rect(present_x, present_y + 10, block_size, block_size), 3)
pygame.display.update()
def left_mouse_clicked(mouse_x, mouse_y):
"""
This function handles what happens when the left mouse button is clicked. If the mouse is over the tile section, it will
select the tile, if it is over the buttons, it will call a corresponding function, and if it is over the map area, it
places the currently selected tile at that spot
"""
global present_x, present_y, tile_dict, current_tile, current_strength
present_x = mouse_x - (mouse_x % block_size)
present_y = mouse_y - (mouse_y % block_size)
array_index_x = int(present_x/block_size)
array_index_y = int(present_y/block_size)
if in_tile_menu(mouse_y) and not in_button_menu(mouse_y):
index = str(present_x)+":"+str(present_y + offset)
if index in tile_dict:
current_tile = tile_dict[index]
load_tile_set()
highlight_selection()
if in_map_area(mouse_y):
pygame.draw.rect(screen, black, (present_x, present_y, block_size, block_size))
screen.blit(current_tile, (present_x, present_y))
tile_details = tile_map[current_tile]
map_array[array_index_y][array_index_x] = str(tile_details) + ':' + str(current_strength)
pygame.display.update()
if in_button_menu(mouse_y):
button_clicked(mouse_x)
def right_mouse_clicked(mouse_x, mouse_y):
"""
This function is used to erase the tile that was placed and to make it look like a cleared tile
"""
if in_map_area(mouse_y):
global present_x, present_y
present_x = mouse_x - (mouse_x % block_size)
present_y = mouse_y - (mouse_y % block_size)
pygame.draw.rect(screen, black, (present_x, present_y, block_size, block_size))
pygame.draw.rect(screen, greyish_white, (present_x, present_y, block_size - 1, block_size - 1))
array_index_x = int(present_x / block_size)
array_index_y = int(present_y / block_size)
map_array[array_index_y][array_index_x] = str(0) + ':' + str(0)
pygame.display.update()
"""
Functions being called here do the following in order
1. Shows the map as a grid,
2. Loads all the tiles into the tile editor
3. sets the current tile editor to the first tile
4. Loads all the buttons
5. runs the loop until the window is closed
"""
show_grid()
load_tile_set()
current_tile = tile_dict[first_tile]
load_buttons()
running = True
while running:
mouse_X, mouse_Y = pygame.mouse.get_pos()
for event in pygame.event.get():
if event.type == pygame.QUIT:
exit_tile_editor()
running = False
if event.type == pygame.MOUSEBUTTONDOWN and event.button == RIGHT:
right_mouse_clicked(mouse_X, mouse_Y)
if event.type == pygame.MOUSEBUTTONDOWN and event.button == LEFT:
left_mouse_clicked(mouse_X, mouse_Y)
pygame.quit()
| [
"aashish.thyagarajan@gmail.com"
] | aashish.thyagarajan@gmail.com |
6293b1c33585bb63fb13582f05d372a5eb3c69d1 | 84137a5667299b375dc1a3fc70ac339a70559e1f | /simulation.py | 40356f9c6c0eb348b28468cfe1d62686984c2983 | [] | no_license | Sabotaz/hypersonic | 3c150b411243acf0f5a718449de65a905ee8a31a | 906fa55d75246667b704ca7bb5e3e5088f38ecfa | refs/heads/master | 2021-01-24T08:46:40.932841 | 2017-06-06T14:32:29 | 2017-06-06T14:32:29 | 93,395,784 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,259 | py |
import config
import functools
import utils
import random
def simulate_turn(game):
game.next_turn()
actions = [choose_action(game,pid) for pid in range(config.NB_JOUEURS)]
my_action = None
for action in actions:
if action.pid == config.MY_ID:
my_action = action
game.apply_action(action)
return game, my_action
def choose_action(game, pid):
player = game.get_player(pid)
px = player[0]
py = player[1]
allowed = [(px,py)]
if px != 0:
if game.is_accessible(px-1,py):
allowed.append((px-1,py))
if py != 0:
if game.is_accessible(px,py-1):
allowed.append((px,py-1))
if px != config.largeur - 1:
if game.is_accessible(px+1,py):
allowed.append((px+1,py))
if py != config.hauteur - 1:
if game.is_accessible(px,py+1):
allowed.append((px,py+1))
x,y = random.choice(allowed)
action = Action(pid, x, y)
if player[3] > 0: #nb_bombs_restantes
action.bomb = random.random() > 0.5
return action
def simulate(game):
game = game.clone()
game, action = simulate_turn(game)
for i in range(config.PROFONDEUR-1):
game,_ = simulate_turn(game)
return action, game
| [
"sablier@zendikar.fr"
] | sablier@zendikar.fr |
ae2d9e4f8d5f960e48a22b646f462eb9b28643a7 | 709d9a545802b9ab36d0969dcd066e23ee6afd50 | /bounds.py | 3c7c3df93eb493d2a1ae9249b9dce6e68be54bf6 | [] | no_license | yang0110/graph_signal_processing | 65d1cafd7967b1949c140c51818680c592631300 | 9639e4da9c1f7ff31465b757e18d5d2397cfd4d2 | refs/heads/master | 2020-04-04T19:39:17.905570 | 2019-01-17T18:47:24 | 2019-01-17T18:47:24 | 156,214,993 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,862 | py | import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics.pairwise import rbf_kernel
from sklearn.preprocessing import Normalizer
from scipy.sparse import csgraph
import scipy
import os
os.chdir('Documents/research/code/')
import datetime
import networkx as nx
from bandit_models import LinUCB, Graph_ridge
from utils import create_networkx_graph
from sklearn import datasets
path='../results/Bound/'
np.random.seed(2019)
def lambda_(noise_level, d, user_num, dimension, item_num):
lam=8*np.sqrt(noise_level)*np.sqrt(d)*np.sqrt(user_num+dimension)/(user_num*item_num)
return lam
def ridge_bound_fro(lam, rank, I_user_fro, I_min, k):
bound=lam*(np.sqrt(rank)+2*I_user_fro)/(k+lam*I_min)
return bound
def ridge_bound_infty(lam, rank, I_user_infty, I_min, k):
bound=lam*np.sqrt(rank)*(1+2*I_user_infty)/(k+lam*I_min)
return bound
def graph_ridge_bound_fro(lam, rank, lap_user_fro, lap_min, k):
bound=lam*(np.sqrt(rank)+2*lap_user_fro)/(k+lam*lap_min)
return bound
def graph_ridge_bound_infty(lam, rank, lap_user_infty, lap_min, k):
bound=lam*np.sqrt(rank)*(1+2*lap_user_infty)/(k+lam*lap_min)
return bound
user_num=50
dimension=10
item_num=2000
noise_level=0.1
d=2
item_f=np.random.normal(size=(item_num, dimension))
item_f=Normalizer().fit_transform(item_f)
Sigma=np.cov(item_f.T)
u,s,v=np.linalg.svd(Sigma)
sigma_min=np.min(s)
k=sigma_min/18
user_f=np.random.normal(size=(user_num, dimension))
user_f, _=datasets.make_blobs(n_samples=user_num, n_features=dimension, centers=5, cluster_std=0.1, shuffle=False, random_state=2019)
user_f=Normalizer().fit_transform(user_f)
rank=np.linalg.matrix_rank(user_f)
ori_adj=rbf_kernel(user_f)
min_adj=np.min(ori_adj)
max_adj=np.max(ori_adj)
thrs_list=np.round(np.linspace((min_adj+max_adj)/2, max_adj, 5), decimals=4)
adj=ori_adj.copy()
thrs=0
adj[adj<=thrs]=0
lap=csgraph.laplacian(adj, normed=False)
lap_evalues, lap_vectors=np.linalg.eig(lap)
lap_evalues=np.sort(lap_evalues)
lap_min=np.min(lap_evalues)
lap_2=lap_evalues[1]
lap_user_fro=np.linalg.norm(np.dot(lap, user_f), 'fro')
lap_user_infty=np.linalg.norm(np.dot(lap, user_f), np.inf)
evalues_matrix=np.diag(lap_evalues)
lam_user_fro=np.linalg.norm(np.dot(evalues_matrix, user_f), 'fro')
I=np.identity(user_num)
I_ev, I_evc=np.linalg.eig(I)
I_ev=np.sort(I_ev)
I_min=np.min(I_ev)
I_2=I_ev[1]
I_user_fro=np.linalg.norm(np.dot(I, user_f), 'fro')
I_user_infty=np.linalg.norm(np.dot(I, user_f), np.inf)
ridge_array=np.zeros(item_num)
graph_ridge_array=np.zeros(item_num)
graph_ridge_simple_array=np.zeros(item_num)
lam_list=np.zeros(item_num)
for i in range(item_num):
lam=lambda_(noise_level, d, user_num, dimension, i+1)
lam2=lam
lam_list[i]=lam
ridge_array[i]=ridge_bound_fro(lam, rank, I_user_fro, I_2, k)
graph_ridge_array[i]=graph_ridge_bound_fro(lam2, rank, lap_user_fro, lap_min, k)
graph_ridge_simple_array[i]=graph_ridge_bound_fro(lam2, rank, lam_user_fro, lap_min, k)
plt.figure()
plt.plot(ridge_array, label='ridge')
plt.plot(graph_ridge_array, label='graph ridge')
plt.plot(graph_ridge_simple_array,label='graph ridge simple')
plt.xlabel('sample size', fontsize=12)
plt.ylabel('theoretical bound', fontsize=12)
plt.title('same lambda', fontsize=12)
plt.legend(loc=0,fontsize=12)
plt.savefig(path+'lap_1_lap_lam_same_ridge_lam_theoretical_bound_ridge_gr_grs'+'.png', dpi=200)
plt.show()
plt.figure()
plt.plot(ridge_array, label='ridge')
plt.legend(loc=0,fontsize=12)
plt.show()
plt.figure()
plt.plot(lam_list, label='lam')
plt.legend(loc=0, fontsize=12)
plt.show()
plt.figure()
plt.plot(lam_list*lap_2, label='lam*lap_min')
plt.legend(loc=0, fontsize=12)
plt.show()
plt.figure()
plt.plot(lap_evalues, label='lap_evalues')
plt.legend(loc=0, fontsize=12)
plt.show()
cluster_std_list=np.arange(0.001, 10, 0.1)
ori_user_f, _=datasets.make_blobs(n_samples=user_num, n_features=dimension, centers=5, cluster_std=1, shuffle=False, random_state=2019)
fro_list=np.zeros(len(cluster_std_list))
lam_list=np.zeros(len(cluster_std_list))
for i, cluster_std in enumerate(cluster_std_list):
user_f, _=datasets.make_blobs(n_samples=user_num, n_features=dimension, centers=5, cluster_std=cluster_std, shuffle=False, random_state=2019)
user_f=Normalizer().fit_transform(user_f)
adj=rbf_kernel(user_f)
lap=csgraph.laplacian(adj, normed=False)
lap_evalues, lap_evec=np.linalg.eig(lap)
Lambda=np.diag(lap_evalues)
lap_user_fro=np.linalg.norm(np.dot(lap, ori_user_f))
lam_user_fro=np.linalg.norm(np.dot(Lambda, ori_user_f))
fro_list[i]=lap_user_fro
lam_list[i]=lam_user_fro
# plt.plot(cluster_std_list, lam_list, label='Lambda')
plt.plot(cluster_std_list, fro_list, label='Lap')
plt.legend(loc=0, fontsize=12)
plt.title('cluster_std=1', fontsize=12)
plt.xlabel('cluster_std', fontsize=12)
plt.ylabel('||L theta||_F', fontsize=12)
plt.savefig(path+'dot_product_of_lap_and_user_f'+'.png', dpi=200)
plt.show()
| [
"noreply@github.com"
] | yang0110.noreply@github.com |
737afb20142aa4df6664eaa1655c26902c8de00b | 81d220b8e45ad167e283ffda2feefd2bfb7fb1a4 | /manage.py | 3bbd89d416b4caa1e11790a9cd19b787f3cc6229 | [] | no_license | danieljcs/pira_truths | c57333f9b116f0f387ee0cc45e633fca13a8b5b3 | f9d9c51335a5d01ed5f0bafad5a614c14049d60b | refs/heads/main | 2023-01-13T16:32:32.814092 | 2020-11-18T12:53:21 | 2020-11-18T12:53:21 | 313,933,295 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 667 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'pira_truths.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"50522425+danieljcs@users.noreply.github.com"
] | 50522425+danieljcs@users.noreply.github.com |
bef36379784dfd5d27c3748d08a7c9263568ac69 | 8b3047774c7cb24d3cec4cd38321558863ccec4d | /training-set/train_frame.py | 693177ae4fb6653134f4ae6d9ad443880063a85a | [] | no_license | aman-jakkani/636-Final-Model | e1b47979f102d37ae9f2996861ee1861fbf049de | fa003571e0a2df0d58e0ba554b19263ce17026e4 | refs/heads/master | 2022-06-06T03:16:58.675226 | 2020-05-01T21:16:02 | 2020-05-01T21:16:02 | 260,517,772 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 817 | py | #framer program
#
# all imports
import cv2
import math
frame_count = 0
videoFiles = ['gettinginto_1.mp4', 'gettinginto_2.mp4', 'gettingout_1.mp4', 'gettingout_2.mp4', 'gettinginto_3.mp4', 'neither_1.mp4', 'gettingout_3.mp4',
'gettingout_4.mp4', 'gettinginto_4.mp4', 'gettinginto_5.mp4']
for each in videoFiles:
cap = cv2.VideoCapture(each)
frameRate = cap.get(cv2.CAP_PROP_FPS)
#print(frameRate)
while(cap.isOpened()):
frameId = cap.get(1) #current frame number
ret, frame = cap.read()
if (ret != True):
break
if (frameId % math.floor(math.floor(frameRate)/2) == 0): #getting two frames per second of video
filename ="frame%d.jpg" % frame_count;frame_count+=1
cv2.imwrite(filename, frame)
cap.release()
print("Done!") | [
"aj280598@gmail.com"
] | aj280598@gmail.com |
a67d6079a5eec64bc07497534737ee8ef949dd51 | 3ab1f37b4372d0796c85ef24343dd8c03accb6ef | /CoinBase/ConnectFour.py | aa65bc2e3204e93c5a8b26877efbaf25d28eb2c3 | [] | no_license | Blossomyyh/leetcode | 2be6a99534801fc59fe9551317ca49c3704b1c3d | 38615779eb43d147587467e11dc22761ac0726cb | refs/heads/master | 2023-01-22T16:56:26.624677 | 2020-11-20T13:47:43 | 2020-11-20T13:47:43 | 266,845,278 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,135 | py | """
Connect 4
use get column and line and diagonals to find wins
4 ->wins
https://codereview.stackexchange.com/questions/225840/a-simple-connect-4-game-in-python
https://github.com/KeithGalli/Connect4-Python/blob/master/connect4.py
Better solution:
focus on the current move's row and col! to check wins
"""
TEAM1 = 1
TEAM2 = 2
class connect4:
def __init__(self, row=6, col=7):
self.row = row
self.col = col
# generate empty 6*6 board
self.board = [[0]*self.col for _ in range(self.row)]
self.rows =[]
self.count = self.row * self.col
# one situation- 4positions -> 0; team1+1, team2-1 4/-4--> win
def returnboard(self):
for i in range(self.row):
print(self.board[i])
return
def checkwins(self, team):
# n*m --> Time O(4*N*M)
# horizontally
for r in range(self.row):
for c in range(self.col - 3):
if self.board[r][c] == team and self.board[r][c+1]== team and self.board[r][c+2]== team and self.board[r][c+3]== team:
return True
# vertically
for r in range(self.row - 3):
for c in range(self.col):
if self.board[r][c] == team and self.board[r+1][c] == team and self.board[r+2][c] == team and self.board[r+3][c] == team:
return True
# diagonally
for r in range(self.row -3):
for c in range(self.col - 3):
if self.board[r][c] == team and self.board[r+1][c+1]== team and self.board[r+2][c+2]== team and self.board[r+3][c+3] == team:
return True
# anti-diagonally
for r in range(3, self.row):
for c in range(self.col - 3):
if self.board[r][c] == team and self.board[r-1][c+1] == team and self.board[r-2][c+2] == team and self.board[r-3][c+3] == team:
return True
return False
def checkcolumn(self, col):
# check whether the current column can make move
return 0 in [i[col] for i in self.board]
def checkend(self, rounds):
# check all the element are filled
print("The end of the game! ")
return rounds > self.count
def makemove(self, team, col):
# col is valid here
i = self.row -1
# check from bottom until find the empty position
while self.board[i][col] != 0:
i -= 1
self.board[i][col] = team
print(str(team)+" move at col: " +str(col))
self.returnboard()
if self.checkwins(team):
print("Team "+str(team)+ " WIN !")
return True
return False
import random
if __name__ == "__main__":
game = connect4()
game.returnboard()
rounds = 1
win = False
while not win and not game.checkend(rounds):
team = rounds % 2 + 1
# generate a random number 0-6
colidx = random.randrange(7)
while not game.checkcolumn(colidx):
colidx = random.randrange(7)
win = game.makemove(team, colidx)
rounds += 1
game.returnboard()
| [
"blossomyyh@163.com"
] | blossomyyh@163.com |
5f2abcf4f52ffd5536b61da41e9a77fe6fcd0b3a | 2560d2678456d6d2efb889fb4ddde6aedb772a6a | /601IntroToEEAndCSI/solutions/python_tutorial/section7/712worksheet.py | b2a259220f6fac92c926ae6f82eebf4a6250b9c4 | [] | no_license | GolfyMcG/MIT_Classes | 2b53896fc8fd9ebc593a56f0a614adb994734fde | 367e75f7638a377b8035ad1b6e064892b3a276cf | refs/heads/master | 2021-01-01T15:29:42.719979 | 2014-07-22T04:43:26 | 2014-07-22T04:43:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 742 | py | import sys
import os.path
sys.path.append(os.path.join(os.path.dirname(__file__), '../modules/'))
import checkers
n = 0
def emptyAlist():
return []
def addEntry(al, k, v):
al.append([k, v])
def lookup(al, k):
#return (pair for pair in al if pair[0] == k)
for pair in al:
if pair[0] == k:
return pair
d = emptyAlist()
addEntry(d, 'key1', 'value1')
addEntry(d, 'key2', 'value2')
n += 1
question = d
ansvalue = [['key1', 'value1'], ['key2', 'value2']]
checkers.check_answer(question, ansvalue, n)
n += 1
question = lookup(d, 'key1')
ansvalue = ['key1', 'value1']
checkers.check_answer(question, ansvalue, n)
n += 1
question = lookup(d, 'key2')
ansvalue = ['key2', 'value2']
checkers.check_answer(question, ansvalue, n)
| [
"avilla0429@gmail.com"
] | avilla0429@gmail.com |
f57152994f9ef694ce26e8b9cde3dd48fc02ec45 | 48df8729bc22f7f5560e2a19504e9d949818e8c9 | /urlform/forms.py | 68fbb69e2d08a672581f040146bdcd243260f0b4 | [] | no_license | njdevil/shorten-url | 27bb3e3b0dcde9a29ae32c51a284b92d770b53b5 | c22a0972c80f31a04a774e7046621f0669c0e785 | refs/heads/master | 2021-01-01T19:35:12.280228 | 2013-11-07T08:03:08 | 2013-11-07T08:03:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 182 | py | from django import forms
from ****.urlform.models import ShortLinks
class ShortLinksForm(forms.ModelForm):
class Meta:
model = ShortLinks
fields = ('long_url',)
| [
"root@mpsclient.info"
] | root@mpsclient.info |
c0573c684911278d9ca87fad422d50b5649709c8 | 095f35858f26e1b1f456f5462512b1b42c6419da | /venv/Part2/XML_1.py | ff3178520e3011bec73c0af64ca96de4dd2d2d96 | [] | no_license | rheehot/DataBaseWithPython | 9aa456880beea8195b8691854f2cf23f145f142d | 1261223a4b64ae8925ec0bc4cfbb7fc5138a302f | refs/heads/master | 2023-01-07T21:06:09.750691 | 2020-11-15T13:44:29 | 2020-11-15T13:44:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,151 | py | # ๋ฉ๋ชจ๋ฆฌ์์ XML์ XDM ํธ๋ฆฌ ์ฒ๋ฆฌํ๊ธฐ
import xml.etree.ElementTree as ET
# ํ์ด์ฌ ๊ฐ์ฒด๋ฅผ ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋ฉํ์ฌ, XML ์คํธ๋ง (XDM ํธ๋ฆฌ)๋ก ๋ณํ.
newDict = {
'PLAYER' : [
{'PLAYER_ID': '2007001', 'PLAYER_NAME': '์ ๋ณ์ง', 'TEAM_ID': 'K03', 'E_PLAYER_NAME': 'JOENG, BYUNGJI', 'NICKNAME': None, 'JOIN_YYYY': '2011', 'POSITION': 'GK', 'BACK_NO': 1, 'NATION':None, 'BIRTH_DATE': '1980-08-04', 'SOLAR': '1', 'HEIGHT': 184, 'WEIGHT': 77},
{'PLAYER_ID': '2007020', 'PLAYER_NAME': '์๋๋ช
', 'TEAM_ID': 'K01', 'E_PLAYER_NAME': 'SEO, DONGMYUNG', 'NICKNAME': None, 'JOIN_YYYY': '2012', 'POSITION': 'GK', 'BACK_NO': 1, 'NATION':None, 'BIRTH_DATE': '1984-03-05', 'SOLAR': '1', 'HEIGHT': 196, 'WEIGHT': 94},
{'PLAYER_ID': '2007045', 'PLAYER_NAME': '๊น์ด์ฌ', 'TEAM_ID': 'K02', 'E_PLAYER_NAME': 'KIM, WOONJAE', 'NICKNAME': None, 'JOIN_YYYY': '2014', 'POSITION': 'GK', 'BACK_NO': 1, 'NATION':None, 'BIRTH_DATE': '1990-08-22', 'SOLAR': '1', 'HEIGHT': 188, 'WEIGHT': 79}
]
}
# XDM ํธ๋ฆฌ ์์ฑ
tableName = list(newDict.keys())[0] # PLAYER
tableRows = list(newDict.values())[0]
rootElement = ET.Element('Table')
rootElement.attrib['name'] = tableName
for row in tableRows:
rowElement = ET.Element('Row')
rootElement.append(rowElement)
# rowElement = ET.SubElement(rootElement, 'Row'), ์์ ๋ ๋ถ์ฅ๊ณผ ๋์ผ
for columnName in list(row.keys()):
if row[columnName] == None:
rowElement.attrib[columnName] = ''
else:
if type(row[columnName]) == int:
rowElement.attrib[columnName] = str(row[columnName])
else:
rowElement.attrib[columnName] = row[columnName]
# XDM ํธ๋ฆฌ๋ฅผ ์ฝ์์ ์ถ๋ ฅ
ET.dump(rootElement)
print()
#####################################################################################
# XML ์คํธ๋ง์ XDM ํธ๋ฆฌ๋ก ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋ฉํ์ฌ, ํ์ด์ ๊ฐ์ฒด๋ก ๋ณํ
xmlString = '''
<Table name="PLAYER">
<Row PLAYER_ID="2007001" PLAYER_NAME="์ ๋ณ์ง" TEAM_ID="K03" E_PLAYER_NAME="JOENG, BYUNGJI" NICKNAME="" JOIN_YYYY="2011" POSITION="GK" BACK_NO="1" NATION="" BIRTH_DATE="1980-08-04" SOLAR="1" HEIGHT="184" WEIGHT="77" />
<Row PLAYER_ID="2007020" PLAYER_NAME="์๋๋ช
" TEAM_ID="K01" E_PLAYER_NAME="SEO, DONGMYUNG" NICKNAME="" JOIN_YYYY="2012" POSITION="GK" BACK_NO="1" NATION="" BIRTH_DATE="1984-03-05" SOLAR="1" HEIGHT="196" WEIGHT="94" />
<Row PLAYER_ID="2007045" PLAYER_NAME="๊น์ด์ฌ" TEAM_ID="K02" E_PLAYER_NAME="KIM, WOONJAE" NICKNAME="" JOIN_YYYY="2014" POSITION="GK" BACK_NO="1" NATION="" BIRTH_DATE="1990-08-22" SOLAR="1" HEIGHT="188" WEIGHT="79" />
</Table>
'''
# XML ์คํธ๋ง์ XDM ํธ๋ฆฌ๋ก ๋ฉ๋ชจ๋ฆฌ์ ๋ก๋ฉ
rootElement = ET.fromstring(xmlString)
# XDM ํธ๋ฆฌ๋ฅผ ํ์ด์ฌ ๊ฐ์ฒด๋ก ๋ณํ
players = []
for childElement in rootElement:
print(childElement.tag, childElement.attrib)
players.append(childElement.attrib)
print()
print(players)
print()
newDict = {}
newDict[list(rootElement.attrib.values())[0]] = players
print((newDict)) | [
"9h0jun1115@gmail.com"
] | 9h0jun1115@gmail.com |
339700e4ddf899c0336dd7012c4c6385c8eb3cbb | 9716a77ef1d0ba5ef9a61be04f6229494744d5d5 | /chapter06 ์ ๋ ฌ/์์์ ์๋๋ก.py | 158d19ce05bdd45aaedf7a1c03c28402fb6a8ac5 | [] | no_license | korea-space-codingmonster/Algorithm_Study | 98b00c81839cf8ac8365d3982c25650a21226ce9 | 8c92857e458994a2d1d77dc3ea0d4b645b8b6a4b | refs/heads/main | 2023-06-03T20:00:52.915447 | 2021-06-20T05:51:47 | 2021-06-20T05:51:47 | 329,354,196 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 712 | py | # ํ๋์ ์ฃผ์ด์๋ ๋ค์ํ ์๊ฐ ์กด์ฌํ๋ค. ์ด๋ฌํ ์์ ํฌ๊ธฐ์ ์๊ด์์ด ๋์ด๋์ด ์๋ค.
# ์ด ์๋ฅผ ํฐ ์๋ถํฐ ์์ ์์ ์์๋ก ์ ๋ ฌํด์ผํ๋ค. ์์ด์ ๋ด๋ฆผ์ฐจ์์ผ๋ก ์ ๋ ฌํ๋ ํ๋ก๊ทธ๋จ์ ๋ง๋์์ค.
# ์
๋ ฅ์กฐ๊ฑด
# ์ฒซ์งธ ์ค์ ์์ด์ ์ํด ์๋ ์์ ๊ฐ์ N์ด ์ฃผ์ด์ง๋ค.(1 < N <= 500)
# ๋์งธ ์ค๋ถํฐ N + 1๋ฒ์งธ ์ค๊น์ง N๊ฐ์ ์๊ฐ ์
๋ ค๋๋ค. ์์ ๋ฒ์๋ 1์ด์ 100000์ดํ์ ์์ฐ์์ด๋ค.
# ์
๋ ฅ์์
# 3
# 15
# 27
# 12
# ์ถ๋ ฅ์์
# 27 15 12
n = int(input())
array = []
for i in range(n):
array.append(int(input()))
array = sorted(array, reverse = True)
for i in array:
print(i, end = ' ')
| [
"replituser@example.com"
] | replituser@example.com |
88702a2b262efec4b9998b739edd2a203fb96bf5 | 3930ffcf78a3a80b53fc47317cddce79f416d1d8 | /bsconsulting.py | c580eee8d1ed1bc6211f8767a3f0df8715c0be62 | [] | no_license | r3ap3rpy/bogusconsultation | 4986328b3f6d77ec0499fb9eb58dcee559672dfb | 5a44f5ba1f2af40a6de17e8e23e0d2df9beb3cba | refs/heads/master | 2022-11-24T09:09:54.001725 | 2020-07-28T15:19:53 | 2020-07-28T15:19:53 | 283,228,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,229 | py | from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.keys import Keys
from queue import Queue
import os
import itertools
import argparse
import requests
import threading
if not os.sys.platform == "win32":
raise SystemExit("This script can only run on windows!")
parser = argparse.ArgumentParser(description='Auto filler for the bullshi consultation our government spends billions to brainwash people.')
parser.add_argument('-participants','--participants' ,type = int, help='The number of people filling the form.', required=True)
parser.add_argument('-threads','--threads', type = int, help = "The number of worker threads.", required = False, default=8)
args = parser.parse_args()
male = requests.get('https://www2.census.gov/topics/genealogy/1990surnames/dist.male.first?#')
fmale = requests.get('https://www2.census.gov/topics/genealogy/1990surnames/dist.female.first?#')
male = [_.split(' ')[0] for _ in male.text.split('\n') if _]
fmale = [_.split(' ')[0] for _ in fmale.text.split('\n') if _]
if args.participants < 1 or args.participants > 5_000_000:
raise SystemExit("Must be between 1 and 5_000_000!")
JobQueue = Queue()
print(f"Running on {args.threads} thread(s)!")
prod = list(itertools.product(male, fmale))
prod = prod[:args.participants]
print(f"Number of participiants: {len(prod)}")
class BSinfo(threading.Thread):
def __init__(self, jobqueue):
threading.Thread.__init__(self)
self.jobqueue = jobqueue
self.vnev = '//*[@id="vezeteknev"]'
self.knev = '//*[@id="keresztnev"]'
self.email = '//*[@id="email_cim"]'
self.kor = '//*[@id="eletkor"]'
self.hunvagyok = '/html/body/div[3]/main/section/div/div/div[2]/form/div/div[1]/div[3]/div[1]/label'
self.lua = '/html/body/div[3]/main/section/div/div/div[2]/form/div/div[1]/div[3]/div[2]/label'
self.onwards = '/html/body/div[3]/main/section/div/div/div[2]/form/div/div[1]/div[4]/button'
self.first = '/html/body/div[3]/main/section/form/div[2]/div/div/div[2]/ul/li[8]/label'
self.second = '/html/body/div[3]/main/section/form/div[3]/div/div/div[2]/ul/li[2]/label'
self.third = '/html/body/div[3]/main/section/form/div[4]/div/div/div[2]/ul/li[2]/label'
self.fourth = '/html/body/div[3]/main/section/form/div[5]/div/div/div[2]/ul/li[2]/label'
self.fifth = '/html/body/div[3]/main/section/form/div[6]/div/div/div[2]/ul/li[2]/label'
self.sixth = '/html/body/div[3]/main/section/form/div[7]/div/div/div[2]/ul/li[2]/label'
self.seventh = '/html/body/div[3]/main/section/form/div[8]/div/div/div[2]/ul/li[2]/label'
self.eigth = '/html/body/div[3]/main/section/form/div[9]/div/div/div[2]/ul/li[2]/label'
self.ninth = '/html/body/div[3]/main/section/form/div[10]/div/div/div[2]/ul/li[2]/label'
self.tenth = '/html/body/div[3]/main/section/form/div[11]/div/div/div[2]/ul/li[2]/label'
self.eleventh = '/html/body/div[3]/main/section/form/div[12]/div/div/div[2]/ul/li[2]/label'
self.twelveth = '/html/body/div[3]/main/section/form/div[13]/div/div/div[2]/ul/li[2]/label'
self.thirteenth = '/html/body/div[3]/main/section/form/div[14]/div/div/div[2]/ul/li[2]/label'
self.sendin = '/html/body/div[3]/main/section/form/div[15]/div[1]/button'
self.agree = '/html/body/div[3]/main/section/form/div[16]/div/div/div/div[3]/div[2]/button[2]/strong'
self.answers = (self.first,self.second,self.third,self.fourth,self.fifth,self.sixth,self.seventh,self.eigth,self.ninth,self.tenth,self.eleventh,self.twelveth,self.thirteenth)
def run(self):
while True:
CurrentParticipiant = self.jobqueue.get()
print(CurrentParticipiant)
self.foxy = webdriver.Firefox(executable_path=os.path.sep.join(['source','geckodriver.exe']))
self.foxy.get("https://nemzetikonzultacio.kormany.hu/")
self.vnevElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.vnev))
self.vnevElement.send_keys(CurrentParticipiant[0])
self.knevElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.knev))
self.knevElement.send_keys(CurrentParticipiant[1])
self.emailElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.email))
self.emailElement.send_keys(f'{CurrentParticipiant[0]}@{CurrentParticipiant[1]}.hu')
self.korElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.kor))
self.korElement.send_keys('99')
self.hunvagyokElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.hunvagyok))
self.hunvagyokElement.click()
self.luaElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.lua))
self.luaElement.click()
self.onwardsElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.onwards))
self.onwardsElement.click()
for answer in self.answers:
self.target = self.foxy.find_element_by_xpath(answer)
self.foxy.execute_script('arguments[0].scrollIntoView(true);', self.target)
self.element = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(answer))
self.element.click()
self.sendinElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.sendin))
self.sendinElement.click()
self.agreeElement = WebDriverWait(self.foxy, 10).until(lambda driver: driver.find_element_by_xpath(self.agree))
self.agreeElement.click()
self.foxy.close()
self.jobqueue.task_done()
for ppl in prod:
JobQueue.put(ppl)
for i in range(args.threads):
t = BSinfo(JobQueue)
t.setDaemon(True)
t.start()
JobQueue.join()
raise SystemExit()
| [
"dszabo@itron.com"
] | dszabo@itron.com |
a0414e6d33697862f9e5277510df817eb85fe440 | 69ff402d57d7d9605430aeeef30383ca83e7bfb1 | /neurone_with_lib/exo2.py | 3e54e90e797fec628d750fc40d80f75f55aab18a | [
"Apache-2.0"
] | permissive | yann-nshare/AI_project | 5ee29f13b0472b8abb179e27338623c6c0d11e44 | b3af8fa0bb0bd561c730f871dfdb3b8165f546de | refs/heads/master | 2021-01-09T03:43:49.637587 | 2020-02-21T22:04:38 | 2020-02-21T22:04:38 | 242,234,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,237 | py | # import standard PyTorch modules
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.tensorboard import SummaryWriter # TensorBoard support
import torchvision
import torchvision.transforms as transforms
from torch.autogra import Variable
from torch.utils.data import DataLoader
class Net(nn.Module):
def __init__(self):
super(Net, self).__init__()
self.fc1 = nn.Linear(28 * 28, 200)
self.fc2 = nn.Linear(200, 200)
self.fc3 = nn.Linear(200, 10)
def forward(self, x):
x = F.relu(self.fc1(x))
x = F.relu(self.fc2(x))
x = self.fc3(x)
return F.log_softmax(x)
trans = transforms.Compose([transforms.ToTensor(), transforms.Normalize((0.1307,), (0.3081))])
train_dataset = torchvision.datasets.MNIST(root="./data", train=True, transform=trans, download=True)
test_dataset = torchvision.datasets.MNIST(root="./data", train=True, transform=trans)
train_loader = DataLoader(dataset = train_dataset, batch_size=100, shuffle=True)
test_loader = DataLoader(dataset = test_dataset, batch_size=100, shuffle=False)
net = Net()
print(net)
optimizer = optim.SGD(net.parameters(), lr=0.5, momentum=0.9)
criterion = nn.NLLLoss()
epochs = 100
for epoch in range(epochs):
for batch_idx, (data, target) in enumerate(train_loader):
data, target = Variable(data), Variable(target)
# resize data from (batch_size, 1, 28, 28) to (batch_size, 28*28)
data = data.view(-1, 28*28)
optimizer.zero_grad()
net_out = net(data)
loss = criterion(net_out, target)
loss.backward()
optimizer.step()
if batch_idx % 10000 == 0:
print('Train Epoch: {} [{}/{} ({:.0f}%)]\tLoss: {:.6f}'.format(
epoch, batch_idx * len(data), len(train_loader.dataset),
100. * batch_idx / len(train_loader), loss.item[0]))
test_loss = 0
correct = 0
for data, target in test_loader:
data, target = Variable(data, volatile=True), Variable(target)
data = data.view(-1, 28 * 28)
net_out = net(data)
#sum up batch loss
test_loss += criterion(net_out, target).item()
pred = net_out.data.max(1)[1]
correct += | [
"yann.nshare@epitech.eu"
] | yann.nshare@epitech.eu |
56337b1337e5899c09e974e19288f2cdc899dc73 | eb94bccbcc8d2244843dde59d201850870ef29ca | /datahub_lib/swagger_client/models/role_assignment_request.py | d57b16e3d72f647de2aff3f2b4762c479931ea5c | [] | no_license | sunasing/noaa_docker | d7e2000d1cfc91123d74f0c95f8efe2c7eda8c12 | 7d9e05686463a6cd8d39313af9496a06bdf00367 | refs/heads/master | 2020-12-27T14:44:18.967963 | 2020-02-03T06:50:52 | 2020-02-03T06:50:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,051 | py | # coding: utf-8
"""
Azure FarmBeats API
<p> <p>Azure FarmBeats helps you build digital agricultural solutions in Azure. By providing a standardized schema to query agricultural data from various sources, Azure FarmBeats provides you: <ul > <li style=\"margin: 7px;\">Ability to acquire, aggregate, process and store agricultural data.</li> <li style=\"margin: 7px;\">Capability to fuse data between data sources and generate insights.</li> <li style=\"margin: 7px;\">Schematized access and query capabilities on ingested data.</li> </ul> </p> <h><b>REST Operation Groups</b></h> <p><b>Farm:</b></p> <p>Farm corresponds to a physical location of interest within the system. Each Farm has a Farm name and a unique farm id.</p> <p><b>Device:</b></p> <p>Device corresponds to a physical device present in the farm. Each device has a unique device id. Device is typically provisioned to a farm with a farm id.</p> <p><b>DeviceModel:</b></p> <p>DeviceModel corresponds to the meta-data of the device such as the Manufacturer, Type of the device either Gateway or Node.</p> <p><b>Sensor:</b></p> <p>Sensor corresponds to a physical sensor that records values. A sensor is typically connected to a device with a device id.</p> </p> <p><b>SensorModel:</b></p> <p>SensorModel corresponds to the meta-data of the sensor such as the Manufacturer, Type of the sensor either Analog or Digital, Sensor Measure such as Ambient Temperature, Pressure etc.</p> <p><b>Telemetry:</b></p> <p>Telemetry provides the ability to read telemetry messages for a particular sensor & time range.</p> <p><b>Job:</b></p> <p>Job corresponds to any workflow of activities which are executed in the system to get a desired output. Each job is associated with a job id and job type.</p> <p><b>JobType:</b></p> <p>JobType corresponds to different job types supported by the system. This includes system defined & user-defined job types.</p> <p><b>ExtendedType:</b></p> <p>ExtendedType corresponds to the list of system & user-defined types in the system. This helps setup a new Sensor or Scene or Scenefile type in the system.</p> <p><b>Partner:</b></p> <p>Partner corresponds to the sensor/weather/imagery integration partner.</p> <p><b>Scene:</b></p> <p>Scene corresponds to any generated output in the context of a Farm. Each Scene has a scene id, scene source, scene type and farm id associated with it. Each scene id can have multiple scene files associated with it.</p> <p><b>SceneFile:</b></p> <p>SceneFile corresponds to all files which are generated for single scene. A single scene id can have multiple SceneFile ids associated with it.</p> <p><b>Rule:</b></p> <p>Rule corresponds to a condition for farm-related data to trigger an alert. Each rule will be in the context of a farm's data.</p> <p><b>Alert:</b></p> <p>Alert corresponds to a notification which gets generated when a rule condition is met. Each alert will be in the context of a rule.</p> <p><b>RoleDefinition:</b></p> <p>RoleDefinition defines allowed and disallowed actions for a role.</p> <p><b>RoleAssignment:</b></p> <p>RoleAssignment corresponds to the assignment of a role to a user or a service principal.</p> </p> # noqa: E501
OpenAPI spec version: v1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class RoleAssignmentRequest(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'role_definition_id': 'str',
'object_id': 'str',
'object_id_type': 'str',
'tenant_id': 'str'
}
attribute_map = {
'role_definition_id': 'roleDefinitionId',
'object_id': 'objectId',
'object_id_type': 'objectIdType',
'tenant_id': 'tenantId'
}
def __init__(self, role_definition_id=None, object_id=None, object_id_type=None, tenant_id=None): # noqa: E501
"""RoleAssignmentRequest - a model defined in Swagger""" # noqa: E501
self._role_definition_id = None
self._object_id = None
self._object_id_type = None
self._tenant_id = None
self.discriminator = None
self.role_definition_id = role_definition_id
self.object_id = object_id
self.object_id_type = object_id_type
self.tenant_id = tenant_id
@property
def role_definition_id(self):
"""Gets the role_definition_id of this RoleAssignmentRequest. # noqa: E501
Gets or sets roleDefinitionId of the role assignment. # noqa: E501
:return: The role_definition_id of this RoleAssignmentRequest. # noqa: E501
:rtype: str
"""
return self._role_definition_id
@role_definition_id.setter
def role_definition_id(self, role_definition_id):
"""Sets the role_definition_id of this RoleAssignmentRequest.
Gets or sets roleDefinitionId of the role assignment. # noqa: E501
:param role_definition_id: The role_definition_id of this RoleAssignmentRequest. # noqa: E501
:type: str
"""
if role_definition_id is None:
raise ValueError("Invalid value for `role_definition_id`, must not be `None`") # noqa: E501
if role_definition_id is not None and len(role_definition_id) > 200:
raise ValueError("Invalid value for `role_definition_id`, length must be less than or equal to `200`") # noqa: E501
if role_definition_id is not None and len(role_definition_id) < 3:
raise ValueError("Invalid value for `role_definition_id`, length must be greater than or equal to `3`") # noqa: E501
self._role_definition_id = role_definition_id
@property
def object_id(self):
"""Gets the object_id of this RoleAssignmentRequest. # noqa: E501
Gets or sets objectId of the role assignment. # noqa: E501
:return: The object_id of this RoleAssignmentRequest. # noqa: E501
:rtype: str
"""
return self._object_id
@object_id.setter
def object_id(self, object_id):
"""Sets the object_id of this RoleAssignmentRequest.
Gets or sets objectId of the role assignment. # noqa: E501
:param object_id: The object_id of this RoleAssignmentRequest. # noqa: E501
:type: str
"""
if object_id is None:
raise ValueError("Invalid value for `object_id`, must not be `None`") # noqa: E501
if object_id is not None and len(object_id) > 200:
raise ValueError("Invalid value for `object_id`, length must be less than or equal to `200`") # noqa: E501
if object_id is not None and len(object_id) < 3:
raise ValueError("Invalid value for `object_id`, length must be greater than or equal to `3`") # noqa: E501
self._object_id = object_id
@property
def object_id_type(self):
"""Gets the object_id_type of this RoleAssignmentRequest. # noqa: E501
Gets or sets objectIdType of the role assignment. # noqa: E501
:return: The object_id_type of this RoleAssignmentRequest. # noqa: E501
:rtype: str
"""
return self._object_id_type
@object_id_type.setter
def object_id_type(self, object_id_type):
"""Sets the object_id_type of this RoleAssignmentRequest.
Gets or sets objectIdType of the role assignment. # noqa: E501
:param object_id_type: The object_id_type of this RoleAssignmentRequest. # noqa: E501
:type: str
"""
if object_id_type is None:
raise ValueError("Invalid value for `object_id_type`, must not be `None`") # noqa: E501
allowed_values = ["UserId", "ServicePrincipalId"] # noqa: E501
if object_id_type not in allowed_values:
raise ValueError(
"Invalid value for `object_id_type` ({0}), must be one of {1}" # noqa: E501
.format(object_id_type, allowed_values)
)
self._object_id_type = object_id_type
@property
def tenant_id(self):
"""Gets the tenant_id of this RoleAssignmentRequest. # noqa: E501
Gets or sets tenantId of the role assignment. # noqa: E501
:return: The tenant_id of this RoleAssignmentRequest. # noqa: E501
:rtype: str
"""
return self._tenant_id
@tenant_id.setter
def tenant_id(self, tenant_id):
"""Sets the tenant_id of this RoleAssignmentRequest.
Gets or sets tenantId of the role assignment. # noqa: E501
:param tenant_id: The tenant_id of this RoleAssignmentRequest. # noqa: E501
:type: str
"""
if tenant_id is None:
raise ValueError("Invalid value for `tenant_id`, must not be `None`") # noqa: E501
if tenant_id is not None and len(tenant_id) > 200:
raise ValueError("Invalid value for `tenant_id`, length must be less than or equal to `200`") # noqa: E501
if tenant_id is not None and len(tenant_id) < 3:
raise ValueError("Invalid value for `tenant_id`, length must be greater than or equal to `3`") # noqa: E501
self._tenant_id = tenant_id
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(RoleAssignmentRequest, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, RoleAssignmentRequest):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"abmallic@microsoft.com"
] | abmallic@microsoft.com |
677a797647835a828b4b94df32fc98c68f9440dc | f1c19ef6da5367209045cb4f39727251f7ebdf15 | /constants/card.py | 4863843ae4309e89dfd52c0bd21163357bf41e2a | [] | no_license | mw4n6/autobots_go_fish | 3b7a73763042ab3fba46e6458d88e87182ecb33d | 75e3b30882f9411c8c9e986553f8c005101caed0 | refs/heads/master | 2022-12-15T15:42:36.374044 | 2020-09-12T00:17:30 | 2020-09-12T00:17:30 | 292,422,860 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 148 | py | SUITS = [
'Clubs',
'Spades',
'Hearts',
'Diamonds'];
RANKS = [
'A',
'2',
'3',
'4',
'5',
'6',
'7',
'8',
'9',
'10',
'J',
'Q',
'K'
];
| [
"michael.wang08@gmail.com"
] | michael.wang08@gmail.com |
3e5a6e3f8242994e087853773022b3c4f8b1d9b8 | 7d2ae7cd571a0ea99c98e0c10abfbc04c439bccf | /Bank_system/urls.py | 17dd850bd34febcc67f7d9b3c99baaa575d47edb | [] | no_license | dhruvk112/dhruvk112.github.io | 587ca5e9167078532c0cd48298ad9acf114a7d74 | 2d5c401cdba06f5ba39c809e37199a7c98279ce0 | refs/heads/main | 2022-12-25T07:33:45.048943 | 2020-10-10T17:23:47 | 2020-10-10T17:23:47 | 302,153,452 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 908 | py | """Bank_system URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
from Users import views
from django.urls import include
urlpatterns = [
path('',views.index,name='index'),
path('admin/', admin.site.urls),
path('users/',include('Users.urls',namespace='users')),
]
| [
"dhruvkandpal13@gmail.com"
] | dhruvkandpal13@gmail.com |
0eb8c05f44ce6192a839496e20dd39bbaf464182 | 971e0efcc68b8f7cfb1040c38008426f7bcf9d2e | /tests/artificial/transf_RelativeDifference/trend_Lag1Trend/cycle_5/ar_/test_artificial_1024_RelativeDifference_Lag1Trend_5__100.py | f07285216a3cc7ff2114694287b967c65119eace | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | antoinecarme/pyaf | a105d172c2e7544f8d580d75f28b751351dd83b6 | b12db77cb3fa9292e774b2b33db8ce732647c35e | refs/heads/master | 2023-09-01T09:30:59.967219 | 2023-07-28T20:15:53 | 2023-07-28T20:15:53 | 70,790,978 | 457 | 77 | BSD-3-Clause | 2023-03-08T21:45:40 | 2016-10-13T09:30:30 | Python | UTF-8 | Python | false | false | 275 | py | import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 1024 , FREQ = 'D', seed = 0, trendtype = "Lag1Trend", cycle_length = 5, transform = "RelativeDifference", sigma = 0.0, exog_count = 100, ar_order = 0); | [
"antoine.carme@laposte.net"
] | antoine.carme@laposte.net |
4523e952a55597f9698342a2cd3e2d65f8a2c12a | 4b581371a8429b8f9f6f2ee4c6905b624679102d | /fall/18_db-nmcrnch/db_builder.py | fceba6c26cf8dfbdbe36d78e6bdca261837cd73f | [] | no_license | jjshawshaw/SoftDev_Work | 9474ac2c37c4695c17d671e0abfa48b0547b4ae8 | 2d997f44a7688e8b2ae42c093d21904bc9578a91 | refs/heads/master | 2020-07-23T12:12:04.199127 | 2020-04-22T03:50:12 | 2020-04-22T03:50:12 | 207,552,505 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,329 | py | #Justin Shaw
#SoftDev1 pd1
#K17 -- No Trouble
#2019-10-10
import sqlite3 #enable control of an sqlite database
import csv #facilitate CSV I/O
DB_FILE="discobandit.db"
db = sqlite3.connect(DB_FILE) #open if file exists, otherwise create
c = db.cursor() #facilitate db ops
#==========================================================
## creates student table if the table does not exist
c.execute("CREATE TABLE IF NOT EXISTS students(name TEXT, age INTEGER, id INTEGER);")
with open("students.csv") as csvFile: #read in student.csv
reader = csv.DictReader(csvFile) #create DictReader
for row in reader:
c.execute(f"INSERT INTO students VALUES (\"{row['name']}\", {row['age']}, {row['id']});")
#insert each row into the student table
## creates courses table if the table does not exist
c.execute("CREATE TABLE IF NOT EXISTS courses(code TEXT, mark INTEGER, id INTEGER);")
with open("courses.csv") as csvFile: #read in courses.csv
reader = csv.DictReader(csvFile) #create DictReader
for row in reader:
c.execute(f"INSERT INTO courses VALUES (\"{row['code']}\", {row['mark']}, {row['id']});")
#insert each row into the courses table
#==========================================================
db.commit() #save changes
db.close() #close database
| [
"jshaw00@stuy.edu"
] | jshaw00@stuy.edu |
cf5db8547d4d1246066b14aa9ec4eeec1d1d642b | ae5268cec244d19a10094a0c1cbe6df64c21978e | /kvadratickรก rovnica.py | 59a05fc1d93a3c82d92041fc74229ddc6e68fb75 | [] | no_license | Zochova/kvadraticka-rovnica-NikolasCibula | 65def02bbcf5fbb9ee0edb67781131b6eff99eea | fe3b1abd8503d94596e89e3606d853ca41c83b68 | refs/heads/main | 2023-08-23T07:13:23.301418 | 2021-09-28T07:31:36 | 2021-09-28T07:31:36 | 411,171,376 | 0 | 0 | null | 2021-09-28T07:00:50 | 2021-09-28T07:00:45 | null | UTF-8 | Python | false | false | 264 | py | a=int(input('Zadajte ฤรญslo pre a='))
b=int(input('Zadajte ฤรญslo pre b='))
c=int(input('Zadajte ฤรญslo pre c='))
#d=> diskriminant
d=b*b-4*a*c
if 0 > d:
print('Rovnica mรก 0 rieลกenรญ')
else:
x1=((-b)+sqrt(d)/(2*a))
x2=((-b)-sqrt(d)/(2*a)) | [
"noreply@github.com"
] | Zochova.noreply@github.com |
24b6342cfd9d6f470842e7b811d8251cdbf6b932 | c85a6d674679780ee510b5c8c3dbcbdecc859f64 | /test/test_alert_config.py | b935b925e2b7e72d3f4f6959ec9d5a61a7aa6c8d | [] | no_license | cbrowet-axway/APIM_sdk | d4f4a124e86a7b2e65d0ef07b54c68e95de68337 | 4f82df67ebe3dd6eae645bab8f86e72c0347ee24 | refs/heads/master | 2020-05-25T13:22:35.802350 | 2020-04-16T09:25:21 | 2020-04-16T09:25:21 | 187,820,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 944 | py | # coding: utf-8
"""
API Manager API v1.3
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: 1.3.0
Contact: support@axway.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.alert_config import AlertConfig # noqa: E501
from swagger_client.rest import ApiException
class TestAlertConfig(unittest.TestCase):
"""AlertConfig unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAlertConfig(self):
"""Test AlertConfig"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.alert_config.AlertConfig() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"cbro@semperpax.com"
] | cbro@semperpax.com |
bde0ecd51b6b0fbaa4ff999ec4a52d29de106ae1 | 7a3d8ad6ff5cf0862392a8d62dfba3285891ae62 | /Scrapy/data_scraping/data_scraping/pipelines.py | 036e591ef20e230f4146c05ff767a64557503698 | [
"MIT"
] | permissive | jonatascs/labdata-tcc | de112d9620e013294721ae9267f95e0df35f4bc1 | bb15d988f41ae15754ae4c76bc0c438dc4c7a2f9 | refs/heads/master | 2022-12-14T18:16:14.770493 | 2020-09-21T13:13:54 | 2020-09-21T13:13:54 | 295,526,211 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
class DataScrapingPipeline(object):
def process_item(self, item, spider):
return item
| [
"noreply@github.com"
] | jonatascs.noreply@github.com |
d96ed2ce95e6d3184151b1539a6f3a0eb664c89b | 75d8667735782cd1d0eb4877e52c89da5cd92dde | /nova/api/openstack/compute/floating_ips_bulk.py | 3107887da5317d24b2fbdb3186c0eec49b39a49a | [
"Apache-2.0"
] | permissive | bopopescu/nova-token | ffecfd3ec561936b7d9d7e691bc57383cde05436 | ec98f69dea7b3e2b9013b27fd55a2c1a1ac6bfb2 | refs/heads/master | 2022-11-22T09:53:31.073483 | 2016-05-14T02:47:01 | 2016-05-15T22:02:55 | 282,105,621 | 0 | 0 | Apache-2.0 | 2020-07-24T02:42:19 | 2020-07-24T02:42:18 | null | UTF-8 | Python | false | false | 13,677 | py | begin_unit
comment|'# Copyright 2012 IBM Corp.'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Licensed under the Apache License, Version 2.0 (the "License"); you may'
nl|'\n'
comment|'# not use this file except in compliance with the License. You may obtain'
nl|'\n'
comment|'# a copy of the License at'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# http://www.apache.org/licenses/LICENSE-2.0'
nl|'\n'
comment|'#'
nl|'\n'
comment|'# Unless required by applicable law or agreed to in writing, software'
nl|'\n'
comment|'# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT'
nl|'\n'
comment|'# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the'
nl|'\n'
comment|'# License for the specific language governing permissions and limitations'
nl|'\n'
comment|'# under the License.'
nl|'\n'
nl|'\n'
name|'import'
name|'netaddr'
newline|'\n'
name|'import'
name|'six'
newline|'\n'
name|'import'
name|'webob'
op|'.'
name|'exc'
newline|'\n'
nl|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
op|'.'
name|'compute'
op|'.'
name|'schemas'
name|'import'
name|'floating_ips_bulk'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'extensions'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
op|'.'
name|'openstack'
name|'import'
name|'wsgi'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'api'
name|'import'
name|'validation'
newline|'\n'
name|'import'
name|'nova'
op|'.'
name|'conf'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'exception'
newline|'\n'
name|'from'
name|'nova'
op|'.'
name|'i18n'
name|'import'
name|'_'
newline|'\n'
name|'from'
name|'nova'
name|'import'
name|'objects'
newline|'\n'
nl|'\n'
DECL|variable|CONF
name|'CONF'
op|'='
name|'nova'
op|'.'
name|'conf'
op|'.'
name|'CONF'
newline|'\n'
name|'CONF'
op|'.'
name|'import_opt'
op|'('
string|"'default_floating_pool'"
op|','
string|"'nova.network.floating_ips'"
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|variable|ALIAS
name|'ALIAS'
op|'='
string|"'os-floating-ips-bulk'"
newline|'\n'
DECL|variable|authorize
name|'authorize'
op|'='
name|'extensions'
op|'.'
name|'os_compute_authorizer'
op|'('
name|'ALIAS'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FloatingIPBulkController
name|'class'
name|'FloatingIPBulkController'
op|'('
name|'wsgi'
op|'.'
name|'Controller'
op|')'
op|':'
newline|'\n'
nl|'\n'
indent|' '
op|'@'
name|'extensions'
op|'.'
name|'expected_errors'
op|'('
number|'404'
op|')'
newline|'\n'
DECL|member|index
name|'def'
name|'index'
op|'('
name|'self'
op|','
name|'req'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Return a list of all floating IPs."""'
newline|'\n'
name|'context'
op|'='
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
newline|'\n'
name|'authorize'
op|'('
name|'context'
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'self'
op|'.'
name|'_get_floating_ip_info'
op|'('
name|'context'
op|')'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'extensions'
op|'.'
name|'expected_errors'
op|'('
number|'404'
op|')'
newline|'\n'
DECL|member|show
name|'def'
name|'show'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Return a list of all floating IPs for a given host."""'
newline|'\n'
name|'context'
op|'='
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
newline|'\n'
name|'authorize'
op|'('
name|'context'
op|')'
newline|'\n'
nl|'\n'
name|'return'
name|'self'
op|'.'
name|'_get_floating_ip_info'
op|'('
name|'context'
op|','
name|'id'
op|')'
newline|'\n'
nl|'\n'
DECL|member|_get_floating_ip_info
dedent|''
name|'def'
name|'_get_floating_ip_info'
op|'('
name|'self'
op|','
name|'context'
op|','
name|'host'
op|'='
name|'None'
op|')'
op|':'
newline|'\n'
indent|' '
name|'floating_ip_info'
op|'='
op|'{'
string|'"floating_ip_info"'
op|':'
op|'['
op|']'
op|'}'
newline|'\n'
nl|'\n'
name|'if'
name|'host'
name|'is'
name|'None'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'floating_ips'
op|'='
name|'objects'
op|'.'
name|'FloatingIPList'
op|'.'
name|'get_all'
op|'('
name|'context'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'NoFloatingIpsDefined'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'floating_ip_info'
newline|'\n'
dedent|''
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'try'
op|':'
newline|'\n'
indent|' '
name|'floating_ips'
op|'='
name|'objects'
op|'.'
name|'FloatingIPList'
op|'.'
name|'get_by_host'
op|'('
name|'context'
op|','
nl|'\n'
name|'host'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'FloatingIpNotFoundForHost'
name|'as'
name|'e'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPNotFound'
op|'('
name|'explanation'
op|'='
name|'e'
op|'.'
name|'format_message'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
dedent|''
name|'for'
name|'floating_ip'
name|'in'
name|'floating_ips'
op|':'
newline|'\n'
indent|' '
name|'instance_uuid'
op|'='
name|'None'
newline|'\n'
name|'fixed_ip'
op|'='
name|'None'
newline|'\n'
name|'if'
name|'floating_ip'
op|'.'
name|'fixed_ip'
op|':'
newline|'\n'
indent|' '
name|'instance_uuid'
op|'='
name|'floating_ip'
op|'.'
name|'fixed_ip'
op|'.'
name|'instance_uuid'
newline|'\n'
name|'fixed_ip'
op|'='
name|'str'
op|'('
name|'floating_ip'
op|'.'
name|'fixed_ip'
op|'.'
name|'address'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'result'
op|'='
op|'{'
string|"'address'"
op|':'
name|'str'
op|'('
name|'floating_ip'
op|'.'
name|'address'
op|')'
op|','
nl|'\n'
string|"'pool'"
op|':'
name|'floating_ip'
op|'.'
name|'pool'
op|','
nl|'\n'
string|"'interface'"
op|':'
name|'floating_ip'
op|'.'
name|'interface'
op|','
nl|'\n'
string|"'project_id'"
op|':'
name|'floating_ip'
op|'.'
name|'project_id'
op|','
nl|'\n'
string|"'instance_uuid'"
op|':'
name|'instance_uuid'
op|','
nl|'\n'
string|"'fixed_ip'"
op|':'
name|'fixed_ip'
op|'}'
newline|'\n'
name|'floating_ip_info'
op|'['
string|"'floating_ip_info'"
op|']'
op|'.'
name|'append'
op|'('
name|'result'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
name|'floating_ip_info'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'extensions'
op|'.'
name|'expected_errors'
op|'('
op|'('
number|'400'
op|','
number|'409'
op|')'
op|')'
newline|'\n'
op|'@'
name|'validation'
op|'.'
name|'schema'
op|'('
name|'floating_ips_bulk'
op|'.'
name|'create'
op|')'
newline|'\n'
DECL|member|create
name|'def'
name|'create'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Bulk create floating IPs."""'
newline|'\n'
name|'context'
op|'='
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
newline|'\n'
name|'authorize'
op|'('
name|'context'
op|')'
newline|'\n'
nl|'\n'
name|'params'
op|'='
name|'body'
op|'['
string|"'floating_ips_bulk_create'"
op|']'
newline|'\n'
name|'ip_range'
op|'='
name|'params'
op|'['
string|"'ip_range'"
op|']'
newline|'\n'
nl|'\n'
name|'pool'
op|'='
name|'params'
op|'.'
name|'get'
op|'('
string|"'pool'"
op|','
name|'CONF'
op|'.'
name|'default_floating_pool'
op|')'
newline|'\n'
name|'interface'
op|'='
name|'params'
op|'.'
name|'get'
op|'('
string|"'interface'"
op|','
name|'CONF'
op|'.'
name|'public_interface'
op|')'
newline|'\n'
nl|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'ips'
op|'='
op|'['
name|'objects'
op|'.'
name|'FloatingIPList'
op|'.'
name|'make_ip_info'
op|'('
name|'addr'
op|','
name|'pool'
op|','
name|'interface'
op|')'
nl|'\n'
name|'for'
name|'addr'
name|'in'
name|'self'
op|'.'
name|'_address_to_hosts'
op|'('
name|'ip_range'
op|')'
op|']'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'InvalidInput'
name|'as'
name|'exc'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPBadRequest'
op|'('
name|'explanation'
op|'='
name|'exc'
op|'.'
name|'format_message'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'try'
op|':'
newline|'\n'
indent|' '
name|'objects'
op|'.'
name|'FloatingIPList'
op|'.'
name|'create'
op|'('
name|'context'
op|','
name|'ips'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'FloatingIpExists'
name|'as'
name|'exc'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPConflict'
op|'('
name|'explanation'
op|'='
name|'exc'
op|'.'
name|'format_message'
op|'('
op|')'
op|')'
newline|'\n'
nl|'\n'
dedent|''
name|'return'
op|'{'
string|'"floating_ips_bulk_create"'
op|':'
op|'{'
string|'"ip_range"'
op|':'
name|'ip_range'
op|','
nl|'\n'
string|'"pool"'
op|':'
name|'pool'
op|','
nl|'\n'
string|'"interface"'
op|':'
name|'interface'
op|'}'
op|'}'
newline|'\n'
nl|'\n'
dedent|''
op|'@'
name|'extensions'
op|'.'
name|'expected_errors'
op|'('
op|'('
number|'400'
op|','
number|'404'
op|')'
op|')'
newline|'\n'
op|'@'
name|'validation'
op|'.'
name|'schema'
op|'('
name|'floating_ips_bulk'
op|'.'
name|'delete'
op|')'
newline|'\n'
DECL|member|update
name|'def'
name|'update'
op|'('
name|'self'
op|','
name|'req'
op|','
name|'id'
op|','
name|'body'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Bulk delete floating IPs."""'
newline|'\n'
name|'context'
op|'='
name|'req'
op|'.'
name|'environ'
op|'['
string|"'nova.context'"
op|']'
newline|'\n'
name|'authorize'
op|'('
name|'context'
op|')'
newline|'\n'
nl|'\n'
name|'if'
name|'id'
op|'!='
string|'"delete"'
op|':'
newline|'\n'
indent|' '
name|'msg'
op|'='
name|'_'
op|'('
string|'"Unknown action"'
op|')'
newline|'\n'
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPNotFound'
op|'('
name|'explanation'
op|'='
name|'msg'
op|')'
newline|'\n'
dedent|''
name|'ip_range'
op|'='
name|'body'
op|'['
string|"'ip_range'"
op|']'
newline|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'ips'
op|'='
op|'('
name|'objects'
op|'.'
name|'FloatingIPList'
op|'.'
name|'make_ip_info'
op|'('
name|'address'
op|','
name|'None'
op|','
name|'None'
op|')'
nl|'\n'
name|'for'
name|'address'
name|'in'
name|'self'
op|'.'
name|'_address_to_hosts'
op|'('
name|'ip_range'
op|')'
op|')'
newline|'\n'
dedent|''
name|'except'
name|'exception'
op|'.'
name|'InvalidInput'
name|'as'
name|'exc'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'webob'
op|'.'
name|'exc'
op|'.'
name|'HTTPBadRequest'
op|'('
name|'explanation'
op|'='
name|'exc'
op|'.'
name|'format_message'
op|'('
op|')'
op|')'
newline|'\n'
dedent|''
name|'objects'
op|'.'
name|'FloatingIPList'
op|'.'
name|'destroy'
op|'('
name|'context'
op|','
name|'ips'
op|')'
newline|'\n'
nl|'\n'
name|'return'
op|'{'
string|'"floating_ips_bulk_delete"'
op|':'
name|'ip_range'
op|'}'
newline|'\n'
nl|'\n'
DECL|member|_address_to_hosts
dedent|''
name|'def'
name|'_address_to_hosts'
op|'('
name|'self'
op|','
name|'addresses'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Iterate over hosts within an address range.\n\n If an explicit range specifier is missing, the parameter is\n interpreted as a specific individual address.\n """'
newline|'\n'
name|'try'
op|':'
newline|'\n'
indent|' '
name|'return'
op|'['
name|'netaddr'
op|'.'
name|'IPAddress'
op|'('
name|'addresses'
op|')'
op|']'
newline|'\n'
dedent|''
name|'except'
name|'ValueError'
op|':'
newline|'\n'
indent|' '
name|'net'
op|'='
name|'netaddr'
op|'.'
name|'IPNetwork'
op|'('
name|'addresses'
op|')'
newline|'\n'
name|'if'
name|'net'
op|'.'
name|'size'
op|'<'
number|'4'
op|':'
newline|'\n'
indent|' '
name|'reason'
op|'='
name|'_'
op|'('
string|'"/%s should be specified as single address(es) "'
nl|'\n'
string|'"not in cidr format"'
op|')'
op|'%'
name|'net'
op|'.'
name|'prefixlen'
newline|'\n'
name|'raise'
name|'exception'
op|'.'
name|'InvalidInput'
op|'('
name|'reason'
op|'='
name|'reason'
op|')'
newline|'\n'
dedent|''
name|'else'
op|':'
newline|'\n'
indent|' '
name|'return'
name|'net'
op|'.'
name|'iter_hosts'
op|'('
op|')'
newline|'\n'
dedent|''
dedent|''
name|'except'
name|'netaddr'
op|'.'
name|'AddrFormatError'
name|'as'
name|'exc'
op|':'
newline|'\n'
indent|' '
name|'raise'
name|'exception'
op|'.'
name|'InvalidInput'
op|'('
name|'reason'
op|'='
name|'six'
op|'.'
name|'text_type'
op|'('
name|'exc'
op|')'
op|')'
newline|'\n'
nl|'\n'
nl|'\n'
DECL|class|FloatingIpsBulk
dedent|''
dedent|''
dedent|''
name|'class'
name|'FloatingIpsBulk'
op|'('
name|'extensions'
op|'.'
name|'V21APIExtensionBase'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""Bulk handling of Floating IPs."""'
newline|'\n'
nl|'\n'
DECL|variable|name
name|'name'
op|'='
string|'"FloatingIpsBulk"'
newline|'\n'
DECL|variable|alias
name|'alias'
op|'='
name|'ALIAS'
newline|'\n'
DECL|variable|version
name|'version'
op|'='
number|'1'
newline|'\n'
nl|'\n'
DECL|member|get_resources
name|'def'
name|'get_resources'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
name|'resource'
op|'='
op|'['
name|'extensions'
op|'.'
name|'ResourceExtension'
op|'('
name|'ALIAS'
op|','
nl|'\n'
name|'FloatingIPBulkController'
op|'('
op|')'
op|')'
op|']'
newline|'\n'
name|'return'
name|'resource'
newline|'\n'
nl|'\n'
DECL|member|get_controller_extensions
dedent|''
name|'def'
name|'get_controller_extensions'
op|'('
name|'self'
op|')'
op|':'
newline|'\n'
indent|' '
string|'"""It\'s an abstract function V21APIExtensionBase and the extension\n will not be loaded without it.\n """'
newline|'\n'
name|'return'
op|'['
op|']'
newline|'\n'
dedent|''
dedent|''
endmarker|''
end_unit
| [
"dmg@uvic.ca"
] | dmg@uvic.ca |
0d512e3aeaa270f706df8a5e3db7f4ca01e7a2fa | 64be1c4b09ef228d73a50523674c66cda10e366e | /oop/inheritance_example2.py | 7116a421adb841baf465aa48c330b21166a40be2 | [] | no_license | nkirkpatrick/google-python-crash-course | b18191bd36b687073424ca28d82aad2e7df4f25e | c57f06ffd9759084e823261322b6c0b993a87197 | refs/heads/master | 2022-11-13T20:54:52.522996 | 2020-07-06T00:54:32 | 2020-07-06T00:54:32 | 264,575,266 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 360 | py | class Animal:
sound = ""
def __init__(self, name):
self.name = name
def speak(self):
print("{sound} I'm {name}! {sound}".format(name=self.name, sound=self.sound))
class Piglet(Animal):
sound = "Oink!"
hamlet = Piglet("Hamlet")
hamlet.speak()
class Cow(Animal):
sound = "Mooooo"
milky = Cow("Milky White")
milky.speak()
| [
"norbertkirkpatrick@macbook-pro.lan"
] | norbertkirkpatrick@macbook-pro.lan |
26a2f122cb7b9221175cb1153bca90f9d6bd1f86 | 363fe3dafdbedbb8346b0c38e58f483fec165d13 | /dataFiles/SinglePulse.py | e517cb460451b3a01a58d744a6dc7e93b9ab6efc | [] | no_license | rossjjennings/REU-pulsar | 53598d23f2f2e2dbfd8025cec8bee53124ec8673 | 4ad8c8fd1cc6e093af18c49eb0ecca9ad1f3ae57 | refs/heads/master | 2023-06-06T21:30:14.369858 | 2021-07-08T00:04:50 | 2021-07-08T00:04:50 | 276,169,029 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,422 | py | '''
Michael Lam
Last updated: 12/31/2013
Define interpulse alignment as putting the peak value at len/4. Interpulse will be roughly at 3*len/4
Need to handle what to do if no opw for offpulse noise, etc.
Figure out way to add/average SPs.
'''
from __future__ import division, print_function
from matplotlib.pyplot import *
import numpy as np
import utilities as u
import scipy.optimize as optimize
import sys
#sys.path.append('/home/dizzy4/mlam/source/jimcode')
import waveforms
get_toa = waveforms.get_toa3
#import ffttoa
#get_toa = ffttoa.get_toa
#ACF=lambda p: np.correlate(p,p,"full") #no longer used
class SinglePulse:
def __init__(self,data,mpw=None,ipw=None,opw=None,prepare=False,align=None,period=None):
self.data=np.array(data)
if mpw is not None:
self.mpw = np.array(mpw)
else:
self.mpw = None
if ipw is not None:
self.ipw = np.array(ipw)
else:
self.ipw = None
#Define off pulse
self.nbins = len(data)
bins=np.arange(self.nbins)
if opw is None:
if self.mpw is None and self.ipw is None:
self.opw=None #do not define any windows
elif self.ipw is None:
self.opw=bins[np.logical_not(np.in1d(bins,mpw))]
elif self.mpw is None:
self.opw=bins[np.logical_not(np.in1d(bins,ipw))]
else:
self.opw=bins[np.logical_not(np.logical_or(np.in1d(bins,mpw),np.in1d(bins,ipw)))]
else:
self.opw=np.array(opw)
if self.mpw is None and self.ipw is None and self.opw is None:
self.mpw=np.arange(self.nbins)
if align:
if align!=0:
self.data = np.roll(self.data,align)
#prepare=True #? #keep this for 1937?
#self.shiftit(align,save=True)
if prepare: #change this for jitter (prepare set to False here)
self.interpulse_align()
#self.normalize() #do not do this
self.period = period
self.null = False
if np.all(self.data==0) or np.all(np.isnan(self.data)):
self.null = True
def interpulse_align(self):
self.data = np.roll(u.center_max(self.data),-len(self.data)//4)
def center_align(self):
self.data = u.center_max(self.data)
def normalize(self):
minimum=np.mean(self.getOffpulse())
#print minimum
self.data=u.normalize(self.data,minimum=minimum)
def getFWHM(self,simple=False,timeunits=True):
#remove baseline? what if no offpulse window?
dbin = u.FWHM(self.data,notcentered=True)#,window=800)
factor=1
if timeunits and self.period is not None:
factor = self.period/self.nbins
return factor*dbin
def getWeff(self,fourier=False,sumonly=False,timeunits=True):
if not timeunits or self.period is None:
return None
P=self.period
N=self.nbins
U=u.normalize(self.data,simple=True) #remove baseline?
tot=np.sum(np.power(U[1:]-U[:-1],2))
if sumonly:
return tot
self.weff=P/np.sqrt(N*tot)
return self.weff
def remove_baseline(self,save=True):
if self.opw is None:
#print "No Offpulse" #do this?
return
opmean = np.mean(self.getOffpulse())
if save:
self.data = self.data - opmean
return self.data
return self.data - opmean
def getMainpulse(self):
if self.mpw is None:
return None
return self.data[self.mpw]
def getInterpulse(self):
if self.ipw is None:
return None
return self.data[self.ipw]
def getOffpulse(self):
if self.opw is None:
return None
return self.data[self.opw]
def getAllpulse(self):
return self.getMainpulse(),self.getInterpulse(),self.getOffpulse()
def getMainpulseACF(self):
mp=self.getMainpulse()
return u.acf(mp,var=False,norm_by_tau=True)
def getInterpulseACF(self):
if self.ipw is None:
return None
ip=self.getInterpulse()
return u.acf(ip,var=False,norm_by_tau=True)
def getOffpulseACF(self):
if self.opw is None:
return None
op=self.getOffpulse()
return u.acf(op,var=False,norm_by_tau=True)
def getAllACF(self):
return self.getMainpulseACF(),self.getInterpulseACF(),self.getOffpulseACF()
def getOffpulseNoise(self,full=False):
if self.opw is None:
return None
op=self.getOffpulse()
if full:
return np.mean(op),np.std(op)
return np.std(op)
def getOffpulseZCT(self):
return u.zct(self.getOffpulse(),full=True,meansub=True)
def fitPulse(self,template,fixedphase=False,rms_baseline=None):
"""
Returns taucff, tauhat, bhat, sigma_Tau,sigma_b, snr, rho
"""
if self.null:
return None
if rms_baseline is None:
self.remove_baseline()
if fixedphase: #just return S/N
p0 = [np.max(self.data)]
p1,cov,infodict,mesg,ier = optimize.leastsq(lambda p,x,y: np.abs(p[0])*x - y,p0[:],args=(np.asarray(template,np.float64),np.asarray(self.data,np.float64)),full_output=True) #conversion to np.float64 fixes bug with Jacobian inversion
noise = self.getOffpulseNoise()
return np.abs(p1[0])/noise#,np.sqrt(cov[0][0])/noise
if self.opw is None:
if rms_baseline is not None:
try:
return get_toa(template,self.data,rms_baseline)
except:
print(self.data)
plot(self.data)
show()
raise SystemExit
return get_toa(template,self.data,1)
try: #problem?
return get_toa(template,self.data,self.getOffpulseNoise())#,nlagsfit=1001)
except:
return None
#define this so a positive shift is forward
def shiftit(self,shift,save=False):
x = waveforms.shiftit(self.data,-1*shift)
if save:
self.data = x
return x
def getPeriod(self):
return self.period
def getNBins(self):
return len(self.data)
| [
"rossjjennings@gmail.com"
] | rossjjennings@gmail.com |
f12c7e78f6cc76322a20f97f04b8731c60d73ac0 | 5474905a26e356fe2742e62567718173b81b616d | /templates/python.flask/{{cookiecutter.project_safe_name}}/test/test_demo.py | 52e109dcd4a6b06c2716df9480549aeac5797cf5 | [
"MIT"
] | permissive | by46/recipe | 16dd24a8a83f2a00beab84c5b6522c0bff073233 | 203abd2141a536b66b4e57d073169a49395be1f0 | refs/heads/master | 2020-04-13T22:41:27.865516 | 2016-09-09T10:09:20 | 2016-09-09T10:09:20 | 65,368,302 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 746 | py | import unittest
import app
from app import create_app
class HelloWorldTestCase(unittest.TestCase):
def setUp(self):
self.client = create_app('test').test_client()
def test_hello_world(self):
response = self.client.get('/{{cookiecutter.project_slug}}', follow_redirects=True)
self.assertTrue('The Art of Computer Programming' in response.data)
def test_version(self):
response = self.client.get('/{{cookiecutter.project_slug}}/version', follow_redirects=True)
self.assertTrue(app.__version__ in response.data)
def test_faq(self):
response = self.client.get('/{{cookiecutter.project_slug}}/faq.htm')
self.assertEqual('<!--Newegg-->', response.data)
| [
"ycs_ctbu_2010@126.com"
] | ycs_ctbu_2010@126.com |
cc2a1c27b48d6d2b129e924104a057d843445b3a | 31de01be3f4bb6b94aff79e098ccb15f7e939597 | /WOL PXE Project/WOL-PXE/WOL-BF/impacket-0.9.12/build/lib/impacket/cdp.py | 5958d6e4471be6e1211a20b36ca2e1d41d7048a4 | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0",
"Apache-1.1",
"BSD-2-Clause"
] | permissive | gregesposito/EspoScripts | 0c3e540767a2459bc4b625c00ae3034b6ad8ed27 | 22cb11423d5168367a9aa078cab4dd5c68a858c2 | refs/heads/master | 2021-01-25T08:48:07.949199 | 2015-07-06T17:05:55 | 2015-07-06T17:05:55 | 38,629,342 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 13,594 | py | # Copyright (c) 2003-2012 CORE Security Technologies
#
# This software is provided under under a slightly modified version
# of the Apache Software License. See the accompanying LICENSE file
# for more information.
#
# $Id: cdp.py 529 2012-04-29 21:39:46Z bethus@gmail.com $
#
# Description:
# Cisco Discovery Protocol packet codecs.
#
# Author:
# Martin Candurra
# martincad at corest.com
from ImpactPacket import ProtocolLayer, PacketBuffer, Header
from struct import unpack
import socket
IP_ADDRESS_LENGTH = 4
class CDPTypes:
DeviceID_Type = 1
Address_Type = 2
PortID_Type = 3
Capabilities_Type = 4
SoftVersion_Type = 5
Platform_Type = 6
IPPrefix_Type = 7
ProtocolHello_Type = 8
MTU_Type = 17
SystemName_Type = 20
SystemObjectId_Type = 21
SnmpLocation = 23
class CDP(Header):
Type = 0x2000
OUI = 0x00000c
def __init__(self, aBuffer = None):
Header.__init__(self, 8)
if aBuffer:
self.load_header(aBuffer)
self._elements = self._getElements(aBuffer)
def _getElements(self, aBuffer):
# Remove version (1 byte), TTL (1 byte), and checksum (2 bytes)
buff = aBuffer[4:]
l = []
finish = False
while buff:
elem = CDPElementFactory.create(buff)
data = elem.get_data()
l.append( elem )
buff = buff[ elem.get_length() : ]
return l
def get_header_size(self):
return 8
def get_version(self):
return self.get_byte(0)
def get_ttl(self):
return self.get_byte(1)
def get_checksum(self):
return self.get_word(2)
def get_type(self):
return self.get_word(4)
def get_lenght(self):
return self.get_word(6)
def getElements(self):
return self._elements
def __str__(self):
knowcode = 0
tmp_str = 'CDP Details:\n'
for element in self._elements:
tmp_str += "** Type:" + str(element.get_type()) + " " + str(element) + "\n"
return tmp_str
def get_byte(buffer, offset):
return unpack("!B", buffer[offset:offset+1])[0]
def get_word(buffer, offset):
return unpack("!h", buffer[offset:offset+2])[0]
def get_long(buffer, offset):
return unpack("!I", buffer[offset:offset+4])[0]
def get_bytes(buffer, offset, bytes):
return buffer[offset:offset + bytes]
def mac_to_string(mac_bytes):
bytes = unpack('!BBBBBB', mac_bytes)
s = ''
for byte in bytes:
s += '%02x:' % byte
return s[0:-1]
class CDPElement(Header):
def __init__(self, aBuffer = None):
Header.__init__(self, 8)
if aBuffer:
self._length = CDPElement.Get_length(aBuffer)
self.load_header( aBuffer[:self._length] )
@classmethod
def Get_length(cls, aBuffer):
return unpack('!h', aBuffer[2:4])[0]
def get_header_size(self):
self._length
def get_length(self):
return self.get_word(2)
def get_data(self):
return self.get_bytes().tostring()[4:self.get_length()]
def get_ip_address(self, offset = 0, ip = None):
if not ip:
ip = self.get_bytes().tostring()[offset : offset + IP_ADDRESS_LENGTH]
return socket.inet_ntoa( ip )
class CDPDevice(CDPElement):
Type = 1
def get_type(self):
return CDPDevice.Type
def get_device_id(self):
return CDPElement.get_data(self)
def __str__(self):
return "Device:" + self.get_device_id()
class Address(CDPElement):
Type = 2
def __init__(self, aBuffer = None):
CDPElement.__init__(self, aBuffer)
if aBuffer:
data = self.get_bytes().tostring()[8:]
self._generateAddressDetails(data)
def _generateAddressDetails(self, buff):
self.address_details = []
while buff:
address = AddressDetails.create(buff)
self.address_details.append( address )
buff = buff[address.get_total_length():]
def get_type(self):
return Address.Type
def get_number(self):
return self.get_long(4)
def get_address_details(self):
return self.address_details
def __str__(self):
tmp_str = "Addresses:"
for address_detail in self.address_details:
tmp_str += "\n" + str(address_detail)
return tmp_str
class AddressDetails():
PROTOCOL_IP = 0xcc
@classmethod
def create(cls, buff):
a = AddressDetails(buff)
return a
def __init__(self, aBuffer = None):
if aBuffer:
addr_length = unpack("!h", aBuffer[3:5])[0]
self.total_length = addr_length + 5
self.buffer = aBuffer[:self.total_length]
def get_total_length(self):
return self.total_length
def get_protocol_type(self):
return self.buffer[0:1]
def get_protocol_length(self):
return get_byte( self.buffer, 1)
def get_protocol(self):
return get_byte( self.buffer, 2)
def get_address_length(self):
return get_word( self.buffer, 3)
def get_address(self):
address = get_bytes( self.buffer, 5, self.get_address_length() )
if self.get_protocol()==AddressDetails.PROTOCOL_IP:
return socket.inet_ntoa(address)
else:
print "Address not IP"
return address
def is_protocol_IP(self):
return self.get_protocol()==AddressDetails.PROTOCOL_IP
def __str__(self):
return "Protocol Type:%r Protocol:%r Address Length:%r Address:%s" % (self.get_protocol_type(), self.get_protocol(), self.get_address_length(), self.get_address())
class Port(CDPElement):
Type = 3
def get_type(self):
return Port.Type
def get_port(self):
return CDPElement.get_data(self)
def __str__(self):
return "Port:" + self.get_port()
class Capabilities(CDPElement):
Type = 4
def __init__(self, aBuffer = None):
CDPElement.__init__(self, aBuffer)
self._capabilities_processed = False
self._router = False
self._transparent_bridge = False
self._source_route_bridge = False
self._switch = False
self._host = False
self._igmp_capable = False
self._repeater = False
self._init_capabilities()
def get_type(self):
return Capabilities.Type
def get_capabilities(self):
return CDPElement.get_data(self)
def _init_capabilities(self):
if self._capabilities_processed:
return
capabilities = unpack("!L", self.get_capabilities())[0]
self._router = (capabilities & 0x1) > 0
self._transparent_bridge = (capabilities & 0x02) > 0
self._source_route_bridge = (capabilities & 0x04) > 0
self._switch = (capabilities & 0x08) > 0
self._host = (capabilities & 0x10) > 0
self._igmp_capable = (capabilities & 0x20) > 0
self._repeater = (capabilities & 0x40) > 0
def is_router(self):
return self._router
def is_transparent_bridge(self):
return self._transparent_bridge
def is_source_route_bridge(self):
return self._source_route_bridge
def is_switch(self):
return self._switch
def is_host(self):
return self.is_host
def is_igmp_capable(self):
return self._igmp_capable
def is_repeater(self):
return self._repeater
def __str__(self):
return "Capabilities:" + self.get_capabilities()
class SoftVersion(CDPElement):
Type = 5
def get_type(self):
return SoftVersion.Type
def get_version(self):
return CDPElement.get_data(self)
def __str__(self):
return "Version:" + self.get_version()
class Platform(CDPElement):
Type = 6
def get_type(self):
return Platform.Type
def get_platform(self):
return CDPElement.get_data(self)
def __str__(self):
return "Platform:%r" % self.get_platform()
class IpPrefix(CDPElement):
Type = 7
def get_type(self):
return IpPrefix .Type
def get_ip_prefix(self):
return CDPElement.get_ip_address(self, 4)
def get_bits(self):
return self.get_byte(8)
def __str__(self):
return "IP Prefix/Gateway: %r/%d" % (self.get_ip_prefix(), self.get_bits())
class ProtocolHello(CDPElement):
Type = 8
def get_type(self):
return ProtocolHello.Type
def get_master_ip(self):
return self.get_ip_address(9)
def get_version(self):
return self.get_byte(17)
def get_sub_version(self):
return self.get_byte(18)
def get_status(self):
return self.get_byte(19)
def get_cluster_command_mac(self):
return self.get_bytes().tostring()[20:20+6]
def get_switch_mac(self):
return self.get_bytes().tostring()[28:28+6]
def get_management_vlan(self):
return self.get_word(36)
def __str__(self):
return "\n\n\nProcolHello: Master IP:%s version:%r subversion:%r status:%r Switch's Mac:%r Management VLAN:%r" \
% (self.get_master_ip(), self.get_version(), self.get_sub_version(), self.get_status(), mac_to_string(self.get_switch_mac()), self.get_management_vlan())
class VTPManagementDomain(CDPElement):
Type = 9
def get_type(self):
return VTPManagementDomain.Type
def get_domain(self):
return CDPElement.get_data(self)
class Duplex(CDPElement):
Type = 0xb
def get_type(self):
return Duplex.Type
def get_duplex(self):
return CDPElement.get_data(self)
def is_full_duplex(self):
return self.get_duplex()==0x1
class VLAN(CDPElement):
Type = 0xa
def get_type(self):
return VLAN.Type
def get_vlan_number(self):
return CDPElement.get_data(self)
class TrustBitmap(CDPElement):
Type = 0x12
def get_type(self):
return TrustBitmap.Type
def get_trust_bitmap(self):
return self.get_data()
def __str__(self):
return "TrustBitmap Trust Bitmap:%r" % self.get_trust_bitmap()
class UntrustedPortCoS(CDPElement):
Type = 0x13
def get_type(self):
return UntrustedPortCoS.Type
def get_port_CoS(self):
return self.get_data()
def __str__(self):
return "UntrustedPortCoS port CoS %r" % self.get_port_CoS()
class ManagementAddresses(Address):
Type = 0x16
def get_type(self):
return ManagementAddresses.Type
class MTU(CDPElement):
Type = 0x11
def get_type(self):
return MTU.Type
class SystemName(CDPElement):
Type = 0x14
def get_type(self):
return SystemName.Type
class SystemObjectId(CDPElement):
Type = 0x15
def get_type(self):
return SystemObjectId.Type
class SnmpLocation(CDPElement):
Type = 0x17
def get_type(self):
return SnmpLocation.Type
class DummyCdpElement(CDPElement):
Type = 0x99
def get_type(self):
return DummyCdpElement.Type
class CDPElementFactory():
elementTypeMap = {
CDPDevice.Type : CDPDevice,
Port.Type : Port,
Capabilities.Type : Capabilities,
Address.Type : Address,
SoftVersion.Type : SoftVersion,
Platform.Type : Platform,
IpPrefix.Type : IpPrefix,
ProtocolHello.Type : ProtocolHello,
VTPManagementDomain.Type : VTPManagementDomain,
VLAN.Type : VLAN,
Duplex.Type : Duplex,
TrustBitmap.Type : TrustBitmap,
UntrustedPortCoS.Type : UntrustedPortCoS,
ManagementAddresses.Type : ManagementAddresses,
MTU.Type : MTU,
SystemName.Type : SystemName,
SystemObjectId.Type : SystemObjectId,
SnmpLocation.Type : SnmpLocation
}
@classmethod
def create(cls, aBuffer):
# print "CDPElementFactory.create aBuffer:", repr(aBuffer)
# print "CDPElementFactory.create sub_type:", repr(aBuffer[0:2])
_type = unpack("!h", aBuffer[0:2])[0]
# print "CDPElementFactory.create _type:", _type
try:
class_type = cls.elementTypeMap[_type]
except KeyError:
class_type = DummyCdpElement
#raise Exception("CDP Element type %s not implemented" % _type)
return class_type( aBuffer )
| [
"gregesposito@mac.com"
] | gregesposito@mac.com |
4dca51173e60182e6e60683a2886289259f51398 | 4f07d4f41e4692b89a81b88ec0b56cd45dfb3121 | /week 2/day 3/A/A.py | 2c01ab0c2c4ef08938141a37c1d2141f18eb8f37 | [] | no_license | Beisenbek/PP2Summer2020 | 594f1fe8bda7fbdaec1beeb880dad1fb949f469a | 3b9b947601aaee4efb9424c0c6fbf1d6a665cfc1 | refs/heads/master | 2022-11-11T21:06:55.330860 | 2020-07-09T14:12:42 | 2020-07-09T14:12:42 | 271,017,160 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 762 | py | def numberOfDigits(n):
res = 0
while(True):
res = res + 1
n = n // 10
if n == 0: break
return res
def firstDigit(n, k):
x = pow(10, k - 1)
return n // x
n = int(input())
k = numberOfDigits(n)
res = True
leadingZero = False
for i in range(0, k // 2):
a = numberOfDigits(n)
expectedLength = a - 2
r = n % 10
n = n // 10
l = 0
if leadingZero == False:
l = firstDigit(n, a - 1)
n = n - l * pow(10, a - 2)
else:
leadingZero = False
expectedLength = a - 1
if l != r:
res = False
break
actualLength = numberOfDigits(n)
if actualLength != expectedLength:
leadingZero = True
if res : print("YES")
else : print("NO") | [
"bbaisakov@dar.kz"
] | bbaisakov@dar.kz |
0af2519c0df633db3f643a4c658518190f0a2164 | f8e65ae20ca2f7c7f143eaf4e6e6061ddf3b4807 | /leapsec_test.py | e39e4798cfb376329bd1dff5aab5c1b6a94dbeea | [] | no_license | Gerenjie/Centaur-Model | af59e6810b9b8132ecb060f1dffe28bad2b15dc7 | 465d5dab5c9233229e68cf31e861c665c0bd4553 | refs/heads/master | 2020-07-22T10:48:50.467503 | 2019-09-12T00:37:52 | 2019-09-12T00:37:52 | 207,173,259 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 328 | py | from obs80 import leapsec
import novas.compat as novas
def test_leap():
"""example from leap-seconds.list"""
l = leapsec.LeapSeconds()
jd9 = novas.julian_date(1972, 6, 30, 23 + (3599. / 3600))
assert l.getLeapSeconds(jd9) == 10
jd0 = novas.julian_date(1972, 7, 1, 0)
assert l.getLeapSeconds(jd0) == 11
| [
"noreply@github.com"
] | Gerenjie.noreply@github.com |
a6d5ec1887858dde34ae03c2bd2f11d3e85e9e80 | 07657cee40a89c8a12fb10bcbeca89ae381aedbb | /apps/experiment/adminx.py | 15bbe23353b9423e9debbe15d3e6e6aab7284f9a | [] | no_license | my-master-yang/xiong | aad07a5d9953e2dc57662678ae0117fa3721b701 | 38e928c8857ae07427d87f5dc854809d68717324 | refs/heads/master | 2018-10-29T23:11:15.312313 | 2018-08-13T01:29:41 | 2018-08-13T01:29:41 | 113,152,387 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 799 | py | # _*_ encoding:utf-8 _*_
__author__ = "kusole"
__date__ = "17-12-7 ไธๅ1:39"
import xadmin
from .models import LabCategory, Lab
class LabCategoryAdmin(object):
list_display = ['name', 'desc', 'add_time']
search_fields = ['name', 'desc']
list_filter = ['name', 'desc', 'add_time']
class LabAdmin(object):
list_display = [
'id', 'labcategory', 'name', 'desc', 'detail', 'degree', 'learn_times', 'students', 'image', 'add_time'
]
search_fields = ['labcategory', 'name', 'desc', 'detail', 'degree', 'learn_times', 'students', 'image']
list_filter = [
'labcategory__name', 'name', 'desc', 'detail', 'degree', 'learn_times', 'students', 'image', 'add_time'
]
xadmin.site.register(LabCategory, LabCategoryAdmin)
xadmin.site.register(Lab, LabAdmin)
| [
"3079362259@qq.com"
] | 3079362259@qq.com |
192c834c943d4e4dfafb959d91354a48dc2b1af8 | 63df3647761f94d0937f5d14f02b916eb2208eb4 | /blogapp/migrations/0001_initial.py | ddba767d5506a6f31428c05822c1a0eb15d39aaa | [] | no_license | alidabour/DokkanzBlog | 6a05098b5cbdb63381b595c6315e9ab28cba514f | d9220a183a173a117fb1975200bcf011dbbc2999 | refs/heads/master | 2021-05-06T06:23:12.572283 | 2017-12-11T22:28:41 | 2017-12-11T22:28:41 | 113,913,191 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | # Generated by Django 2.0 on 2017-12-10 10:54
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Post',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('subject', models.CharField(max_length=200)),
('content', models.TextField()),
('created', models.DateTimeField(auto_now_add=True)),
('owner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"alidabour0@gmail.com"
] | alidabour0@gmail.com |
9106c5d1a7b95165084dd263e4a7421c8030d12e | e95eeb123f3772da8d2dc7677e7afdc1287f1276 | /bot.py | a1f32743e20491dec9bd1bd4b5ce00cf5bfb409e | [
"MIT"
] | permissive | jayrav13/presidency | 3cf880bf51d211f8fb21d5c4bc564f22c2a8ae4f | f18721d5df9af161cc01f503b6657d9b06fea0e9 | refs/heads/master | 2022-09-29T22:53:40.867506 | 2017-03-03T18:57:04 | 2017-03-03T18:57:04 | 72,818,604 | 15 | 2 | MIT | 2022-09-16T17:46:42 | 2016-11-04T05:56:54 | Python | UTF-8 | Python | false | false | 5,501 | py | """
Imports
"""
from presidency.models import *
from lxml import html
import requests
import json
import datetime
from twython import Twython
import os
import time
import sys
"""
Set UTF-8 for everything.
"""
reload(sys)
sys.setdefaultencoding("utf-8")
# Establish Base URL.
base_url = os.environ.get('WHITE_HOUSE_URL') + ""
# Establish all pages to scrape.
pages = {
"/briefing-room/speeches-and-remarks": "Speeches and Remarks",
"/briefing-room/press-briefings": "Press Briefings",
"/briefing-room/statements-and-releases": "Statements and Releases",
"/briefing-room/presidential-actions/executive-orders": "Executive Orders",
"/briefing-room/presidential-actions/presidential-memoranda": "Presidential Memoranda",
"/briefing-room/presidential-actions/proclamations": "Proclamations",
"/briefing-room/presidential-actions/related-omb-material": "Related OMB Material",
# "/briefing-room/pending-legislation": "Pending Legislation",
# "/briefing-room/signed-legislation": "Signed Legislation",
# "/briefing-room/vetoed-legislation": "Vetoed Legislation",
"/briefing-room/statements-administration-policy": "Statements of Administration Policy"
}
# Scrape each page.
for key, value in pages.iteritems():
print("Scanning " + value)
# Make request and transform into tree.
page_url = base_url + key
response = requests.get(page_url)
tree = html.document_fromstring(response.text)
# Deterimine number of total pages.
pagecount = int(tree.xpath('//li[@class="pager-current"]')[0].text_content().split(' of ')[1]) if len(tree.xpath('//li[@class="pager-current"]')) > 0 else 1
# Keep iterating through pages until you reach a page that has been fully scraped. Then stop.
for i in range(0, pagecount):
# Use ?page= parameter to scrape, starting with page 0.
response = requests.get(page_url)
print("PAGE URL: " + page_url)
tree = html.document_fromstring(response.text)
# Build the resulting dictionary objects for each document on that page.
objects = [{
"document_date": x.xpath('div[contains(@class, "views-field-created")]')[0].text_content().strip() if len(x.xpath('div[contains(@class, "views-field-created")]')) > 0 else x.xpath('div')[0].text_content().split(' on ')[1],
"title": x.xpath('div[contains(@class, "views-field-title")]')[0].text_content().strip(),
"uri": x.xpath('div[contains(@class, "views-field-title")]')[0].xpath('h3')[0].xpath('a')[0].attrib['href'].strip(),
"category_slug": key,
"category_name": value,
"full_url": os.environ.get('WHITE_HOUSE_URL') + x.xpath('div[contains(@class, "views-field-title")]')[0].xpath('h3')[0].xpath('a')[0].attrib['href'].strip()
} for x in tree.xpath('//div[contains(@class, "views-row")]')]
# Add url's to object.
for i in range(0, len(objects)):
url = requests.post('https://www.googleapis.com/urlshortener/v1/url?key=' + os.environ.get('GOOGLE_URL_SHORTENER_API_KEY'), json={"longUrl": os.environ.get('WHITE_HOUSE_URL') + objects[i]['uri']})
if url.status_code == 200:
objects[i]['short_url'] = url.json()['id']
else:
objects[i]['short_url'] = objects[i]['short_url']
# Create database objects for all of these.
records = [WhiteHouse(x['title'], x['uri'], x['category_slug'], x['category_name'], x['document_date'], x['full_url'], x['short_url']) for x in objects]
# Track number of records successfully added. Those not added will be duplicates.
record_counter = 0
# Iterate through records.
for x in records:
# Attempt to persist.
try:
db.session.add(x)
db.session.commit()
record_counter = record_counter + 1
print("Added " + x.title + " successfully.")
# Fallback,
except Exception as e:
# Flush old commit that did not persist.
db.session.rollback()
# Try to save an error message.
"""
try:
db.session.add(Error(str(e)))
db.session.commit()
except:
db.session.rollback()
"""
print("Failed to add " + x.title + " successfully: " + str(e))
# If 0 records were added to the database, everything henceforth is old in this topic.
# Break, go to next slug.
pager = tree.xpath('//li[contains(@class, "pager-next")]')
try:
print(pager[0].xpath('a')[0].attrib['href'])
page_url = base_url + pager[0].xpath('a')[0].attrib['href']
except:
pass
# Retrieve all documents in descending order.
documents = WhiteHouse.query.filter_by(is_tweeted=False).order_by(WhiteHouse.document_date.asc())
print("New documents detected: %d" % (documents.count()))
# Set up Twitter bot.
twitter = Twython(
os.environ.get('TWITTER_CONSUMER_KEY'),
os.environ.get('TWITTER_CONSUMER_SECRET'),
os.environ.get('TWITTER_ACCESS_TOKEN'),
os.environ.get('TWITTER_ACCESS_TOKEN_SECRET')
)
# Go through all relevant documents and tweet them out.
for document in documents:
try:
tweet = document.title[0 : 113] + ("..." if len(document.title) > 113 else "") + " " + document.short_url
if os.environ.get('TWEET_ENV') == "TRUE":
try:
twitter.update_status( status=(tweet) )
document.is_tweeted = True
except Exception as e:
"""
db.session.add(Error(str(e)))
db.session.commit()
"""
continue
document.tweet = tweet
print("Tweeted: " + document.tweet)
db.session.add(document)
db.session.commit()
except Exception as e:
"""
try:
db.session.add(Error(str(e)))
db.session.commit()
except:
db.session.rollback()
"""
pass
# Time Delay
if os.environ.get('TWEET_ENV') == "TRUE":
time.sleep(10)
| [
"jayrav13@gmail.com"
] | jayrav13@gmail.com |
7c3c653ac29e14cec2d578ecf6c2370af6effa9a | 05e796c29226573944b01c927a5469ccef60d9f8 | /proj/local_settings.py | d8020b7a76f040819500c0ca6f8e78076a8371b7 | [
"MIT"
] | permissive | jbnerd/Anc_Portal | 4cb2065a27d263d1866c523b41da394368d0f085 | 92f3a6c57f76da1bdd0f511a3d8edfb61cfcc4ce | refs/heads/master | 2021-01-21T23:28:42.304389 | 2017-08-05T11:48:12 | 2017-08-05T11:48:12 | 95,243,478 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,060 | py | """
Django settings for proj project.
Generated by 'django-admin startproject' using Django 1.9.2.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.9/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.9/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_e)m3rr)f&=(k6oicrmy2x5xno9o6javch=@^j%20@n=@d!ahs'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'social.apps.django_app.default',
'anc',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'proj.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'proj.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.9/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'ANC',
'USER': 'anc_operator',
'PASSWORD': 'ancportal@1234',
'HOST': 'localhost',
'PORT': '',
}
}
'''DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'anc',
'USER': 'codingclub',
'PASSWORD': 'abc123!',
'HOST': 'mysql3.gear.host',
'PORT': '',
}
}'''
# Password validation
# https://docs.djangoproject.com/en/1.9/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.9/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.9/howto/static-files/
STATIC_URL = '/static/'
LOGIN_URL = '/inform/'
SOCIAL_AUTH_GOOGLE_OAUTH2_KEY= '1029866612334-it6397v5cd3kjv5d1fu9kajb6f9ajud1'
SOCIAL_AUTH_GOOGLE_OAUTH2_SECRET = 'G_0i9RN-YeqzBqN4NEsqopiv'
SOCIAL_AUTH_GOOGLE_OAUTH2_WHITELISTED_DOMAINS = ['pilani.bits-pilani.ac.in']
SOCIAL_AUTH_RAISE_EXCEPTIONS = False
SOCIAL_AUTH_LOGIN_ERROR_URL="/inform/"
AUTHENTICATION_BACKENDS = (
'social.backends.google.GoogleOAuth2',
'django.contrib.auth.backends.ModelBackend')
SOCIAL_AUTH_PIPELINE = (
# Get the information we can about the user and return it in a simple
# format to create the user instance later. On some cases the details are
# already part of the auth response from the provider, but sometimes this
# could hit a provider API.
'social.pipeline.social_auth.social_details',
# Get the social uid from whichever service we're authing thru. The uid is
# the unique identifier of the given user in the provider.
'social.pipeline.social_auth.social_uid',
# Verifies that the current auth process is valid within the current
# project, this is where emails and domains whitelists are applied (if
# defined).
'social.pipeline.social_auth.auth_allowed',
# Checks if the current social-account is already associated in the site.
'social.pipeline.social_auth.social_user',
# Make up a username for this person, appends a random string at the end if
# there's any collision.
'social.pipeline.user.get_username',
# Send a validation email to the user to verify its email address.
# Disabled by default.
# 'social.pipeline.mail.mail_validation',
# Associates the current social details with another user account with
# a similar email address. Disabled by default.
# 'social.pipeline.social_auth.associate_by_email',
# Create a user account if we haven't found one yet.
'social.pipeline.user.create_user',
# Create the record that associates the social account with the user.
'social.pipeline.social_auth.associate_user',
# Populate the extra_data field in the social record with the values
# specified by settings (and the default ones like access_token, etc).
'social.pipeline.social_auth.load_extra_data',
# Update the user record with any changed info from the auth service.
'social.pipeline.user.user_details',
)
#SEND Email
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_PORT = 587
EMAIL_HOST_USER = 'abhivjoshi.aj@gmail.com'
EMAIL_HOST_PASSWORD = '#'
EMAIL_USE_TLS = True
| [
"abhivjoshi.aj@gmail.com"
] | abhivjoshi.aj@gmail.com |
b1897521c7b612921d88df7a303a832036796e83 | 4bd555bc662b8182a2e7644976bfdb00ed5e1ebe | /PythonistaAppTemplate/PythonistaKit.framework/pylib_ext/scene.py | 14aa3e0938459fe4581caf119b859a026e19008b | [] | no_license | fhelmli/homeNOWG2 | a103df1ef97194dec9501dbda87ec1f7c111fb4a | e794fd87b296544542fd9dc7ac94c981c6312419 | refs/heads/master | 2020-04-04T13:40:20.417769 | 2019-01-30T21:41:04 | 2019-01-30T21:41:04 | 155,970,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,230 | py | #import pythonista
#coding: utf-8
from _scene2 import *
import _scene2
from scene_drawing import *
import math
from numbers import Number
from io import BytesIO
import ui
DEFAULT_ORIENTATION = 0
PORTRAIT = 1
LANDSCAPE = 2
BLEND_NORMAL = 0
BLEND_ADD = 1
BLEND_MULTIPLY = 2
from ui import get_screen_size
def run(scene_to_run, orientation=0, frame_interval=1, anti_alias=False, show_fps=False, multi_touch=True):
sv = SceneView()
if orientation == PORTRAIT:
ui_orientations = ['portrait']
elif orientation == LANDSCAPE:
ui_orientations = ['landscape']
else:
ui_orientations = None
sv.anti_alias = anti_alias
sv.frame_interval = frame_interval
sv.multi_touch_enabled = multi_touch
sv.shows_fps = show_fps
sv.scene = scene_to_run
sv.present(orientations=ui_orientations)
def gravity():
g = _scene2.gravity()
return Vector3(g[0], g[1], g[2])
class Touch (object):
def __init__(self, x, y, prev_x, prev_y, touch_id):
self.touch_id = touch_id
self.location = Point(x, y)
self.prev_location = Point(prev_x, prev_y)
self.layer = None
def __eq__(self, other_touch):
if not isinstance(other_touch, Touch):
return False
elif other_touch.touch_id == self.touch_id:
return True
return False
def __hash__(self):
return self.touch_id.__hash__()
class Scene (SceneNode):
def __init__(self, *args, **kwargs):
SceneNode.__init__(self, *args, **kwargs)
self.t = 0.0
self.dt = 0.0
self.root_layer = None
self.touches = {}
self.delayed_invocations = []
w, h = ui.get_screen_size()
self.size = Size(w, h)
self.bounds = Rect(0, 0, w, h)
self.presented_scene = None
self.presenting_scene = None
self.setup_finished = False
def setup(self):
pass
def update(self):
pass
def did_evaluate_actions(self):
pass
def draw(self):
pass
def did_change_size(self):
pass
def stop(self):
pass
def pause(self):
pass
def resume(self):
pass
def touch_began(self, touch):
pass
def touch_moved(self, touch):
pass
def touch_ended(self, touch):
pass
def present_modal_scene(self, other_scene):
if self.presented_scene:
self.dismiss_modal_scene()
other_scene._setup_scene(*self.size)
other_scene._set_size(*self.size)
self.presented_scene = other_scene
other_scene.presenting_scene = self
other_scene.z_position = max(n.z_position for n in self.children) + 1
self.add_child(other_scene)
def dismiss_modal_scene(self):
if self.presented_scene:
self.presented_scene.presenting_scene = None
self.presented_scene.remove_from_parent()
self.presented_scene = None
elif self.presenting_scene:
self.presenting_scene.dismiss_modal_scene()
def add_layer(self, layer):
if self.root_layer is None:
s = self.size
self.root_layer = Layer(Rect(0, 0, s[0], s[1]))
self.root_layer.add_layer(layer)
def delay(self, dt, func):
invocation = { 't': self.t + dt, 'f': func }
self.delayed_invocations.append(invocation)
def _setup_scene(self, width, height):
if hasattr(self, 'setup_finished') and self.setup_finished:
return
self.size = Size(width, height)
self.bounds = Rect(0, 0, width, height)
# Note: Some legacy code relies on not having to call super in __init__, so these are initialized again here...
self.t = 0.0
self.dt = 0.0
self.root_layer = None
self.touches = {}
self.delayed_invocations = []
self.presented_scene = None
self.presenting_scene = None
self.setup()
self.setup_finished = True
def _set_size(self, width, height):
if self.size.w != width or self.size.h != height:
self.size = Size(width, height)
self.bounds = Rect(0, 0, width, height)
self.crop_rect = self.bounds
self.did_change_size()
if self.presented_scene:
self.presented_scene._set_size(width, height)
def should_rotate(self, orientation):
return False
def _process_delayed_invocations(self):
fired_invocations = None
for invocation in self.delayed_invocations:
if invocation['t'] <= self.t:
invocation['f']()
if fired_invocations is None:
fired_invocations = []
fired_invocations.append(invocation)
if fired_invocations is not None:
for invocation in fired_invocations:
self.delayed_invocations.remove(invocation)
def _draw(self, dt):
paused = self.paused
if not paused:
self.dt = dt
self.t += dt
self._process_delayed_invocations()
self.draw()
if not paused:
self.update()
self._update(dt)
if not paused:
self.did_evaluate_actions()
self._render()
if self.presented_scene:
self.presented_scene._draw(dt)
def _stop(self):
self.stop()
def _touch_began(self, x, y, touch_id):
if self.presented_scene:
self.presented_scene._touch_began(x, y, touch_id)
return
touch = Touch(x, y, x, y, touch_id)
if self.root_layer is not None:
hit_layer = self.root_layer._hit_test(Point(x, y))
touch.layer = hit_layer
if hit_layer is not None:
if hasattr(hit_layer, 'touch_began') and callable(hit_layer.touch_began):
hit_layer.touch_began(touch)
self.touches[touch_id] = touch
self.touch_began(touch)
def _touch_moved(self, x, y, prev_x, prev_y, touch_id):
if self.presented_scene:
self.presented_scene._touch_moved(x, y, prev_x, prev_y, touch_id)
return
touch = Touch(x, y, prev_x, prev_y, touch_id)
old_touch = self.touches.get(touch_id, None)
if old_touch is not None:
touch.layer = old_touch.layer
if touch.layer is not None:
if hasattr(touch.layer, 'touch_moved') and callable(touch.layer.touch_moved):
touch.layer.touch_moved(touch)
self.touches[touch_id] = touch
self.touch_moved(touch)
def _touch_ended(self, x, y, touch_id):
if self.presented_scene:
self.presented_scene._touch_ended(x, y, touch_id)
return
touch = Touch(x, y, x, y, touch_id)
old_touch = self.touches.get(touch_id, None)
if old_touch is not None:
del self.touches[touch_id]
touch.layer = old_touch.layer
if touch.layer is not None:
if hasattr(touch.layer, 'touch_ended') and callable(touch.layer.touch_ended):
touch.layer.touch_ended(touch)
self.touch_ended(touch)
class LabelNode (SpriteNode):
def __init__(self, text='', font=('Helvetica', 20), *args, **kwargs):
SpriteNode.__init__(self, *args, **kwargs)
self._suspend_updates = True
self._rendered_text = None
self.text = text
self.font = font
self._suspend_updates = False
self.update_texture()
def __setattr__(self, name, value):
SpriteNode.__setattr__(self, name, value)
if name == 'font':
try:
if len(value) != 2:
raise TypeError('Expected a sequence of font name and size')
if not isinstance(value[0], basestring):
raise TypeError('Font name must be a string')
if not isinstance(value[1], Number):
raise TypeError('Font size must be a number')
except TypeError:
raise TypeError('Expected a sequence of font name and size')
if name == 'font' or (name == 'text' and value != self._rendered_text):
self.update_texture()
def update_texture(self):
if self._suspend_updates:
return
w, h = ui.measure_string(self.text, font=self.font)
with ui.ImageContext(max(w, 1), max(h, 1)) as ctx:
ui.draw_string(self.text, (0, 0, w, h), self.font, color='white')
img = ctx.get_image()
self.texture = Texture(img)
self._rendered_text = self.text
class ShapeNode (SpriteNode):
def __init__(self, path=None, fill_color='white', stroke_color='clear', shadow=None, *args, **kwargs):
SpriteNode.__init__(self, *args, **kwargs)
self._suspend_updates = True
self.path = path
self.line_width = path.line_width
self.fill_color = fill_color
self.stroke_color = stroke_color
self.shadow = shadow
self._suspend_updates = False
self.update_texture()
def __setattr__(self, name, value):
SpriteNode.__setattr__(self, name, value)
if name == 'line_width':
self.path.line_width = value
self.update_texture()
if name in ('path', 'fill_color', 'stroke_color', 'shadow'):
self.update_texture()
def update_texture(self):
if self._suspend_updates or not self.path:
return
if self.shadow:
shadow_color = self.shadow[0]
shadow_offset_x = self.shadow[1]
shadow_offset_y = self.shadow[2]
shadow_radius = self.shadow[3]
else:
shadow_offset_x = 0
shadow_offset_y = 0
shadow_radius = 0
shadow_left = shadow_radius - shadow_offset_x
shadow_right = shadow_radius + shadow_offset_x
shadow_top = shadow_radius - shadow_offset_y
shadow_bottom = shadow_radius + shadow_offset_y
lw = self.path.line_width
path_bounds = self.path.bounds
w = max(1, math.ceil(path_bounds.w + abs(shadow_left) + abs(shadow_right)) + lw)
h = max(1, math.ceil(path_bounds.h + abs(shadow_top) + abs(shadow_bottom)) + lw)
with ui.ImageContext(w, h) as ctx:
ui.concat_ctm(ui.Transform.translation(lw/2 + max(0, shadow_left) - path_bounds.x, lw/2 + max(0, shadow_top) - path_bounds.y))
ui.set_color(self.fill_color)
with ui.GState():
if self.shadow:
ui.set_shadow(shadow_color, shadow_offset_x, shadow_offset_y, shadow_radius)
self.path.fill()
if self.path.line_width > 0:
ui.set_color(self.stroke_color)
self.path.stroke()
img = ctx.get_image()
self.texture = Texture(img)
| [
"tberk@gmx.at"
] | tberk@gmx.at |
8b04b0680c919fab45b483ef6ab1551ab145d51b | 6a983aa7f216cbeec15aefeaa2ef8731771f2e7d | /gcsite/urls.py | 3e3ef4b067d5ffb036f2ca564f8faeda5a83e4ae | [] | no_license | srlee056/gcproject | c970ae0fc645eeeaad10b63ca5e223dc32abe611 | 82ebcdf5fb16b336d2652ba57512b24f775d8ef2 | refs/heads/master | 2022-11-25T06:39:09.708299 | 2020-07-31T23:41:57 | 2020-07-31T23:41:57 | 281,684,913 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 843 | py | """gcsite URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('', include('myapp.urls')),
path('guild/', include('parsed_data.urls')),
]
| [
"imsolem1226@gmail.com"
] | imsolem1226@gmail.com |
c2c60a9263d26d5897f8406ba267856e1bd4bdf0 | 2410646752e83818dd5d751d653c09232d6fc705 | /rdsexport/functions/rds_export_to_s3.py | 8b026094c19aa494917f5e8a2ed751c5b839220b | [] | no_license | ArchTaqi/IaS-Code | 6ce5b551b154a9eda6c40e79ac31b2e35b8cd50d | dda4271e52510abc1c8c1172ab63ccf4608cc17b | refs/heads/master | 2023-06-02T18:01:28.252518 | 2021-06-16T07:35:59 | 2021-06-16T07:35:59 | 377,379,355 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,345 | py | #!/usr/bin/env python3
"""
This utility helps to copy the monthly aws rds snapshot to the S3 bucket.
This is to maintain the backup of rds snapshot in s3 for DR needs.
"""
import os
import sys
import json
import boto3
import logging
from datetime import date, datetime, timezone, timedelta
from botocore.client import ClientError
REGION = os.getenv("REGION")
logger = logging.getLogger()
logger.setLevel(logging.INFO)
s3 = boto3.client('s3', region_name=REGION)
rds_client = boto3.client('rds', region_name=REGION)
def _create_bucket(bucket_name):
try:
s3.head_bucket(Bucket=bucket_name)
return True
except ClientError:
s3.create_bucket(Bucket=bucket_name, CreateBucketConfiguration={'LocationConstraint': REGION})
s3.put_bucket_encryption(Bucket=bucket_name,
ServerSideEncryptionConfiguration={
'Rules': [
{
'ApplyServerSideEncryptionByDefault': {
'SSEAlgorithm': 'AES256'
}
},
]
})
return True
def _get_most_current_snapshot(db_identifier, today_date):
"""
finding most current snapshot
return: (string) DBSnapshotInstance
"""
snapshots = rds_client.describe_db_snapshots(SnapshotType='automated')['DBSnapshots']
if db_identifier and not 'None':
snapshots = filter(lambda x: db_identifier in x.get('DBInstanceIdentifier'), snapshots)
for snapshot in snapshots:
if snapshot['SnapshotCreateTime'].date() == today_date:
return snapshot
def instantiate_s3_export(rds_snapshots, s3_bucket, IamRoleArn, KmsKeyId, today):
"""
Function to invoke start_export_task using
recent most system snapshot Return: Response
"""
year = today.strftime("%Y")
month = today.strftime("%m")
get_latest_snapshot_name,get_latest_snapshot_time = rds_snapshots['DBSnapshotIdentifier'], rds_snapshots['SnapshotCreateTime']
return rds_client.start_export_task(
ExportTaskIdentifier='MWP-snapshot-monthly-%s' % today.strftime("%b%Y"),
SourceArn=rds_snapshots['DBSnapshotArn'],
S3BucketName=s3_bucket,
S3Prefix='{year}/{month}'.format(year=year, month=month),
IamRoleArn=IamRoleArn,
KmsKeyId=KmsKeyId,
# ExportOnly=[
# 'string',
# ]
)
def jsonDateTimeConverter(o):
"""To avoid TypeError: datetime.datetime(...) is not JSON serializable"""
if isinstance(o, datetime):
return o.__str__()
def lambda_handler(event, context):
logger.info('start:export_snapshot')
db_identifier = os.getenv("DB_IDENTIFIER")
s3_bucket = os.getenv("S3_BUCKET")
IamRoleArn = os.environ.get('IAM_ROLE_ARN')
KmsKeyId = os.environ.get('KMS_KEY_ID')
last_day_of_prev_month = date.today().replace(day=1) - timedelta(days=1)
logger.info('Last day of prev month: {last_day_of_prev_month}'.format(last_day_of_prev_month=last_day_of_prev_month))
snapshot = _get_most_current_snapshot(db_identifier, last_day_of_prev_month)
if _create_bucket(s3_bucket):
response = instantiate_s3_export(snapshot, s3_bucket, IamRoleArn, KmsKeyId, last_day_of_prev_month)
logger.info(json.dumps(response, default=jsonDateTimeConverter))
logger.info('end:export_snapshots')
| [
"taqi.arch@gmail.com"
] | taqi.arch@gmail.com |
7e57a13ccb33e59dd349b8957eb2347ccf682b14 | 8a87d58c47bb3eb19184f7096766ca286cd3e586 | /ff.py | 4fac3277f7eb96d2bb6a3fdca64e040896423a5f | [] | no_license | jesusble/evening | 9d8dc6e1bbfa28e65cc9b8bc584a2cf1f48166a6 | a3c3f1843185e1976397cd5ec97a9395b7ae7148 | refs/heads/master | 2020-06-17T04:51:41.479661 | 2019-07-10T12:46:23 | 2019-07-10T12:46:23 | 195,802,693 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 83 | py | sb=int(input())
for i in range(1,sb+1):
if(sb%i==0):
print(i, end=' ')
| [
"noreply@github.com"
] | jesusble.noreply@github.com |
a2300d6a94ca2cefd91d8d13d10b57d752bcefa4 | 1ade02a8e0c6d7e442c9d9041f15518d22da3923 | /w2/d5/sqlite_db/schema.py | 99c0917574b62199a3263ba8d784e3cfc122ffc9 | [] | no_license | fodisi/ByteAcademy-Bootcamp | 7980b80636a36db6da3e0fc0e529fbc6b8e097e0 | d53e3f4864f6cba1b85e806c29b01c48e3c2e81d | refs/heads/master | 2020-03-19T12:55:31.489638 | 2018-07-25T16:19:19 | 2018-07-25T16:19:19 | 136,550,128 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | #!/usr/bin/env python3
import sqlite3
# create a connection to the database
connection = sqlite3.connect("securities_master.db", check_same_thread=False)
# create a cursor object to represent the "gaze" of the database management system
cursor = connection.cursor()
cursor.execute(
"""CREATE TABLE rippleUSD(
pk INTEGER PRIMARY KEY AUTOINCREMENT,
unix_time FLOAT,
last_price FLOAT,
trade_volume FLOAT
);"""
)
cursor.close()
connection.close()
| [
"fodisi@users.noreply.github.com"
] | fodisi@users.noreply.github.com |
3c3091a21cf5d12371f8d133ebbc1bd9dfd495c3 | b4d720c4d9de5f464fe7aa3bd2fb9155dba2f4df | /omni_anomaly/eval_methods.py | c4f95d2bc86f9e9c2b087560dbe3536e51e38ba6 | [] | no_license | real-lhj/CTF_code | 7d367eafd70d40f7e369b8139c578e9ce7f263f2 | 2ccb3a528745e4b42ffb7eea57208a4d0273ec74 | refs/heads/main | 2023-03-19T03:44:30.393132 | 2021-01-12T13:22:25 | 2021-01-12T13:22:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,996 | py | # -*- coding: utf-8 -*-
import numpy as np
from omni_anomaly.spot import SPOT
def calc_point2point(predict, actual):
"""
calculate f1 score by predict and actual.
Args:
predict (np.ndarray): the predict label
actual (np.ndarray): np.ndarray
"""
TP = np.sum(predict * actual)
TN = np.sum((1 - predict) * (1 - actual))
FP = np.sum(predict * (1 - actual))
FN = np.sum((1 - predict) * actual)
precision = TP / (TP + FP + 0.00001)
recall = TP / (TP + FN + 0.00001)
f1 = 2 * precision * recall / (precision + recall + 0.00001)
return f1, precision, recall, TP, TN, FP, FN
def adjust_predicts(score, label,
threshold=None,
pred=None,
calc_latency=False):
"""
Calculate adjusted predict labels using given `score`, `threshold` (or given `pred`) and `label`.
Args:
score (np.ndarray): The anomaly score
label (np.ndarray): The ground-truth label
threshold (float): The threshold of anomaly score.
A point is labeled as "anomaly" if its score is lower than the threshold.
pred (np.ndarray or None): if not None, adjust `pred` and ignore `score` and `threshold`,
calc_latency (bool):
Returns:
np.ndarray: predict labels
"""
if len(score) != len(label):
raise ValueError("score and label must have the same length")
score = np.asarray(score)
label = np.asarray(label)
latency = 0
if pred is None:
predict = score < threshold
else:
predict = pred
actual = label > 0.1
anomaly_state = False
anomaly_count = 0
for i in range(len(score)):
if actual[i] and predict[i] and not anomaly_state:
anomaly_state = True
anomaly_count += 1
for j in range(i, 0, -1):
if not actual[j]:
break
else:
if not predict[j]:
predict[j] = True
latency += 1
elif not actual[i]:
anomaly_state = False
if anomaly_state:
predict[i] = True
if calc_latency:
return predict, latency / (anomaly_count + 1e-4)
else:
return predict
def calc_seq(score, label, threshold, calc_latency=False):
"""
Calculate f1 score for a score sequence
"""
if calc_latency:
predict, latency = adjust_predicts(score, label, threshold, calc_latency=calc_latency)
t = list(calc_point2point(predict, label))
t.append(latency)
return t
else:
predict = adjust_predicts(score, label, threshold, calc_latency=calc_latency)
return calc_point2point(predict, label)
def bf_search(score, label, start, end=None, step_num=1, display_freq=1, verbose=True):
"""
Find the best-f1 score by searching best `threshold` in [`start`, `end`).
Returns:
list: list for results
float: the `threshold` for best-f1
"""
if step_num is None or end is None:
end = start
step_num = 1
search_step, search_range, search_lower_bound = step_num, end - start, start
if verbose:
print("search range: ", search_lower_bound, search_lower_bound + search_range)
threshold = search_lower_bound
m = (-1., -1., -1.)
m_t = 0.0
for i in range(search_step):
threshold += search_range / float(search_step)
target = calc_seq(score, label, threshold, calc_latency=True)
if target[0] > m[0]:
m_t = threshold
m = target
if verbose and i % display_freq == 0:
print("cur thr: ", threshold, target, m, m_t)
print(m, m_t)
return m, m_t
def pot_eval(init_score, score, label, q=1e-3, level=0.02, threshold=-300):
"""
Run POT method on given score.
Args:
init_score (np.ndarray): The data to get init threshold.
For `OmniAnomaly`, it should be the anomaly score of train set.
score (np.ndarray): The data to run POT method.
For `OmniAnomaly`, it should be the anomaly score of test set.
label:
q (float): Detection level (risk)
level (float): Probability associated with the initial threshold t
Returns:
dict: pot result dict
"""
s = SPOT(q) # SPOT object
s.fit(init_score, score) # data import
s.initialize(level=level, min_extrema=True) # initialization step
ret = s.run(dynamic=False) # run
print(len(ret['alarms']))
print(len(ret['thresholds']))
pot_th = -np.mean(ret['thresholds'])
pot_th = max(threshold, pot_th)
pred, p_latency = adjust_predicts(score, label, pot_th, calc_latency=True)
p_t = calc_point2point(pred, label)
print('POT result: ', p_t, pot_th, p_latency)
return p_t, pot_th, pred, {
'pot-f1': p_t[0],
'pot-precision': p_t[1],
'pot-recall': p_t[2],
'pot-TP': p_t[3],
'pot-TN': p_t[4],
'pot-FP': p_t[5],
'pot-FN': p_t[6],
'pot-threshold': pot_th,
'pot-latency': p_latency
}
def pot_eval_online(init_score, score, q=1e-3, level=0.02):
"""
Run POT method on given score.
Args:
init_score (np.ndarray): The data to get init threshold.
For `OmniAnomaly`, it should be the anomaly score of train set.
score (np.ndarray): The data to run POT method.
For `OmniAnomaly`, it should be the anomaly score of test set.
q (float): Detection level (risk)
level (float): Probability associated with the initial threshold t
Returns:
dict: pot result dict
"""
s = SPOT(q) # SPOT object
s.fit(init_score, score) # data import
s.initialize(level=level, min_extrema=True) # initialization step
ret = s.run(dynamic=False) # run
print(len(ret['alarms']))
print(len(ret['thresholds']))
pot_th = -np.mean(ret['thresholds'])
return pot_th
| [
"420822631@qq.com"
] | 420822631@qq.com |
f80068e4c813e20b94f963bc6e02662511c57101 | b7204fa9006e8328d06dab9e66229b4a07c0c85a | /Project 1/uninformed.py | 9ae3c66e4f2c42c37cb9bb3e641ec83e6d15d742 | [] | no_license | iamchristoph/AI | ec72c2e998532980e86ebc9d10de287c918ef856 | 5afa7626330a8be28765eff1ba5d553a72a3058d | refs/heads/master | 2020-05-18T18:02:40.742861 | 2015-12-09T19:01:23 | 2015-12-09T19:01:23 | 41,823,281 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,546 | py | import time
import sys
from collections import deque
# -------------------------------------------------
# Project by Christopher Terry and Travis Smith
# -------------------------------------------------
# Instructions on use:
# python uninformed.py <optional file name>
#
# If a file name is not provided, the following will be used instead:
test = (0, 1, 3, 5, 7, 8, 6, 4, 2)
# _13
# 578
# 642
# True will print out all of the moves taken, while False will not show them. False is very useful for just seeing the program's report
printMoves = True
# Set to a different number to try a different limit for Depth Limited Search
depthLimitedCap = 21
# If you want
maximumMovesToShow = 31
# Shouldn't need to alter, but provided as a config option none the less
solution = (1, 2, 3, 4, 5, 6, 7, 8, 0) # Option to re-arrange the way the solution is formatted
# 1 2 3
# 4 5 6
# 7 8 _
# -------------------------------------------------
# End of Config Options
# -------------------------------------------------
# Read a filename, if any, provided on the command line
if len(sys.argv) > 1:
filename = sys.argv[1]
text = ""
with open(filename) as f:
for line in f:
for character in line:
if character in ('12345678_'):
text += character
text = text.replace("_", "0")
listOfInts = []
for character in text:
listOfInts.append(int(character))
testReplacement = tuple(listOfInts)
test = testReplacement
class Puzzle :
#grids = {}
# list address
# 0 1 2
# 3 4 5
# 6 7 8
def getOffspring(self) :
# Returns the positions (relative to the blank's position in the list) of pieces that can be swapped with the blank
blank = self.blank
if blank is 0 : # The blank can move right or down
return 1, 3
elif blank is 1 : # The blank can move left, right, or down
return -1, 1, 3
elif blank is 2 : # The blank can move left or down
return -1, 3
elif blank is 3 : # The blank can move up, right, or down
return -3, 1, 3
elif blank is 4 : # The blank can move up, left, right, or down
return -3, -1, 1, 3
elif blank is 5 : # The blank can move up, left, or down
return -3, -1, 3
elif blank is 6 : # The blank can move up or right
return -3, 1
elif blank is 7 : # The blank can move up, left, or right
return -3, -1, 1
elif blank is 8 : # The blank can move up or left
return -3, -1
def __init__(self, puzz, dep) :
self.state = puzz
self.depth = dep
self.blank = puzz.index(0)
self.kids = Puzzle.getOffspring(self)
#Puzzle.grids
#clean up
def cleanUp() :
if Puzzle.grids :
del Puzzle.grids
Puzzle.grids = {}
# Swap the piece at loc + d with the blank. loc = blank's current location
def move(loc, puzzle, d) :
moved = puzzle[loc + d]
chi = [x for x in puzzle]
chi[loc + d] = 0
chi[loc] = moved
child = (chi[0], chi[1], chi[2],chi[3], chi[4], chi[5], chi[6], chi[7], chi[8])
return child
# Get all the children of a possible puzzle state
def children(parent) :
space = parent.blank
puzzle = parent.state
depth = parent.depth + 1
kiddies = []
for child in parent.kids : # Parent.kids is a list of directions (ints) that are possible to have as kids, not pre-established puzzle states
kid = Puzzle(move(space, puzzle, child), depth) # Create a new puzzle for each kid
if kid.state in Puzzle.grids :
if Puzzle.grids[kid.state].depth > depth :
Puzzle.grids[kid.state] = parent
kiddies.append(kid)
else :
Puzzle.grids[kid.state] = parent
kiddies.append(kid)
return kiddies
# Print the puzzle
def display(p, max) :
if p.depth is 0 or max is 0:
pass
else :
display(Puzzle.grids[p.state], max - 1)
print "moves =", p.depth
print p.state[0:3]
print p.state[3:6]
print p.state[6:]
# Get the number of moves taken to reach the goal
def getMoves(p, max) :
if p.depth is 0 or max is 0:
return 1
else :
return getMoves(Puzzle.grids[p.state], max - 1) + 1
# Are we there yet?
def is_goal(state) :
if solution in Puzzle.grids :
return True
return False
# Breadth first search
def breadth(start) :
queue = deque()
state = Puzzle(start, 0)
i = 0
while i < 50000 :
i += 1
if is_goal(state) :
return (True, i)
else :
kids = children(state)
for kid in kids :
queue.append(kid)
if not queue :
return (False, i)
state = queue.popleft()
return (False, i)
# Depth first search
def depth(start) :
stack = deque()
state = Puzzle(start, 0)
i = 0
while i < 50000 :
i += 1
if is_goal(state) :
#print "nodes searched =", i
return (True,i)
else :
kids = children(state)
for kid in kids :
stack.append(kid)
if not stack :
return (False, i)
state = stack.pop()
return (False, i)
# Depth limited search
def depth_limited(start, limit) :
stack = deque()
state = Puzzle(start, 0)
i = 0
if limit == -1:
limit = 500000
while i < 500000 :
i += 1
#print state.depth,
if is_goal(state) :
return (True, i) # Found the goal, this many nodes searched
else :
kids = children(state)
for kid in kids :
if kid.depth < limit : # Only look at a kid if it has less than 40 depth?
# This was taking 17566 nodes to find the solution, without it it takes 12740 nodes
stack.append(kid)
if not stack :
return (False, i) # Did not find the goal, this many nodes searched
state = stack.pop()
return (False, limit)
# Iterative Deepening search
def iterative(start, limit):
if limit == -1:
limit = 500000
for i in range(1, limit+1):
returned = depth_limited(start, i)
success, nodes = returned
if success:
return (success, nodes)
#clean up
del Puzzle.grids
Puzzle.grids = {}
return (False, limit)
# Bi-Directional
def bidirectional(start, end):
# Both sides to a BFS, stop when one element is in both BFS's
startGrid = {}
endGrid = {}
def get_children(parent, grid):
space = parent.blank
puzzle = parent.state
depth = parent.depth + 1
kiddies = []
for child in parent.kids :
kid = Puzzle(move(space, puzzle, child), depth) # Create a new puzzle for each kid
if kid.state in grid :
if grid[kid.state].depth > depth :
grid[kid.state] = parent
kiddies.append(kid)
else :
grid[kid.state] = parent
kiddies.append(kid)
return (kiddies, grid)
def goalFound():
for key in startGrid.keys():
if key in endGrid:
return True
return False
def constructPuzzleGrid():
for key in startGrid.keys():
if key in endGrid:
# This is where the two grids meet
# Add the "tree" for the startGrid to Puzzle Grid
startTree = key
while not startGrid[startTree].depth == 0:
Puzzle.grids[startTree] = startGrid[startTree]
startTree = startGrid[startTree].state
Puzzle.grids[startTree] = startGrid[startTree]
depthOfStart = startGrid[key].depth
depthOfEnd = endGrid[key].depth
endTree = key
while not endTree == solution:
# Needs to convert the endTree's child state -> parent puzzle into parent state -> child puzzle
Puzzle.grids[endGrid[endTree].state] = Puzzle(endTree, depthOfStart + (depthOfEnd - endGrid[endTree].depth) +1)
endTree = endGrid[endTree].state
return
startQueue = deque()
startState = Puzzle(start, 0)
endQueue = deque()
endState = Puzzle(end, 0)
i = 0
while i < 50000 :
i += 1
# Check if the goal is found
if goalFound() :
# Construct the Puzzle Grid
constructPuzzleGrid()
return (True, i)
# Iterate on the start
else :
kids, startGrid = get_children(startState, startGrid)
for kid in kids :
startQueue.append(kid)
if not startQueue :
return (False, i)
startState = startQueue.popleft()
# Check if the goal is found
if goalFound() :
constructPuzzleGrid()
return (True, i)
# Iterate on the end
else :
kids, endGrid = get_children(endState, endGrid)
for kid in kids :
endQueue.append(kid)
if not endQueue :
return (False, i)
endState = endQueue.popleft()
return (False, i)
# Informed Searces ----------------------------------------------------------
# ---------------------------------------------------------------------------
# Heuristics
from math import ceil
def manhattan(grid) :
value = 0
for num in grid :
i = grid.index(num) + 1
if num is 0 :
num = 9
upDnMoves = abs(-(-num/3) - (-(-i//3)))
x = i%3
x = x if x else 3
y = num%3
y = y if y else 3
lrMoves = abs(y - x)
value += upDnMoves + lrMoves
return value
def customHeuristic1(grid):
value = 0
for i in range(len(grid)):
piece = grid[i]
# We don't care where the blank tile is, since it's needed to move other pieces
if (i+1) == piece or (piece == 0):
value += 0 # the piece is in the right place
else:
goalIndex = piece -1
goalIndex = goalIndex if goalIndex >=0 else 8
# Essentially checks for if they're in different rows and columns
x = abs((goalIndex/3) - (i/3))
y = abs((goalIndex %3) - (i%3))
distance = x+y
value += distance**2
return value
# A* search
def aStar(start, heuristic, useD) :
def gethValue(node, useDepth) :
if useDepth :
node.hValue = heuristic(node.state) + node.depth
else :
node.hValue = heuristic(node.state)
queue = []
state = Puzzle(start, 0)
gethValue(state, useD)
i = 0
while i < 50000 :
i += 1
if is_goal(state) :
return (True, i)
else :
kids = children(state)
for kid in kids :
gethValue(kid, useD)
queue.append(kid)
queue.sort(key= lambda x : x.hValue)
#print [x.hValue for x in queue]
if not queue :
return (False, i)
state = queue.pop(0)
return (False, i)
# ---------------------------------------------------------------------------
originalPuzzle = test
Puzzle.grids = {}
print "Uninformed Searches"
# Start Breadth First Search
start = time.time()
success, nodes = breadth(originalPuzzle)
if success:
end = time.time()
print "BFS solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, getMoves(Puzzle.grids[solution], maximumMovesToShow))
if printMoves:
display(Puzzle.grids[solution], maximumMovesToShow)
else :
end = time.time()
print "BFS failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
#clean up
del Puzzle.grids
Puzzle.grids = {}
# Start Depth First Search
start = time.time()
success, nodes = depth_limited(originalPuzzle, 100000)
if success:
end = time.time()
print "DFS solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, Puzzle.grids[solution].depth + 1)
if printMoves:
display(Puzzle.grids[solution], maximumMovesToShow)
else :
end = time.time()
print "DFS failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
#clean up
del Puzzle.grids
Puzzle.grids = {}
# Start Depth Limited Search
start = time.time()
success, nodes = depth_limited(originalPuzzle, depthLimitedCap)
end = time.time()
if success:
print "Depth Limited solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, getMoves(Puzzle.grids[solution], maximumMovesToShow))
if printMoves:
display(Puzzle.grids[solution], maximumMovesToShow)
else :
print "Depth Limited failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
#clean up
del Puzzle.grids
Puzzle.grids = {}
# Start Iterative Deepening Search
start = time.time()
success, nodes = iterative(originalPuzzle, -1)
end = time.time()
if success:
print "Iterative Deepening solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, getMoves(Puzzle.grids[solution], maximumMovesToShow))
if printMoves:
display(Puzzle.grids[solution], maximumMovesToShow)
else :
print "Iterative Deepening failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
#clean up
del Puzzle.grids
Puzzle.grids = {}
# Bi-Directional Search
start = time.time()
success, nodes = bidirectional(originalPuzzle, solution)
end = time.time()
if success:
print "Bi-Directional solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, getMoves(Puzzle.grids[solution], maximumMovesToShow))
if printMoves:
display(Puzzle.grids[solution], maximumMovesToShow)
else :
print "Bi-Directional failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
#clean up
del Puzzle.grids
Puzzle.grids = {}
# ---------------------------------------------------------------------------
# Tests
print "Informed Searches"
originalPuzzle = test
Puzzle.grids = {}
# Greedy Manhattan
start = time.time()
success, nodes = aStar(originalPuzzle, manhattan, 0)
end = time.time()
if success :
print "Greedy Manhattan solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, Puzzle.grids[solution].depth + 1)
if printMoves :
display(Puzzle.grids[solution], maximumMovesToShow)
else :
print "Greedy Manhattan failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
del Puzzle.grids
Puzzle.grids = {}
# Greedy Custom Heuristic
start = time.time()
success, nodes = aStar(originalPuzzle, customHeuristic1, 0)
end = time.time()
if success :
print "Greedy Custom Heuristic solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, Puzzle.grids[solution].depth + 1)
if printMoves :
display(Puzzle.grids[solution], maximumMovesToShow)
else :
print "Greedy Custom Heuristic failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
del Puzzle.grids
Puzzle.grids = {}
# A* Manhattan
start = time.time()
success, nodes = aStar(originalPuzzle, manhattan, 1)
end = time.time()
if success :
print "A* Manhattan solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, Puzzle.grids[solution].depth + 1)
if printMoves :
display(Puzzle.grids[solution], maximumMovesToShow)
else :
print "A* Manhattan failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
del Puzzle.grids
Puzzle.grids = {}
# A* Custom Heuristic
start = time.time()
success, nodes = aStar(originalPuzzle, customHeuristic1, 1)
end = time.time()
if success :
print "A* Custom Heuristic solution found!\n- Solution found in %s seconds\n- %s nodes searched\n- The solution takes %s moves"%(end - start, nodes, Puzzle.grids[solution].depth + 1)
if printMoves :
display(Puzzle.grids[solution], maximumMovesToShow)
else :
print "A* Custom Heuristic failed to find solution. Ran for %s seconds and searched %s nodes"%(end - start, nodes)
| [
"iamchristoph@yahoo.com"
] | iamchristoph@yahoo.com |
55b708e6c7de4d53c9d8f7c3c0a01fa15e1c2cf9 | cbeb1d7060dbc281c11c6b029a1d156e1ec7ebfd | /yugoslavia/locations.py | 37f896b9e1d335e65c273a0e6078610e6cfad4ce | [] | no_license | paulkirkwood/py.parcoursdb | 28ceceaf4f44e03d9911892a9a916447cd7c7477 | df2745064e4c66dc0c2d522fc0381bf13a8e7859 | refs/heads/master | 2020-09-14T04:58:10.940799 | 2020-02-24T21:04:31 | 2020-02-24T21:04:31 | 223,024,314 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 126 | py | from country import Yugoslavia
from location import Location
def ljubljana():
return Location("Ljubljana", Yugoslavia())
| [
"paul@paulandsue.plus.com"
] | paul@paulandsue.plus.com |
606c6daa39403e1f7813670974620cd5c5c62c6f | 9c8b45b2b2be2e4c7063675965fa25538114e660 | /namseoul/urls.py | 85b13d914746388b20e042876187df50d8b64b07 | [] | no_license | gloweean/namseoul | 1a8f8b85b7ff4213c078b8e3cca409dfadfac5f4 | 9acc0c3c0e12f61d5ad399c32364bff2d11cbcfb | refs/heads/master | 2020-04-07T01:49:23.669077 | 2018-12-22T05:44:37 | 2018-12-22T05:44:37 | 157,953,505 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,509 | py | """namseoul URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from message import views
from rest_framework import routers
from rest_framework.authtoken import views as AuthView
from member.views import UserSignUpView, UserRetrieveUpdateDestroyView, UserLogoutView
# ViewSet์ ์ฌ์ฉํ ๊ฒฝ์ฐ router๋ฅผ ์ง์ ํด์ฃผ์ด์ผ ํ๋ค.
router = routers.DefaultRouter()
router.register(r'message', views.MessageViewSet)
urlpatterns = [
path('admin/', admin.site.urls),
path('', include(router.urls)),
path('signup', UserSignUpView.as_view()),
path('user_info', UserRetrieveUpdateDestroyView.as_view()),
path('login', AuthView.obtain_auth_token), # ์ดํ ์์ฒญ๋ถํฐ๋ Authorization: Token 9944b09199c62bcf9418ad846dd0e4bbdfc6ee4b ํ์์ผ๋ก request header์ ๋ฃ์ด์ ์์ฒญ์ ๋ณด๋ด์ผ ํ๋ค.
path('logout', UserLogoutView.as_view()),
]
| [
"gaius827@gmail.com"
] | gaius827@gmail.com |
ea9f6e7a47afd34b16da8ec54171afda6c3c36d7 | 1a47c5af54e1dc8b71d2dd4267e0883bab81ba1d | /genetic_algo.py | 66e407e50b056b38d0a8fe986e019aea10cc0c2c | [] | no_license | gauravchopracg/genetic_algorithm | 5690d421a151bc5a7351357391d5a74f9df99837 | 3ed6511d968362b8df1ca0f7e1843d262b60a1f7 | refs/heads/master | 2020-06-19T17:33:03.559285 | 2019-07-14T06:55:42 | 2019-07-14T06:55:42 | 196,803,640 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,057 | py | from tpot import TPOTClassifier
from sklearn.model_selection import train_test_split
import pandas as pd
import numpy as np
#load the data
telescope=pd.read_csv('MAGIC Gamma Telescope Data.csv')
#clean the data
telescope_shuffle=telescope.iloc[np.random.permutation(len(telescope))]
tele=telescope_shuffle.reset_index(drop=True)
#Store 2 classes
tele['Class']=tele['Class'].map({'g':0, 'h':1})
tele_class = tele['Class'].values
#Split training, testing, and validation data
training_indices, validation_indices = training_indices, testing_indices = train_test_split(tele.index,
stratify= tele_class, train_size=0.75, test_size=0.25)
#Let Genetic Programming find best ML model and hyperparameters
tpot = TPOTClassifier(generations=5, verbosity=2)
tpot.fit(tele.drop('Class', axis=1).loc[training_indices].values,
tele.loc[training_indicss, 'Class'].values)
#Score the accuracy
tpot.score(tele.drop('Class', axis=1).loc[validation_indices].values,
tele.loc[validation_indices, 'Class'].values)
#Export the generated code
tpot.export('pipeline.py') | [
"gauravchopracg@gmail.com"
] | gauravchopracg@gmail.com |
f19a4bb0dc2e392197331c9ddc9a58eef5dc0f86 | 30043f42552c9d74e9c81d396f7303ea9bb9946b | /src/commands/__init__.py | c2342847b4d20229993a5209974d970e2ef9cbc6 | [
"MIT"
] | permissive | SkylarKelty/pyirc | ad483bb30f6c661529060e539ec75e8318dd2d71 | 7fae4435671d8281e3dcf4c5341ef85a79031ca9 | refs/heads/master | 2021-01-19T06:12:29.720773 | 2014-10-04T10:21:07 | 2014-10-04T10:21:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from src.commands import hello
from src.commands import noaction
from src.commands import remind
| [
"skylarkelty@gmail.com"
] | skylarkelty@gmail.com |
9baf993431c633cca33d8c2904b04c63250aa8a1 | 57d958cdd7eda2e0d6f010e0b0e1578217d616cc | /onlineCalcProj/client/generate_token.py | ea72287157237a7e7bcc8af60eadebbd03cb2262 | [] | no_license | mohdhallal/OnlineCalculatorApiDjango | 61ed2388ddab5e2b92e23c774db8cd531ecb4245 | 5480d1373ce3fb437abccc2202e8a053abfacfec | refs/heads/master | 2020-07-06T10:41:26.944777 | 2019-08-16T09:40:05 | 2019-08-16T09:40:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 192 | py | import requests
r = requests.post('http://127.0.0.1:7070/api/token/',
data={"username": "user", "password": "megasoft123"})
print(r.text)
print(r.status_code)
print(r.json()) | [
"martin.mamdouh2014@gmail.com"
] | martin.mamdouh2014@gmail.com |
487668fe2ef1bf1b80979377f315ef43e5eef143 | 4fb624627e9a0e198d1658981fd059c18f7df35a | /test.py | a49cc1ae97cd225cc411d471d8fad5b5a8dd6863 | [] | no_license | bemorepower/Event-knowledge-map | be4475278d233d72122fc92cab8b374e2ec5093c | fcdc9f20bd919e1f83774b36ce80aff624f7210c | refs/heads/master | 2023-02-11T03:37:01.730011 | 2021-01-12T06:37:47 | 2021-01-12T06:37:47 | 328,854,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,959 | py | # -*- coding: utf-8 -*-
"""
ๆๆฌไธญไบๅฎไธๅ
็ปๆฝๅ
python *.py input.txt output.txt begin_line end_line
"""
# Set your own model path
MODELDIR = "/data/ltp/ltp-models/3.3.0/ltp_data"
import sys
import os
from pyltp import Segmentor, Postagger, Parser, NamedEntityRecognizer
print
"ๆญฃๅจๅ ่ฝฝLTPๆจกๅ... ..."
segmentor = Segmentor()
segmentor.load(os.path.join(MODELDIR, "cws.model"))
postagger = Postagger()
postagger.load(os.path.join(MODELDIR, "pos.model"))
parser = Parser()
parser.load(os.path.join(MODELDIR, "parser.model"))
recognizer = NamedEntityRecognizer()
recognizer.load(os.path.join(MODELDIR, "ner.model"))
# labeller = SementicRoleLabeller()
# labeller.load(os.path.join(MODELDIR, "srl/"))
print
"ๅ ่ฝฝๆจกๅๅฎๆฏใ"
in_file_name = "input.txt"
out_file_name = "output.txt"
begin_line = 1
end_line = 0
if len(sys.argv) > 1:
in_file_name = sys.argv[1]
if len(sys.argv) > 2:
out_file_name = sys.argv[2]
if len(sys.argv) > 3:
begin_line = int(sys.argv[3])
if len(sys.argv) > 4:
end_line = int(sys.argv[4])
def extraction_start(in_file_name, out_file_name, begin_line, end_line):
"""
ไบๅฎไธๅ
็ปๆฝๅ็ๆปๆง็จๅบ
Args:
in_file_name: ่พๅ
ฅๆไปถ็ๅ็งฐ
#out_file_name: ่พๅบๆไปถ็ๅ็งฐ
begin_line: ่ฏปๆไปถ็่ตทๅง่ก
end_line: ่ฏปๆไปถ็็ปๆ่ก
"""
in_file = open(in_file_name, 'r')
out_file = open(out_file_name, 'a')
line_index = 1
sentence_number = 0
text_line = in_file.readline()
while text_line:
if line_index < begin_line:
text_line = in_file.readline()
line_index += 1
continue
if end_line != 0 and line_index > end_line:
break
sentence = text_line.strip()
if sentence == "" or len(sentence) > 1000:
text_line = in_file.readline()
line_index += 1
continue
try:
fact_triple_extract(sentence, out_file)
out_file.flush()
except:
pass
sentence_number += 1
if sentence_number % 50 == 0:
print
"%d done" % (sentence_number)
text_line = in_file.readline()
line_index += 1
in_file.close()
out_file.close()
def fact_triple_extract(sentence, out_file):
"""
ๅฏนไบ็ปๅฎ็ๅฅๅญ่ฟ่กไบๅฎไธๅ
็ปๆฝๅ
Args:
sentence: ่ฆๅค็็่ฏญๅฅ
"""
# print sentence
words = segmentor.segment(sentence)
# print "\t".join(words)
postags = postagger.postag(words)
netags = recognizer.recognize(words, postags)
arcs = parser.parse(words, postags)
# print "\t".join("%d:%s" % (arc.head, arc.relation) for arc in arcs)
child_dict_list = build_parse_child_dict(words, postags, arcs)
for index in range(len(postags)):
# ๆฝๅไปฅ่ฐ่ฏไธบไธญๅฟ็ไบๅฎไธๅ
็ป
if postags[index] == 'v':
child_dict = child_dict_list[index]
# ไธป่ฐๅฎพ
if child_dict.has_key('SBV') and child_dict.has_key('VOB'):
e1 = complete_e(words, postags, child_dict_list, child_dict['SBV'][0])
r = words[index]
e2 = complete_e(words, postags, child_dict_list, child_dict['VOB'][0])
out_file.write("ไธป่ฏญ่ฐ่ฏญๅฎพ่ฏญๅ
ณ็ณป\t(%s, %s, %s)\n" % (e1, r, e2))
out_file.flush()
# ๅฎ่ฏญๅ็ฝฎ๏ผๅจๅฎพๅ
ณ็ณป
if arcs[index].relation == 'ATT':
if child_dict.has_key('VOB'):
e1 = complete_e(words, postags, child_dict_list, arcs[index].head - 1)
r = words[index]
e2 = complete_e(words, postags, child_dict_list, child_dict['VOB'][0])
temp_string = r + e2
if temp_string == e1[:len(temp_string)]:
e1 = e1[len(temp_string):]
if temp_string not in e1:
out_file.write("ๅฎ่ฏญๅ็ฝฎๅจๅฎพๅ
ณ็ณป\t(%s, %s, %s)\n" % (e1, r, e2))
out_file.flush()
# ๅซๆไปๅฎพๅ
ณ็ณป็ไธป่ฐๅจ่กฅๅ
ณ็ณป
if child_dict.has_key('SBV') and child_dict.has_key('CMP'):
# e1 = words[child_dict['SBV'][0]]
e1 = complete_e(words, postags, child_dict_list, child_dict['SBV'][0])
cmp_index = child_dict['CMP'][0]
r = words[index] + words[cmp_index]
if child_dict_list[cmp_index].has_key('POB'):
e2 = complete_e(words, postags, child_dict_list, child_dict_list[cmp_index]['POB'][0])
out_file.write("ไปๅฎพๅ
ณ็ณปไธป่ฐๅจ่กฅ\t(%s, %s, %s)\n" % (e1, r, e2))
out_file.flush()
# ๅฐ่ฏๆฝๅๅฝๅๅฎไฝๆๅ
ณ็ไธๅ
็ป
if netags[index][0] == 'S' or netags[index][0] == 'B':
ni = index
if netags[ni][0] == 'B':
while netags[ni][0] != 'E':
ni += 1
e1 = ''.join(words[index:ni + 1])
else:
e1 = words[ni]
if arcs[ni].relation == 'ATT' and postags[arcs[ni].head - 1] == 'n' and netags[arcs[ni].head - 1] == 'O':
r = complete_e(words, postags, child_dict_list, arcs[ni].head - 1)
if e1 in r:
r = r[(r.index(e1) + len(e1)):]
if arcs[arcs[ni].head - 1].relation == 'ATT' and netags[arcs[arcs[ni].head - 1].head - 1] != 'O':
e2 = complete_e(words, postags, child_dict_list, arcs[arcs[ni].head - 1].head - 1)
mi = arcs[arcs[ni].head - 1].head - 1
li = mi
if netags[mi][0] == 'B':
while netags[mi][0] != 'E':
mi += 1
e = ''.join(words[li + 1:mi + 1])
e2 += e
if r in e2:
e2 = e2[(e2.index(r) + len(r)):]
if r + e2 in sentence:
out_file.write("ไบบๅ//ๅฐๅ//ๆบๆ\t(%s, %s, %s)\n" % (e1, r, e2))
out_file.flush()
def build_parse_child_dict(words, postags, arcs):
"""
ไธบๅฅๅญไธญ็ๆฏไธช่ฏ่ฏญ็ปดๆคไธไธชไฟๅญๅฅๆณไพๅญๅฟๅญ่็น็ๅญๅ
ธ
Args:
words: ๅ่ฏๅ่กจ
postags: ่ฏๆงๅ่กจ
arcs: ๅฅๆณไพๅญๅ่กจ
"""
child_dict_list = []
for index in range(len(words)):
child_dict = dict()
for arc_index in range(len(arcs)):
if arcs[arc_index].head == index + 1:
if child_dict.has_key(arcs[arc_index].relation):
child_dict[arcs[arc_index].relation].append(arc_index)
else:
child_dict[arcs[arc_index].relation] = []
child_dict[arcs[arc_index].relation].append(arc_index)
# if child_dict.has_key('SBV'):
# print words[index],child_dict['SBV']
child_dict_list.append(child_dict)
return child_dict_list
def complete_e(words, postags, child_dict_list, word_index):
"""
ๅฎๅ่ฏๅซ็้จๅๅฎไฝ
"""
child_dict = child_dict_list[word_index]
prefix = ''
if child_dict.has_key('ATT'):
for i in range(len(child_dict['ATT'])):
prefix += complete_e(words, postags, child_dict_list, child_dict['ATT'][i])
postfix = ''
if postags[word_index] == 'v':
if child_dict.has_key('VOB'):
postfix += complete_e(words, postags, child_dict_list, child_dict['VOB'][0])
if child_dict.has_key('SBV'):
prefix = complete_e(words, postags, child_dict_list, child_dict['SBV'][0]) + prefix
return prefix + words[word_index] + postfix
if __name__ == "__main__":
# extraction_start(in_file_name, out_file_name, begin_line, end_line)
extraction_start(in_file_name, out_file_name, begin_line, end_line) | [
"2460712447@qq.com"
] | 2460712447@qq.com |
6bd121269c85094184fe489d0e9a2a61f4760cff | d7ec4aad024745725efb9587c7f9f56020311ee5 | /env/bin/jupyter-migrate | e320c1cb9573b4fc31a7ea685fd7ce3a96569a1e | [] | no_license | PlumedSerpent/PPE-Object-Detection | e3c382c5a099d76d76f3b36ea21634d8706178ff | 035d105ee10439f441626704a27f2cd14e34242a | refs/heads/master | 2023-03-23T22:32:03.644884 | 2021-03-22T19:38:30 | 2021-03-22T19:38:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | #!/home/ec2-user/PPE-Object-Detection/env/bin/python3
# -*- coding: utf-8 -*-
import re
import sys
from jupyter_core.migrate import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"ec2-user@ip-172-31-67-74.ec2.internal"
] | ec2-user@ip-172-31-67-74.ec2.internal | |
169249ed1830a96d7cd1e82c22572fba0f053b0e | 16ca6a7afcd4f62b055d6ba1c9841b59c1b15082 | /configs/atss/baseline/atss_r50_fpn_1x_coco_zsd_sfl.py | 485e3d0bccc2d30d4c52af4cb07c5d36c0b5f029 | [
"Apache-2.0"
] | permissive | mengqiDyangge/HierKD | 787365f22eb4b25cb64bb9e255f7824be06a9541 | 18b2307eb7a98e45b6e6d3d30e2a72e13da86c7b | refs/heads/main | 2023-05-23T09:04:31.348978 | 2022-08-25T14:59:41 | 2022-08-25T14:59:41 | 470,613,658 | 34 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,063 | py | _base_ = [
'../../_base_/models/atss_r50_fpn_zsd.py',
'../../_base_/datasets/coco_zero_shot_detection.py',
'../../_base_/schedules/schedule_1x_zsd.py',
'../../_base_/default_runtime.py'
]
# log
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
dict(type='TensorboardLoggerHook')
])
# optimizer
optimizer = dict(type='SGD', lr=0.01, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(_delete_=True, grad_clip=dict(max_norm=10, norm_type=2))
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[8, 11])
# distill setting
model = dict(
bbox_head=dict(
type='ATSSZSDSFLHead',
use_loss_cls = True,
test_with_clip_ve = False,
dist_featuremap = False,
dist_instance = False,
loss_cls=dict(
type='SoftMaxFocalLoss',
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
)
)
data = dict(
samples_per_gpu=4,
workers_per_gpu=4,
)
| [
"flouzhaozli@gmail.com"
] | flouzhaozli@gmail.com |
28a57877699840d447b57131eafedbf97d4ffd13 | 9e15ada895e90d033bc3b65c2666065bddd62605 | /08/8.1.repr_test.py | 06248a91f25680a03388cc1f6d0487d858914dcf | [] | no_license | zhyErick/fengkuang_python | b0f0c78273420fd862691799bfd7e4f1b6eadf80 | 6d50ad3b7d4ae05d06379c2dc87d91081964ec6d | refs/heads/master | 2021-02-14T08:23:26.616211 | 2020-05-06T13:08:07 | 2020-05-06T13:08:07 | 244,788,500 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 340 | py | class Apple:
# ๅฎ็ฐๆ้ ๅจ
def __init__(self, color, weight):
self.color = color
self.weight = weight
# ้ๅ__repr__()ๆนๆณ,็จไบๅฎ็ฐAppleๅฏน่ฑก็่ชๆๆ่ฟฐ
def __repr__(self):
return "Apple[color=" + self.color + ", weight=" + str(self.weight) + "]"
a = Apple("็บข่ฒ", 5.68)
print(a) | [
"xingzhishangyang@163.com"
] | xingzhishangyang@163.com |
a5148d91637002d9a241730d570829467d4d050f | cbb3e1df6e3abdd7ac66087aff8b74e43dae278d | /cgi_scripts/play.py | ce5630e1896c66bf2c7bf0cccf172721b91130b4 | [] | no_license | ElasticBottle/primer_checker | 695d2c24fb8bcb2c59c7f2568c7149305f4b480b | 8b225b5f404b01a78fdb1b5a62d7bd41ba3fea86 | refs/heads/master | 2023-04-11T20:49:39.924106 | 2021-04-17T03:29:43 | 2021-04-17T03:29:43 | 293,309,689 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | import json
import pandas as pd
base = "D:/Datasets/GISAID_Update_Analysis/blast/"
result = "C:/Users/Winston/Documents/Code/intern_and_work/Astar/primer_checker/cgi_scripts/output"
data = {}
with open(f"{result}/test.csv") as f:
data["US-CDC-2"] = pd.read_csv(
f,
dtype={
"virus_name": "string",
"accession_id": "string",
"date": "string",
"country_name": "string",
"ISO_A3": "string",
"orig_name": "string",
"match_diag": "string",
"misses3": "int",
"misses": "int",
"match_pct": "float",
"type": "string",
"virus_match_idx": "string",
"query_match_idx": "string",
},
).to_dict("records")
database_counts = []
with open(f"{base}database_count.json", "r") as f:
database_counts.append(json.load(f))
with open(f"{base}database_count_daily.json", "r") as f:
database_counts.append(json.load(f))
to_dump = [data, database_counts]
with open(f"{result}/final.json", "w") as f:
json.dump(to_dump, f, separators=(",", ":"))
| [
"winstonyeo99@yahoo.com"
] | winstonyeo99@yahoo.com |
30d563f13f6423cd18ccd165a856cab9a1290c3a | 95e2375d75ffab2a524a8f4c7dd689b5b54569ed | /2018/TreeNode/104. Maximum Depth of Binary Tree.py | 9377453eb22809cac4e8fd9f850963a2e3070fdb | [] | no_license | wufans/EverydayAlgorithms | c0bd561e6bd1aaa930bdf698e50d06e337e9dcf1 | e85277c83b904db365d828e51d89b7398e399a8a | refs/heads/master | 2020-03-23T03:04:35.827120 | 2019-08-08T03:17:17 | 2019-08-08T03:17:17 | 141,008,260 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,476 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 30 10:56:44 2018
@author: wufan
# =============================================================================
# Given a binary tree, find its maximum depth.
#
# The maximum depth is the number of nodes along the longest path from the root node down to the farthest leaf node.
#
# Note: A leaf is a node with no children.
#
# Example:
#
# Given binary tree [3,9,20,null,null,15,7],
#
# 3
# / \
# 9 20
# / \
# 15 7
# return its depth = 3.
# =============================================================================
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def maxDepth(self, root):
"""
:type root: TreeNode
:rtype: int
"""
if not root:return 0
def getdepth(node):
if node.left == None and node.right == None:
return 1
elif node.left and node.right:
return max(getdepth(node.left),getdepth(node.right))+1
elif node.left:
return getdepth(node.left)+1
else:
return getdepth(node.right)+1
return getdepth(root)
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
# =============================================================================
#One line solution
# return 1 + max(map(self.maxDepth, (root.left, root.right))) if root else 0
# =============================================================================
# fastest solution
# =============================================================================
# if root is None:
# return 0
# queue = []
# queue.append(root)
# depth = 1
# a = len(queue)
# while len(queue) > 0:
# if a == 0:
# a = len(queue)
# depth += 1
# curr = queue.pop(0)
# a -= 1
# if curr.left is not None:
# queue.append(curr.left)
# if curr.right is not None:
# queue.append(curr.right)
# return depth
# =============================================================================
a = TreeNode(3)
a.left = TreeNode(1)
a.right = TreeNode(1)
print(Solution().maxDepth(a))
#b = TreeNode(3)
#print(Solution().maxDepth(b))
| [
"noreply@github.com"
] | wufans.noreply@github.com |
387f78efedf54707074b3d54e433ca863301716b | 9c9701f79c8eeaa05f684442d2d03f7de4bba1f1 | /Korpora/korpus_namuwiki.py | 2e75dd8d2a48c3a78b50a187af42870c394678d6 | [
"CC-BY-4.0"
] | permissive | hank110/Korpora | e54708fe2d7910df4e6ec5cff1cf1ca0696636bf | b0e014f5c8c4ba71aba335285d0be48cbb802a0d | refs/heads/master | 2023-01-10T04:24:14.386097 | 2020-09-21T03:42:25 | 2020-09-21T03:42:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,007 | py | import os
from .korpora import Korpus, SentencePairKorpusData
from .utils import fetch, default_korpora_path, load_wikitext
NAMUWIKI_FETCH_INFORMATION = [
{
'url': 'https://github.com/lovit/namuwikitext/releases/download/v0.1/namuwikitext_20200302.v0.1.train.zip',
'destination': 'namuwikitext/namuwikitext_20200302.train.zip',
'method': 'download & unzip'
},
{
'url': 'https://github.com/lovit/namuwikitext/releases/download/v0.1/namuwikitext_20200302.v0.1.test.zip',
'destination': 'namuwikitext/namuwikitext_20200302.test.zip',
'method': 'download & unzip'
},
{
'url': 'https://github.com/lovit/namuwikitext/releases/download/v0.1/namuwikitext_20200302.v0.1.dev.zip',
'destination': 'namuwikitext/namuwikitext_20200302.dev.zip',
'method': 'download & unzip'
}
]
description = """ Author : Hyunjoong Kim lovit@github
Repository : https://github.com/lovit/namuwikitext
References :
๋๋ฌด์ํค์ ๋คํ ๋ฐ์ดํฐ๋ฅผ ๋ฐํ์ ์ ์ํ wikitext ํ์์ ํ
์คํธ ํ์ผ์
๋๋ค.
ํ์ต ๋ฐ ํ๊ฐ๋ฅผ ์ํ์ฌ ์ํคํ์ด์ง ๋ณ๋ก train (99%), dev (0.5%), test (0.5%) ๋ก ๋๋์ด์ ธ์์ต๋๋ค.
"""
license = " CC BY-NC-SA 2.0 KR which Namuwiki dump dataset is licensed"
class NamuwikiTextKorpusData(SentencePairKorpusData):
"""
Args:
description (str) : data description
texts (list of str) : namuwiki contents including '\n'
pairs (list of str) : title
"""
def __init__(self, description, texts, pairs):
super().__init__(description, texts, pairs)
class NamuwikiTextKorpus(Korpus):
def __init__(self, root_dir=None, force_download=False):
super().__init__(description, license)
if root_dir is None:
root_dir = default_korpora_path
fetch_namuwikitext(root_dir, force_download)
for information in NAMUWIKI_FETCH_INFORMATION:
destination = information['destination']
local_path = os.path.join(os.path.abspath(root_dir), destination[:-4])
if 'train' in destination:
response = input(
'NamuwikiText.train text file is large (5.3G).'
'If you want to load text in your memory, please insert `yes`').lower()
if (len(response) == 1 and response == 'y') or (response == 'yes'):
texts, titles = self.load(local_path)
self.train = NamuwikiTextKorpusData(description, texts, titles)
else:
dirname = os.path.abspath(f'{root_dir}/namuwikitext')
self.train = f'Namuwikitext corpus is downloaded. Open local directory {dirname}'
print('Continue to load `dev` and `test`')
continue
texts, titles = self.load(local_path)
if 'dev' in destination:
self.dev = NamuwikiTextKorpusData(description, texts, titles)
elif 'test' in destination:
self.test = NamuwikiTextKorpusData(description, texts, titles)
else:
raise ValueError(f'Check local files')
def load(self, path):
def split_title_text(wikitext):
lines = wikitext.split('\n')
title = lines[0]
text = '\n'.join([line.strip() for line in lines[2:] if line.strip()])
return title, text
wikitexts = load_wikitext(path)
wikitexts = [split_title_text(wikitext) for wikitext in wikitexts]
titles, texts = zip(*wikitexts)
# swap position
return texts, titles
def fetch_namuwikitext(root_dir, force_download):
for information in NAMUWIKI_FETCH_INFORMATION:
url = information['url']
destination = information['destination']
local_path = os.path.join(os.path.abspath(root_dir), destination)
fetch(url, local_path, 'namuwikitext', force_download, information['method'])
| [
"soy.lovit@gmail.com"
] | soy.lovit@gmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.