index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
11,700 | 32b5b5b89c1f9bf6097e589a3b89c2202241dc28 | '''
LISTS IN PYTHON
'''
# 1. To Create Lists in Python
Numbers = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
Names = ['Alex', 'Bob', 'Nancy']
## List can have elements with different data type
list_1 = [1997, 1998, 'Alex', 'Bob', True, False, 3.45, 5.6]
## List-of-List
list_2 = [[1, 2, 3], [4, 5, 6], [7, 8, 9]]
# 2. To Print (display) elements in the list
## (i). using print statement
print (Numbers)
print (Names)
## (ii). using for loop
for num in Numbers:
print (num)
print ()
for name in Names:
print (name)
# 3. Methods available in Lists
'''
.append(value) - appends element to end of the list
.count('x') - counts the number of occurrences of 'x' in the list
.index('x') - returns the index of 'x' in the list
.insert('y','x') - inserts 'x' at location 'y'
.pop() - returns last element then removes it from the list
.remove('x') - finds and removes first 'x' from list
.reverse() - reverses the elements in the list
.sort() - sorts the list alphabetically in ascending order, or numerical in ascending order
'''
Numbers.append(11)
Numbers.append(12)
## Now Numbers is [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12]
# 4. List Slicing
## Lists have a very nice property i.e slicing
## To slice a list we use list_name[start:end:step]
## start -> Starting Point (Inclusive), end -> Ending Point (Exclusive) & step -> Number of Steps to take
print (Numbers[::2]) # Starting till Ending with a step size of 2
## Output: [1, 3, 5, 7, 9, 11]
print (Numbers[::-1]) # Print whole list in reverse order
## Output: [12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1]
## To Access elements from the end we use -1, -2, -3 ...
print (Numbers[-3:])
## Output: [10, 11, 12]
print (Numbers[-1:-4:-1])
## Output: [12, 11, 10]
# 4. Deleting elements from the list
## Delete 2nd value in Numbers
del Numbers[2]
## Delete Whole List
del Numbers[:] |
11,701 | c6f65c594b417f8f6671680c62551d8d4936c9b4 | #/usr/bin/env python
import hashlib
import socket
import struct
import json
import time
import sys
import os
# check if ip is within CIDR
def addressInNetwork(ip, net):
ipaddr = int(''.join([ '%02x' % int(x) for x in ip.split('.') ]), 16)
netstr, bits = net.split('/')
netaddr = int(''.join([ '%02x' % int(x) for x in netstr.split('.') ]), 16)
mask = (0xffffffff << (32 - int(bits))) & 0xffffffff
return (ipaddr & mask) == (netaddr & mask)
def newgip(gip, ipcidr):
sipsx = gip.split(".")
#print sipsx
if int(sipsx[2]) <= 0:
sys.exit()
else:
sipsx[2] = int(sipsx[2])-1
gip = str(sipsx[0]) + "." + str(sipsx[1]) + "." + str(sipsx[2]) + ".0"
rlookup(gip,ipcidr)
def rlookup(gip, ipcidr):
if str(gip).split(".")[2] <= 0:
#print "not found"
sys.exit()
rli = os.popen('grep -E "^'+gip+'" \/root\/iplist\/*').readlines()
try:
if rli == []:
#print "newgip"
newgip(gip, ipcidr)
else:
ipcidr.append(rli)
pass
ipcidr = sorted(set(ipcidr))
return ipcidr
except Exception as tfail:
#print tfail
pass
# check hash value of fail2ban log
def md5(fname):
hash = hashlib.md5()
with open(fname, "rb") as f:
for chunk in iter(lambda: f.read(4096), b""):
hash.update(chunk)
return hash.hexdigest()
f2bl = md5('/var/log/fail2ban.log')
#read log
jlogfile = "/root/Jbusefile"
try:
with open(jlogfile) as jlf:
a = json.load(jlf)
except Exception as e:
a = {}
pass
# ipset list
ipset = []
# read system log from fail2ban
tl = os.popen('grep " " /var/log/fail2ban.log* | cut -d":" -f2- | grep -viE "Unban|INFO" | grep " Ban " | sed -r "s/(((.*)(.*)),(.*) Ban (.*))/\\6 \\2/g"')
tl
tlr = tl.readlines()
# check MD5 sums to process file or not
try:
if a['MD5'] == f2bl:
print "\n\t[INFO] Logged MD5sum of: '/var/log/fail2ban.log' - " + str(f2bl)
print "\t[INFO] System MD5sum of: '/var/log/fail2ban.log' - " + str(os.popen('md5sum /var/log/fail2ban.log').read()).split()[0]
print "\t[INFO] Nothing has changed in the fail2ban.log"
print "\t\t[INFO] Quitting ...\n"
sys.exit(0)
#pass
except Exception as fail2log:
a['MD5'] = 0
a['IPSET'] = []
pass
if a['MD5'] != f2bl:
print "\n\t[INFO] fail2ban.log has changed in the fail2ban.log\n\t\t[INFO] Starting processing ..."
a['MD5'] = f2bl
if 'FailAuth' not in a.keys():
a['FailAuth'] = []
# process file
for x in tlr:
#print x, "x"
# parse sections IP and Date
xr = str(x).split(" ")[0]
xd = str(x).split(" ")[1::]
xd = " ".join(xd)
xd = str(xd).strip()
#print xr
a['FailAuth'].append(str(xd).strip() + ", " + str(xr))
# split and strip ip address
t = str(xr).split(".")[0]
u = str(xr).split(".")[1]
v = str(xr).split(".")[2]
w = str(xr).split(".")[3].strip()
#check Keys in JSON
if t not in a.keys():
a[t] = {}
a[t]["tcount"] = 1
a[t]['rules'] = {}
a[t]['log'] = {}
a[t]['rules']['abuser'] = 0
a[t]['rules']['permaban'] = 0
a[t]['log']['firstseen'] = xd
a[t]['log']['lastseen'] = xd
a[t]['log']['everytime'] = []
a[t]['log']['everytime'].append(xd)
else:
a[t]["tcount"] = a[t]["tcount"] + 1
a[t]['log']['lastseen'] = xd
if xd in a[t]['log']['everytime']:
break
else:
a[t]['log']['everytime'].append(xd)
if a[t]['rules']["permaban"] == 1:
pass
else:
if a[t]["tcount"] != 160:
a[t]['rules']["abuser"] = a[t]['rules']["abuser"] + 1
if a[t]['rules']["abuser"] == 30:
print "\t[INFO] T-Rule Abuser Identified: " + str(xr)
a[t]['rules']["permaban"] = a[t]['rules']["permaban"] + 1
if a[t]['rules']["permaban"] == 20:
xrs = str(xr).split(".")
xrs1 = xrs[0]
a['IPSET'].append(str(xrs1)+".0.0.0/8")
print "\t\t[WARNING] T-Rule Permaban Action taken in iptables"
ipset.append(xr)
if u not in a[t].keys():
a[t][u] = {}
a[t][u]["ucount"] = 1
a[t][u]['rules'] = {}
a[t][u]['rules']['abuser'] = 0
a[t][u]['rules']['permaban'] = 0
a[t][u]['log'] = {}
a[t][u]['log']['firstseen'] = xd
a[t][u]['log']['lastseen'] = xd
a[t][u]['log']['everytime'] = []
a[t][u]['log']['everytime'].append(xd)
else:
a[t][u]["ucount"] = a[t][u]["ucount"] + 1
a[t][u]['log']['lastseen'] = xd
if xd in a[t][u]['log']['everytime']:
break
else:
a[t][u]['log']['everytime'].append(xd)
if a[t][u]['rules']["permaban"] == 1:
pass
else:
if a[t][u]["ucount"] != 60:
a[t][u]['rules']["abuser"] = a[t][u]['rules']["abuser"] + 1
if a[t][u]['rules']["abuser"] == 20:
print "\t[INFO] U-Rule Abuser Identified: " + str(xr)
a[t][u]['rules']["permaban"] = a[t][u]['rules']["permaban"] + 1
if a[t][u]['rules']["permaban"] == 20:
xrs = str(xr).split(".")
xrs1 = xrs[0]
xrs2 = xrs[1]
a['IPSET'].append(str(xrs1)+"."+str(xrs2)+".0.0/16")
print "\t\t[WARNING] U-Rule Permaban Action taken in iptables"
ipset.append(xr)
if v not in a[t][u].keys():
a[t][u][v] = {}
a[t][u][v]["vcount"] = 1
a[t][u][v]['rules'] = {}
a[t][u][v]['rules']['abuser'] = 0
a[t][u][v]['rules']['permaban'] = 0
a[t][u][v]['log'] = {}
a[t][u][v]['log']['firstseen'] = xd
a[t][u][v]['log']['lastseen'] = xd
a[t][u][v]['log']['everytime'] = []
a[t][u][v]['log']['everytime'].append(xd)
else:
a[t][u][v]["vcount"] = a[t][u][v]["vcount"] + 1
a[t][u][v]['log']['lastseen'] = xd
if xd in a[t][u][v]['log']['everytime']:
break
else:
a[t][u][v]['log']['everytime'].append(xd)
if a[t][u][v]['rules']["permaban"] == 1:
pass
else:
if a[t][u][v]["vcount"] != 30:
a[t][u][v]['rules']["abuser"] = a[t][u][v]['rules']["abuser"] + 1
if a[t][u][v]['rules']["abuser"] == 15:
print "\t[INFO] V-Rule Abuser Identified: " + str(xr)
a[t][u][v]['rules']["permaban"] = a[t][u][v]['rules']["permaban"] + 1
if a[t][u][v]['rules']["permaban"] == 10:
xrs = str(xr).split(".")
xrs1 = xrs[0]
xrs2 = xrs[1]
xrs3 = xrs[2]
a['IPSET'].append(str(xrs1)+"."+str(xrs2)+"."+str(xrs3)+".0/24")
print "\t\t[WARNING] V-Rule Permaban Action taken in iptables"
ipset.append(xr)
if w not in a[t][u][v].keys():
a[t][u][v][w] = {}
a[t][u][v][w]["wcount"] = 1
a[t][u][v][w]['rules'] = {}
a[t][u][v][w]['rules']['abuser'] = 0
a[t][u][v][w]['rules']['permaban'] = 0
a[t][u][v][w]['log'] = {}
a[t][u][v][w]['log']['firstseen'] = xd
a[t][u][v][w]['log']['lastseen'] = xd
a[t][u][v][w]['log']['everytime'] = []
a[t][u][v][w]['log']['everytime'].append(xd)
else:
a[t][u][v][w]["wcount"] = a[t][u][v][w]["wcount"] + 1
a[t][u][v][w]['log']['lastseen'] = xd
if xd in a[t][u][v][w]['log']['everytime']:
break
else:
a[t][u][v][w]['log']['everytime'].append(xd)
if a[t][u][v][w]['rules']["permaban"] == 1:
pass
else:
if a[t][u][v][w]["wcount"] != 6:
a[t][u][v][w]['rules']["abuser"] = a[t][u][v][w]['rules']["abuser"] + 1
if a[t][u][v][w]['rules']["abuser"] == 3:
print "\t[INFO] W-Rule Abuser Identified: " + str(xr)
a[t][u][v][w]['rules']["permaban"] = a[t][u][v][w]['rules']["permaban"] + 1
if a[t][u][v][w]['rules']["permaban"] == 1:
ipset.append(xr)
#a['IPSET'].append(xr)
print "\t\t[WARNING] W-Rule Permaban Action taken in iptables"
gi = str(xr).split(".")
gi1 = gi[0]
gi2 = gi[1]
gi3 = gi[2]
gi4 = "0"
gip = str(gi1) + "." + str(gi2) + "." + str(gi3) + "." + str(gi4)
#print gip, "gip"
try:
#gg = rlookup(gip, ipset)
#print ipset, "ipset"
a['IPSET'].append(str(xr))
#print a['IPSET']
except Exception as rlf:
#print rlf, "rlf"
pass
#ipset.append(xr)
#print ipset
'''
try:
ipcidr = []
with open('/root/ipset_list', 'r')as ipsetl:
ipsetll = ipsetl.readlines()
for ipsx in ipsetll:
sipsx = str(ipsx).split(".")
gip = str(sipsx[0]) + "." + str(sipsx[1]) + "." + str(sipsx[2]) + ".0"
aaa = os.popen('grep -E "^'+gip+'" \/root\/iplist\/*').readlines()
rlookup(gip, ipcidr)
print str("\t[WARNING] PermaBanned User Information: " + str(xr) + "\n\t\t[!] " + str(ipcidr[0][0]))
except Exception as E:
#print E, "big E"
pass
'''
# dump JSON file
with open(jlogfile, 'w') as outfile:
json.dump(a, outfile, sort_keys = True, indent = 4,ensure_ascii=False)
# write ipset list
with open("/root/ipset_list","a") as ipsetlist:
for ipsetw in ipset:
ipsetlist.write(str(ipsetw)+"\n")
print "\t[INFO] Finished processing Fail2ban Logs\n\t\t[INFO] Quitting ..."
|
11,702 | 78617876fed15979032ab7e18c24b045e347c027 | from django.conf.urls import url
from task.views import index,registerNotification,success
urlpatterns=[
url("^$",index,name='index'),
url("^register$",registerNotification,name='registerNotification'),
url("^success$",success,name='success'),
]
import task.jobs |
11,703 | c15060b2fe273d896d2ad29e1558491430cf7aa0 | #!/usr/bin/env python3
from datetime import datetime, timedelta
class Image:
"""Defines information about an image"""
def __init__(self, json):
"""
Initializes the Image class with information for an image
:param json: String. Raw JSON
"""
self.height = json["height"]
self.width = json["width"]
self.src = json["src"]
class Show:
"""Defines information about a DMAX show"""
def __init__(self, json):
"""
Initializes the Show class with information for a show
:param json: String. Raw JSON
"""
self.id = json["id"]
self.alternateId = json["alternateId"]
self.name = json["name"]
if "description" in json:
self.description = json["description"]
if "episodeCount" in json:
self.episodeCount = json["episodeCount"]
if "seasonNumbers" in json:
self.seasonNumbers = json["seasonNumbers"]
if "image" in json:
self.image = Image(json["image"])
class Episode:
"""Defines information about an episode"""
def __init__(self, json):
"""
Initializes the Episode class with information for an episode
:param json: String. Raw JSON
"""
self.id = json["id"]
self.alternateId = json["alternateId"]
if "airDate" in json:
self.airDate = datetime.strptime(json["airDate"], '%Y-%m-%dT%H:%M:%SZ')
if "name" in json:
self.name = json["name"]
if "title" in json:
self.title = json["title"]
if "description" in json:
self.description = json["description"]
if "episode" in json:
self.episode = json["episode"]
if "episodeNumber" in json:
self.episodeNumber = json["episodeNumber"]
else:
self.episodeNumber = None
if "season" in json:
self.season = json["season"]
if "seasonNumber" in json:
self.seasonNumber = json["seasonNumber"]
else:
self.seasonNumber = None
if "publishStart" in json:
self.publishStart = datetime.strptime(json["publishStart"], '%Y-%m-%dT%H:%M:%SZ')
if "publishEnd" in json:
self.publishEnd = datetime.strptime(json["publishEnd"], '%Y-%m-%dT%H:%M:%SZ')
if "videoDuration" in json:
self.videoDuration = timedelta(milliseconds=json["videoDuration"])
if "isFreePlayable" in json:
self.isFreePlayable = json["isFreePlayable"]
if "isPlayable" in json:
self.isPlayable = json["isPlayable"]
if "isNew" in json:
self.isNew = json["isNew"]
if "image" in json:
self.image = Image(json["image"])
def __repr__(self):
return "Episode {0}: {1}".format(
self.episodeNumber if hasattr(self, "episodeNumber") else "?",
self.name
)
class Season:
"""Defines information about a season"""
def __init__(self, number, json):
"""
Initializes the Season class with information for a season
:param number: Int. Season Number
:param json: String. Raw JSON
"""
self.number = number
self.episodes = []
for episode in json:
self.episodes.append(Episode(episode))
def __repr__(self):
return "Season {0}".format(self.number)
class DMAX:
"""Main class for Show and Episode classes"""
def __init__(self, json):
"""
Initializes the DMAX class
:param json: String. Raw JSON
"""
if "show" not in json or "videos" not in json:
raise Exception("Invalid JSON.")
self.show = Show(json["show"])
self.seasons = []
for seasonNumber in self.show.seasonNumbers:
try:
season_json = json["videos"]["episode"][str(seasonNumber)]
except KeyError:
continue
self.seasons.append(Season(seasonNumber, season_json))
self.specials = []
if "standalone" in json["videos"]:
for special in json["videos"]["standalone"]:
self.specials.append(Episode(special))
|
11,704 | 8098a606d0fc2e0a0051a53e1e5462b91b64a9d8 | #!/usr/bin/python
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
import pymysql
db = pymysql.connect(host='localhost', port=3306, user='root', passwd='', db='bankbyjiajia', charset="utf8",
use_unicode=True)
def insert(data):
# 使用cursor()方法获取操作游标
cursor = db.cursor()
# SQL 插入语句
sql = "INSERT INTO tb_chinapnr_account_detail(create_time, \
serial_number, usr_id, user_name, acct_type,debit_or_credit_mark,tran_amount,free_amount,acct_amount,in_usr_id,buss_type,des_note) \
VALUES ('%s', '%s', '%s', '%s', '%s','%s','%s','%s','%s','%s','%s','%s')" % \
(data[0], data[1], data[2], data[3], data[4], data[5], data[6].replace(',', ''), data[7].replace(',', ''),
data[8].replace(',', ''), data[9], data[10], data[11])
# try:
# 执行sql语句
cursor.execute(sql)
# 提交到数据库执行
db.commit()
# except:
# 发生错误时回滚
# db.rollback()
usrids = ['6000060269448244', '6000060269448119', '6000060269453923', '6000060269456948', '6000060269455093',
'6000060269455994', '6000060269459071', '6000060269455869', '6000060269456118', '6000060269456261',
'6000060269457616', '6000060269462708', '6000060269461736', '6000060269463618', '6000060269652085',
'6000060269469033', '6000060269480234', '6000060269480225', '6000060269488245', '6000060269554011',
'6000060269572475', '6000060269586521', '6000060269591533', '6000060269593773', '6000060269693585',
'6000060269697796', '6000060269696840', '6000060269699222', '6000060269764954', '6000060269743638',
'6000060269785094', '6000060269786761', '6000060269810654', '6000060269826745', '6000060269918708',
'6000060269935173', '6000060269938241', '6000060269943716', '6000060269968627', '6000060269971043',
'6000060269974255', '6000060269991218', '6000060269996017', '6000060270007753', '6000060269998792',
'6000060270012462', '6000060270016379', '6000060273437908', '6000060270080487', '6000060270084474',
'6000060270088461', '6000060270122389', '6000060270120452', '6000060270130888', '6000060270167777',
'6000060270178319', '6000060270182545', '6000060270187327', '6000060270193123', '6000060270194961',
'6000060270196585', '6000060270197334', '6000060270207724', '6000060270245666', '6000060270250454',
'6000060270290465', '6000060270326597', '6000060270329727', '6000060270457918', '6000060270711545',
'6000060270777172', '6000060270787884', '6000060270839686', '6000060270979347', '6000060271138986',
'6000060271413848', '6000060271429555', '6000060271431891', '6000060271441461', '6000060271607309',
'6000060271765842', '6000060272137958', '6000060272216676', '6000060272235138', '6000060272451778',
'6000060272579695', '6000060272620195', '6000060272734740', '6000060272779728', '6000060272799975',
'6000060272947208', '6000060273017933', '6000060273037378', '6000060273060715', '6000060273155882',
'6000060273210947', '6000060273239329', '6000060273302928', '6000060273307834', '6000060273329213',
'6000060273503970', '6000060273545952', '6000060273554273', '6000060273573797', '6000060273581476',
'6000060273617081', '6000060274035680', '6000060274038213', '6000060274239176', '6000060274334081',
'6000060274348254', '6000060274510041', '6000060274544880', '6000060274532900', '6000060274624847',
'6000060274628594', '6000060274654234', '6000060274684899', '6000060274750102', '6000060274750219',
'6000060274857284', '6000060274897106', '6000060274974772', '6000060281109604', '6000060275043945',
'6000060275068712', '6000060275074965', '6000060275083919', '6000060275148351', '6000060275157082',
'6000060275207206', '6000060275277050', '6000060275367596', '6000060275397705', '6000060275426041',
'6000060275440999', '6000060275465481', '6000060275543272', '6000060275603634', '6000060275839738',
'6000060275852561', '6000060275977669', '6000060276008278', '6000060276077924', '6000060276088912',
'6000060276095165', '6000060276333693', '6000060276564979', '6000060276506649', '6000060276541076',
'6000060276697942', '6000060276699325', '6000060276705498', '6000060276803541', '6000060276890992',
'6000060277250182', '6000060277256024', '6000060277353197', '6000060277439532', '6000060277397532',
'6000060277416897', '6000060277551776', '6000060278336455', '6000060278760851', '6000060280233099',
'6000060280355359', '6000060280459078', '6000060280462643', '6000060280616069', '6000060280638063',
'6000060280807932', '6000060280816352', '6000060280827876', '6000060281093176', '6000060281190999',
'6000060281237413', '6000060281349695', '6000060281474013', '6000060281607816', '6000060281871398',
'6000060281894104', '6000060281940983', '6000060281943249', '6000060281948468', '6000060281951239',
'6000060282077968', '6000060282137644', '6000060282182522', '6000060282311535', '6000060282710505',
'6000060282717438', '6000060282742464', '6000060282989858', '6000060283127582', '6000060283130426',
'6000060283534963', '6000060283674197', '6000060283766962', '6000060283907622', '6000060284026975',
'6000060284270807', '6000060284481606', '6000060284562617', '6000060284596038', '6000060284630143',
'6000060284785850', '6000060284935948', '6000060284839400', '6000060284931568', '6000060285006663',
'6000060285020576', '6000060285032126', '6000060285046175', '6000060285140278', '6000060285234195',
'6000060285338298', '6000060285405642', '6000060285407551', '6000060285416934', '6000060285740076',
'6000060285780870', '6000060285804863', '6000060285887505', '6000060285912666', '6000060285968865',
'6000060285986006', '6000060286048091', '6000060286091596', '6000060286117523', '6000060286187430',
'6000060286197465', '6000060286349444', '6000060286369920', '6000060286403660', '6000060286413551',
'6000060286417968', '6000060286426949', '6000060286533431', '6000060286556059', '6000060286595864',
'6000060286670372', '6000060286823653', '6000060286954351', '6000060286957811', '6000060286982865',
'6000060287011733', '6000060287029653', '6000060287235449', '6000060287231096', '6000060287352204',
'6000060287417868', '6000060287772938', '6000060287798974', '6000060287808918', '6000060287904635',
'6000060287977744', '6000060288128045', '6000060288346997', '6000060288530136', '6000060288602727',
'6000060288963837', '6000060289368328', '6000060289386415', '6000060289482294', '6000060289911927',
'6000060290281302', '6000060290300265', '6000060290550360', '6000060290686839', '6000060291077380',
'6000060291243806', '6000060291340862', '6000060291431540', '6000060291483681', '6000060291553294',
'6000060291706959', '6000060292219430', '6000060292566439', '6000060292741605', '6000060293019252',
'6000060293381519', '6000060293320112', '6000060293384730', '6000060293387871', '6000060293694236',
'6000060293457750', '6000060293507340', '6000060293611888', '6000060293621190', '6000060295043630',
'6000060295245128', '6000060295298124', '6000060295371605', '6000060295427413', '6000060295767894',
'6000060295876437']
# usrids = ['6000060269448244', '6000060269448119', '6000060269652085']
import time
import requests
from lxml import html
browser = webdriver.Chrome()
browser.get("https://wealth.cloudpnr.com/p2padmin/")
input = browser.find_element_by_id("login_operator_id")
input.send_keys("mayanbin0302@163.com")
# input.send_keys(Keys.ENTER)
input = browser.find_element_by_id("login_password")
input.send_keys("972506")
# input.send_keys(Keys.ENTER)
str1 = input("Enter your input: ")
input = browser.find_element_by_id("captcha")
input.send_keys(str1)
input.send_keys(Keys.ENTER)
# 等待时间
wait = WebDriverWait(browser, 10)
wait.until(EC.presence_of_element_located((By.CLASS_NAME, "header-infos")))
# print(browser.current_url)
# print(browser.get_cookies())
# print(browser.page_source)
# time.sleep(10)
browser.get('https://wealth.cloudpnr.com/p2padmin/report/index/report/id/500005')
# tree = html.fromstring(browser.page_source)
# data = ''.join(tree.xpath('//span[contains(text(),"客户账户明细查询")]/text()'))
wait.until(EC.presence_of_element_located((By.CLASS_NAME, "main-content-title")))
# 点击查询
query = browser.find_element_by_xpath('//a[@class="btn btn-primary ajax-get-data"]')
# 查询开始时间
date_start_input = browser.find_element_by_name('date_from')
# 查询结束时间
date_end_input = browser.find_element_by_name('date_to')
# 客户号
cust_input = browser.find_element_by_name('custId')
for usrid in usrids:
num = 0s
cust_input.clear()
cust_input.send_keys(usrid)
queryDate = [['2018-03-12', '2018-06-10'], ['2018-06-11', '2018-07-31']]
for dateq in queryDate:
date_start_input.clear()
date_start_input.send_keys(dateq[0])
date_end_input.clear()
date_end_input.send_keys(dateq[1])
query.click()
# "btn ajax-get-data btn-disabled"
wait.until(EC.presence_of_element_located((By.XPATH, '//a[@class="btn ajax-get-data btn-primary"]')))
# 获取总页数
size = browser.find_element_by_xpath('//p[@class="page"]/span/strong').text
# print(size)
if int(size) > 0:
# 数据总页数
total = browser.find_element_by_xpath('//p[@class="page"]/input[@max]').get_attribute('max')
for i in range(1, int(total) + 1):
if i != 1:
next = browser.find_element_by_xpath('//p[@class="page"]/a[@title="Next"]')
next.click()
wait.until(EC.presence_of_element_located(
(By.XPATH, '//p[@class="page"]/a[@class="current" and @title="' + str(i) + '"]')))
# pageindex = browser.find_element_by_xpath('//div[@class="table dis"]/table/tbody/tr')
trs = browser.find_elements_by_xpath('//div[@class="table dis"]/table/tbody/tr')
for tr in trs:
one = list()
tds = tr.find_elements_by_xpath('.//td')
for j in range(0, len(tds)):
if j > 0:
one.append(tds[j].text)
insert(one)
# print(e.text)
num += 1
# pageindex = browser.find_element_by_xpath('//p[@class="page"]/a[@class="current"]').text
# if i<total:
print(usrid + ':' + str(num))
# print(browser.page_source)
browser.close()
# 关闭数据库连接
db.close()
|
11,705 | d4c6a0858e66e54a5a0349ef82367a19bd49a391 | # -*- coding: utf-8 -*-
"""
Created on Tue Apr 30 13:03:47 2019
@author: Yoshi
"""
import matplotlib.pyplot as plt
import numpy as np
import pdb
import analysis_config as cf
import analysis_backend as bk
import dispersions as disp
import lmfit as lmf
from scipy.optimize import curve_fit
def get_max_frequency(arr, plot=False):
'''
Calculates strongest frequency within a given field component across
the simulation space. Returns frequency and power axes and index of
maximum frequency in axis.
'''
npts = arr.shape[0]
fft_freqs = np.fft.fftfreq(npts, d=cf.dt_field)
fft_freqs = fft_freqs[fft_freqs >= 0]
# For each gridpoint, take temporal FFT
fft_matrix = np.zeros((npts, cf.NX), dtype='complex128')
for ii in range(cf.NX):
fft_matrix[:, ii] = np.fft.fft(arr[:, ii] - arr[:, ii].mean())
# Convert FFT output to power and normalize
fft_pwr = (fft_matrix[:fft_freqs.shape[0], :] * np.conj(fft_matrix[:fft_freqs.shape[0], :])).real
fft_pwr *= 4. / (npts ** 2)
fft_pwr = fft_pwr.sum(axis=1)
max_idx = np.where(fft_pwr == fft_pwr.max())[0][0]
print('Maximum frequency at {}Hz\n'.format(fft_freqs[max_idx]))
if plot == True:
plt.figure()
plt.plot(fft_freqs, fft_pwr)
plt.scatter(fft_freqs[max_idx], fft_pwr[max_idx], c='r')
plt.title('Frequencies across simulation domain')
plt.xlabel('Frequency (Hz)')
plt.ylabel('Power (nT^2 / Hz)')
plt.legend()
plt.show()
return fft_freqs, fft_pwr, max_idx
def growing_sine(pars, t, data=None):
vals = pars.valuesdict()
amp = vals['amp']
freq = vals['freq']
growth = vals['growth']
model = amp * np.exp(1j*2*np.pi*freq*t).imag * np.exp(growth*t)
if data is None:
return model
else:
return model - data
def get_growth_rates(do_plot=None):
'''
Extract the magnetic linear wave growth rate from:
-- Fitting an exponential to the magnetic energy
-- Fitting a growing sine wave to the field components at each cell
The linear regime is calculated as all times before the maximum energy derivative,
i.e. the growth is assumed exponential until the rate of energy transfer decreases.
One could also take the min/max (i.e. abs) of the field through time and
fit an exponential to that, but it should be roughly equivalent to the energy fit.
INPUT:
-- do_plot : 'show', 'save' or 'None'. 'save' will also output growth rates to a text file.
'''
by = cf.get_array('By') * 1e9
bz = cf.get_array('Bz') * 1e9
linear_cutoff, gr_rate_energy = fit_magnetic_energy(by, bz, plot=do_plot)
freqs, power, max_idx = get_max_frequency(by, plot=do_plot)
growth_rate_kt(by, linear_cutoff, freqs[max_idx])
by_wamps, by_wfreqs, by_gr_rate = fit_field_component(by, freqs[max_idx], 'By', linear_cutoff, plot=do_plot)
bz_wamps, bz_wfreqs, bz_gr_rate = fit_field_component(bz, freqs[max_idx], 'Bz', linear_cutoff, plot=do_plot)
if do_plot == 'save':
txt_path = cf.anal_dir + 'growth_rates.txt'
text_file = open(txt_path, 'w')
else:
text_file = None
print('Energy growth rate: {}'.format(gr_rate_energy), file=text_file)
print('By av. growth rate: {}'.format(by_gr_rate.mean()), file=text_file)
print('Bz av. growth rate: {}'.format(bz_gr_rate.mean()), file=text_file)
print('By min growth rate: {}'.format(by_gr_rate.min()), file=text_file)
print('Bz min growth rate: {}'.format(bz_gr_rate.min()), file=text_file)
print('By max growth rate: {}'.format(by_gr_rate.max()), file=text_file)
print('Bz max growth rate: {}'.format(bz_gr_rate.max()), file=text_file)
return
def fit_field_component(arr, fi, component, cut_idx=None, plot=False, plot_cell=64):
'''
Calculates and returns parameters for growing sine wave function for each
gridpoint up to the linear cutoff time.
'''
print('Fitting field component')
time_fit = cf.time_seconds_field[:cut_idx]
gyfreq_hz = cf.gyfreq/(2*np.pi)
growth_rates = np.zeros(cf.NX)
frequencies = np.zeros(cf.NX)
amplitudes = np.zeros(cf.NX)
fit_params = lmf.Parameters()
fit_params.add('amp' , value=1.0 , vary=True, min=-0.5*cf.B0*1e9 , max=0.5*cf.B0*1e9)
fit_params.add('freq' , value=fi , vary=True, min=-gyfreq_hz , max=gyfreq_hz )
fit_params.add('growth', value=0.001*gyfreq_hz, vary=True, min=0.0 , max=0.1*gyfreq_hz)
for cell_num in range(cf.NX):
data_to_fit = arr[:cut_idx, cell_num]
fit_output = lmf.minimize(growing_sine, fit_params, args=(time_fit,), kws={'data': data_to_fit},
method='leastsq')
fit_function = growing_sine(fit_output.params, time_fit)
fit_dict = fit_output.params.valuesdict()
growth_rates[cell_num] = fit_dict['growth']
frequencies[ cell_num] = fit_dict['freq']
amplitudes[ cell_num] = fit_dict['amp']
if plot != None and cell_num == plot_cell:
plt.figure()
plt.plot(time_fit, data_to_fit, label='Magnetic field')
plt.plot(time_fit, fit_function, label='Fit')
plt.figtext(0.135, 0.73, r'$f = %.3fHz$' % (frequencies[cell_num] / (2 * np.pi)))
plt.figtext(0.135, 0.69, r'$\gamma = %.3fs^{-1}$' % (growth_rates[cell_num] / (2 * np.pi)))
plt.figtext(0.135, 0.65, r'$A_0 = %.3fnT$' % (amplitudes[cell_num] ))
plt.title('{} cell {}'.format(component, plot_cell))
plt.xlabel('Time (s)')
plt.ylabel('Amplitude (nT)')
plt.legend()
print(lmf.fit_report(fit_output))
if plot == 'save':
save_path = cf.anal_dir + '{}_envfit_{}.png'.format(component, plot_cell)
plt.savefig(save_path)
plt.close('all')
elif plot == 'show':
plt.show()
else:
pass
return amplitudes, frequencies, growth_rates
def residual_exp(pars, t, data=None):
vals = pars.valuesdict()
amp = vals['amp']
growth = vals['growth']
model = amp * np.exp(growth*t)
if data is None:
return model
else:
return model - data
def fit_magnetic_energy(by, bz, plot=False):
'''
Calculates an exponential growth rate based on transverse magnetic field
energy.
'''
mu0 = (4e-7) * np.pi # Magnetic Permeability of Free Space (SI units)
print('Fitting magnetic energy')
bt = np.sqrt(by ** 2 + bz ** 2) * 1e-9
U_B = 0.5 * np.square(bt).sum(axis=1) * cf.NX * cf.dx / mu0
dU = bk.get_derivative(U_B)
linear_cutoff = np.where(dU == dU.max())[0][0]
time_fit = cf.time_seconds_field[:linear_cutoff]
fit_params = lmf.Parameters()
fit_params.add('amp' , value=1.0 , min=None , max=None)
fit_params.add('growth', value=0.001*cf.gyfreq, min=0.0 , max=None)
fit_output = lmf.minimize(residual_exp, fit_params, args=(time_fit,), kws={'data': U_B[:linear_cutoff]},
method='leastsq')
fit_function = residual_exp(fit_output.params, time_fit)
fit_dict = fit_output.params.valuesdict()
if plot != None:
plt.ioff()
plt.figure()
plt.plot(cf.time_seconds_field[:linear_cutoff], U_B[:linear_cutoff], color='green', marker='o', label='Energy')
plt.plot(cf.time_seconds_field[:linear_cutoff], fit_function, color='b', label='Exp. fit')
plt.figtext(0.135, 0.725, r'$\gamma = %.3fs^{-1}$' % (fit_dict['growth'] / (2 * np.pi)))
plt.title('Transverse magnetic field energy')
plt.xlabel('Time (s)')
plt.ylabel('Energy (J)')
plt.legend()
# =============================================================================
# plt.figure()
# plt.plot(time_seconds[:linear_cutoff], dU[:linear_cutoff])
# =============================================================================
if plot == 'save':
save_path = cf.anal_dir + 'magnetic_energy_expfit.png'
plt.savefig(save_path)
plt.close('all')
elif plot == 'show':
plt.show()
else:
pass
return linear_cutoff, fit_dict['growth']
def exponential_sine(t, amp, freq, growth, phase):
return amp * np.sin(2*np.pi*freq*t + phase) * np.exp(growth*t)
def growth_rate_kt(arr, cut_idx, fi, saveas='kt_growth'):
plt.ioff()
time_fit = cf.time_seconds_field[:cut_idx]
k = np.fft.fftfreq(cf.NX, cf.dx)
k = k[k>=0]
# Take spatial FFT at each time
mode_matrix = np.zeros(arr.shape, dtype='complex128')
for ii in range(arr.shape[0]):
mode_matrix[ii, :] = np.fft.fft(arr[ii, :] - arr[ii, :].mean())
# Cut off imaginary bits
mode_matrix = 2*mode_matrix[:, :k.shape[0]]
gmodel = lmf.Model(exponential_sine, nan_policy='propagate')
gmodel.set_param_hint('amp', value=1.0, min=0.0, max=abs(mode_matrix).max())
gmodel.set_param_hint('freq', value=fi, min=-2*fi, max=2*fi)
gmodel.set_param_hint('growth', value=0.05, min=0.0, max=0.5*fi)
gmodel.set_param_hint('phase', value=0.0, vary=False)
for mode_num in [1]:#range(1, k.shape[0]):
data_to_fit = mode_matrix[:cut_idx, mode_num].real
result = gmodel.fit(data_to_fit, t=time_fit, method='leastsq')
plt.plot(time_fit, data_to_fit, 'ko', label='data')
plt.plot(time_fit, result.best_fit, 'r-', label='lmfit')
popt, pcov = curve_fit(exponential_sine, time_fit, data_to_fit, maxfev=1000000000)
plt.plot(time_fit, exponential_sine(time_fit, *popt), label='curve_fit')
plt.legend()
print(popt)
# =============================================================================
# fit_output = minimize(exponential_sine, fit_params, args=(time_fit,), kws={'data': data_to_fit},
# method='leastsq')
#
# fit_function = exponential_sine(fit_output.params, time_fit)
#
# fit_dict = fit_output.params.valuesdict()
#
# growth_rates[mode_num] = fit_dict['growth']
# frequencies[ mode_num] = fit_dict['freq']
# amplitudes[ mode_num] = fit_dict['amp']
#
# plt.plot(time_fit, data_to_fit)
# plt.plot(time_fit, fit_function)
# =============================================================================
plt.show()
return
def get_linear_growth(plot=False):
'''
Calculates an exponential growth rate based on transverse magnetic field
energy.
'''
import pdb
by = cf.get_array('By') * 1e9
bz = cf.get_array('Bz') * 1e9
mu0 = (4e-7) * np.pi # Magnetic Permeability of Free Space (SI units)
print('Fitting magnetic energy')
bt = np.sqrt(by ** 2 + bz ** 2)
U_B = np.square(bt[:, 0])#.sum(axis=1)# * cf.NX * cf.dx / mu0 * 0.5
#dU = bk.get_derivative(U_B)
#linear_cutoff = np.where(dU == dU.max())[0][0]
#time_fit = cf.time_seconds_field[:linear_cutoff]
plt.plot(cf.time_radperiods_field, by, marker='o')
plt.xlim(0, 200)
# =============================================================================
# fit_params = lmf.Parameters()
# fit_params.add('amp' , value=1.0 , min=None , max=None)
# fit_params.add('growth', value=0.001*cf.gyfreq, min=0.0 , max=None)
#
# fit_output = lmf.minimize(residual_exp, fit_params, args=(time_fit,), kws={'data': U_B[:linear_cutoff]},
# method='leastsq')
# fit_function = residual_exp(fit_output.params, time_fit)
#
# fit_dict = fit_output.params.valuesdict()
# =============================================================================
# =============================================================================
# if plot == True:
# plt.ioff()
# plt.figure()
# plt.plot(cf.time_seconds_field[:linear_cutoff], U_B[:linear_cutoff], color='green', marker='o', label='Energy')
# plt.plot(cf.time_seconds_field[:linear_cutoff], fit_function, color='b', label='Exp. fit')
# plt.figtext(0.135, 0.725, r'$\gamma = %.3fs^{-1}$' % (fit_dict['growth'] / (2 * np.pi)))
# plt.title('Transverse magnetic field energy')
# plt.xlabel('Time (s)')
# plt.ylabel('Energy (J)')
# plt.legend()
#
# # =============================================================================
# # plt.figure()
# # plt.plot(time_seconds[:linear_cutoff], dU[:linear_cutoff])
# # =============================================================================
#
# if plot == 'save':
# save_path = cf.anal_dir + 'magnetic_energy_expfit.png'
# plt.savefig(save_path)
# plt.close('all')
# elif plot == 'show':
# plt.show()
# else:
# pass
# =============================================================================
return
def straight_line_fit(save=True, normalize_output=True, normalize_time=False,
normfit_min=0.2, normfit_max=0.6, plot_growth=True,
plot_LT=True, growth_only=True, klim=None, glim=0.25):
'''
To do: Check units. Get growth rate from amplitude only? How to do across space
-- Is wave power averaged/summed across space analogous to a single point? Or do I have to do single point?
-- How to calculate growth rate of energy summed across space?
-- Start with the simple and go from there. Saturation amplitudes have to match too?
-- How do the Winske saturation amplitudes look? It might just be the Fu thing being fucky.
Idea : Sliding window for growth rates, calculate with 5 points and created instantaneous GR timeseries
Why did all the growths turn green?
'''
from numpy.polynomial.polynomial import Polynomial
if normalize_time == True:
tfac = cf.gyfreq
tlab = '$t \Omega_H$'
else:
tfac = 1.0
tlab = 'Time (s)'
print('Calculating growth rate...')
ftime, by = cf.get_array('By')
ftime, bz = cf.get_array('Bz')
bt = np.sqrt(by ** 2 + bz ** 2)
btn = np.square(bt[:, :]).sum(axis=1) / cf.B_eq ** 2
mu0 = (4e-7) * np.pi
U_B = 0.5 / mu0 * np.square(bt[:, :]).sum(axis=1) * cf.dx
if plot_growth == True:
max_idx = np.argmax(U_B)
st = int(normfit_min * max_idx)
en = int(normfit_max * max_idx)
# Data to fit straight line to
linear_xvals = ftime[st:en]
linear_yvals = np.log(U_B[st:en])
out = Polynomial().fit(linear_xvals, linear_yvals, 1)
pdb.set_trace()
#gradient, y_intercept = np.polyfit(linear_xvals, linear_yvals, 1)
# Calculate growth rate and normalize H to cyclotron frequency
gamma = 0.5*gradient
normalized_gr = gamma / cf.gyfreq
# Create line data to plot what we fitted
linear_yfit = gradient * linear_xvals * tfac + y_intercept # Returns y-values on log sscale
log_yfit = np.exp(linear_yfit) # Convert to linear values to use with semilogy()
else:
gamma = np.nan
normalized_gr = np.nan
fontsize = 10
lpad = 20
# Plot showing magnetic field and energy with growth rate line superposed
plt.ioff()
fig, ax = plt.subplots(nrows=2, figsize=(6.0, 4.0), sharex=True)
ofs = 5
ax[0].set_title('Growth Rate :: $\gamma / \Omega_H$ = %.4f' % normalized_gr, fontsize=fontsize)
ax[0].plot(ftime*tfac, btn)
ax[0].set_ylabel(r'$\frac{B^2}{B_0^2}$', rotation=0, labelpad=lpad, fontsize=fontsize)
ax[0].set_xlim(0, tfac*ftime[-1])
ax[0].set_ylim(btn[ofs], None)
# Plot energy log scale
ax[1].plot(ftime*tfac, np.log10(btn))
ax[1].set_xlabel(tlab, fontsize=18)
ax[1].set_ylabel(r'$\log_{10} \left(\frac{B^2}{B_0^2}\right)$', rotation=0, labelpad=lpad, fontsize=fontsize)
ax[1].set_xlim(0, tfac*ftime[-1])
ax[1].set_ylim(U_B[ofs], None)
if plot_growth == True:
# Mark growth rate indicators
ax[1].scatter(tfac*ftime[max_idx], U_B[max_idx], c='r', s=20, marker='x')
ax[1].scatter(tfac*ftime[st] , U_B[st], c='r', s=20, marker='o')
ax[1].scatter(tfac*ftime[en] , U_B[en], c='r', s=20, marker='o')
ax[1].semilogy(linear_xvals, log_yfit, c='r', ls='--', lw=2.0)
if save == True: fig.savefig( cf.anal_dir + 'growth_rate_energy.png')
if plot_LT == True:
# Calculate linear growth rates from simulation to compare
# This could go into calculating gamma(k) later
dk = 1. / (cf.NX * cf.dx)
k = np.arange(0, 1. / (2*cf.dx), dk) * 2*np.pi
k_vals, CPDR_solns, WPDR_solns, HPDR_solns = disp.get_linear_dispersion_from_sim(k, zero_cold=False)
k_vals *= 3e8 / cf.wpi
CPDR_solns *= 2*np.pi / cf.gyfreq
WPDR_solns *= 2*np.pi / cf.gyfreq
HPDR_solns *= 2*np.pi / cf.gyfreq
# Comparison of growth rate to linear theory
clr = ['r', 'g', 'b']
fig0, axes = plt.subplots(3, figsize=(15, 10), sharex=True)
axes[0].set_title('Theoretical growth rates (Dashed: Simulation GR)')
for ii in range(CPDR_solns.shape[1]):
axes[0].plot(k_vals, CPDR_solns.imag, label='Cold', c=clr[ii])
axes[1].plot(k_vals, WPDR_solns.imag, label='Warm', c=clr[ii])
axes[2].plot(k_vals, HPDR_solns.imag, label='Hot', c=clr[ii])
for ax in axes:
if np.isnan(normalized_gr) == False:
ax.axhline(normalized_gr, ls='--', c='k', alpha=0.5)
ax.set_xlim(0, klim)
ax.legend()
if growth_only == True:
ax.set_ylim(0, None)
if ax.get_ylim()[1] > glim:
ax.set_ylim(0, glim)
ax.set_ylabel('$\gamma$', rotation=0)
axes[-1].set_xlabel('$kc/\omega_{pi}$')
if save == True: fig0.savefig(cf.anal_dir + 'growth_rate_energy_LT.png')
plt.close('all')
return
def straight_line_fit_old(save=True, normalize_output=True, normalize_time=False,
normfit_min=0.2, normfit_max=0.6, plot_growth=True,
plot_LT=True, growth_only=True, klim=None, glim=0.25):
'''
To do: Check units. Get growth rate from amplitude only? How to do across space
-- Is wave power averaged/summed across space analogous to a single point? Or do I have to do single point?
-- How to calculate growth rate of energy summed across space?
-- Start with the simple and go from there. Saturation amplitudes have to match too?
-- How do the Winske saturation amplitudes look? It might just be the Fu thing being fucky.
Idea : Sliding window for growth rates, calculate with 5 points and created instantaneous GR timeseries
Why did all the growths turn green?
'''
if normalize_time == True:
tfac = cf.gyfreq
tlab = '$t \Omega_H$'
else:
tfac = 1.0
tlab = 'Time (s)'
print('Calculating growth rate...')
ftime, by = cf.get_array('By')
ftime, bz = cf.get_array('Bz')
bt = np.sqrt(by ** 2 + bz ** 2)
btn = np.square(bt[:, :]).sum(axis=1) / cf.B_eq ** 2
mu0 = (4e-7) * np.pi
U_B = 0.5 / mu0 * np.square(bt[:, :]).sum(axis=1) * cf.dx
if plot_growth == True:
max_idx = np.argmax(U_B)
st = int(normfit_min * max_idx)
en = int(normfit_max * max_idx)
# Data to fit straight line to
linear_xvals = ftime[st:en]
linear_yvals = np.log(U_B[st:en])
gradient, y_intercept = np.polyfit(linear_xvals, linear_yvals, 1)
# Calculate growth rate and normalize H to cyclotron frequency
gamma = 0.5*gradient
normalized_gr = gamma / cf.gyfreq
# Create line data to plot what we fitted
linear_yfit = gradient * linear_xvals * tfac + y_intercept # Returns y-values on log sscale
log_yfit = np.exp(linear_yfit) # Convert to linear values to use with semilogy()
else:
gamma = np.nan
normalized_gr = np.nan
# Plot showing magnetic field and energy with growth rate line superposed
plt.ioff()
fig, ax = plt.subplots(nrows=2, figsize=(15, 10), sharex=True)
ofs = 5
ax[0].set_title('Growth Rate :: $\gamma / \Omega_H$ = %.4f' % normalized_gr, fontsize=20)
ax[0].plot(ftime*tfac, btn)
ax[0].set_ylabel(r'$\frac{B^2}{B_0^2}$', rotation=0, labelpad=30, fontsize=18)
ax[0].set_xlim(0, tfac*ftime[-1])
ax[0].set_ylim(btn[ofs], None)
# Plot energy log scale
ax[1].semilogy(ftime*tfac, btn)
ax[1].set_xlabel(tlab, fontsize=18)
ax[1].set_ylabel(r'$\log_{10} \left(\frac{B^2}{B_0^2}\right)$', rotation=0, labelpad=30, fontsize=18)
ax[1].set_xlim(0, tfac*ftime[-1])
ax[1].set_ylim(U_B[ofs], None)
if plot_growth == True:
# Mark growth rate indicators
ax[1].scatter(tfac*ftime[max_idx], U_B[max_idx], c='r', s=20, marker='x')
ax[1].scatter(tfac*ftime[st] , U_B[st], c='r', s=20, marker='o')
ax[1].scatter(tfac*ftime[en] , U_B[en], c='r', s=20, marker='o')
ax[1].semilogy(linear_xvals, log_yfit, c='r', ls='--', lw=2.0)
if plot_LT == True:
# Calculate linear growth rates from simulation to compare
# This could go into calculating gamma(k) later
dk = 1. / (cf.NX * cf.dx)
k = np.arange(0, 1. / (2*cf.dx), dk) * 2*np.pi
k_vals, CPDR_solns, WPDR_solns, HPDR_solns = disp.get_linear_dispersion_from_sim(k, zero_cold=False)
k_vals *= 3e8 / cf.wpi
CPDR_solns *= 2*np.pi / cf.gyfreq
WPDR_solns *= 2*np.pi / cf.gyfreq
HPDR_solns *= 2*np.pi / cf.gyfreq
# Comparison of growth rate to linear theory
clr = ['r', 'g', 'b']
fig0, axes = plt.subplots(3, figsize=(15, 10), sharex=True)
axes[0].set_title('Theoretical growth rates (Dashed: Simulation GR)')
for ii in range(CPDR_solns.shape[1]):
axes[0].plot(k_vals, CPDR_solns[:, ii].imag, c=clr[ii], label='Cold')
axes[1].plot(k_vals, WPDR_solns[:, ii].imag, c=clr[ii], label='Warm')
axes[2].plot(k_vals, HPDR_solns[:, ii].imag, c=clr[ii], label='Hot')
for ax in axes:
if np.isnan(normalized_gr) == False:
ax.axhline(normalized_gr, ls='--', c='k', alpha=0.5)
ax.set_xlim(0, klim)
ax.legend()
if growth_only == True:
ax.set_ylim(0, None)
if ax.get_ylim()[1] > glim:
ax.set_ylim(0, glim)
ax.set_ylabel('$\gamma$', rotation=0)
axes[-1].set_xlabel('$kc/\omega_{pi}$')
if save == True:
fig.savefig( cf.anal_dir + 'growth_rate_energy.png')
fig0.savefig(cf.anal_dir + 'growth_rate_energy_LT.png')
plt.close('all')
else:
plt.show()
return
def SWSP_timeseries(nx, tmax=None, save=True, log=False, normalize=False, LT_overlay=False):
'''
Single wave, single point timeseries. Splits magnetic field into forwards
and backwards waves, and tracks their evolution at a single gridpoint.
To do: Are st, en legit for parabolic code?
'''
ftime, B_fwd, B_bwd, B_raw = bk.get_FB_waves(overwrite=False, field='B')
if tmax is None:
tmax = ftime[-1]
if normalize == True:
B_fwd /= cf.B_eq
B_bwd /= cf.B_eq
ylbl = '/$B_0$'
else:
B_fwd *= 1e9
B_bwd *= 1e9
ylbl = '\nnT'
B_max = max(np.abs(B_fwd).max(), np.abs(B_bwd).max())
if LT_overlay == True:
B_seed = 1e-3
dk = 1. / (cf.NX * cf.dx)
k = np.arange(0, 1. / (2*cf.dx), dk) * 2*np.pi
k_vals, CPDR_solns, WPDR_solns, HPDR_solns = disp.get_linear_dispersion_from_sim(k, zero_cold=False)
k_vals *= 3e8 / cf.wpi
# Growth rates in angular units
CPDR_solns *= 2*np.pi
WPDR_solns *= 2*np.pi
HPDR_solns *= 2*np.pi
max_gr = HPDR_solns.imag.max()
# =============================================================================
# linear_line = B_seed * np.exp(max_gr*ftime)
# plot_line = 10**linear_line
# =============================================================================
plt.ioff()
fig, axes = plt.subplots(2, sharex=True, figsize=(15, 10))
axes[0].set_title('Backwards/Forwards Wave Fields at x = {}km'.format(cf.B_nodes[nx]*1e-3))
if log == False:
axes[0].plot(ftime, np.abs(B_fwd[:, nx]))
axes[1].plot(ftime, np.abs(B_bwd[:, nx]))
axes[0].set_ylim(-B_max, B_max)
axes[1].set_ylim(-B_max, B_max)
else:
axes[0].semilogy(ftime, np.abs(B_fwd[:, nx]))
axes[1].semilogy(ftime, np.abs(B_bwd[:, nx]))
if normalize == True:
axes[0].set_ylim(1e-4/(cf.B_eq*1e9), None)
axes[1].set_ylim(1e-4/(cf.B_eq*1e9), None)
else:
axes[0].set_ylim(1e-4, None)
axes[1].set_ylim(1e-4, None)
axes[0].set_ylabel('$B_{fwd}$%s' % ylbl, rotation=0, labelpad=20)
axes[1].set_ylabel('$B_{bwd}$%s' % ylbl, rotation=0, labelpad=20)
axes[1].set_xlabel('Time (s)', rotation=0)
for ax in axes:
ax.set_xlim(0, tmax)
if save == True:
fig.savefig(cf.anal_dir + 'SWSP_{:04}.jpg'.format(nx))
plt.close('all')
else:
plt.show()
return
def test_seed_sizes():
mp = 1.673e-27
qi = 1.602e-19
B0 = 243e-9
sat = 0.58 # Saturation amplitude as (Bw/B0)**2
Bwsat= np.sqrt(sat)*B0
#ne = 177e6
pcyc = qi * B0 / mp
wcinv= 1. / pcyc
t_max = 1800*wcinv
dt = 0.05*wcinv
tarr = np.arange(0.0, t_max, dt)
# Wave parameters
B_seed = [1e-10, 1e-15, 1e-20, 1e-25, 1e-30]
freq = 0.35*pcyc
grate = 0.035*pcyc
nwaves = len(B_seed)
wavearr= np.zeros((nwaves, tarr.shape[0]))
# Grow waves. Saturate at saturation amplitude
for ii in range(nwaves):
for jj in range(tarr.shape[0]):
soln = np.abs(B_seed[ii] * np.exp(-1j*freq*tarr[jj]) * np.exp(grate*tarr[jj]))
if soln > Bwsat:
wavearr[ii, jj:] = Bwsat
break
else:
wavearr[ii, jj] = soln
tarr /= wcinv
plt.ioff()
fig, axes = plt.subplots(2, sharex=True)
axes[0].set_title('Time to Saturation for different seeds')
for ii in range(nwaves):
axes[0].plot( tarr, wavearr[ii]*1e9, label='Seed: {:.1E} nT'.format(B_seed[ii]*1e9))
axes[1].semilogy(tarr, wavearr[ii]*1e9, label='Seed: {:.1E} nT'.format(B_seed[ii]*1e9))
axes[1].set_xlabel('Time ($t\Omega_H$)')
for ax in axes:
ax.set_ylabel('Amplitude (nT)')
ax.set_xlim(0, tarr[-1])
ax.legend()
plt.show()
return
if __name__ == '__main__':
test_seed_sizes() |
11,706 | 44fda58daa020ec3a77612c00432b63187ae08e1 | f_lado = list(map(float, input().split()))
f_lado.sort()
f_lado.reverse()
if(f_lado[0] >= f_lado[1]+f_lado[2]):
print("NAO FORMA TRIANGULO")
else:
if(f_lado[0]**2 == f_lado[1]**2 + f_lado[2]**2):
print("TRIANGULO RETANGULO")
if(f_lado[0]**2 > f_lado[1]**2 + f_lado[2]**2):
print("TRIANGULO OBTUSANGULO")
if(f_lado[0]**2 < f_lado[1]**2 + f_lado[2]**2):
print("TRIANGULO ACUTANGULO")
if(f_lado[0]==f_lado[1] and f_lado[1]==f_lado[2]):
print("TRIANGULO EQUILATERO")
elif(f_lado[0]==f_lado[1] or f_lado[1]==f_lado[2] or f_lado[0] == f_lado[1]):
print("TRIANGULO ISOSCELES") |
11,707 | 32b1427edaf87dfc00905f2518d87336e0371470 | from flask import render_template, json, request
from core.controllers.cipher import Cipher
class CipherRoute:
def __init__(self):
self.main_page_legend = \
'Cesar cipher site. Please choose rotation and case: ' \
'encrypt or decrypt <br>' \
'After that enter text in the left field'
def main_page(self):
return render_template("index.html", legend=self.main_page_legend)
def cipher(self):
rot_index = request.form['rot-index']
mode = request.form['mode']
rough_text = request.form['rough-text']
cipher = Cipher(rot_index=rot_index, mode=mode, rough_text=rough_text)
return json.dumps(cipher.get_result())
|
11,708 | 8e775d2913fb036a82abb57a936d4c2853d590d6 | import random
def gen_random_A(length, majNum):
a = []
for j in range(0, length):
a.append(random.randint(0, majNum))
return a
def partition_even_odd(a):
i = 0
j = len(a)-1
while i <= j:
if a[i] % 2 == 1 and a[j] % 2 == 0:
a[j], a[i] = a[i], a[j]
i += 1
j -= 1
while a[i] % 2 == 0 and a[j] % 2 == 0:
i += 1
if i >= j:
return
while a[i] % 2 == 1 and a[j] % 2 == 1:
j -= 1
if i >= j:
return
while a[i] % 2 == 0 and a[j] % 2 == 1:
i += 1
j -= 1
if i >= j:
return
return
if __name__ == '__main__':
A = gen_random_A(int(10e5), int(10e9))
partition_even_odd(A)
|
11,709 | b41c688f050a120caf8a95021165e36bbee55b14 | # coding: utf8
from __future__ import unicode_literals
# Source: https://github.com/stopwords-iso/stopwords-et
STOP_WORDS = set(
"""
aga
ei
et
ja
jah
kas
kui
kõik
ma
me
mida
midagi
mind
minu
mis
mu
mul
mulle
nad
nii
oled
olen
oli
oma
on
pole
sa
seda
see
selle
siin
siis
ta
te
ära
""".split()
)
|
11,710 | a947246be7fbb10392014db0f395cacc1301c526 |
from __future__ import print_function
import httplib2
import os
import json
from apiclient import errors
from apiclient import discovery
from oauth2client import client
from oauth2client import tools
from oauth2client.file import Storage
import MySQLdb
import re
conn = MySQLdb.connect(user='root', password='luyuan', database='lyxiong', charset='utf8')
cursor = conn.cursor()
try:
import argparse
flags = argparse.ArgumentParser(parents=[tools.argparser]).parse_args()
except ImportError:
flags = None
def emoji_filter(): #正则过滤表情
SCOPES = 'https://www.googleapis.com/auth/gmail.readonly'
CLIENT_SECRET_FILE = 'client_secret.json'
APPLICATION_NAME = 'Gmail API Python Quickstart'
emoji_pattern = re.compile("["
"\U0001F600-\U0001F64F" # emoticons
"\U0001F300-\U0001F5FF" # symbols & pictographs
"\U0001F680-\U0001F6FF" # transport & map symbols
"\U0001F1E0-\U0001F1FF" # flags (iOS)
"]+", flags=re.UNICODE)
return emoji_pattern
def get_credentials():
home_dir = os.path.expanduser('~')
credential_dir = os.path.join(home_dir, '.credentials')
if not os.path.exists(credential_dir):
os.makedirs(credential_dir)
credential_path = os.path.join(credential_dir,
'gmail-python-quickstart.json')
store = Storage(credential_path)
credentials = store.get()
# if not credentials or credentials.invalid:
# flow = client.flow_from_clientsecrets(CLIENT_SECRET_FILE, SCOPES)
# flow.user_agent = APPLICATION_NAME
# if flags:
# credentials = tools.run_flow(flow, store, flags)
# else: # Needed only for compatibility with Python 2.6
# credentials = tools.run(flow, store)
# print('Storing credentials to ' + credential_path)
return credentials
def ListThreadsWithLabels(service,user_id,label_ids=[]):
try:
response = service.users().threads().list(userId = user_id,labelIds = label_ids).execute()
threads = []
if 'threads' in response:
threads.extend(response['threads'])
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().threads().list(userId = user_id,labelIds = label_ids,pageToken = page_token).execute()
threads.extend(response['threads'])
return threads
except errors.HttpError as error:
print('An error occurred: %s' %error)
def ListMessagesWithLabels(service, user_id, label_ids=[]):
try:
response = service.users().messages().list(userId=user_id,
labelIds=label_ids).execute()
messages = []
if 'messages' in response:
messages.extend(response['messages'])
while 'nextPageToken' in response:
page_token = response['nextPageToken']
response = service.users().messages().list(userId=user_id,
labelIds=label_ids,
pageToken=page_token).execute()
messages.extend(response['messages'])
return messages
except errors.HttpError as error:
print('An error occurred: %s' % error)
def GetMessage(service,user_id,msg_id):
try:
message = service.users().messages().get(userId=user_id, id = msg_id).execute()
return message
except errors.HttpError as error:
print('An error occurred: %s' %error)
def GetService():
credentials = get_credentials()
http = credentials.authorize(httplib2.Http())
service = discovery.build('gmail', 'v1', http=http)
return service
def GetMsgId(): #获取label为'INBOX'下每个thread_id所对应的多个msg_id
service = GetService();
threads = ListThreadsWithLabels(service, 'me', ['INBOX'])
list_threads =[]
for item in threads:
list_threads.append(item['id']) #获取label为'INBOX'下每个thread id
list_message = ListMessagesWithLabels(service, 'me', ['INBOX']) #获取label为'INBOX'下每个thread_id和msg_id
dict = {} # 获取label为'INBOX'下每个thread_id所对应的多个msg_id
for i in list_threads:
dict[i] = []
for item in list_message:
if i == item['threadId']:
dict[i].append(item['id'])
return dict
def GetPurposedMessage(): #获取每个thread_id下的message信息,筛选后放入purposed_message字典,以此存入message_list列表
service = GetService()
dict = GetMsgId()
message_list = []
emoji_pattern = emoji_filter()
for key in dict:
for value in dict[key]:
purposed_message = {}
message = GetMessage(service, 'me', value)
purposed_message['msg_id'] = message['id']
purposed_message['threadId'] = message['threadId']
purposed_message['content'] = emoji_pattern.sub('', message['snippet'])
purposed_message['timestamps'] = int(message['internalDate'])
purposed_message['receiver'] = message['payload']['headers'][0]['value']
purposed_message['sender'] = message['payload']['headers'][1]['value']
message_list.append(purposed_message)
return message_list
def main():
list_message = GetPurposedMessage()
#连接数据库,循环写入
with open('info.txt', 'w') as json_file:
for item in list_message: #获取每个thread_id下的message信息,筛选后放入purposed_message字典,并将其以json的形式写入文件
json_file.write(json.dumps(item, ensure_ascii=False,indent = 6))
cursor.execute('insert ignore into email_info(thread_id,msg_id,receiver,sender,content,timestamps) values(%s,%s,%s,%s,%s,%s)',
[item['threadId'],item['msg_id'],item['receiver'],item['sender'],item['content'],item['timestamps']])
conn.commit()
print('写入完成')
if __name__ == '__main__':
main() |
11,711 | 7ac34b5670b3974e45d78a570fb9d17b4b04a194 | from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import user_passes_test
from apps.users.models import BaseUser
def student_required(view_func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is student, redirecting to the login page if necessary.
"""
actual_decorator = user_passes_test(
BaseUser.is_student,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if view_func:
return actual_decorator(view_func)
return actual_decorator
def employee_required(view_func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Decorator for views that checks that the user is employee, redirecting to the login page if necessary.
"""
actual_decorator = user_passes_test(
BaseUser.is_employee,
login_url=login_url,
redirect_field_name=redirect_field_name
)
if view_func:
return actual_decorator(view_func)
return actual_decorator
def external_contractor_forbidden(view_func=None, redirect_field_name=REDIRECT_FIELD_NAME, login_url=None):
"""
Check whether the logged user is either a student or an actual employee
(i.e. not external contractor).
Redirect to the login page if that's not the case.
"""
decorator = user_passes_test(
lambda u: not BaseUser.is_external_contractor(u),
login_url=login_url,
redirect_field_name=redirect_field_name)
if view_func:
return decorator(view_func)
return decorator
|
11,712 | e6e334b6eede49980cf1f44b2bfb10e470e8207b | """ For installing package.
"""
import re
from setuptools import setup
# Descriptions
SHORT_DESCRIPTION = "Useful tools for machine learning mastery."
with open('README.md') as f:
LONG_DESCRIPTION = f.read()
# version
with open('./src/__init__.py') as f:
VERSION = next(
re.finditer(
r'\n__version__ *= *[\'\"]([0-9\.]+)[\'\"]',
f.read(),
)
).groups()[0]
setup(
name='machine_learning_mastery_src',
version=VERSION,
author='Dylan Gregersen',
author_email='an.email0101@gmail.com',
url='https://github.com/earthastronaut/mlmastery',
# license='MIT',
description=SHORT_DESCRIPTION,
long_description=LONG_DESCRIPTION,
python_requires='>=3.6',
# install_requires=[],
)
|
11,713 | 4f6e0591971772eaae54841076d34b1d895632f7 | import json
from django.http import HttpResponseRedirect
from django.template.response import TemplateResponse
from django.contrib.auth.models import User
from api.models import Category, Account, Topic
from legislation import sunlightapi as sun
from utils.custom_decorators import beta_blocker, login_required
def base_view(request):
if not request.user.is_authenticated():
return TemplateResponse(request, 'static_templates/landing.html', {})
a = Account.objects.get(user=request.user)
if not a.beta_access:
return HttpResponseRedirect('/beta')
return TemplateResponse(request, 'feed.html', {})
@login_required
@beta_blocker
def user_profile(request, username=None):
if not username:
user = request.user
else:
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return HttpResponseRedirect('/404')
a = Account.objects.get(user=user)
friend_data_dictionary = Account.objects.friends(a)
result = dict(friends=friend_data_dictionary['friends'],
requests=friend_data_dictionary['requests'],
profile=Account.objects.summarize(a),
bills=sun.get_bill_information(a))
return TemplateResponse(request, 'account.html', {'result': json.dumps(result)})
@login_required
@beta_blocker
def create_group(request):
return TemplateResponse(request, 'newgroup.html', {})
@login_required
@beta_blocker
def dbview(request):
result = [{'id': c.id, 'name': c.name} for c in Category.objects.all()]
return TemplateResponse(request, 'dbview.html', {'result': json.dumps(result)})
@login_required
@beta_blocker
def add_civi(request):
categories = [{'id': c.id, 'name': c.name} for c in Category.objects.all()]
topics = [{'id': c.id, 'topic': c.topic} for c in Topic.objects.all()]
return TemplateResponse(request, 'add_civi.html', {'categories': json.dumps(categories), 'topics': json.dumps(topics)})
def login_view(request):
if request.user.is_authenticated():
return HttpResponseRedirect('/')
return TemplateResponse(request, 'login.html', {})
def beta_view(request):
return TemplateResponse(request, 'beta_blocker.html', {})
def declaration(request):
return TemplateResponse(request, 'declaration.html', {})
def landing_view(request):
return TemplateResponse(request, 'static_templates/landing.html', {})
def how_it_works_view(request):
return TemplateResponse(request, 'static_templates/how_it_works.html', {})
def about_view(request):
return TemplateResponse(request, 'static_templates/about.html', {})
def support_us_view(request):
return TemplateResponse(request, 'static_templates/support_us.html', {})
def does_not_exist(request):
return TemplateResponse(request, '404.html', {})
|
11,714 | 09eda8181e1dfab8a3997de254723d0b13b2bdc8 | #!/usr/bin/python
# -*- coding: utf-8 -*-
import logging
import cPickle as pickle
import psycopg2
import numpy as np
np.random.seed(19870712) # for reproducibility
path = "/home/terence/pycharm_use/IPIR_De_identification/1_data/"
get_conn = psycopg2.connect(dbname='IPIR_De_identification',user='postgres', host='localhost', password='postgres')
import glob, os
import re
import xml.etree.ElementTree as ET
get_conn.autocommit = True
get_cur = get_conn.cursor()
# Create storage table
get_cur.execute("DROP TABLE IF EXISTS Record_text;"
"DROP TABLE IF EXISTS Record_PHI;"
"DROP TABLE IF EXISTS sentence_text;")
get_cur.execute("Create table Record_text ("
" row_id serial primary key, "
" subject_id integer,"
" order_id integer,"
" content text,"
" labels text,"
" train integer"
" clean_content text);"
"Create table Record_PHI ("
" row_id serial primary key,"
" subject_id integer,"
" order_id integer,"
" id integer,"
" type text,"
" text text,"
" text_start integer,"
" text_end integer,"
" comment text,"
" train integer"
" clean_text text"
" clean_position integer"
" sentence_id integer"
" sentence_position integer);"
"Create table sentence_text ("
" row_id serial primary key,"
" subject_id integer,"
" order_id integer,"
" sentence_id integer,"
" sentence text,"
" labels text,"
" train integer);")
# Get all xml, path and file name.
os.chdir(path +"i2b2/training-PHI-Gold-Set1/")
subject = glob.glob("*.xml")
subject_len = [len(subject)]
os.chdir(path +"i2b2/training-PHI-Gold-Set2/")
subject += glob.glob("*.xml")
subject_len.append(len(subject))
os.chdir(path +"i2b2/testing-PHI-Gold-fixed/")
subject += glob.glob("*.xml")
subject_len.append(len(subject))
# Storage data
for i in range(0, subject_len[2]):
# Get xml file
(subject_id, order_id) = re.findall(r"[\w']+", subject[i])[0:2]
if i<subject_len[0]:
tree = ET.parse(path + "i2b2/training-PHI-Gold-Set1/" + subject[i])
train = '1'
elif i<subject_len[1]:
tree = ET.parse(path + "i2b2/training-PHI-Gold-Set2/" + subject[i])
train = '1'
else:
tree = ET.parse(path + "i2b2/testing-PHI-Gold-fixed/" + subject[i])
train = '0'
# Parsing xml file
root = tree.getroot()
# deal content part
get_cur.execute("Insert into Record_text (subject_id, order_id, content, train) "
"values "
"("+subject_id+","+order_id+",'"+root[0].text.replace("'", "''")+"', "+train+");")
# deal phi part
for j in range(0,len(root[1])):
get_cur.execute("Insert into Record_PHI (subject_id, order_id, id, type, text, text_start, text_end, comment, train) "
"values "
"(" + subject_id + "," + order_id + ", "
"" + root[1][j].attrib['id'].replace("P", "") + ", "
"'" + root[1][j].attrib['TYPE'] + "', "
"'" + root[1][j].attrib['text'].replace("'", "''") + "', "
"'" + root[1][j].attrib['start'] + "', "
"'" + root[1][j].attrib['end'] + "', "
"'" + root[1][j].attrib['comment'].replace("'", "''") + "', " + train + ");")
print "end" |
11,715 | c5390c7d612685d454f157ca7dc7e636bd7fa3d9 | from mpl_toolkits import mplot3d
from matplotlib.ticker import MaxNLocator
import matplotlib.pyplot as plt
import matplotlib.colors
import matplotlib.animation as animation
import numpy as np
import pandas as pd
import sys
from tools.my_setup import *
frame_delay_ms=10
# frame_delay_ms=25
# frame_delay_ms=50
#
turn_off_graphs=True
# turn_off_graphs=False
hillcolor='darkolivegreen'
hillcolor='black'
particles = particles.loc[particles['identifier'].isin(unique_particles)]
unique_particles = particles.identifier.unique()
num_particles = len(list(particles.identifier.unique()))
# print(len(num_particles))
# sys.exit()
norm = matplotlib.colors.Normalize(vmin=particles['temperature'].min()-10,
vmax=particles['temperature'].max())
# ---- Set up colors ----
# cmap = plt.cm.Greens
cmap = plt.cm.Blues
cmap_c = cmap(norm(particles.temperature.values))
fig = plt.figure(figsize=plt.figaspect(0.5))
if not turn_off_graphs:
ax = fig.add_subplot(1,2,1,projection='3d')
else:
ax = fig.add_subplot(1,1,1,projection='3d')
def set_limits():
ax.set_xlim(0,particles.timestep.max()); # time
ax.set_ylim(1,num_particles);
ax.set_zlim(1,particles.temperature.max())
# ax.set_xlim(1,nx); ax.set_ylim(1,ny); ax.set_zlim(1,nz)
# ax.set_xlim(1,nx); ax.set_ylim(1,ny); ax.set_zlim(1,particles['z_meters'].max())
set_limits()
ax.xaxis.set_major_locator(MaxNLocator(integer=True))
ax.yaxis.set_major_locator(MaxNLocator(integer=True))
ax.zaxis.set_major_locator(MaxNLocator(integer=True))
ax.set_xlabel("timesteps")
ax.set_ylabel("particles")
ax.set_zlabel("temperature (K)")
# ax.set_xticklabels([])
# ax.set_yticklabels([])
for i in range(0,num_particles):
if any(particles.identifier == i):
change_i = particles.iloc[i].identifier
particles.loc[particles.identifier == change_i, 'identifier'] = -i
q = particles[particles.timestep == 0]
q = q.sort_values(by=['temperature'])
for i in range(0,num_particles):
change_i = q.iloc[i].identifier
particles.replace({'identifier' : change_i}, i, inplace=True)
if not turn_off_graphs:
# --- set up second graph ---
second_graph_title = 'temperature'
second_graph_entity = second_graph_title.replace(' ', '_')
second_graph = fig.add_subplot(2,2,2)
second_graph_max = particles[second_graph_entity].max()
if (particles[second_graph_entity].min() < 0):
second_graph_min = particles[second_graph_entity].min()
else:
second_graph_min = 0
second_graph_max *= 1.1
second_graph_min *= 1.1
# --- set up third graph ---
# choose type
third_graph_title = "density"
third_graph_title = "water vapor"
third_graph_title = "potential temperature"
# third_graph_title = "mixing ratio"
third_graph_entity = third_graph_title.replace(' ', '_')
third_graph = fig.add_subplot(2,2,4)
third_graph_max = particles[third_graph_entity].max()
if (particles[third_graph_entity].min() < 0):
third_graph_min = particles[third_graph_entity].min()
else:
third_graph_min = 0
third_graph_max *= 1.1
third_graph_min *= 1.1
# --- set graph limits ---
def set_graph_lim():
return
if turn_off_graphs:
return
second_graph.set_title(second_graph_title)
second_graph.set_xlim(0,num_t)
second_graph.set_ylim(second_graph_min,second_graph_max)
third_graph.set_title(third_graph_title, y=-0.3)
third_graph.set_xlim(0,num_t)
third_graph.set_ylim(third_graph_min,third_graph_max)
# print(particles)
# print(particles['z_meters'].max())
# sys.exit()
def set_old(p):
old = ax.scatter3D(p.timestep, p.identifier, p.temperature, marker='o', edgecolor='black',
color=cmap_c[0])
return old
oparticles = particles[['timestep','x','y','z_meters', 'identifier', 'image']]
particles = particles[['timestep','identifier','temperature']]
# --- function that gets run every animation timestep ---
blue_cmap = plt.cm.Blues
discrete_cmap = plt.get_cmap('tab20b')
def updateFig(*args):
global t, old, time
ax.set_title("Particle Movement t="+str(t), y=1.05)
if (t == num_t):
ax.cla()
set_limits()
# plot_image_lines(time)
time += 1
t = 0
else:
# old.remove()
pass
p = particles[ (particles.timestep == t) ]
# p.water_vapor = np.ma.masked_where(p.water_vapor == 0.0, p.water_vapor)
# p.water
p_color = "black"
# print(type(p.water_vapor))
# if (p.water_vapor > 0.0):
# p_color = "blue"
old = set_old(p)
# c=p.cloud_water, cmap=blue_cmap, vmin=0.00000)
# c=p.water_vapor, cmap=blue_cmap, vmin=0.0)
# color=cmap_c[t]) edgecolor='black')
t += 1
return old
# --- animation ----
# - setup graphs
# plot_image_lines(time)
set_graph_lim()
gif = True
gif = False
repeat = False if gif else True
# num_t = 4
# ax.view_init(90, 0)
t = 0
# debug
# p = particles[ (particles.timestep == 0) ]
# old = set_old(p)
# ani = animation.FuncAnimation(fig, updateFig, interval=frame_delay_ms,
# frames=num_t-1,repeat=repeat)
# ax.scatter()
ax.scatter(particles.timestep, particles.identifier, particles.temperature,
cmap=discrete_cmap, c=particles.identifier)
# marker='.', edgecolor='black')
# color=cmap_c[0])
i = 0
# print(len(ax.get_yticklabels()))
# ax.get_xaxis().set_visible(False)
# ax.get_yaxis().set_visible(False)
# sys.exit()
if (gif):
print("Gif!")
# ani.save('test.gif', writer=animation.PillowWriter, fps=None,dpi=20) # fps was 5
# ani.save('test.gif', writer=animation.ImageMagickWriter, fps=None) # fps was 5
else:
plt.tight_layout()
plt.show()
print("Fin!")
|
11,716 | b9acea13ba6bbf15d39c5711be3ebbe56598222b | from cs50 import get_string
# Get user input and use in print
s = get_string("What is your name? ")
print("Hello, " + s) |
11,717 | d0ce40be3ddeece19c9dfc2262327a13afb1072f | from django.shortcuts import render,redirect
from pharmacy.models import Medicine,Order
from django.contrib import messages
from patient.models import Bill
from .models import PlasmaProfile,DonorRequest
def searchMedicine(request):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
obj = Medicine.objects.all()
return render(request,"Donor/searchMedicine.html",{'obj':obj,'len':len(obj)})
def filterMedicine(request):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
cost = request.GET.get('cost')
delivery = request.GET.get('expected_delivery')
name = request.GET.get('name')
obj = None
if cost != '':
if delivery != '':
if name != '':
obj = Medicine.objects.filter(name__startswith=name,cost__lte=cost,expected_delivery__lte=delivery)
else:
obj = Medicine.objects.filter(cost__lte=cost,expected_delivery__lte=delivery)
else:
if name != '':
obj = Medicine.objects.filter(name__startswith=name,cost__lte=cost)
else:
obj = Medicine.objects.filter(cost__lte=cost)
else:
if delivery != '':
if name != '':
obj = Medicine.objects.filter(name__startswith=name,expected_delivery__lte=delivery)
else:
obj = Medicine.objects.filter(expected_delivery__lte=delivery)
else:
if name != '':
obj = Medicine.objects.filter(name__startswith=name)
return render(request,"Donor/searchMedicine.html",{'obj':obj,'len':len(obj)})
def placeOrder(request,mid):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
medicine_obj = Medicine.objects.get(id=mid)
ordered_by = request.user
ordered_to = medicine_obj.user
status = "Pending"
description = "Purchased "+medicine_obj.name+" from "+medicine_obj.user.first_name
amount = medicine_obj.cost
bill = Bill.objects.create(user=request.user,amount=amount,description=description)
obj = Order.objects.create(medicine = medicine_obj, ordered_by = ordered_by, ordered_to = ordered_to,status=status,billing=bill)
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Order Placed Successfully')
return redirect('/dashboard')
def yourOrder(request):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
pending = Order.objects.filter(ordered_by = request.user,status="Pending")
accepted = Order.objects.filter(ordered_by = request.user,status="Accepted")
rejected = Order.objects.filter(ordered_by = request.user,status="Rejected")
shipped = Order.objects.filter(ordered_by = request.user,status="Shipped")
cancelled = Order.objects.filter(ordered_by = request.user,status="Cancelled")
p_l,a_l,r_l,s_l,c_l = len(pending),len(accepted),len(rejected),len(shipped),len(cancelled)
return render(request,"Donor/yourOrder.html",{'pending':pending,'accepted':accepted,'rejected':rejected,'shipped':shipped,'cancelled':cancelled,
'pl':p_l,'rl':r_l,'al':a_l,'sl':s_l,'cl':c_l})
def cancelOrder(request,oid):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
order = Order.objects.get(id=oid)
bill = Bill.objects.get(id = order.billing.id)
bill.amount = int(0.25*bill.amount)
bill.description +="-Cancelled"
bill.save()
order.status="Cancelled"
order.save()
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Order Cancelled Successfully')
return redirect('/dashboard')
def addDonorProfile(request):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
if request.method=="POST":
days_covid_negative = request.POST["days_covid_negative"]
plasma_last_donated = request.POST["plasma_last_donated"]
blood_group = request.POST["blood_group"]
obj = PlasmaProfile.objects.filter(user=request.user)
myfiles = request.FILES
photo = myfiles["photo"]
if len(obj)==0:
PlasmaProfile.objects.create(user=request.user,days_covid_negative=days_covid_negative,plasma_last_donated=plasma_last_donated,blood_group=blood_group,photo=photo)
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Profile Created Successfully')
return redirect('/dashboard')
else:
obj[0].days_covid_negative = days_covid_negative
obj[0].plasma_last_donated = plasma_last_donated
obj[0].blood_group = blood_group
obj[0].save()
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Profile Updated Successfully')
return redirect('/dashboard')
profile = PlasmaProfile.objects.filter(user=request.user)
if len(profile)==0:
return render(request,"Donor/addDonorProfile.html")
else:
return render(request,"Donor/addDonorProfile.html",{'profile':profile[0]})
def viewRequest(request):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
pending = DonorRequest.objects.filter(donor=request.user,status="Pending")
accepted = DonorRequest.objects.filter(donor=request.user,status="Accepted")
rejected = DonorRequest.objects.filter(donor=request.user,status="Rejected")
pl,al,rl = len(pending),len(accepted),len(rejected)
return render(request,"Donor/viewRequest.html",{'pending':pending,'accepted':accepted,'rejected':rejected,'pl':pl,'al':al,'rl':rl})
def acceptRequest(request,rid):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
obj = DonorRequest.objects.get(id=rid)
obj.status="Accepted"
obj.save()
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Request Accepted')
return redirect('/dashboard')
def rejectRequest(request,rid):
if request.user.type!="Donor":
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Not Allowed. Please Re-Login')
return redirect('/')
obj = DonorRequest.objects.get(id=rid)
obj.status="Rejected"
obj.save()
storage = messages.get_messages(request)
storage.used = True
messages.info(request,'Request Rejected')
return redirect('/dashboard')
|
11,718 | 99234952493f97dc3caa1c6cf9232e1e92e13b3d | from os.path import join
from os import getcwd
from settings import APP_DIR
def app_path(*appends):
return join(APP_DIR, *appends)
def brick_path(*appends):
return app_path("bricks", *appends) |
11,719 | e25c824282b2c856c97d42e946655429df9a8977 | import sys
assert sys.version_info >= (3, 5) # make sure we have Python 3.5+
from pyspark.sql import SparkSession, functions, types
spark = SparkSession.builder.appName('colour prediction').getOrCreate()
spark.sparkContext.setLogLevel('WARN')
assert spark.version >= '2.4' # make sure we have Spark 2.4+
from pyspark.ml import Pipeline
from pyspark.ml.feature import VectorAssembler, SQLTransformer
from pyspark.ml.regression import *
from pyspark.ml.evaluation import RegressionEvaluator
tmax_schema = types.StructType([
types.StructField('station', types.StringType()),
types.StructField('date', types.DateType()),
types.StructField('latitude', types.FloatType()),
types.StructField('longitude', types.FloatType()),
types.StructField('elevation', types.FloatType()),
types.StructField('tmax', types.FloatType()),
])
def main(inputs, output):
# read data
data = spark.read.csv(inputs, schema=tmax_schema)
# prepare train and validation set
train, validation = data.randomSplit([0.75, 0.25])
train = train.cache()
validation = validation.cache()
# transform date to day-of-year
stm_yesterday_tmax = 'SELECT today.station as station, \
dayofyear(today.date) as dayofyear, \
today.latitude as latitude, \
today.longitude as longitude, \
today.elevation as elevation, \
today.tmax as tmax, \
yesterday.tmax as yesterday_tmax \
FROM __THIS__ as today \
INNER JOIN __THIS__ as yesterday \
ON date_sub(today.date, 1) = yesterday.date AND today.station = yesterday.station'
transformer = SQLTransformer(statement=stm_yesterday_tmax)
# input columns
assembler = VectorAssembler(inputCols=['dayofyear', 'latitude', 'longitude', 'elevation', 'yesterday_tmax'], outputCol='features')
# output column
regressor = GBTRegressor(featuresCol='features', labelCol='tmax', maxIter=20, maxDepth=10)
# pipeline
pipeline = Pipeline(stages=[transformer, assembler, regressor])
# train model
model = pipeline.fit(train)
# make predictions
predictions = model.transform(validation)
predictions.show()
# evaluate model
r2_evaluator = RegressionEvaluator(predictionCol='prediction', labelCol='tmax', metricName='r2')
r2 = r2_evaluator.evaluate(predictions)
rmse_evaluator = RegressionEvaluator(predictionCol='prediction', labelCol='tmax', metricName='rmse')
rmse = rmse_evaluator.evaluate(predictions)
# save model
model.write().overwrite().save(output)
# print score
print("r2: %f" % (r2))
print("rmse: %f" % (rmse))
if __name__ == '__main__':
inputs = sys.argv[1]
output = sys.argv[2]
main(inputs, output)
|
11,720 | e02e1fcca972d114d56ca1daefd017f33af0d88c | from flask_restful import Resource
from flask import Flask, request, send_from_directory
import os
class GetBundles(Resource):
def get(self):
file_name=request.args.get('file')
clientId=request.args.get('clientId')
file_name1 = os.path.join(os.path.realpath(os.path.dirname(__file__))) + f"\\bundles\\{clientId}\\{file_name}"
file_name2= os.path.join(os.path.realpath(os.path.dirname(__file__))) + f"\\bundles\\{file_name}"
baseDir1 = os.path.join(os.path.realpath(os.path.dirname(__file__))) + f"\\bundles\\{clientId}"
baseDir2= os.path.join(os.path.realpath(os.path.dirname(__file__))) + "\\bundles"
print('file_name1 is ',file_name1,' and file_name2 = ',file_name2,' and file_name is ',file_name)
if os.path.isfile(file_name1):
return send_from_directory(baseDir1, file_name, as_attachment=True)
if os.path.isfile(file_name2):
return send_from_directory(baseDir2, file_name, as_attachment=True)
else:
return "FileNotFound",404
|
11,721 | 665650a7b0ecdc6ef26df2b49e34ada7fe662f02 | # Django
from django.urls import path
# Views
from .views import ContactView
urlpatterns = [
path(
route="message/send/",
view=ContactView.as_view(),
name="send_message",
),
] |
11,722 | e1f1452b910495792af98d2a131f5c6e6547b5b0 | from django.shortcuts import render
# Create your views here.
def default_map(request):
return render(request, 'default.html', {})
def default_map2(request):
return render(request, 'mapa2.html', {})
def search(request):
print("REQUEST", request)
return render(request, 'mapa3.html', {}) |
11,723 | 718d5303ce13ffec5a6d6cd1fab149149fbb37e1 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('firstname', models.CharField(max_length=40)),
('lastname', models.CharField(max_length=40)),
('affiliation', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Publication',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('pubtype', models.CharField(max_length=20, choices=[(b'Journal Article', b'article'), (b'White Paper', b'whitepaper'), (b'Other', b'other')])),
('doi', models.CharField(max_length=200, blank=True)),
('uri', models.URLField(max_length=500, blank=True)),
('title', models.CharField(max_length=500)),
('abstract', models.TextField(blank=True)),
('synopsis', models.TextField(blank=True)),
('volume', models.IntegerField(blank=True)),
('issue', models.IntegerField(blank=True)),
('startpage', models.IntegerField(blank=True)),
('endpage', models.IntegerField(blank=True)),
('date', models.DateField(null=True, blank=True)),
('contents', models.FileField(upload_to=b'', blank=True)),
],
),
migrations.CreateModel(
name='PublicationAuthors',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('order', models.IntegerField()),
('author', models.ForeignKey(to='library.Author')),
('publication', models.ForeignKey(to='library.Publication')),
],
),
migrations.CreateModel(
name='Source',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('sourcetype', models.CharField(blank=True, max_length=20, choices=[(b'Academic Journal', b'journal'), (b'Non-governmental Org.', b'NGO'), (b'Development Agency', b'agency'), (b'Institute', b'institute')])),
('name', models.CharField(max_length=200)),
('url', models.URLField()),
],
),
migrations.AddField(
model_name='publication',
name='authors',
field=models.ManyToManyField(to='library.Author', through='library.PublicationAuthors'),
),
migrations.AddField(
model_name='publication',
name='source',
field=models.ForeignKey(blank=True, to='library.Source', null=True),
),
]
|
11,724 | 21719097ea011687485f991a8726851d21eeb7b1 | from typing import List
class Solution:
def fourSum(self, nums: List[int], target: int) -> List[List[int]]:
if not nums:
return []
nums.sort()
answer = []
i = 0
while i < len(nums) - 3:
if i > 0 and nums[i] == nums[i - 1]:
i = i + 1
continue
j = i + 1
while j < len(nums) - 2:
if j > i + 1 and nums[j] == nums[j - 1]:
j = j + 1
continue
tempTarget = target - nums[i] - nums[j]
start = j + 1
end = len(nums) - 1
while start < end:
if nums[start] + nums[end] == tempTarget:
answer.append([nums[i], nums[j], nums[start], nums[end]])
while start < end and nums[start] == nums[start + 1]:
start += 1
start = start + 1
while start < end and nums[end] == nums[end - 1]:
end -= 1
end = end - 1
elif nums[start] + nums[end] < tempTarget:
start += 1
else:
end -= 1
j += 1
i +=1
return answer
if __name__ == '__main__':
test = Solution()
inputList = [-3,-2,-1,0,0,1,2,3]
target = 0
result = test.fourSum(inputList, target)
print(result) |
11,725 | 612e48421094f47183a638281187c9514f847827 | import sys
import numpy as np
import matplotlib.pyplot as plt
import cleverhans
from cleverhans.attacks_tf import fgsm
from cleverhans.utils_tf import batch_eval
from models import ResidualBlockProperties, ParsevalResNet, ResNet
from data_utils import Cifar10Loader, Dataset
import visualization
from visualization import compose, Viewer
import dirs
from training import train
import standard_resnets
dimargs = sys.argv[1:]
if len(dimargs) not in [0, 2]:
print("usage: train-wrn.py [<Zagoruyko-depth> <widening-factor>]")
zaggydepth, k = (28, 10) if len(dimargs) == 0 else map(int, dimargs)
print("Loading and preparing data...")
ds_test = Cifar10Loader.load_test()
print(Cifar10Loader.std)
print("Initializing model...")
top = 70
epss = [int(i**2+0.5) / 100 for i in np.arange(0, int(top**0.5+1)+0.5, 0.5)]
print(epss)
accuracieses = []
for parseval in [False, True]:
aggregation = 'convex' if parseval else 'sum'
resnet_ctor = ParsevalResNet if parseval else ResNet
from standard_resnets import get_wrn
model = standard_resnets.get_wrn(
zaggydepth,
k,
ds_test.image_shape,
ds_test.class_count,
aggregation=aggregation,
resnet_ctor=resnet_ctor)
saved_path = dirs.SAVED_MODELS
if parseval:
saved_path += '/wrn-28-10-p-t--2018-01-24-21-18/ResNet' # Parseval
else:
saved_path += '/wrn-28-10-t--2018-01-23-19-13/ResNet' # vanilla
model.load_state(saved_path)
cost, ev = model.test(ds_test)
accuracies = [ev['accuracy']]
for eps in epss[1:]:
print("Creating adversarial examples...")
clip_max = (
255 - np.max(Cifar10Loader.mean)) / np.max(Cifar10Loader.std)
n_fgsm = fgsm(
model.nodes.input,
model.nodes.probs,
eps=eps,
clip_min=-clip_max,
clip_max=clip_max)
images_adv, = batch_eval(
model._sess, [model.nodes.input], [n_fgsm],
[ds_test.images[:model.batch_size*64]],
args={'batch_size': model.batch_size},
feed={model._is_training: False})
adv_ds_test = Dataset(images_adv, ds_test.labels, ds_test.class_count)
cost, ev = model.test(adv_ds_test)
accuracies.append(ev['accuracy'])
accuracieses.append(accuracies)
print(accuracies)
def plot(epss, curves, names):
plt.figure()
plt.rcParams["mathtext.fontset"] = "cm"
#plt.yticks(np.arange(0, 1, 0.05))
axes = plt.gca()
axes.grid(color='0.9', linestyle='-', linewidth=1)
axes.set_ylim([0, 1])
axes.set_xlim([0, top/100])
for c, n in zip(curves, names):
plt.plot(epss, c, label=n, linewidth=2)
plt.xlabel("$\epsilon$")
plt.ylabel("točnost")
plt.legend()
plt.show()
plot(epss, accuracieses, ["WRN-28-10-Parseval", "WRN-28-10"]) |
11,726 | e52d157ce7386ad77e044412aae682353ad4b695 | # client for the remote controlled bot
|
11,727 | 3375c68a2a729deac32011f2c8a256e77ed712fc | import pygame, sys
from intro import intro
from utils import *
from sprites import *
from pygame.locals import *
from random import randint
intro()
DISPLAYSURF = pygame.display.set_mode((800, 593))
pygame.display.set_caption("Kill the Baby!")
background = load_image("KTBbackground2.png")
BASIN = pygame.Rect((20, 391), (250, 180))
TOP_RIGHT = pygame.Rect((680, 20), (100, 100))
CENTER_RIGHT = pygame.Rect((680, 220), (100, 100))
BOTTOM_RIGHT = pygame.Rect((680, 420), (100, 100))
TOP_CENTER = pygame.Rect((560, 20), (100, 100))
CENTER = pygame.Rect((560, 220), (100, 100))
BOTTOM_CENTER = pygame.Rect((560, 420), (100, 100))
# Constants
BASIN_ITEM = 0
GARLIC_ITEM = 1
STAKE_ITEM = 2
FISH_ITEM = 3
SILVER_ITEM = 4
SWATTER_ITEM = 5
RAZOR_ITEM = 6
BASE_TYPE = 0
WERE_TYPE = 1
VAMP_TYPE = 2
TENGU_TYPE = 3
LARS_TYPE = 4
FULL_TIME = 6
# Vars
gameOver = False
currentBabySprite = BASEBABY
currentBabyType = None
message = ""
time = FULL_TIME
pygame.time.set_timer(USEREVENT+1, 1000)
# Decide if next baby is normal, or a monster
def getNextBabyType():
prob = randint(0, 9)
if prob == 0 or prob == 1:
return WERE_TYPE
elif prob == 3 or prob == 4:
return VAMP_TYPE
elif prob == 5 or prob == 6:
return TENGU_TYPE
elif prob == 2:
return LARS_TYPE
else:
return BASE_TYPE
def getSprite(currentBabyType, time):
if time == -1:
if currentBabyType == LARS_TYPE:
return LARS
if currentBabyType == WERE_TYPE:
return WEREBABY_FULL
if currentBabyType == VAMP_TYPE:
return VAMPBABY_FULL
if currentBabyType == TENGU_TYPE:
return TENGUBABY_FULL
if currentBabyType == BASE_TYPE:
prob = randint(0, 2)
if prob == 0:
return WEREBABY_FULL
if prob == 1:
return VAMPBABY_FULL
if prob == 2:
return TENGUBABY_FULL
if currentBabyType == LARS_TYPE:
return LARS
elif currentBabyType == WERE_TYPE:
if time > 4:
return BASEBABY
elif time > 2:
return BABY_FANGS
elif time > 0:
return WEREBABY_PART
elif time == 0:
return WEREBABY_FULL
elif currentBabyType == VAMP_TYPE:
if time > 4:
return BASEBABY
elif time > 2:
return BABY_FANGS
elif time > 0:
return VAMPBABY_PART
elif time == 0:
return VAMPBABY_FULL
elif currentBabyType == TENGU_TYPE:
if time > 4:
return BASEBABY
elif time > 2:
return BASEBABY
elif time > 0:
return TENGUBABY_PART
elif time == 0:
return TENGUBABY_FULL
else:
return BASEBABY
def clickHandler(babyType, item, time, currMessage, gameOver):
if gameOver:
return currMessage, currentBabyType
elif item == BASIN_ITEM:
if babyType == BASE_TYPE:
return "You've saved this young one!", FULL_TIME
else:
return "You've given this beast our Goddess's protection! You monster!", -1
elif item == GARLIC_ITEM:
if babyType == VAMP_TYPE:
return "It's a vampire!", time
else:
return "The baby sniffs the garlic, wholly apathetic.", time
elif item == STAKE_ITEM:
if babyType == VAMP_TYPE:
return "You've staked the vampire's heart!", FULL_TIME
else:
return "It wasn't a vampire and you staked it :(", time
elif item == FISH_ITEM:
if babyType == TENGU_TYPE:
return "It's a tengu! It hates the fish so much it flees.", FULL_TIME
else:
return "The baby does not appreciate the smell of fish.", time
elif item == SWATTER_ITEM:
if babyType > 0:
return "You hit the baby with a fly swatter. It cries.", time
elif item == SILVER_ITEM:
if babyType == WERE_TYPE:
return "A silver bullet! You shoot the werewolf.", FULL_TIME
elif babyType != BASE_TYPE:
return "You've shot it, but it isn't a werewolf", time
else:
return "You've killed that baby ;-;", -1
elif item == RAZOR_ITEM:
if babyType == LARS_TYPE:
return "You shaved its beard, the source of its power!", FULL_TIME
elif babyType == BASE_TYPE:
return "You attack the baby with the razor. What is wrong with you?", -1
else:
return "The baby sees your razor but shows no fear.", time
return "Error", -1
try:
mixer.music.load("data/music/babykiller01_Purity.ogg")
mixer.music.play(-1)
except:
pass
# Game loop
while True:
currentBabySprite = getSprite(currentBabyType, time)
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
# event.button means left-mouse
elif event.type == pygame.MOUSEBUTTONDOWN and event.button == 1 and not gameOver:
pos = pygame.mouse.get_pos()
if BASIN.collidepoint(pos):
message, time = clickHandler(currentBabyType, BASIN_ITEM, time, message, gameOver)
if time == FULL_TIME:
currentBabyType = None
# HERE
elif TOP_RIGHT.collidepoint(pos):
message, time = clickHandler(currentBabyType, GARLIC_ITEM, time, message, gameOver)
if currentBabyType == VAMP_TYPE:
currentBabySprite = VAMPBABY_PART
elif CENTER_RIGHT.collidepoint(pos):
message, time = clickHandler(currentBabyType, STAKE_ITEM, time, message, gameOver)
if time == FULL_TIME:
currentBabyType = None
elif BOTTOM_RIGHT.collidepoint(pos):
message, time = clickHandler(currentBabyType, RAZOR_ITEM, time, message, gameOver)
if time == FULL_TIME:
currentBabyType = None
elif TOP_CENTER.collidepoint(pos):
message, time = clickHandler(currentBabyType, FISH_ITEM, time, message, gameOver)
if time == FULL_TIME:
currentBabyType = None
elif CENTER.collidepoint(pos):
message, time = clickHandler(currentBabyType, SILVER_ITEM, time, message, gameOver)
if time == FULL_TIME:
currentBabyType = None
elif BOTTOM_CENTER.collidepoint(pos):
message, time = clickHandler(currentBabyType, SWATTER_ITEM, time, message, gameOver)
if time == FULL_TIME:
currentBabyType = None
elif event.type == pygame.KEYDOWN and gameOver:
message = "RESTART"
gameOver = False
time = FULL_TIME
elif event.type == USEREVENT+1:
if time >= 0:
time -= 1
if currentBabyType == None:
currentBabyType = getNextBabyType()
pygame.display.update()
DISPLAYSURF.blit(background, (0, 0))
DISPLAYSURF.blit(currentBabySprite, (300, 300))
if time == 0:
message = "The beasts claim this one!"
time = -1
if time == -1:
gameOver = True
if pygame.font:
font = pygame.font.Font(None, 36)
text = font.render(message, 1, (255, 0, 0))
textpos = text.get_rect(centerx=DISPLAYSURF.get_width()/2)
DISPLAYSURF.blit(text, textpos)
if time > -1:
text = font.render("Time: " + str(time), 1, (255, 0, 0))
textpos = text.get_rect(centerx=50)
DISPLAYSURF.blit(text, textpos)
background.blit(FISH, TOP_CENTER)
background.blit(GARLIC, TOP_RIGHT)
background.blit(BULLET, CENTER)
background.blit(STAKE, CENTER_RIGHT)
background.blit(SWAT, BOTTOM_CENTER)
background.blit(RAZOR, BOTTOM_RIGHT)
|
11,728 | 3ae662881f3ffdea5c79bdccf1fe7b51eb2ad7b7 | from tensorflow.keras.models import load_model
import threading
import numpy as np
import cv2 as cv
import pickle
import time
import os
CAMERA_PORT = 0
IMG_SIZE = 48
MODEL_DIR = './models/26-08-2020_22-33-45'
DATA_DIR = 'data'
CASCADE_CLASSIFIER_PATH = 'res/haarcascade_frontalface_default.xml'
ROI_BOX_COLOR = (255, 0, 0)
EMOTION_FONT_COLOR = (0, 255, 0)
FPS_FONT_COLOR = (0, 0, 255)
FONT = cv.FONT_HERSHEY_SIMPLEX
# this is a seperate thread that will handle capturing frames from the webcam
# the idea here is to implement double buffering so the main pipeline doesn't
# need to waste time waiting around for frames
class CaptureThread(threading.Thread):
def __init__(self, port):
super(CaptureThread, self).__init__()
self.terminated = False
self.buffer_lock = threading.Lock()
self.cap = cv.VideoCapture(port)
self.A = None
self.B = None
self.start()
def run(self):
while not self.terminated:
ret, frame = self.cap.read()
if ret:
# write the data
self.A = frame.copy()
# swap the identifiers
with self.buffer_lock:
self.A, self.B = self.B, self.A
# brief pause to keep the thread stable
time.sleep(0.001)
self.cap.release()
# load up the model, along with the categories
model = load_model(MODEL_DIR)
categories = pickle.load(open(os.path.join(DATA_DIR, 'categories.pickle'), 'rb'))
# create the window that will display the frames
WINDOW_NAME = 'Emotion Analyzer'
cv.namedWindow(WINDOW_NAME)
# initialize the cascade classifier for detecting faces
face_cascade = cv.CascadeClassifier(CASCADE_CLASSIFIER_PATH)
# initialize the capture thread
cap = CaptureThread(CAMERA_PORT)
start = time.time()
# wait until the first frame is captured
while cap.B is None:
time.sleep(0.001)
while True:
# grab the most recent frame from the capture thread
with cap.buffer_lock:
img = cap.B.copy()
# only proceed if the frame was succeccfully retrieved
gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
# look for faces
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x, y, w, h) in faces:
# draw the roi for the face
upperLeft = (x, y)
bottomRight = (x + w, y + h)
img = cv.rectangle(img, upperLeft, bottomRight, ROI_BOX_COLOR, 2)
# extract and prepare the roi for predictions
roi_gray = gray[y:y + h, x:x + w]
roi_gray = cv.resize(roi_gray, (IMG_SIZE, IMG_SIZE))
data = np.array([roi_gray]).reshape(-1, IMG_SIZE, IMG_SIZE, 1)
# get the prediction, draw it on the frame
emotion = categories[model.predict_classes(data)[0]]
cv.putText(img, emotion, (x, y - 10), FONT, 1, EMOTION_FONT_COLOR, 2)
end = time.time()
# write the current fps
processingTimeMs = (end - start) * 1000
fps = 1000 / processingTimeMs
cv.putText(img, str(round(fps, 2)), (0, 25), FONT, 1, FPS_FONT_COLOR, 3)
cv.imshow(WINDOW_NAME, img)
start = time.time()
key = cv.waitKey(1)
if key == ord("q"):
cap.terminated = True
cap.join()
break |
11,729 | b99e9dea430f0a0efca4d36d81279cfa88a9c4bf | from django.contrib import admin
from django.urls import path
from .views import vendas, novaVenda, listVendas, updateVenda
urlpatterns = [
path('', vendas, name='vendas'),
path('nova-venda/', novaVenda, name='nova_venda'),
path('lista-vendas/', listVendas, name='all_vendas'),
path('update/<int:id>', updateVenda, name='update_venda'),
]
|
11,730 | 90518a75795708830fca0192f1e3b3c07c8f447d |
# coding: utf-8
# In[4]:
import pandas_datareader.data as web
import datetime
start = datetime.datetime(2013, 1, 1)
end = datetime.datetime(2016, 1, 27)
df = web.DataReader("GOOGL", 'yahoo', start, end)
dates =[]
for x in range(len(df)):
newdate = str(df.index[x])
newdate = newdate[0:10]
dates.append(newdate)
df['dates'] = dates
print df.head()
print df.tail()
# In[8]:
web.DataReader("1171.HK", 'yahoo', start, end)
# In[ ]:
|
11,731 | de8b33c46ea3aabc58586f09326df14036d725dd | #! /usr/bin/python
from math import sqrt
def prime(n) :
if n < 2 :
return False
for i in range(2,int(sqrt(n))+1) :
if n % i == 0 :
return False
return True
aa = 0
bb = 0
cc = 0
for a in range(-999,1000,2) :
for b in range(-999,1000,2) :
c = 0
for n in range(0,1000) :
if prime(n*n+a*n+b) == True :
c = c + 1
if c > cc :
aa = a
bb = b
cc = c
print aa,bb,cc
print a
|
11,732 | e9e69a0db6c549be2dcdb167c58b61fa592bc036 | from typing import *
class Solution:
def canPartition(self, nums: List[int]) -> bool:
s = sum(nums)
if s % 2 == 1:
return False
dp = [[-1 for __ in range(s//2+1)] for _ in range(len(nums))]
def partition_recursive(dp, nums, idx, curr_s):
if curr_s == 0:
return 1
if idx >= len(nums):
return 0
if dp[idx][curr_s] != -1:
return dp[idx][curr_s]
if nums[idx] <= curr_s:
if partition_recursive(dp, nums, idx+1, curr_s - nums[idx]) == 1:
dp[idx][curr_s] = 1
return 1
dp[idx][curr_s] = partition_recursive(dp, nums, idx+1, curr_s)
return dp[idx][curr_s]
return partition_recursive(dp, nums, 0, s//2) == 1
|
11,733 | d913e1a77c32da657f037a657439afc116fcdd08 | from django.apps import AppConfig
class AbstractbaseclassesConfig(AppConfig):
name = 'abstractbaseclasses'
|
11,734 | 36dcb5a8997fdd2b9c58a744929258e1aed9a50e | import json
import urllib.request
tuling_key='8c918d2e025d4dc1ada1313094dc8e9d'
api_url = "http://openapi.tuling123.com/openapi/api/v2"
def get_message(message,userid):
req = {
"perception":
{
"inputText":
{
"text": message
},
"selfInfo":
{
"location":
{
"city": "嘉兴",
"province": "浙江",
"street": "昌盛南路1001号"
}
}
},
"userInfo":
{
"apiKey": tuling_key,
"userId": userid
}
}
req = json.dumps(req).encode('utf8')
http_post = urllib.request.Request(api_url, data=req, headers={'content-type': 'application/json'})
response = urllib.request.urlopen(http_post)
response_str = response.read().decode('utf8')
response_dic = json.loads(response_str)
results_code = response_dic['intent']['code']
print(results_code)
if results_code == 4003:
results_text = "4003:%s"%response_dic['results'][0]['values']['text']
else:
results_text = response_dic['results'][0]['values']['text']
return results_text
|
11,735 | ca4b53cd94a4cba91e6d30be3e6c88fed607391e | from django.db import models
class Pet(models.Model):
name = models.CharField(max_length=50)
age = models.IntegerField()
available = models.BooleanField(default = True)
image = models.ImageField()
price = models.DecimalField()
|
11,736 | b7e07ff745be4fd543bc06ea8c1b0973f4063b74 |
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('types', models.CharField(choices=[('phr', 'Pharmacy'), ('rec', 'Reception'), ('doc', 'Doctor'), ('pat', 'Patient')], default='pat', max_length=3)),
('user', models.OneToOneField(default='', on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
11,737 | 0bc5dc46ce4dbd06520d3c081e042afde693ef99 | ############# E17 Decode a web page ############
#updated 12.08
from bs4 import BeautifulSoup
import requests
def decode(url):
# requests the website
r = requests.get(url)
# store the text from the website as a variable to make it easier to read
r_html = r.text
# run the text through BeautifulSoup with the "html.parser", as well as store this in a variable
soup = BeautifulSoup(r_html, 'html.parser')
#
for item in soup.find_all("h3"):
print(item.text)
decode("https://www.vg.no/") |
11,738 | 08d4b3081eb88d3ae61ed322cb8508f691b5f750 | #!/usr/bin/env python
"""Utils module for NSX SDK"""
import requests
import json
import sys
class HTTPClient(object):
"""HTTPClient have the following properties:
Attributes:
base_url: A string representing the base url.
login: A string representing login.
password: A string representing password
session: Session parameters for REST API calls
"""
def __init__(self, hostname, login, password):
self.base_url = "https://" + hostname
self.login = login
self.password = password
self.session = self._initialize_session()
def _initialize_session(self):
"""Initialize HTTP session with a basic configuration to consume
JSON REST API:
- Disable SSL verification
- Set HTTP headers to application/json
- Set authorization header
Returns:
Session: initiazed session
"""
session = requests.Session()
session.auth = (self.login, self.password)
session.verify = False
session.headers.update({'Accept': 'application/json'})
session.headers.update({'Content-type': 'application/json'})
return session
def request(self, method, path, body=None, headers=None):
"""Generic method to consume REST API Webservices
:param method: HTTP method
:param path: API resource path
:param body: HTTP request body
:param headers: Extra headers
:return: return description
:rtype: the return type description
"""
url = self.base_url + path
print "Method: " + method + ", URL: " + url
if body is not None:
print json.dumps(
json.loads(body),
sort_keys=True,
indent=4,
separators=(
',',
': '))
try:
response = self.session.request(
method,
url,
data=body,
headers=headers)
print "Status code: " + str(response.status_code)
return response
except requests.exceptions.HTTPError as exception:
print "HTTPError: " + exception
sys.exit(1)
except requests.exceptions.RequestException as exception:
print exception
sys.exit(1)
|
11,739 | c15d12ed6f4a865a05dcabb01febdfac499fd819 | import json
from datetime import datetime
from django.http import HttpResponse, HttpResponseRedirect, Http404
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from usermgmt.models import Notification, FriendRequest
from usermgmt.serializers import NotificationSerializer
from events.models import EventRequestInvitation, Events, EventsAccess
from groups.models import Groups, GroupsRequestInvitation
class NotificationsView(APIView):
model = Notification
def get(self, request, *args, **kwargs):
user = request.user
notifications = Notification.objects.filter(user = user).order_by('-created_on')
serializer = NotificationSerializer(notifications,many = True)
return Response(serializer.data)
notifications = NotificationsView.as_view()
class NotificationsCountView(APIView):
model = Notification
def get(self, request, *args, **kwargs):
user = request.user
notifications_count = user.notifications_count
#notifications_count = Notification.objects.filter(user = user,read=False).count()
data = {'notifications_count':notifications_count}
send_data = json.dumps(data)
send_json = json.loads(send_data)
return Response(send_json,status=status.HTTP_200_OK)
notifications_count = NotificationsCountView.as_view()
class NotificationDetailsView(APIView):
model = Notification
def get_object(self, pk):
try:
return Notification.objects.get(pk=pk)
except Notification.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
notification = self.get_object(pk)
nobject = get_notification_object(notification.notification_type,notification.object_id)
if not notification.read:
notification.read = True
notification.save()
user = request.user
user.notifications_count -= 1
user.save()
serializer = NotificationSerializer(notification)
data = {
'notification': serializer.data,
'nobject':nobject
}
return Response(data)
def delete(self, request, pk, format=None):
notification = self.get_object(pk)
notification.delete()
return Response(status=status.HTTP_200_OK)
notification_details = NotificationDetailsView.as_view()
def get_notification_object(type,object_id):
if type == 'EURA':
notificationobject = EventRequestInvitation.objects.get(id=object_id)
if notificationobject.accept == 'Y':status = 'A'
elif notificationobject.accept == 'N':status = 'R'
else:status = 'P'
nobject = {
'id' : str(notificationobject.id),
'objecturl':notificationobject.get_object_url(),
'submiturl':notificationobject.get_submit_url(),
'status':status
}
elif type == 'EURI':
notificationobject = EventRequestInvitation.objects.get(id=object_id)
if notificationobject.accept == 'Y':status = 'A'
elif notificationobject.accept == 'N':status = 'R'
else:status = 'P'
nobject = {
'id' : str(notificationobject.id),
'objecturl':notificationobject.get_object_url(),
'submiturl':notificationobject.get_accept_url(),
'status':status
}
elif type == 'UFRS':
notificationobject = FriendRequest.objects.get(id=object_id)
nobject = {
'id' : str(notificationobject.id),
'objecturl':notificationobject.get_object_url(),
'submiturl':notificationobject.get_submit_url(),
'status':notificationobject.status
}
elif type == 'EUGA':
notificationobject = EventRequestInvitation.objects.get(id=object_id)
nobject = {
'id' : str(notificationobject.id),
'objecturl':notificationobject.get_object_url(),
}
elif type == 'GURI':
notificationobject = GroupsRequestInvitation.objects.get(id=object_id)
if notificationobject.accept == 'Y':status = 'A'
elif notificationobject.accept == 'N':status = 'R'
else:status = 'P'
nobject = {
'id' : str(notificationobject.id),
'objecturl':notificationobject.get_object_url(),
'submiturl':notificationobject.get_accept_url(),
'status':status
}
elif type == 'GURA':
notificationobject = GroupsRequestInvitation.objects.get(id=object_id)
if notificationobject.accept == 'Y':status = 'A'
elif notificationobject.accept == 'N':status = 'R'
else:status = 'P'
nobject = {
'id' : str(notificationobject.id),
'objecturl':notificationobject.get_object_url(),
'submiturl':notificationobject.get_submit_url(),
'status':status
}
else:
nobject = False
return nobject |
11,740 | 3f51bd09569a8db0111938b971a91bcd2fbddac1 | # This class is required in terms to apply specific colors to specific word
class SimpleGroupedColorFunc(object):
def __init__(self, color_to_words, default_color):
self.word_to_color = {word: color
for (color, words) in color_to_words.items()
for word in words}
self.default_color = default_color
def __call__(self, word, **kwargs):
return self.word_to_color.get(word, self.default_color)
|
11,741 | d06d7aaaceed220c7ea76f56f142e49b97d855d1 | '''
AI Implemented Using A Recurrent Neural Network
'''
from ai.models.base_model import BaseModel
class RecNNet(BaseModel):
def __init__(self, is_white=False):
self.is_white = is_white
def _trained(self):
return True
def _train(self):
return True
def _predict_move(self, board):
print('got here')
|
11,742 | ee28711ba8b18f614ceaabadbc3d29bf8d0fe646 | ######### VBF
samples['VBF_HToInvisible_M110'] = ['/VBF_HToInvisible_M110_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=4.434e+00','triggerName=HLT2']]
samples['VBF_HToInvisible_M125'] = ['/VBF_HToInvisible_M125_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=3.782','triggerName=HLT2']]
samples['VBF_HToInvisible_M150'] = ['/VBF_HToInvisible_M150_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=3.239e+00','triggerName=HLT2']]
samples['VBF_HToInvisible_M200'] = ['/VBF_HToInvisible_M200_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=2.282e+00','triggerName=HLT2']]
samples['VBF_HToInvisible_M300'] = ['/VBF_HToInvisible_M300_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.256e+00','triggerName=HLT2']]
samples['VBF_HToInvisible_M400'] = ['/VBF_HToInvisible_M400_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=7.580e-01','triggerName=HLT2']]
samples['VBF_HToInvisible_M500'] = ['/VBF_HToInvisible_M500_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=4.872e-01','triggerName=HLT2']]
samples['VBF_HToInvisible_M600'] = ['/VBF_HToInvisible_M600_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=3.274e-01','triggerName=HLT2']]
samples['VBF_HToInvisible_M800'] = ['/VBF_HToInvisible_M800_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.622e-01','triggerName=HLT2']]
######## GluGlu
samples['GluGlu_HToInvisible_M110'] = ['/GluGlu_HToInvisible_M110_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=5.790e+01','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M125'] = ['/GluGlu_HToInvisible_M125_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=4.520e+01','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M150'] = ['/GluGlu_HToInvisible_M150_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=3.129e+01','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M200'] = ['/GluGlu_HToInvisible_M200_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.694e+01','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M300'] = ['/GluGlu_HToInvisible_M300_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=6.590e+00','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M400'] = ['/GluGlu_HToInvisible_M400_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=3.160e+00','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M500'] = ['/GluGlu_HToInvisible_M500_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.709e+00','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M600'] = ['/GluGlu_HToInvisible_M600_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.001e+00','triggerName=HLT2']]
samples['GluGlu_HToInvisible_M800'] = ['/GluGlu_HToInvisible_M800_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-PUSpring16RAWAODSIM_reHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=4.015e-01','triggerName=HLT2']]
######## WH
samples['WminusH_HToInvisible_WToQQ_M110'] = ['/WminusH_HToInvisible_WToQQ_M110_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=8.587e-01','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M150'] = ['/WminusH_HToInvisible_WToQQ_M150_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=3.498e-01','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M200'] = ['/WminusH_HToInvisible_WToQQ_M200_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.124e-01','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M300'] = ['/WminusH_HToInvisible_WToQQ_M300_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=2.376E-02','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M400'] = ['/WminusH_HToInvisible_WToQQ_M400_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=7.309E-03','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M500'] = ['/WminusH_HToInvisible_WToQQ_M500_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=2.796E-03','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M600'] = ['/WminusH_HToInvisible_WToQQ_M600_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.232E-03','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M700'] = ['/WminusH_HToInvisible_WToQQ_M700_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=5.990E-04','triggerName=HLT']]
samples['WminusH_HToInvisible_WToQQ_M800'] = ['/WminusH_HToInvisible_WToQQ_M800_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=3.133E-04','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M110'] = ['/WplusH_HToInvisible_WToQQ_M110_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.335E+00','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M150'] = ['/WplusH_HToInvisible_WToQQ_M150_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=5.037E-01','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M200'] = ['/WplusH_HToInvisible_WToQQ_M200_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.899E-01','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M300'] = ['/WplusH_HToInvisible_WToQQ_M300_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=4.348E-02','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M400'] = ['/WplusH_HToInvisible_WToQQ_M400_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.582E-02','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M500'] = ['/WplusH_HToInvisible_WToQQ_M500_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=5.825E-03','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M600'] = ['/WplusH_HToInvisible_WToQQ_M600_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=2.709E-03','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M700'] = ['/WplusH_HToInvisible_WToQQ_M700_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.383E-03','triggerName=HLT']]
samples['WplusH_HToInvisible_WToQQ_M800'] = ['/WplusH_HToInvisible_WToQQ_M800_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=7.545E-04','triggerName=HLT']]
#### ZH
samples['ZH_HToInvisible_ZToQQ_M110'] = ['/ZH_HToInvisible_ZToQQ_M110_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.309E+00','triggerName=HLT']]
samples['ZH_HToInvisible_ZToQQ_M150'] = ['/ZH_HToInvisible_ZToQQ_M150_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=5.279E-01','triggerName=HLT']]
samples['ZH_HToInvisible_ZToQQ_M200'] = ['/ZH_HToInvisible_ZToQQ_M200_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=2.054E-01','triggerName=HLT']]
samples['ZH_HToInvisible_ZToQQ_M300'] = ['/ZH_HToInvisible_ZToQQ_M300_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=4.132E-02','triggerName=HLT']]
samples['ZH_HToInvisible_ZToQQ_M400'] = ['/ZH_HToInvisible_ZToQQ_M400_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.273E-02','triggerName=HLT']]
samples['ZH_HToInvisible_ZToQQ_M500'] = ['/ZH_HToInvisible_ZToQQ_M500_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=5.256E-03','triggerName=HLT']]
samples['ZH_HToInvisible_ZToQQ_M600'] = ['/ZH_HToInvisible_ZToQQ_M600_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=2.544E-03','triggerName=HLT']]
samples['ZH_HToInvisible_ZToQQ_M800'] = ['/ZH_HToInvisible_ZToQQ_M800_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.364E-03','triggerName=HLT']]
## ggZH
samples['ggZH_HToInvisible_ZToQQ_M125'] = ['/ggZH_HToInvisible_ZToQQ_M125_13TeV_powheg_pythia8/RunIISpring16MiniAODv2-premix_withHLT_80X_mcRun2_asymptotic_v14-v1/MINIAODSIM',['useLHEWeights=True','addQCDPDFWeights=True','isSignalSample=True','addGenParticles=True','crossSection=1.227E-01','triggerName=HLT']]
|
11,743 | 9dd1487222680274bc94246e300c4361aee5bbf5 | __author__ = 'Lee'
import Semiring, XSemiring, MatrixRing
def initial_matrix(cnf, words):
X = XSemiring.XSemiringFactory.constructor(cnf)
n = len(words) + 1
f = lambda i,j: X.lift(set([])) if i != j + 1 else X.lift({(cnf.term_of(words[j]),)})
return MatrixRing.Matrix.lift([f(i,j) for i in range(n) for j in range(n)])
if __name__ == "__main__":
import CNF, math
Grammar = CNF.Grammar
grammar = Grammar()
grammar.add_term('(', '(')
grammar.add_term(')', ')')
grammar.add_bin('X', '(', 'A')
grammar.add_bin('A', 'X', ')')
grammar.add_bin('A', 'A', 'A')
grammar.add_bin('A', '(', ')')
A = initial_matrix(grammar, "((()())())")
#print A
P = A
n = int(math.sqrt(len(A.item)))
for i in range(int(math.sqrt(len(A.item)))):
P = P + P*P
#print P.item[P.idx(0,n-1)]
for tree in P.item[P.idx(0,n-1)].item:
print XSemiring.dot(tree)
|
11,744 | f63e6adc7d0f9c0893b87253c6616be6819dfce4 | n = int(input())
m = {len(s): s for s in input().split()}
x = input()
for v in range(len(x), -1, -1):
if v in m:
print(m[v])
break
|
11,745 | a2f3115da8e6e0097de73bd31e2813c58e0a9110 | from microkubes.gateway import Registrator, KongGatewayRegistrator
from unittest import mock
import pook
def test_get_url():
r = Registrator(gw_admin_url='http://kong:8001')
assert r._get_url('/apis') == 'http://kong:8001/apis'
assert r._get_url('////apis') == 'http://kong:8001/apis' # normalize
assert r._get_url('/apis', {'id': '10', 'num': 22.3, 'str': 'other-string'}) == 'http://kong:8001/apis?id=10&num=22.3&str=other-string'
@mock.patch.object(Registrator, 'register_service')
def test_registrator_register(register_service_mock):
reg = Registrator(gw_admin_url='http://kong:8000')
(reg.register(name='service_name', port=1001, host='the-host.lan', paths=['/match', '/match/again']))
assert register_service_mock.called_once_with({
'name': 'service_name',
'port': 1001,
'paths': ['/match', '/match/again'],
'host': 'the-host.lan'
})
@pook.on
def test_kong_gateway_add_api():
pook.get('http://kong:8001/apis/test-api', reply=404, response_json={'message': 'Not Found'})
(pook.post('http://kong:8001/apis/').json({
'name': 'test-api',
'uris': '/test,/api/test',
'upstream_url': 'http://test.services.lan:8080'
}).reply(201).json({
'created_at': 1540213698704,
'strip_uri': False,
'id': '6af8aa24-b520-471a-bced-942e6cc023b6',
'name': 'test-api',
'http_if_terminated': True,
'https_only': False,
'upstream_url': 'http://test.services.lan:8080',
'uris': [
'/test',
'/api/test'
],
'preserve_host': False,
'upstream_connect_timeout': 60000,
'upstream_read_timeout': 60000,
'upstream_send_timeout': 60000,
'retries': 5
}))
reg = KongGatewayRegistrator(gw_admin_url='http://kong:8001')
resp = reg.register(name='test-api', host='test.services.lan', port=8080, paths=['/test', '/api/test'])
assert resp is not None
@pook.on
def test_kong_gateway_update_api():
pook.get('http://kong:8001/apis/test-api', reply=200, response_json={
'created_at': 1540213698704,
'strip_uri': False,
'id': '6af8aa24-b520-471a-bced-942e6cc023b6',
'name': 'test-api',
'http_if_terminated': True,
'https_only': False,
'upstream_url': 'http://test.services.lan:8080',
'uris': [
'/test',
'/api/test'
],
'preserve_host': False,
'upstream_connect_timeout': 60000,
'upstream_read_timeout': 60000,
'upstream_send_timeout': 60000,
'retries': 5
})
(pook.patch('http://kong:8001/apis/test-api').json({
'name': 'test-api',
'uris': '/new,/new/test',
'upstream_url': 'http://new-test.services.lan:8089'
}).reply(200).json({
'created_at': 1540213698704,
'strip_uri': False,
'id': '6af8aa24-b520-471a-bced-942e6cc023b6',
'name': 'test-api',
'http_if_terminated': True,
'https_only': False,
'upstream_url': 'http://new-test.services.lan:8089',
'uris': [
'/new',
'/new/test'
],
'preserve_host': False,
'upstream_connect_timeout': 60000,
'upstream_read_timeout': 60000,
'upstream_send_timeout': 60000,
'retries': 5
}))
reg = KongGatewayRegistrator(gw_admin_url='http://kong:8001')
resp = reg.register(name='test-api', host='new-test.services.lan', port=8089, paths=['/new', '/new/test'])
assert resp is not None
assert resp.get('uris') == ['/new', '/new/test']
assert resp.get('upstream_url') == 'http://new-test.services.lan:8089'
|
11,746 | 886e413ced639490470a0f7b1e13dc04fe178164 | def get_grid(filename):
grid = []
with open(filename) as f:
for line in f:
new_line = []
for charac in line:
if charac == "#":
new_line += ["#"]
elif charac == ".":
new_line += ["."]
elif charac == "L":
new_line += ["L"]
grid += [new_line]
return grid
def get_number_of_seats(grid):
grid_to_compare = create_grid_to_compare(grid)
while (not grids_are_equals(grid, grid_to_compare)):
grid = grid_to_compare
grid_to_compare = create_grid_to_compare(grid)
return count_number_of_seats(grid_to_compare)
def get_number_of_seats_v2(grid):
grid_to_compare = create_grid_to_compare_v2(grid)
while (not grids_are_equals(grid, grid_to_compare)):
grid = grid_to_compare
grid_to_compare = create_grid_to_compare_v2(grid)
return count_number_of_seats(grid_to_compare)
def grids_are_equals(grid, grid_to_compare):
for i in range(len(grid)):
for j in range(len(grid[i])):
if grid[i][j] != grid_to_compare[i][j]:
return False
return True
def count_number_of_seats(grid):
total = 0
for line in grid:
for charac in line:
if charac == "#":
total += 1
return total
def create_grid_to_compare(grid):
grid_to_compare = []
for y in range(len(grid)):
new_line = []
for x in range(len(grid[y])):
if grid[y][x] == ".":
new_line += ["."]
else:
adjacent_numbers = 0
#checking adjacent
if y > 0 and x > 0 and is_occupied_seat(grid[y - 1][x - 1]):
adjacent_numbers += 1
if y > 0 and is_occupied_seat(grid[y - 1][x]):
adjacent_numbers += 1
if y > 0 and x < len(grid[y]) - 1 and is_occupied_seat(grid[y - 1][x + 1]):
adjacent_numbers += 1
if x < len(grid[y]) - 1 and is_occupied_seat(grid[y][x + 1]):
adjacent_numbers += 1
if y < len(grid) - 1 and x < len(grid[y]) - 1 and is_occupied_seat(grid[y + 1][x + 1]):
adjacent_numbers += 1
if y < len(grid) - 1 and is_occupied_seat(grid[y + 1][x]):
adjacent_numbers += 1
if x > 0 and y < len(grid) - 1 and is_occupied_seat(grid[y + 1][x - 1]):
adjacent_numbers += 1
if x > 0 and is_occupied_seat(grid[y][x - 1]):
adjacent_numbers += 1
if is_occupied_seat(grid[y][x]) and adjacent_numbers >= 4:
new_line += ["L"]
elif not is_occupied_seat(grid[y][x]) and adjacent_numbers == 0:
new_line += ["#"]
else:
new_line += [grid[y][x]]
grid_to_compare += [new_line]
return grid_to_compare
def is_occupied_seat_v2(grid, x, y, x_move, y_move):
occupied_seat = False
finished = False
while (not occupied_seat and not finished):
try:
x = x + x_move
y = y + y_move
if x < 0 or y < 0 or grid[y][x] == "L":
finished = True
if not finished:
occupied_seat = occupied_seat or grid[y][x] == "#"
except IndexError:
finished = True
return occupied_seat
def print_grid(grid):
txt = ""
for line in grid:
for charac in line:
txt += charac
txt += "\n"
print(txt)
def create_grid_to_compare_v2(grid):
grid_to_compare = []
for y in range(len(grid)):
new_line = []
for x in range(len(grid[y])):
if grid[y][x] == ".":
new_line += ["."]
else:
adjacent_numbers = 0
#checking adjacent
positions = [-1, 0, 1]
for x_move in positions:
for y_move in positions:
if (x_move != 0 or y_move != 0) and is_occupied_seat_v2(grid, x, y, x_move, y_move):
adjacent_numbers += 1
if is_occupied_seat(grid[y][x]) and adjacent_numbers >= 5:
new_line += ["L"]
elif not is_occupied_seat(grid[y][x]) and adjacent_numbers == 0:
new_line += ["#"]
else:
new_line += [grid[y][x]]
grid_to_compare += [new_line]
return grid_to_compare
def is_occupied_seat(cell):
return cell == "#"
def main():
print("---___---")
grid = get_grid("input.txt")
print("v1")
number_of_seats = get_number_of_seats(grid)
print(number_of_seats)
print("v2")
number_of_seats = get_number_of_seats_v2(grid)
print(number_of_seats)
if __name__ == "__main__":
main() |
11,747 | bf70c52c6c52d0d9ddc97731ed78b9a8eb536d9c | s = set("Hacker")
# print(s.intersection("Rank"))
# print(s.intersection(set(['R', 'a', 'n', 'k'])))
print(s.intersection(enumerate(['R', 'a', 'n', 'k'])))
# print(s.intersection({"Rank":1}))
# print(s & set("Rank"))
# eng = int(input())
# english = input()
# fren = int(input())
# french = input()
# e = set(english.split())
# f = set(french.split())
# total = e.intersection(f)
# print(len(total)) |
11,748 | 79779f435112061d90409d34fc52337716e08c2f | #!/usr/bin/env python2
from __future__ import division
import signal
from gi.repository import Gtk, GObject
from settings import Settings
from googlemusic import GoogleMusic
from threads import DownloadThread, SearchThread
class Jenna(object):
def __init__(self):
GObject.threads_init()
self.settings = Settings()
self.api = GoogleMusic()
builder = Gtk.Builder()
builder.add_from_file('ui/main.glade')
builder.connect_signals(self)
self.loading_modal = builder.get_object('loadingModal')
self.loading_modal_label = builder.get_object('loadingModalLabel')
self.window = builder.get_object('mainWindow')
self.notebook = builder.get_object('mainWindowNotebook')
self.status_bar = builder.get_object('statusBar')
self.status_bar_context_id = self.status_bar.get_context_id('search')
self.preferences_dialog = builder.get_object('preferencesDialog')
self.preferences_username_entry = builder.get_object('preferencesUsernameEntry')
self.preferences_password_entry = builder.get_object('preferencesPasswordEntry')
self.preferences_directory_chooser = builder.get_object('preferencesDirectoryChooser')
self.search_entry = builder.get_object('searchEntry')
self.track_list_store = builder.get_object('trackListStore')
self.download_list_store = builder.get_object('downloadListStore')
self.results_tree_view = builder.get_object('resultsTreeView')
self.window.show_all()
def on_preferences_dialog_show(self, dialog):
self.preferences_username_entry.set_text(self.settings.get('username'))
self.preferences_password_entry.set_text(self.settings.get('password'))
self.preferences_directory_chooser.set_filename(self.settings.get('download_directory'))
def on_preferences_ok_clicked(self, button):
username = self.preferences_username_entry.get_text()
password = self.preferences_password_entry.get_text()
download_directory = self.preferences_directory_chooser.get_filename()
self.settings.set('username', username)
if password != self.settings.get('password'):
self.settings.set('password', password.encode('base64'))
self.settings.set('download_directory', download_directory)
self.preferences_dialog.hide()
def on_preferences_cancel_clicked(self, button):
self.preferences_dialog.hide()
def on_main_show(self, window):
# check for login details
# make async
self.status_bar.push(self.status_bar_context_id, 'Logging into Google Music...')
try:
self.api.login(self.settings.get('username'), self.settings.get('password').decode('base64'))
self.status_bar.push(self.status_bar_context_id, 'Logged in!')
except Exception, ex:
self.status_bar.push(self.status_bar_context_id, ex.message)
def on_main_delete_window(self, *args):
Gtk.main_quit(*args)
def on_main_search_toolitem_clicked(self, button):
self.notebook.set_current_page(0)
def on_main_downloads_toolitem_clicked(self, button):
self.notebook.set_current_page(1)
def on_main_preferences_toolitem_clicked(self, button):
self.preferences_dialog.show_all()
def on_main_search_button_clicked(self, button):
if self.search_entry.get_text() != '':
search_thread = SearchThread(self, self.search_entry.get_text(), 'song')
search_thread.start()
def on_main_tracklist_activated(self, treeview, path, column):
store = treeview.get_model()
treeiter = store.get_iter(path)
# only append if not already in list
row = self.download_list_store.append([
store.get_value(treeiter, 1),
store.get_value(treeiter, 2),
store.get_value(treeiter, 3),
store.get_value(treeiter, 5),
store.get_value(treeiter, 7),
store.get_value(treeiter, 8),
0
])
download_thread = DownloadThread(self, row)
download_thread.start()
signal.signal(signal.SIGINT, signal.SIG_DFL)
app = Jenna()
Gtk.main()
|
11,749 | b562028706fb79f75201bccf9b435d9f1a366b26 | from django import forms
from django.contrib.auth.forms import UserCreationForm
from .models import User,UserInfo
class UserForm(UserCreationForm):
class Meta:
model=UserInfo
fields=['username','first_name','last_name','age','gender','contact','email','password1','password2']
|
11,750 | 3a2f3ec527bcd4cf7520edd57cdb295552f4e5f1 | # encoding:utf-8
import urllib2
'''
# 判断有没有重定向
response = urllib2.urlopen("http://www.baidu.cn")
print response.geturl() == "http://www.baidu.cn"
'''
class RedirectHandler(urllib2.HTTPRedirectHandler):
def http_error_302(self, req, fp, code, msg, headers): # 302重定向
# 重定向以后的url
res = urllib2.HTTPRedirectHandler.http_error_302(self, req, fp, code, msg, headers)
res.status = code # 返回的编码
res.newurl = res.geturl() # 当前的url
print res.newurl,res.status # 重定向地址,302
return res
opener = urllib2.build_opener(RedirectHandler)
opener.open("http://www.baidu.cn/") |
11,751 | 2134a90751f7b58fee81bb1e5f09134571bfb216 | # coding=utf-8
#
import logging
from security_data.models.security_attribute import SecurityAttribute
from sqlalchemy.orm import sessionmaker
from security_data.utils.error_handling import (SecurityAttributeAlreadyExistError,
SecurityAttributeNotExistError)
class SecurityAttributeServices:
def __init__(self, db):
self.logger = logging.getLogger(__name__)
self.db = db
def delete_all(self):
try:
session = sessionmaker(bind=self.db)()
session.query(SecurityAttribute).delete()
session.commit()
except Exception as e:
self.logger.error("Failed to delete all records in SecurityAttribute")
self.logger.error(e)
raise
finally:
session.close()
def create(self, security_attribute_info):
try:
session = sessionmaker(bind=self.db)()
has_record = bool(session.query(SecurityAttribute).filter_by(\
security_id_type=security_attribute_info['security_id_type'], \
security_id=security_attribute_info['security_id']).first()
)
if has_record:
message = "Record (" + security_attribute_info['security_id_type'] + "," + \
security_attribute_info['security_id'] + ") already exists"
self.logger.warn(message)
raise SecurityAttributeAlreadyExistError(message)
else:
security_attribute = SecurityAttribute(**security_attribute_info)
session.add(security_attribute)
session.commit()
self.logger.info("Record (" + security_attribute_info['security_id_type'] + "," + \
security_attribute_info['security_id'] + ") added successfully")
except SecurityAttributeAlreadyExistError:
#-- avoid SecurityAttributeAlreadyExistError being captured by Exception
raise
except Exception as e:
self.logger.error("Failed to add SecurityAttribute")
self.logger.error(e)
raise
finally:
session.close()
def update(self, security_attribute_info):
try:
session = sessionmaker(bind=self.db)()
security_attribute_to_update = session.query(SecurityAttribute).filter_by( \
security_id_type=security_attribute_info['security_id_type'], \
security_id=security_attribute_info['security_id']).first()
#-- throw error if security_base not exists
if not bool(security_attribute_to_update):
message = "Record (" + security_attribute_info['security_id_type'] + "," + \
security_attribute_info['security_id'] + ") not found"
self.logger.warn(message)
raise SecurityAttributeNotExistError(message)
#-- update transaction by updating the status to cancel
for key, value in security_attribute_info.items():
setattr(security_attribute_to_update, key, value)
session.commit()
self.logger.info("Record (" + security_attribute_info['security_id_type'] + "," + \
security_attribute_info['security_id'] + ") updated successfully")
except SecurityAttributeNotExistError:
#-- avoid SecurityAttributeNotExistError being captured by Exception
raise
except Exception as e:
self.logger.error("Failed to update SecurityAttribute")
self.logger.error(e)
raise
finally:
session.close()
def query(self, params):
try:
session = sessionmaker(bind=self.db)()
security_attributes = session.query(
SecurityAttribute.security_id_type.label("security_id_type"), \
SecurityAttribute.security_id.label("security_id"), \
SecurityAttribute.gics_sector.label("gics_sector"), \
SecurityAttribute.gics_industry_group.label("gics_industry_group"), \
SecurityAttribute.industry_sector.label("industry_sector"), \
SecurityAttribute.industry_group.label("industry_group"), \
SecurityAttribute.bics_sector_level_1.label("bics_sector_level_1"), \
SecurityAttribute.bics_industry_group_level_2.label("bics_industry_group_level_2"), \
SecurityAttribute.bics_industry_name_level_3.label("bics_industry_name_level_3"), \
SecurityAttribute.bics_sub_industry_name_level_4.label("bics_sub_industry_name_level_4"), \
SecurityAttribute.parent_symbol.label("parent_symbol"), \
SecurityAttribute.parent_symbol_chinese_name.label("parent_symbol_chinese_name"), \
SecurityAttribute.parent_symbol_industry_group.label("parent_symbol_industry_group"), \
SecurityAttribute.cast_parent_company_name.label("cast_parent_company_name"), \
SecurityAttribute.country_of_risk.label("country_of_risk"), \
SecurityAttribute.country_of_issuance.label("country_of_issuance"), \
SecurityAttribute.sfc_region.label("sfc_region"), \
SecurityAttribute.s_p_issuer_rating.label("s_p_issuer_rating"), \
SecurityAttribute.moody_s_issuer_rating.label("moody_s_issuer_rating"), \
SecurityAttribute.fitch_s_issuer_rating.label("fitch_s_issuer_rating"), \
SecurityAttribute.bond_or_equity_ticker.label("bond_or_equity_ticker"), \
SecurityAttribute.s_p_rating.label("s_p_rating"), \
SecurityAttribute.moody_s_rating.label("moody_s_rating"), \
SecurityAttribute.fitch_rating.label("fitch_rating"), \
SecurityAttribute.payment_rank.label("payment_rank"), \
SecurityAttribute.payment_rank_mbs.label("payment_rank_mbs"), \
SecurityAttribute.bond_classification.label("bond_classification"), \
SecurityAttribute.local_government_lgfv.label("local_government_lgfv"), \
SecurityAttribute.first_year_default_probability.label("first_year_default_probability"), \
SecurityAttribute.contingent_capital.label("contingent_capital"), \
SecurityAttribute.co_co_bond_trigger.label("co_co_bond_trigger"), \
SecurityAttribute.capit_type_conti_conv_tri_lvl.label("capit_type_conti_conv_tri_lvl"), \
SecurityAttribute.tier_1_common_equity_ratio.label("tier_1_common_equity_ratio"), \
SecurityAttribute.bail_in_capital_indicator.label("bail_in_capital_indicator"), \
SecurityAttribute.tlac_mrel_designation.label("tlac_mrel_designation"), \
SecurityAttribute.classif_on_chi_state_owned_enterp.label("classif_on_chi_state_owned_enterp"), \
SecurityAttribute.private_placement_indicator.label("private_placement_indicator"), \
SecurityAttribute.trading_volume_90_days.label("trading_volume_90_days")) \
.filter(SecurityAttribute.security_id_type == params['security_id_type'],
SecurityAttribute.security_id == params['security_id']) \
.order_by(SecurityAttribute.created_at)
#-- return as list of dictionary
def model2dict(row):
d = {}
for column in row.keys():
if column == "first_year_default_probability" or \
column == "tier_1_common_equity_ratio" or \
column == "trading_volume_90_days":
d[column] = float(getattr(row, column))
else:
d[column] = str(getattr(row, column))
return d
security_attribute_d = [model2dict(t) for t in security_attributes]
#self.logger.error("Print the list of dictionary output:")
#self.logger.debug(security_attribute_d)
return security_attribute_d
except Exception as e:
self.logger.error("Error message:")
self.logger.error(e)
raise
finally:
session.close() |
11,752 | 715225571cfe011d4e9a29b5b71e9129e6bf35d3 | #6-8
dog = {
'name': 'pugmaw',
'owner': 'ahri'
}
cat = {
'name': 'neeko',
'owner': 'khazix'
}
hamster = {
'name': 'bard',
'owner': 'lucian'
}
#6-9
pets = []
pets.append(dog)
pets.append(cat)
pets.append(hamster)
# for pet in pets:
# print(pet)
favorite_places = {
'sam': 'japan',
'amumu': 'egypt',
'owen': 'home'
}
# for name,favorite_place in favorite_places.items():
# print(name + "'s favorite place is : " + favorite_place)
#6-11
cities = {
'singapore': {
'country': 'singapore',
'population': 500000,
'fact': 'it is a small country'
},
'akihabara': {
'country': 'japan',
'population': 100000,
'fact': 'it is the land of otakus'
},
'johor bahru': {
'country': 'malaysia',
'population' : 200000,
'fact': 'it has alot of good food'
}
}
for city_name,city_info in cities.items():
print(city_name + " is a city in " + city_info['country'] +
" with a population of " + str(city_info['population']) +
" and heres one fact : " + city_info['fact'] ) |
11,753 | d35991d492f5ae4ef412aa1b6292ceb9d33dcd81 | from ._GuidanceStatus import *
|
11,754 | ed9cea65cee6e6294c94f4722ad81726e819a6e5 | # -*- coding: gbk -*-
import os
import os.path
import common
import sys
import info
# this function generater directory for each file and drag the files into their directory
def createFile(dir):
fileList = os.listdir(dir)
for filename in fileList:
dotPos = filename.find(".")
subdir = os.path.join(dir,filename[:dotPos])
fullfname = os.path.join(dir,filename)
if os.path.isdir(fullfname):
continue
newfname = os.path.join(subdir,filename)
if not os.path.exists(subdir):
os.mkdir(subdir)
if os.path.exists(newfname):
os.remove(newfname)
os.rename(fullfname,newfname)
def AutoPackFile():
dirpath = sys.argv[1]
dirpath = common.Path(dirpath)
info.DisplayInfo("开始拖文件")
createFile(dirpath)
info.DisplayInfo("拖文件完毕")
if __name__ == "__main__":
common.SafeRunProgram(AutoPackFile)
# dir = raw_input("请输入文件目录")
# createFile(dir)
|
11,755 | 2aa31dd8411c016c7ddd8ab4ca46cc89d4372de6 | import pygame
pygame.init()
x = 400
y = 300
velocidade = 10
fundo = pygame.image.load("fundo teste.png")
pers = pygame.image.load("pers.png")
janela = pygame.display.set_mode((800,600))
pygame.display.set_caption("Criando jogo com Python")
janela_aberta = True
while janela_aberta :
pygame.time.delay(50)
for event in pygame.event.get():
if event.type == pygame.QUIT:
janela_aberta = False
comandos = pygame.key.get_pressed()
if comandos[pygame.K_UP]:
y -= velocidade
if comandos[pygame.K_DOWN]:
y += velocidade
if comandos[pygame.K_LEFT]:
x -= velocidade
if comandos[pygame.K_RIGHT]:
x += velocidade
janela.blit (fundo, (0,0))
janela.blit (pers, (x,y))
pygame.display.update()
pygame.quit()
|
11,756 | 45e4003d1b1564851d9f914b66bea578429473bc | #!python
#-*- encoding: utf-8 -*-
'''
A script to test RE for unicode (GBK)
'''
import re
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
gbkstring = '''
<h3>拍拍贷统计信息</h3>
<p>历史统计</p>
<p>正常还清:62 次,逾期还清(1-15):0 次,逾期还清(>15):0 次 </p>
<p>
共计借入:<span class="orange">¥354,929</span>,
待还金额:<span class="orange">¥207,453.96</span>,
待收金额: <span class="orange">
¥357,804.52 </span>
</p>
'''
pattern = re.compile('<h3>拍拍贷统计信息</h3>.*?<p>历史统计</p>.*?<p>正常还清:(\d+).*?次,逾期还清\(1-15\):(\d+).*?次,逾期还清\(>15\):(\d+).*次 </p>' +
'.*?共计借入:<span class="orange">¥(\S+)</span>.*?待还金额:<span class="orange">¥(\S+)</span>' +
'.*?待收金额: <span class="orange">.*?¥(\S+).*?</span>.*?</p>', re.S)
'''
items = re.findall(pattern, gbkstring)
if items != None and len(items) > 0:
print items
else:
print "No Match!"
'''
gbkstring2 = '''
<div class="lendDetailTab w1000center">
<div class="lendDetailTab_tabContent">
<!--魔镜-->
<h3>魔镜等级</h3>
<div class="waprMJ clearfix" style="position: relative;">
<div id="polar" style="height: 310px; width: 500px; float: left;"></div>
<div class="waprMjInfo" style="float: left; width: 300px; margin-left: 40px;">
<h3>什么是魔镜等级?</h3>
<p style="text-align: left;">
魔镜是拍拍贷自主开发的风险评估系统,
其核心是一系列基于大数据的风险模型。<br />
针对每一笔借款,风险模型会给出一个风险评分,以反应对其逾期率的预测。<br />
每一个评分区间会以一个字母评级的形式展示给借入者和借出者。从A到F,风险依次上升。<br />
<a href="http://help.ppdai.com/Home/List/12" target="_blank">了解更多</a>
</p>
<strong class="levelMJ" title="魔镜等级:AAA至F等级依次降低,等级越高逾期率越低。点击等级了解更多。">C</strong>
</div>
</div>
<!--魔镜结束-->
<h3>借款人相关信息</h3>
<p>基本信息:以下信息由借入者提供,拍拍贷未核实。如果您发现信息不实,请点此举报。拍拍贷核实后如果发现严重不符合事实,将扣信用分甚至取消借款资格。</p>
<table class="lendDetailTab_tabContent_table1">
<tr>
<th>借款目的</th>
<th>性别</th>
<th>年龄</th>
<th>婚姻情况</th>
<th>文化程度</th>
<th>住宅状况</th>
<th>是否购车</th>
</tr>
<tr>
<td>其他</td>
<td>男</td>
<td>36</td>
<td>已婚</td>
<td>大专</td>
<td>
有房 </td>
<td>是</td>
</tr>
</table>
'''
bid_response_html = '''
{"Source":0,"ListingId":8672508,"Title":null,"Date":null,"UrlReferrer":"1","Money":0,"Amount":50,"Reason":null,"ValidateCode":null,"Listing":
'''
pattern_user_basic_info = re.compile('<div class="lendDetailTab w1000center">.*?<div class="lendDetailTab_tabContent">.*?'
+ '<table class="lendDetailTab_tabContent_table1">.*?<tr>.*?</tr>.*?<tr>.*?'
+ '<td>(\S*?)</td>.*?<td>(\S+)</td>.*?<td>(\S+?)</td>.*?<td>(\S+?)</td>.*?'
+ '<td>(.*?)</td>.*?<td>.*?(\S+).*?</td>.*?<td>(.*?)</td>.*?</tr>.*?</table>', re.S)
bid_response_pattern = re.compile('.*"ListingId":.*?"UrlReferrer":"1","Money":\d+,"Amount":(\d+),"', re.S)
'''
items = re.findall(pattern_user_basic_info, gbkstring2)
if items is not None:
for item in items:
print item[0]
m = re.search(pattern_user_basic_info, gbkstring2);
if m is None:
print "Not Matched!"
else:
print "%s,%s,%s,%s" % (m.group(1), m.group(2), m.group(3),m.group(4))
other,gender, age, marriage,education_level, house, car = m.groups()
print "%s,%s,%s,%s" % (other,gender, age, marriage)
url = 'http://invest.ppdai.com/loan/info?id=8314822'
loanidm = re.match('.*info\?id=(\d+)', url)
if loanidm is not None:
loanid = int(loanidm.group(1))
print loanid
actual_mountm = re.search(bid_response_pattern, bid_response_html)
if actual_mountm is None:
print "Bid Response Pattern is not matched. Check it"
else:
actual = int(actual_mountm.group(1))
print "Actual Bid: %d" % (actual)
money = 50
referer = "http://invest.ppdai.com/bid/info?source=2&listingId=%d" % (loanid) + '%20%20%20%20&title=&date=12%20%20%20%20&' + "UrlReferrer=1&money=%d" % (money)
print referer
'''
if __name__ == '__main__':
university = u"西安电子科技大学创新学院"
m = re.match(u"(\S+大学)\S+学院", university)
if (m is not None):
print "Matched: %s" % (m.group(1))
else:
print university
pattern_all_history_loan = re.compile('<p>历史借款</p>.*?<table class="lendDetailTab_tabContent_table1">.*?<tr>.*?</tr>(.*?)</table>', re.S);
pattern_history_loan = re.compile('<tr>\s+<td>\s+(\d+).*?</td>.*?<td style="text-align: left">\s+<a href=".*?</a>\s+</td>\s+?<td>\s+?(\S+)%\s+?</td>\s+<td>.*?¥(\S+?)\s+</td>\s+<td>\s+(\S+?)\s+</td>\s+<td>\s+?(\S+)\s+?</td>\s+</tr>', re.S)
# + '<td>.*?(\d+/\d+/\d+).*?</td></tr>', re.S)
#pattern_history_loan = re.compile('<tr>.*?<td>.*?(\d+).*?</td>.*?<td style="text-align: left">.*?<a href=".*?</a>.*?</td>.*?'
# + '<td>\s+?(\S+)%\s+?</td>.*?<td>.*?¥(\S+?).*?</td>.*?<td>.*?(\S+?).*?</td>.*?'
# + '<td>\s+?(\d+/\d+/\d+).*?</td></tr>', re.S)
loan_history_html = '''
<p>历史借款</p>
<table class="lendDetailTab_tabContent_table1">
<tr>
<th>列表ID</th>
<th>标题</th>
<th>利率</th>
<th>金额</th>
<th>状态</th>
<th>已发布</th>
</tr>
<tr>
<td>
9873318
</td>
<td style="text-align: left">
<a href="/loan/info?id=9873318" target="_blank">pdu2517233537的应收款安全标</a>
</td>
<td>
8.02%
</td>
<td>
¥9,000
</td>
<td>
成功
</td>
<td>
2016/3/19
</td>
</tr>
<tr>
<td>
9870912
</td>
<td style="text-align: left">
<a href="/loan/info?id=9870912" target="_blank">pdu2517233537的应收款安全标</a>
</td>
<td>
7.12%
</td>
<td>
¥6,000
</td>
<td>
已撤回
</td>
<td>
2016/3/19
</td>
</tr>
</table>
'''
m = re.search(pattern_all_history_loan, loan_history_html)
if m is not None:
print "L1 Matched!!!"
html = m.group(1)
#print html
items = re.findall(pattern_history_loan, html)
for item in items:
print item[0],item[1], item[2], item[3], item[4];
else:
print "NOT Matched!!"
fuzai = '''name: '负债曲线',
data: [ 2774.2100,
2544.2900,
5544.2900,
5316.3900,
5082.2500,
13582.2500,
10810.1500,
8500.0000,
7854.2700,
7197.7800,
9997.7800,
]
}
'''
pattern_history_loandetail_chart = re.compile("name: '负债曲线',\s+data: \[\s+(.*?)\s+\]\s+\}", re.S)
m2 = re.search(pattern_history_loandetail_chart, fuzai)
if (m2 is not None):
m3 = re.findall('(\S+?),\s+', m2.group(1))
for item in m3:
print item
else:
print "FuZAi - not matched!"
|
11,757 | 4039b2b981e68c6857b2ea2c3d2ab5763408da6f | import numpy as np
essay = np.array([["pass", "not pass", "not pass", "not pass", "pass"],
["not pass", "not pass", "not pass", "not pass", "not pass"],
["pass", "not pass", "pass", "not pass", "not pass"]])
u = len([y for str in essay for y in str if y == 'pass'])
print(u) |
11,758 | 0416cd33c0714be554764f5b2f739aa82838e266 | import os
import re
import shutil
from rdflib import RDFS
import s5_enrichment_extractor as extractor
import utils
'''
E2: ENRICH ONTOLOGY WITH PROPERTY RANGES
Input: s5-ontology and freebase-s3-type
Output: e2-rdfs_range (triples to add RANGE to Properties in the ontology, obtained by scanning TYPE for type.property.expected_type)
'''
PROT = 'file://'
ROOTDIR = '/home/freebase/freebase-s5/'
INPUT_DATA = ROOTDIR + 's5-ontology'
INPUT_EXT = '/home/freebase/freebase-s3/freebase-s3-type'
# INPUT_EXT = '/home/freebase/freebase-s3/typetest'
TMPDIR = ROOTDIR + 'e2-rdfs_range-tmp'
OUTPUT = ROOTDIR + 'e2-rdfs_range'
# override this to enable caching Data
USE_CACHE = False
CACHED_DATA_TMP = ROOTDIR + 'cached-ontology-tmp'
CACHED_DATA = ROOTDIR + 'cached-ontology'
LOOKUP_PRED = 'type.property.expected_type'
OUTPUT_PRED = RDFS.range
# regex for key filtering (select only properties)
IS_PROPERTY = r"^<fbo:([^>\.]+\.){2}[^>\.]+>$"
EXT_KEY_MAPPING = {
'pattern': r"^<f:",
'replace': r"<fbo:"
}
if __name__ == "__main__":
spark = utils.create_session("FB_S5_E2")
sc = spark.sparkContext
if os.path.exists(OUTPUT):
os.remove(OUTPUT)
if os.path.exists(TMPDIR):
shutil.rmtree(TMPDIR)
if USE_CACHE and os.path.exists(CACHED_DATA):
data_rdd = utils.load_data(sc, PROT + CACHED_DATA)
else:
data_rdd = utils.load_data(sc, PROT + INPUT_DATA)
ext_rdd = utils.load_data(sc, PROT + INPUT_EXT)
results = extractor.run(data_rdd, ext_rdd, LOOKUP_PRED, OUTPUT_PRED, USE_CACHE, False, IS_PROPERTY, EXT_KEY_MAPPING) # caching and distinct false
if USE_CACHE:
data_tocache = results[0]
data_tocache.repartition(1).saveAsTextFile(CACHED_DATA_TMP)
shutil.move(f"{CACHED_DATA_TMP}/part-00000", CACHED_DATA)
shutil.rmtree(CACHED_DATA_TMP)
out = results[1]
out.repartition(1).saveAsTextFile(TMPDIR)
shutil.move(f"{TMPDIR}/part-00000", OUTPUT)
shutil.rmtree(TMPDIR)
|
11,759 | 96fa812a7f3a24672371530d7984a43c5a8914f0 | import logging
from pathlib import Path
from typing import List
from pandas import DataFrame
from tabulate import tabulate
_log = logging.getLogger(__name__)
def fmt_to_table(scenarios: List[dict], tablefmt: str) -> str:
return tabulate(scenarios,
headers="keys",
floatfmt=".2f",
stralign="center",
tablefmt=tablefmt)
def fmt_to_csv(scenarios: List[dict]):
return DataFrame(
scenarios,
columns=list(scenarios[0].keys())
).to_csv(
columns=list(scenarios[0].keys()),
float_format="%.2f"
)
class ScenariosOutput:
VALID_FORMATS = {"csv", "plain", "fancy_grid"}
def __init__(self, scenarios: List[dict], out_dest, out_fmt):
if out_fmt not in self.VALID_FORMATS:
raise ValueError("Unsupported format [{}]".format(out_fmt), self.VALID_FORMATS)
self.scenarios = scenarios
self.columns = list(self.scenarios[0].keys())
self.out_dest = out_dest
self.out_fmt = out_fmt
@property
def text(self):
return fmt_to_csv(self.scenarios) if (self.out_fmt == "csv") else fmt_to_table(self.scenarios, self.out_fmt)
def write(self):
if self.out_dest == "console":
print(self.text)
return
with Path(self.out_dest).open("wb") as fp:
fp.write(self.text.encode("utf-8"))
@classmethod
def write_scenarios(cls, scenarios: List[dict], out_dest, out_fmt):
ScenariosOutput(scenarios, out_dest, out_fmt).write()
|
11,760 | 5ce56f3144aeb6bc29ae3ac9ed1a85ccb3b2f921 | """Find nearest (most similar) word
"""
from scipy.spatial.distance import cdist
import numpy
import sys
from load_vectors import WordVectors
def search(wv, word):
if word not in wv.labidxmap:
print word, 'not in vocabulary'
cd = cdist(wv.vecs[wv.labidxmap[word]][numpy.newaxis, :], wv.vecs, 'cosine')
winner = [None, 2]
for i, d in enumerate(cd[0]):
if i==wv.labidxmap[word]:
continue
if d < winner[1]:
winner = [wv.labels[i], d]
return winner[0]
def mainloop(filename):
maxwords = 100000
wv = WordVectors(filename, maxwords)
print 'Loaded'
while True:
word = raw_input('Enter word: ')
winner = search(wv, word)
if winner:
print winner
if __name__=='__main__':
mainloop(sys.argv[1])
|
11,761 | 934b91cdb75871e61e86669c10053e12a22d153a | import os
f=open("commands.txt", "r")
if f.mode == 'r':
contents =f.read()
print(contents) |
11,762 | d7102d3d45e1ef46a3b5f3eefa45e16b1bce9a02 | import findSimilarity
def main():
# text_1 = 'Python is an interpreted high-level programming language for general-purpose programming. Created by Guido van Rossum and first released in 1991, Python has a design philosophy that emphasizes code readability, notably using significant whitespace.'
# text_2 = 'Python is a general-purpose interpreted, interactive, object-oriented, and high-level programming language. It was created by Guido van Rossum during 1985- 1990. Like Perl, Python source code is also available under the GNU General Public License (GPL). This tutorial gives enough understanding on Python programming language.'
text_1 = input("Enter first string : ")
text_2 = input("Enter second string : ")
similarity = findSimilarity.jaccardDistance(text_1, text_2)
# similarity = findSimilarity.cosineDistance(text_1, text_2)
print("Similarity is",similarity)
main() |
11,763 | 458beb59d87fee30a1175b44664dd938f333ccb4 | import copy
import math
import time
# a = get_int()
def get_int():
return int(input())
# a = get_string()
def get_string():
return input()
# a_list = get_int_list()
def get_int_list():
return [int(x) for x in input().split()]
# a_list = get_string_list():
def get_string_list():
return input().split()
# a, b = get_int_multi()
def get_int_multi():
return map(int, input().split())
# a_list = get_string_char_list()
def get_string_char_list():
return list(str(input()))
# print("{} {}".format(a, b))
# for num in range(0, a):
# a_list[idx]
# a_list = [0] * a
'''
while (idx < n) and ():
idx += 1
'''
def main():
start = time.time()
n = get_int()
s_list = get_string_char_list()
s_list = [int(x) for x in s_list]
#n = 30000
#s_list = [1] * 30000
#ans_set = set([])
#for i in range(0,n):
# for ii in range(i+1, n):
# for iii in range(ii+1, n):
# ans_set.add(int(s_list[i]) * 100 + int(s_list[ii]) * 10 + int(s_list[iii]) * 1)
#print(len(ans_set))
ans = 0
for i in range(0, 1000):
a = i // 100
b = (i % 100) // 10
c = i % 10
find_a = False
find_b = False
for ii in s_list:
if (find_a == False) and (ii == a):
find_a = True
elif (find_a == True) and (find_b == False) and (ii == b):
find_b = True
elif (find_b == True) and (ii == c):
ans += 1
break
print(ans)
#print(time.time() - start)
if __name__ == '__main__':
main()
|
11,764 | c0aa418b50fbb66e33c2ad109cd461c1d47056c8 | import math
def snt(n):
if(n < 2):
return False
for i in range(2, int(n//2)+1):
if(n%i == 0):
return False
break
return True
n = int(input())
s = input()
a = s.split()
tmp = []
for i in a:
if(snt(int(i))):
tmp.append(int(i))
ok = [False for i in range(len(tmp))]
for i in range(len(tmp)):
if(ok[i] == True):
continue
dem = 1
for j in range(i + 1, len(tmp), 1):
if(tmp[i] == tmp[j]):
ok[j] = True
dem += 1
print(tmp[i], dem) |
11,765 | 95f3fb59c7607e0982b283eeabf6d54e62912f41 | from flask import Flask, redirect, render_template, request, escape
from flask_mysqldb import MySQL
app = Flask(__name__)
app.config['MYSQL_HOST'] = "localhost"
app.config["MYSQL_USER"] = "root"
app.config["MYSQL_PASSWORD"] = ""
app.config["MYSQL_DB"] = "TODO"
app.config["MYSQL_CURSORCLASS"] = "DictCursor"
mysql = MySQL(app)
@app.route("/", methods=["GET", "POST"])
def index():
if request.method == "POST":
data = escape(request.form['input-data'])
try:
cur = mysql.connection.cursor()
cur.execute("INSERT INTO todo(content)VALUE(%s)",[data])
mysql.connection.commit()
return redirect('/')
except :
cur.close()
error = "May be incorrect data"
return render_template("index.html", error = error)
else:
cur = mysql.connection.cursor()
results = cur.execute("SELECT * FROM todo")
if results > 0:
result = cur.fetchall()
return render_template("index.html",result = result)
@app.route("/delet/<int:id>")
def delete(id):
print("DELETE",id)
cur = mysql.connection.cursor()
cur.execute("DELETE FROM todo WHERE id={}".format(id))
mysql.connection.commit()
cur.close()
return redirect('/')
@app.route('/update/<int:id>', methods=["POST","GET"])
def update(id):
cur = mysql.connection.cursor()
res = cur.execute("SELECT * FROM todo WHERE id={}".format(id))
if res> 0:
task = cur.fetchone()
if request.method == "POST":
task_data = escape(request.form['input-data'])
cur.execute("UPDATE todo SET content = %s WHERE id=%s",(task_data,id))
mysql.connection.commit()
cur.close()
return redirect('/')
else:
return render_template("update.html",task = task)
if __name__ == "__main__":
app.run(debug=True) |
11,766 | 38f6f3a514a43133174777ef32a7f43af0f620a6 | ###########################
# 6.0002 Problem Set 1a: Space Cows
# Name:
# Collaborators:
# Time:
from ps1_partition import get_partitions
import time
#================================
# Part A: Transporting Space Cows
#================================
# Problem 1
def load_cows(filename):
"""
Read the contents of the given file. Assumes the file contents contain
data in the form of comma-separated cow name, weight pairs, and return a
dictionary containing cow names as keys and corresponding weights as values.
Parameters:
filename - the name of the data file as a string
Returns:
a dictionary of cow name (string), weight (int) pairs
"""
# TODO: Your code here
f=open(filename)
t=f.read()
t=t.split('\n')
a=[[str,int]]
for i in t:
i=i.split(',')
a.append(i)
del(a[0])
return a
pass
# Problem 2
def greedy_cow_transport(cows,limit=10):
"""
Uses a greedy heuristic to determine an allocation of cows that attempts to
minimize the number of spaceship trips needed to transport all the cows. The
returned allocation of cows may or may not be optimal.
The greedy heuristic should follow the following method:
1. As long as the current trip can fit another cow, add the largest cow that will fit
to the trip
2. Once the trip is full, begin a new trip to transport the remaining cows
Does not mutate the given dictionary of cows.
Parameters:
cows - a dictionary of name (string), weight (int) pairs
limit - weight limit of the spaceship (an int)
Returns:
A list of lists, with each inner list containing the names of cows
transported on a particular trip and the overall list containing all the
trips
"""
# TODO: Your code here
pass
# Problem 3
def brute_force_cow_transport(cows,limit=10):
"""
Finds the allocation of cows that minimizes the number of spaceship trips
via brute force. The brute force algorithm should follow the following method:
1. Enumerate all possible ways that the cows can be divided into separate trips
Use the given get_partitions function in ps1_partition.py to help you!
2. Select the allocation that minimizes the number of trips without making any trip
that does not obey the weight limitation
Does not mutate the given dictionary of cows.
Parameters:
cows - a dictionary of name (string), weight (int) pairs
limit - weight limit of the spaceship (an int)
Returns:
A list of lists, with each inner list containing the names of cows
transported on a particular trip and the overall list containing all the
trips
"""
# TODO: Your code here
pass
# Problem 4
def compare_cow_transport_algorithms():
"""
Using the data from ps1_cow_data.txt and the specified weight limit, run your
greedy_cow_transport and brute_force_cow_transport functions here. Use the
default weight limits of 10 for both greedy_cow_transport and
brute_force_cow_transport.
Print out the number of trips returned by each method, and how long each
method takes to run in seconds.
Returns:
Does not return anything.
"""
# TODO: Your code here
pass
print(load_cows('ps1_cow_data.txt')) |
11,767 | bdd642672b327375bf25949c03034d638b65be4c | from django.contrib import admin
from .models import Track, Artist, Genre
class TrackAdmin(admin.ModelAdmin):
list_display = ['title', 'album', 'track_number', 'artist_names', 'genre_names', ]
def artist_names(self, obj):
names = [artist.artist_name for artist in obj.artists.all()]
return ", ".join(names)
def genre_names(self, obj):
names = [genre.genre_name for genre in obj.genres.all()]
return ", ".join(names)
class ArtistInLine(admin.StackedInline):
model = Track.artists.through
class ArtistAdmin(admin.ModelAdmin):
list_display = ['artist_name']
inlines = [ArtistInLine,]
class GenreInLine(admin.StackedInline):
model = Track.genres.through
class GenreAdmin(admin.ModelAdmin):
list_display = ['name']
inlines = [GenreInLine,]
admin.site.register(Track, TrackAdmin)
admin.site.register(Artist, ArtistAdmin)
admin.site.register(Genre, GenreAdmin) |
11,768 | 828af5c777c38d3a368d04bd116365ee4a862405 | # There exist two zeroes: +0 (or just 0) and -0.
# Write a function that returns true if the input number is -0 and false otherwise (True and False for Python).
# In JavaScript/TypeScript, the input will be a number. In Python and Java, the input will be a float.
def is_negative_zero(n):
return True if str(n) == '-0.0' else False
|
11,769 | 1a18d41e6b235b6ddea3e1d09ca5f6fbe32af7eb | from django.shortcuts import render, redirect
from . import models
from . import forms
from django.contrib.auth.decorators import login_required
# Create your views here.
@login_required(login_url="/userAuth/login/")
def pesanan(request):
pesanan_user = models.Pesanan.objects.filter(user=request.user, aktif=True).order_by('-tgl_pesan')
return render(request, "pesanan.html", {'semua_pesanan':pesanan_user})
@login_required(login_url="/userAuth/login/")
def bayar(request, id_pesanan):
update_pesanan = models.Pesanan.objects.get(id=id_pesanan)
form = forms.FormBuktiBayar(request.POST or None, request.FILES or None, instance=update_pesanan)
context = {
'form':form,
}
if request.method == 'POST':
if form.is_valid():
form.save()
return redirect('novel:pesanan')
print()
return render(request, 'bayar.html', context)
@login_required(login_url="/userAuth/login/")
def pesan_novel(request, id_novel):
form = forms.FormPesan(request.POST or None, request.FILES or None)
context = {
'form':form,
}
if request.method == 'POST':
if form.is_valid():
pesanan_baru = form.save(commit=False)
pesanan_baru.user = request.user
if int(request.POST['jumlah_pesan']) == 1:
pesanan_baru.total_bayar = int(models.Novel.objects.get(id=id_novel).harga)
else:
pesanan_baru.total_bayar = int(models.Novel.objects.get(id=id_novel).harga) * int(request.POST['jumlah_pesan'])
pesanan_baru.id_novel = models.Novel.objects.get(id=id_novel)
pesanan_baru.save()
return redirect('novel:pesanan')
else:
print("not valid")
print(form.errors)
return render(request, 'buat_pesanan.html', context)
@login_required(login_url="/userAuth/login/")
def pesanan_selesai(request, id_pesanan):
pesanan = models.Pesanan.objects.get(id=id_pesanan)
pesanan.aktif = False
pesanan.save()
return redirect('novel:pesanan')
@login_required(login_url="/userAuth/login/")
def batalkan_pesanan(request, id_pesanan):
models.Pesanan.objects.filter(id=id_pesanan).delete()
return redirect('novel:pesanan') |
11,770 | 6f7719614e149d16c553ad2d374ece78f0fdab8e | __author__ = 'Qiao Jin'
import json
pmid2ctxid = {}
pmid2ctx = json.load(open('pmid2ctx.json'))
evidence = json.load(open('evidence.json'))
indexed_evidence = []
indexed_contexts = []
for entry in evidence:
pmid = entry['pmid']
if pmid not in pmid2ctx: continue
if pmid not in pmid2ctxid:
pmid2ctxid[pmid] = len(pmid2ctxid)
indexed_contexts.append({'passage': pmid2ctx[pmid], 'ctx_id': pmid2ctxid[pmid]})
entry['ctx_id'] = pmid2ctxid[pmid]
indexed_evidence.append(entry)
with open('indexed_evidence.json', 'w') as f:
json.dump(indexed_evidence, f, indent=4)
with open('indexed_contexts.json', 'w') as f:
json.dump(indexed_contexts, f, indent=4)
|
11,771 | 4b0298ab510e7dfe5c8c09988566ebda27a840c8 | # Use the new CASA5 QAC smaller bench data to run the M100 combination
# Takes about 18-20' to run. Fills about 3.3GB but needs quite a bit more scratch space to run.
#
# curl http://admit.astro.umd.edu/~teuben/QAC/qac_bench5.tar.gz | tar zxf -
#
# some figures have been made to adjust the box so we can better compare the Compare and Feather
import os
pdir = 'M100qac'
ms1 = '../M100_aver_12.ms'
ms2 = '../M100_aver_7.ms'
tp1 = '../M100_TP_CO_cube.bl.image'
if False:
pdir = 'M100qac_43'
ms1 = '../data43/M100_aver_12.ms'
ms2 = '../data43/M100_aver_7.ms'
tp1 = '../data43/M100_TP_CO_cube.bl.image'
# some testing, to confirm that M100_combine2 in QAC mode gives the same results as without
# use qac_mom() instead
qac1 = True
# with 1745 as first channel (bl.image)
qac2 = True
# a better box size so all figures are the same .... (see M100_combine1.py how we got this)
box='219,148,612,579'
boxlist = QAC.iarray(box)
zoom={'blc': [boxlist[0],boxlist[1]],'trc': [boxlist[2],boxlist[3]]}
# QAC: start with a clean 'pdir' and do all the work inside
qac_begin(pdir)
qac_project(pdir)
os.chdir(pdir)
# Assert files exist
print("If you do not have the 3 QAC benchmark files, issue")
print("curl http://admit.astro.umd.edu/~teuben/QAC/qac_bench5.tar.gz | tar zxf -")
QAC.assertf(ms1)
QAC.assertf(ms2)
QAC.assertf(tp1)
# sanity
qac_stats(ms1)
qac_stats(ms2)
qac_stats(tp1)
#
listobs(ms1,listfile=ms1 + '.listobs',overwrite=True)
listobs(ms2,listfile=ms2 + '.listobs',overwrite=True)
#
os.system('rm -rf *m_mosaic.png')
au.plotmosaic(ms1,sourceid='0',doplot=True,figfile='12m_mosaic.png')
au.plotmosaic(ms2,sourceid='0',doplot=True,figfile='7m_mosaic.png')
#
plotms(vis=ms1,yaxis='wt',xaxis='uvdist',spw='0:200',coloraxis='spw',plotfile='12m_WT.png',showgui=True,overwrite=True)
plotms(vis=ms2,yaxis='wt',xaxis='uvdist',spw='0~1:200',coloraxis='spw',plotfile='7m_WT.png',showgui=True,overwrite=True)
ms3 = 'M100_combine_CO.ms'
# Concat and scale weights
os.system('rm -rf %s' % ms3)
if True:
concat(vis=[ms1,ms2], concatvis=ms3)
else:
concat(vis=[ms2], concatvis=ms3)
# In CASA
plotms(vis=ms3,yaxis='wt',xaxis='uvdist',spw='0~2:200',coloraxis='spw',plotfile='combine_CO_WT.png',showgui=True,overwrite=True)
plotms(vis=ms3,yaxis='amp',xaxis='uvdist',spw='', avgscan=True,
avgchannel='5000', coloraxis='spw',plotfile='M100_combine_uvdist.png',showgui=True,overwrite=True)
plotms(vis=ms3,yaxis='amp',xaxis='velocity',spw='', avgtime='1e8',avgscan=True,coloraxis='spw',avgchannel='5',
transform=True,freqframe='LSRK',restfreq='115.271201800GHz', plotfile='M100_combine_vel.png',showgui=True,overwrite=True)
### Define clean parameters
vis = ms3
prename = 'M100_combine_CO_cube' # or replace .ms by _cube # 'M100_combine_CO.ms'
imsize = 800
cell = '0.5arcsec'
minpb = 0.2
restfreq = '115.271202GHz' # this value needs to be consistent on how
outframe = 'LSRK'
spw = ''
if qac2:
width = '-5km/s'
start = '1745km/s'
else:
width = '5km/s'
start = '1400km/s'
nchan = 70
robust = 0.5
phasecenter = 'J2000 12h22m54.9 +15d49m15'
### Setup stopping criteria with multiplier for rms.
stop = 3.
### Make initial dirty image to get the rms in the line free channels
os.system('rm -rf '+prename+'_dirty.*')
tclean(vis=vis,
imagename=prename + '_dirty',
gridder='mosaic',
deconvolver='hogbom',
pbmask=minpb,
imsize=imsize,
cell=cell,
spw=spw,
weighting='briggs',
robust=robust,
phasecenter=phasecenter,
specmode='cube',
width=width,
start=start,
nchan=nchan,
restfreq=restfreq,
outframe=outframe,
veltype='radio',
restoringbeam='common',
mask='',
niter=0,
interactive=False)
### Find the peak in the dirty cube.
myimage = prename+'_dirty.image'
bigstat = imstat(imagename=myimage)
peak = bigstat['max'][0]
print 'peak (Jy/beam) in dirty cube = '+str(peak)
# find the RMS of a line free channel (should be around 0.011
if qac2:
chanstat1 = imstat(imagename=myimage,chans='3')
chanstat2 = imstat(imagename=myimage,chans='65')
else:
chanstat1 = imstat(imagename=myimage,chans='4')
chanstat2 = imstat(imagename=myimage,chans='66')
rms1 = chanstat1['rms'][0]
rms2 = chanstat2['rms'][0]
rms = 0.5*(rms1+rms2)
print 'rms (Jy/beam) in a channel = '+str(rms)
sidelobethresh = 2.0
noisethresh = 4.25
minbeamfrac = 0.3
lownoisethresh = 1.5
negativethresh = 0.0
os.system('rm -rf ' + prename + '.*')
tclean(vis=vis,
imagename=prename,
gridder='mosaic',
deconvolver='hogbom',
pbmask=minpb,
imsize=imsize,
cell=cell,
spw=spw,
weighting='briggs',
robust=robust,
phasecenter=phasecenter,
specmode='cube',
width=width,
start=start,
nchan=nchan,
restfreq=restfreq,
outframe=outframe,
veltype='radio',
restoringbeam='common',
mosweight=True,
niter=100000,
usemask='auto-multithresh',
threshold=str(stop*rms)+'Jy/beam',
sidelobethreshold=sidelobethresh,
noisethreshold=noisethresh,
lownoisethreshold=lownoisethresh,
minbeamfrac=minbeamfrac,
growiterations=75,
negativethreshold=negativethresh,
interactive=False,
pbcor=True)
#
# viewer('M100_combine_CO_cube.image',gui=True)
# note this should be referred to with the variable prename
# and the rest of the script as well.
myimage = 'M100_combine_CO_cube.image' # should be: prename + '.image'
if qac2:
chanstat1 = imstat(imagename=myimage,chans='3')
chanstat2 = imstat(imagename=myimage,chans='65')
else:
chanstat1 = imstat(imagename=myimage,chans='4')
chanstat2 = imstat(imagename=myimage,chans='66')
rms1 = chanstat1['rms'][0]
rms2 = chanstat2['rms'][0]
rms = 0.5*(rms1+rms2)
print 'rms in a channel = '+str(rms)
chan_rms = [0,8,62,69] # to complement the '9~61'
if qac1:
qac_mom('M100_combine_CO_cube.image', chan_rms, rms=rms)
else:
os.system('rm -rf M100_combine_CO_cube.image.mom0')
immoments(imagename = 'M100_combine_CO_cube.image',
moments = [0],
axis = 'spectral',chans = '9~61',
mask='M100_combine_CO_cube.pb>0.3',
includepix = [rms*2,100.],
outfile = 'M100_combine_CO_cube.image.mom0')
os.system('rm -rf M100_combine_CO_cube.image.mom1')
immoments(imagename = 'M100_combine_CO_cube.image',
moments = [1],
axis = 'spectral',chans = '9~61',
mask='M100_combine_CO_cube.pb>0.3',
includepix = [rms*5.5,100.],
outfile = 'M100_combine_CO_cube.image.mom1')
os.system('rm -rf M100_combine_CO_cube.image.mom*.png')
imview (raster=[{'file': 'M100_combine_CO_cube.image.mom0',
'range': [-0.3,25.],'scaling': -1.3,'colorwedge': True}],
zoom=zoom,
out='M100_combine_CO_cube.image.mom0.png')
imview (raster=[{'file': 'M100_combine_CO_cube.image.mom1',
'range': [1440,1695],'colorwedge': True}],
zoom=zoom,
out='M100_combine_CO_cube.image.mom1.png')
imview (raster=[{'file': 'M100_combine_CO_cube.image.mom2',
'range': [0,50],'colorwedge': True}],
zoom=zoom,
out='M100_combine_CO_cube.image.mom2.png')
os.system('rm -rf M100_combine_CO_cube.pb.1ch')
imsubimage(imagename='M100_combine_CO_cube.pb',
outfile='M100_combine_CO_cube.pb.1ch',
chans='35')
os.system('rm -rf M100_combine_CO_cube.image.mom0.pbcor')
impbcor(imagename='M100_combine_CO_cube.image.mom0',
pbimage='M100_combine_CO_cube.pb.1ch',
outfile='M100_combine_CO_cube.image.mom0.pbcor')
#
imview (raster=[{'file': 'M100_combine_CO_cube.image.mom0',
'range': [-0.3,25.],'scaling': -1.3},
{'file': 'M100_combine_CO_cube.image.mom0.pbcor',
'range': [-0.3,25.],'scaling': -1.3}],
zoom=zoom)
os.system('rm -rf M100_combine_CO_cube.image.mom0.pbcor.png')
imview (raster=[{'file': 'M100_combine_CO_cube.image.mom0.pbcor',
'range': [-0.3,25.],'scaling': -1.3,'colorwedge': True}],
zoom=zoom,
out='M100_combine_CO_cube.image.mom0.pbcor.png')
qac_fits('M100_combine_CO_cube.image')
qac_fits('M100_combine_CO_cube.pb')
qac_fits('M100_combine_CO_cube.image.mom0')
qac_fits('M100_combine_CO_cube.image.mom0.pbcor')
qac_fits('M100_combine_CO_cube.image.mom1')
imhead(tp1, mode='get',hdkey='restfreq')
imhead('M100_combine_CO_cube.image',mode='get',hdkey='restfreq')
imregrid(imagename=tp1,
template='M100_combine_CO_cube.image',
axes=[0, 1],
output='M100_TP_CO_cube.regrid',
overwrite=True)
imsubimage(imagename='M100_TP_CO_cube.regrid',
outfile='M100_TP_CO_cube.regrid.subim',
box=box,
overwrite=True)
imsubimage(imagename='M100_combine_CO_cube.image',
outfile='M100_combine_CO_cube.image.subim',
box=box,
overwrite=True)
imsubimage(imagename='M100_combine_CO_cube.pb',
outfile='M100_combine_CO_cube.pb.subim',
box=box,
overwrite=True)
os.system('rm -rf M100_TP_CO_cube.regrid.subim.depb')
immath(imagename=['M100_TP_CO_cube.regrid.subim',
'M100_combine_CO_cube.pb.subim'],
expr='IM0*IM1',
outfile='M100_TP_CO_cube.regrid.subim.depb')
os.system('rm -rf M100_Feather_CO.image')
feather(imagename='M100_Feather_CO.image',
highres='M100_combine_CO_cube.image.subim',
lowres='M100_TP_CO_cube.regrid.subim.depb')
# Make Moment Maps of the Feathered Images
myimage = 'M100_TP_CO_cube.regrid.subim'
if qac2:
chanstat1 = imstat(imagename=myimage,chans='3')
chanstat2 = imstat(imagename=myimage,chans='65')
else:
chanstat1 = imstat(imagename=myimage,chans='4')
chanstat2 = imstat(imagename=myimage,chans='66')
rms1 = chanstat1['rms'][0]
rms2 = chanstat2['rms'][0]
rms = 0.5*(rms1+rms2)
if qac1:
qac_mom('M100_TP_CO_cube.regrid.subim', chan_rms, rms=rms)
else:
os.system('rm -rf M100_TP_CO_cube.regrid.subim.mom0')
immoments(imagename='M100_TP_CO_cube.regrid.subim',
moments=[0],
axis='spectral',
chans='10~61',
includepix=[rms*2., 50],
outfile='M100_TP_CO_cube.regrid.subim.mom0')
os.system('rm -rf M100_TP_CO_cube.regrid.subim.mom1')
immoments(imagename='M100_TP_CO_cube.regrid.subim',
moments=[1],
axis='spectral',
chans='10~61',
includepix=[rms*5.5, 50],
outfile='M100_TP_CO_cube.regrid.subim.mom1')
os.system('rm -rf M100_TP_CO_cube.regrid.subim.mom*.png')
imview(raster=[{'file': 'M100_TP_CO_cube.regrid.subim.mom0',
'range': [0., 1080.],
'scaling': -1.3,
'colorwedge': True}],
out='M100_TP_CO_cube.regrid.subim.mom0.png')
imview(raster=[{'file': 'M100_TP_CO_cube.regrid.subim.mom1',
'range': [1440, 1695],
'colorwedge': True}],
out='M100_TP_CO_cube.regrid.subim.mom1.png')
myimage = 'M100_Feather_CO.image'
if qac2:
chanstat1 = imstat(imagename=myimage,chans='3')
chanstat2 = imstat(imagename=myimage,chans='65')
else:
chanstat1 = imstat(imagename=myimage,chans='4')
chanstat2 = imstat(imagename=myimage,chans='66')
rms1 = chanstat1['rms'][0]
rms2 = chanstat2['rms'][0]
rms = 0.5*(rms1+rms2)
if qac1:
qac_mom('M100_Feather_CO.image', chan_rms, rms=rms)
else:
os.system('rm -rf M100_Feather_CO.image.mom0')
immoments(imagename='M100_Feather_CO.image',
moments=[0],
axis='spectral',
chans='10~61',
includepix=[rms*2., 50],
outfile='M100_Feather_CO.image.mom0')
os.system('rm -rf M100_Feather_CO.image.mom1')
immoments(imagename='M100_Feather_CO.image',
moments=[1],
axis='spectral',
chans='10~61',
includepix=[rms*5.5, 50],
outfile='M100_Feather_CO.image.mom1')
os.system('rm -rf M100_Feather_CO.image.mom*.png')
imview(raster=[{'file': 'M100_Feather_CO.image.mom0',
'range': [-0.3, 25.],
'scaling': -1.3,
'colorwedge': True}],
out='M100_Feather_CO.image.mom0.png')
imview(raster=[{'file': 'M100_Feather_CO.image.mom1',
'range': [1440, 1695],
'colorwedge': True}],
out='M100_Feather_CO.image.mom1.png')
imview(raster=[{'file': 'M100_Feather_CO.image.mom2',
'range': [0, 50],
'colorwedge': True}],
out='M100_Feather_CO.image.mom2.png')
os.system('rm -rf M100_Feather_CO.image.pbcor')
immath(imagename=['M100_Feather_CO.image',
'M100_combine_CO_cube.pb.subim'],
expr='IM0/IM1',
outfile='M100_Feather_CO.image.pbcor')
os.system('rm -rf M100_combine_CO_cube.pb.1ch.subim')
imsubimage(imagename='M100_combine_CO_cube.pb.subim',
outfile='M100_combine_CO_cube.pb.1ch.subim',
chans='35')
os.system('rm -rf M100_Feather_CO.image.mom0.pbcor')
immath(imagename=['M100_Feather_CO.image.mom0',
'M100_combine_CO_cube.pb.1ch.subim'],
expr='IM0/IM1',
outfile='M100_Feather_CO.image.mom0.pbcor')
os.system('rm -rf M100_Feather_CO.image.mom0.pbcor.png')
imview(raster=[{'file': 'M100_Feather_CO.image.mom0.pbcor',
'range': [-0.3, 25.],
'scaling': -1.3,
'colorwedge': True}],
out='M100_Feather_CO.image.mom0.pbcor.png')
imstat('M100_combine_CO_cube.image.subim')
r1 = '0.0012482416759620851 0.022885155827339781 -0.097563751041889191 0.77106547355651855 881.60255738809212'
r2 = '0.85800511534968704 2.1029488639827396 0.10577000677585602 47.981842041015625 3038.8259741599804'
r3 = '0.0027783475502910714 0.021545069499004026 -0.082641437649726868 0.78546744585037231 2826.1533279353807'
r4 = '0.0029995717493774485 0.024097159582836203 -0.16380690038204193 0.79057258367538452 3051.1840323921738'
r5 = '1.4254396459758483 2.8135560574036953 0.10561800003051758 51.054183959960938 3882.9384027157043'
qac_stats('M100_combine_CO_cube.image', r1)
qac_stats('M100_combine_CO_cube.image.pbcor')
qac_stats('M100_combine_CO_cube.image.mom0', r2)
qac_stats('M100_combine_CO_cube.image', box=box)
qac_stats('M100_combine_CO_cube.image.pbcor', box=box)
qac_stats('M100_combine_CO_cube.image.mom0', box=box)
qac_stats('M100_Feather_CO.image', r3)
qac_stats('M100_Feather_CO.image.pbcor', r4)
qac_stats('M100_Feather_CO.image.mom0.pbcor', r5)
f1=imstat('M100_TP_CO_cube.regrid.subim.depb')['flux']
f2=imstat('M100_Feather_CO.image')['flux']
f3=imstat('M100_Feather_CO.image.pbcor')['flux']
print("%g %g %g" % (f1,f2,f3))
qac_end()
# full:
# 5.4 => 2822.29454956 2822.29259255 3055.66039175
# 5.5 => 2825.9993648 2825.99752571 3054.25147157
# 5.6 => 2826.22910583 2826.22718072 3050.38152141
# benchmark 5km/s
# 5.6 2852.29 2853.72 3084.51
# (2972 +/- 319 Jy km/s from the BIMA SONG; Helfer et al. 2003, Table 4).
|
11,772 | 41f3659261d02c46bb54a81f733ecca45580098c | import gym
import numpy as np
import matplotlib.pyplot as plt
N_ROWS, N_COLS, N_WIN = 3, 3, 3
class TicTacToe(gym.Env):
def __init__(self, n_rows=N_ROWS, n_cols=N_COLS, n_win=N_WIN, clone=None):
if clone is not None:
self.n_rows, self.n_cols, self.n_win = clone.n_rows, clone.n_cols, clone.n_win
self.board = copy.deepcopy(clone.board)
self.curTurn = clone.curTurn
self.emptySpaces = None
self.boardHash = None
else:
self.n_rows = n_rows
self.n_cols = n_cols
self.n_win = n_win
self.reset()
def getEmptySpaces(self):
if self.emptySpaces is None:
res = np.where(self.board == 0)
self.emptySpaces = np.array([ (i, j) for i,j in zip(res[0], res[1]) ])
return self.emptySpaces
def makeMove(self, player, i, j):
self.board[i, j] = player
self.emptySpaces = None
self.boardHash = None
def getHash(self):
if self.boardHash is None:
self.boardHash = ''.join(['%s' % (x+1) for x in self.board.reshape(self.n_rows * self.n_cols)])
return self.boardHash
def isTerminal(self):
# проверим, не закончилась ли игра
cur_marks, cur_p = np.where(self.board == self.curTurn), self.curTurn
for i,j in zip(cur_marks[0], cur_marks[1]):
win = False
if i <= self.n_rows - self.n_win:
if np.all(self.board[i:i+self.n_win, j] == cur_p):
win = True
if not win:
if j <= self.n_cols - self.n_win:
if np.all(self.board[i,j:j+self.n_win] == cur_p):
win = True
if not win:
if i <= self.n_rows - self.n_win and j <= self.n_cols - self.n_win:
if np.all(np.array([ self.board[i+k,j+k] == cur_p for k in range(self.n_win) ])):
win = True
if not win:
if i <= self.n_rows - self.n_win and j >= self.n_win-1:
if np.all(np.array([ self.board[i+k,j-k] == cur_p for k in range(self.n_win) ])):
win = True
if win:
self.gameOver = True
return self.curTurn
if len(self.getEmptySpaces()) == 0:
self.gameOver = True
return 0
self.gameOver = False
return None
def printBoard(self):
for i in range(0, self.n_rows):
print('----'*(self.n_cols)+'-')
out = '| '
for j in range(0, self.n_cols):
if self.board[i, j] == 1:
token = 'x'
if self.board[i, j] == -1:
token = 'o'
if self.board[i, j] == 0:
token = ' '
out += token + ' | '
print(out)
print('----'*(self.n_cols)+'-')
def getState(self):
return (self.getHash(), self.getEmptySpaces(), self.curTurn)
def action_from_int(self, action_int):
return ( int(action_int / self.n_cols), int(action_int % self.n_cols))
def int_from_action(self, action):
return action[0] * self.n_cols + action[1]
def step(self, action):
if self.board[action[0], action[1]] != 0:
return self.getState(), -10, True, {}
self.makeMove(self.curTurn, action[0], action[1])
reward = self.isTerminal()
self.curTurn = -self.curTurn
return self.getState(), 0 if reward is None else reward, reward is not None, {}
def reset(self):
self.board = np.zeros((self.n_rows, self.n_cols), dtype=int)
self.boardHash = None
self.gameOver = False
self.emptySpaces = None
self.curTurn = 1
def plot_board(env, pi, showtext=True, verbose=True, fontq=20, fontx=60):
'''Рисуем доску с оценками из стратегии pi'''
fig, ax = plt.subplots(1, 1, figsize=(10, 10))
X, Y = np.meshgrid(np.arange(0, env.n_rows), np.arange(0, env.n_rows))
Z = np.zeros((env.n_rows, env.n_cols)) + .01
s, actions = env.getHash(), env.getEmptySpaces()
if pi is not None and s in pi.Q:
for i, a in enumerate(actions):
Z[a[0], a[1]] = pi.Q[s][i]
ax.set_xticks([])
ax.set_yticks([])
surf = ax.imshow(Z, cmap=plt.get_cmap('Accent', 10), vmin=-1, vmax=1)
if showtext:
for i,a in enumerate(actions):
if pi is not None and s in pi.Q:
ax.text( a[1] , a[0] , "%.3f" % pi.Q[s][i], fontsize=fontq, horizontalalignment='center', verticalalignment='center', color="w" )
for i in range(env.n_rows):
for j in range(env.n_cols):
if env.board[i, j] == -1:
ax.text(j, i, "O", fontsize=fontx, horizontalalignment='center', verticalalignment='center', color="w" )
if env.board[i, j] == 1:
ax.text(j, i, "X", fontsize=fontx, horizontalalignment='center', verticalalignment='center', color="w" )
# cbar = plt.colorbar(surf, ticks=[0, 1])
ax.grid(False)
plt.show()
def get_and_print_move(env, pi, s, actions, random=False, verbose=True, fontq=20, fontx=60):
'''Делаем ход, рисуем доску'''
# env.printBoard()
plot_board(env, pi, fontq=fontq, fontx=fontx)
if verbose and (pi is not None):
if s in pi.Q:
for i,a in enumerate(actions):
print(i, a, pi.Q[s][i])
else:
print("Стратегия не знает, что делать...")
if random:
return np.random.randint(len(actions))
else:
return pi.getActionGreedy(s, len(actions))
def plot_test_game(env, pi1, pi2, random_crosses=False, random_naughts=True, verbose=True, fontq=20, fontx=60):
'''Играем тестовую партию между стратегиями или со случайными ходами, рисуем ход игры'''
done = False
env.reset()
while not done:
s, actions = env.getHash(), env.getEmptySpaces()
if env.curTurn == 1:
a = get_and_print_move(env, pi1, s, actions, random=random_crosses, verbose=verbose, fontq=fontq, fontx=fontx)
else:
a = get_and_print_move(env, pi2, s, actions, random=random_naughts, verbose=verbose, fontq=fontq, fontx=fontx)
observation, reward, done, info = env.step(actions[a])
if reward == 1:
print("Крестики выиграли!")
plot_board(env, None, showtext=False, fontq=fontq, fontx=fontx)
if reward == -1:
print("Нолики выиграли!")
plot_board(env, None, showtext=False, fontq=fontq, fontx=fontx)
|
11,773 | 8a7925d46221f9bb702f1a4f5c6d4b3e83c60958 | from django.conf.urls import url
from core import views
urlpatterns = [
url(r'reg/', views.RegisterView.as_view()),
url(r'login/', views.LoginView.as_view()),
url(r'logout/', views.LogoutView.as_view()),
] |
11,774 | dde910b9dcb2e30e2d07aa11c26711e1716158c6 | from mongoengine import Document, fields, connect
from utils import get_timestamp, now_yyyymmddhhmmss
class ControlTypeEnum:
queue = 'queue'
func = 'func'
class ControlValidEnum:
invalid = 0 # 无效
valid = 1 # 有效
class ControlRmCurrentEnum:
no = 'no' # 不删除
read2rm = 'read2rm' # 准备删除
already = 'already' # 已删除
connect(db='yt_test', host='192.168.99.100', port=53015, username='yeteng', password='123456')
class CeleryTaskControl(Document):
"""celery的控制模型"""
queue = fields.StringField(verbose_name='任务队列名, eg: inner_quick', require=True)
control_type = fields.StringField(verbose_name='控制类别', require=True, default=ControlTypeEnum.func)
func = fields.StringField(verbose_name='方法名: test_1, 当控制类别为func时必填')
start_time = fields.LongField(verbose_name='开始生效时间', default=get_timestamp)
end_time = fields.LongField(verbose_name='结束生效时间', default=0)
rm_current_tasks = fields.StringField(verbose_name='删除现有未执行的任务,当控制类别为queue时有效', default=ControlRmCurrentEnum.no)
status = fields.IntField(verbose_name='启用状态', require=True, default=ControlValidEnum.valid)
meta = {'collection': 'hdy_celery_control', 'indexes': ['start_time', 'end_time', 'status']}
class CeleryErrorRecord(Document):
"""celery执行的错误记录"""
task_id = fields.StringField(verbose_name='celery任务id', require=True, primary_key=True)
full_func = fields.StringField(verbose_name='带路径的方法名,eg: tasks.task_1.test_1')
func_params = fields.DictField(verbose_name='方法参数 {args:(), kwargs: {}}')
exc = fields.StringField(verbose_name='简短异常说明')
error_info = fields.StringField(verbose_name='异常堆栈详细信息')
create_time = fields.LongField(verbose_name='创建时间', default=get_timestamp)
create_time_human = fields.StringField(verbose_name='创建时间,人看的', default=now_yyyymmddhhmmss)
meta = {'collection': 'hdy_celery_error', 'indexes': ['full_func', 'create_time', 'create_time_human']}
if __name__ == '__main__':
a = CeleryTaskControl()
a.queue = 'inner_quick'
a.func = 'test_1'
a.save()
|
11,775 | c1f3c76aef540d469cb269db21da241cc8b5585c | # Create your views here.
from django.shortcuts import render
from forms import Register_Form
from models import Register
from django.utils.translation import ugettext
def home(request):
form2 = Register_Form()
reg_list = Register.objects.all()
msg = ugettext('message from views to be translated')
return render(request,"home.html",locals())
def contact(request):
return render(request,"contact.html")
def about_us(request):
return render(request,"about.html")
def support(request):
return render(request,"support.html")
|
11,776 | 4d96e930577ef435205b2308294b3e3d902e5032 | MAXDATA = 1200
globalIDX = 0
radio.set_transmit_power(7)
radio.set_group(88)
input.temperature() #prepare sensor
sensordata = bytearray(MAXDATA) #SENSOR DATA - 8 bit per reading
for i in range(sensordata.length):
sensordata[i]=0
tstamps = bytearray(MAXDATA*4) # TIMESTAMPS - 32 bit per reading
for i in range(tstamps.length):
tstamps[i]=0
def getDataAtPos(pos):
global sensordata
return sensordata[pos]
def getStampAtPos(pos):
global tstamps
tpos = 4*pos
tmstmp = tstamps[tpos]+\
tstamps[tpos+1]*256+\
tstamps[tpos+2]*65536+\
tstamps[tpos+3]*16777216
return tmstmp
def strrepl(s,old,new):
ss = str(s)
so = str(old)
sn = str(new)
snew = ""
for i in range(len(ss)):
if ss.char_at(i)==so:
snew=snew+sn
else:
snew=snew+ss.char_at(i)
return snew
def putDataAtPos(pos, tstamp, data):
global sensordata, tstamps
tpos = 4*pos
tstamps[tpos]=tstamp % 256
tstamp = tstamp // 256 #chop last byte
tstamps[tpos+1]= tstamp % 256
tstamp = tstamp // 256 #chop last byte
tstamps[tpos+2]= tstamp % 256
tstamp = tstamp // 256 #chop last byte
tstamps[tpos+3]= tstamp
sensordata[pos]=data
tmstmp = tstamps[tpos]+\
tstamps[tpos+1]*256+\
tstamps[tpos+2]*65536+\
tstamps[tpos+3]*16777216
data = sensordata[pos]
# basic.pause(1050)
# putDataAtPos(0, input.running_time()/1000,66)
# dt = getDataAtPos(0)
# st = getStampAtPos(0)
#print("Timestamp="+str(st)+" data="+str(dt))
# basic.pause(1050)
# putDataAtPos(1, input.running_time()/1000,77)
# dt = getDataAtPos(1)
# st = getStampAtPos(1)
# print("Timestamp="+str(st)+" data="+str(dt))
def on_button_pressed_a():
global sensordata, tstamps, globalIDX
print("-------------------")
stmp = getStampAtPos(0)
buff = bytearray(5)
for i in range(globalIDX):
buff[0]=sensordata[i]
buff[1]=tstamps[4*i]
buff[2]=tstamps[4*i+1]
buff[3]=tstamps[4*i+2]
buff[4]=tstamps[4*i+3]
radio.send_buffer(buff)
print(str(stmp)+", "+str(buff[0]))
stmp = getStampAtPos(i)
basic.pause(50)
input.on_button_pressed(Button.A, on_button_pressed_a)
def on_button_pressed_b():
basic.show_number(input.temperature())
input.on_button_pressed(Button.B, on_button_pressed_b)
def on_received_buffer(rBuffer):
dat = Math.map(rBuffer[0],0,255,-100,500)/10
tmstmp = rBuffer[1]+\
rBuffer[2]*256+\
rBuffer[3]*65536+\
rBuffer[4]*16777216
sdat = strrepl(dat,".",",")
print(str(RadioPacketProperty.SERIAL_NUMBER)+':'+str(tmstmp)+"; "+sdat)
radio.on_received_buffer(on_received_buffer)
def getNewData():
global globalIDX
dat = input.temperature()
dat = Math.floor(Math.map(dat*10, -100,500,0,255)) #convert to byte from -10.0 to 50.0 C
tst = input.running_time()/1000
olddat = getDataAtPos(globalIDX)
if olddat != dat: # write new data only if they change
globalIDX += 1 #next empty position
putDataAtPos(globalIDX,tst,dat) #write data
def onSet_interval_interval():
global globalIDX
led.plot(2, 2)
#print(str(globalIDX))
getNewData()
basic.pause(100)
led.unplot(2, 2) #flash a led
control.set_interval(onSet_interval_interval, 60000, control.IntervalMode.INTERVAL)
def onSet_interval_interval2(): #every 15 minutes send the data
on_button_pressed_a()
control.set_interval(onSet_interval_interval2, 60000*15, control.IntervalMode.INTERVAL) #every 15 minutes |
11,777 | 98dfdc63b43c936320c07def19212d9d55a4fcf5 | import os
reader = []
scs = []
b = []
def SortByLow():
scs.sort(key=int)
for k in scs:
for i in reader:
if i.find(k) != -1:
b.append(i)
def SortByHight():
scs.sort(key=int)
scs.reverse()
for k in scs:
for i in reader:
if i.find(k) != -1:
b.append(i)
path = input("Enter Path")
directory = os.listdir(path)
print(directory)
print("Choice file")
filePath = input("Enter the file name with type")
ch = input("Выберите действие 1)Прочитать файл 2) Дополнить файл 3)отсортировать файл")
if ch == "1":
file = open(f"{path}{filePath}", 'r', encoding='utf8')
reader = file.readlines()
print(reader)
elif ch == '2':
file = open(f"{path}{filePath}", 'a')
text = input("Append text: ")
file.write(text)
file.close()
elif ch == '3':
sort = input("1)По возростанию 2)По убиванию")
file = open(f"{path}{filePath}", 'r', encoding='utf8')
file2 = open(f"{path}new{filePath}", 'w', encoding='utf8')
reader = file.readlines()
for i in reader:
scs.append(i.split()[2])
if sort == '1':
SortByLow()
for i in b:
file2.write(i)
elif sort == '2':
SortByHight()
for i in b:
file2.write(i)
file2.close()
|
11,778 | 9ad2761252972bd1a1f7d73a7fbddf2c3f723c79 | import FWCore.ParameterSet.Config as cms
process = cms.Process("TEST2D")
process.maxEvents.input = 3
process.source = cms.Source("PoolSource",
fileNames = cms.untracked.vstring('file:testSlimmingTest1D.root')
)
process.testABE = cms.EDAnalyzer("ThinningTestAnalyzer",
parentTag = cms.InputTag('thinningThingProducerAB'),
thinnedTag = cms.InputTag('thinningThingProducerABE'),
associationTag = cms.InputTag('thinningThingProducerABE'),
trackTag = cms.InputTag('trackOfThingsProducerE'),
parentWasDropped = cms.bool(True),
expectedParentContent = cms.vint32(range(0,11)),
expectedThinnedContent = cms.vint32(range(6,11)),
expectedIndexesIntoParent = cms.vuint32(range(6,11)),
expectedValues = cms.vint32(range(6,11)),
)
process.testBD = cms.EDAnalyzer("ThinningTestAnalyzer",
parentTag = cms.InputTag('thinningThingProducerB'),
thinnedTag = cms.InputTag('thinningThingProducerBD'),
associationTag = cms.InputTag('thinningThingProducerBD'),
trackTag = cms.InputTag('trackOfThingsProducerD'),
parentWasDropped = cms.bool(True),
parentSlimmedCount = cms.int32(1),
thinnedSlimmedCount = cms.int32(1),
refToParentIsAvailable = cms.bool(False),
expectedParentContent = cms.vint32(range(0,11)),
expectedThinnedContent = cms.vint32(range(0,6)),
expectedIndexesIntoParent = cms.vuint32(range(0,6)),
expectedValues = cms.vint32(range(0,6)),
)
process.outD = cms.OutputModule("PoolOutputModule",
fileName = cms.untracked.string('testSlimmingTest2D.root'),
outputCommands = cms.untracked.vstring(
'keep *',
'drop *_thinningThingProducerABE_*_*',
)
)
process.p = cms.Path(
process.testABE
* process.testBD
)
process.ep = cms.EndPath(
process.outD
)
|
11,779 | 8c1ec7187e6d3b972b8a509cc9b76b2195a4776b | import numpy as np
import matplotlib.pyplot as plt
def plot_graph(values, marker, label):
xx = [x[0] for x in values]
val = [x[1] for x in values]
plt.plot(xx, val, marker, label=label)
f = np.loadtxt("sol/error-35.txt")
plot_graph(f, "m-", "35")
f = np.loadtxt("sol/error-131.txt")
plot_graph(f, "r-", "131")
f = np.loadtxt("sol/error-373.txt")
plot_graph(f, "g-", "373")
f = np.loadtxt("sol/error-485.txt")
plot_graph(f, "b-", "485")
plt.legend(loc=3)
plt.grid()
plt.show()
|
11,780 | d8d1a456805db8c885b96980d15e270324dfc1e5 | from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
def mealSchedulePageView(request) :
if request.s
return HttpResponse('This will be the meal schedule page view')
|
11,781 | 323c17ba1273e915141c478af7fc6fcf33cb21b0 | from django.db import models
# Create your models here.
class GoodItem(models.Model):
"""A model of a rock band."""
name = models.CharField(max_length=200) |
11,782 | 7430e509f9c43b0dc2deedb6c6c56bb0f64eddd0 | from email.encoders import encode_base64
from email.mime.base import MIMEBase
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from email.utils import formatdate, make_msgid
from flask_ezmail.utils import sanitize_address, sanitize_subject, force_text,\
sanitize_addresses, message_policy, _has_newline, PY3, PY34
import re
import unicodedata
class Attachment(object):
"""Encapsulates file attachment information.
:versionadded: 0.3.5
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
"""
def __init__(self, filename=None, content_type=None, data=None,
disposition=None, headers=None):
self.filename = filename
self.content_type = content_type
self.data = data
self.disposition = disposition or 'attachment'
self.headers = headers or {}
class Message(object):
"""Encapsulates an email message.
:param subject: email subject header
:param recipients: list of email addresses
:param body: plain text message
:param html: HTML message
:param alts: A dict or an iterable to go through dict() that contains
multipart alternatives
:param sender: email sender address, or **MAIL_DEFAULT_SENDER** by default
:param cc: CC list
:param bcc: BCC list
:param attachments: list of Attachment instances
:param reply_to: reply-to address
:param date: send date
:param charset: message character set
:param extra_headers: A dictionary of additional headers for the message
:param mail_options: A list of ESMTP options to be used in MAIL FROM
command
:param rcpt_options: A list of ESMTP options to be used in RCPT commands
"""
def __init__(self, subject='',
recipients=None,
body=None,
html=None,
alts=None,
sender=None,
cc=None,
bcc=None,
attachments=None,
reply_to=None,
date=None,
charset=None,
extra_headers=None,
mail_options=None,
rcpt_options=None,
ascii_attachments=False,):
sender = sender
if isinstance(sender, tuple):
sender = "%s <%s>" % sender
self.recipients = recipients or []
self.subject = subject
self.sender = sender
self.reply_to = reply_to
self.cc = cc or []
self.bcc = bcc or []
self.body = body
self.alts = dict(alts or {})
self.html = html
self.date = date
self.msgId = make_msgid()
self.charset = charset
self.extra_headers = extra_headers
self.mail_options = mail_options or []
self.rcpt_options = rcpt_options or []
self.attachments = attachments or []
self.ascii_attachments = ascii_attachments
@property
def send_to(self):
return set(self.recipients) | set(self.bcc or ()) | set(self.cc or ())
@property
def html(self):
return self.alts.get('html')
@html.setter
def html(self, value):
if value is None:
self.alts.pop('html', None)
else:
self.alts['html'] = value
def _mimetext(self, text, subtype='plain'):
"""Creates a MIMEText object with the given subtype (default: 'plain')
If the text is unicode, the utf-8 charset is used.
"""
charset = self.charset or 'utf-8'
return MIMEText(text, _subtype=subtype, _charset=charset)
def _message(self):
"""Creates the email"""
ascii_attachments = self.ascii_attachments
encoding = self.charset or 'utf-8'
attachments = self.attachments or []
if len(attachments) == 0 and not self.alts:
# No html content and zero attachments means plain text
msg = self._mimetext(self.body)
elif len(attachments) > 0 and not self.alts:
# No html and at least one attachment means multipart
msg = MIMEMultipart()
msg.attach(self._mimetext(self.body))
else:
# Anything else
msg = MIMEMultipart()
alternative = MIMEMultipart('alternative')
alternative.attach(self._mimetext(self.body, 'plain'))
for mimetype, content in self.alts.items():
alternative.attach(self._mimetext(content, mimetype))
msg.attach(alternative)
if self.subject:
msg['Subject'] = sanitize_subject(
force_text(self.subject), encoding
)
msg['From'] = sanitize_address(self.sender, encoding)
msg['To'] = ', '.join(list(set(
sanitize_addresses(self.recipients, encoding))))
msg['Date'] = formatdate(self.date, localtime=True)
# see RFC 5322 section 3.6.4.
msg['Message-ID'] = self.msgId
if self.cc:
msg['Cc'] = ', '.join(list(set(sanitize_addresses(
self.cc, encoding))))
if self.reply_to:
msg['Reply-To'] = sanitize_address(self.reply_to, encoding)
if self.extra_headers:
for k, v in self.extra_headers.items():
msg[k] = v
SPACES = re.compile(r'[\s]+', re.UNICODE)
for attachment in attachments:
f = MIMEBase(*attachment.content_type.split('/'))
f.set_payload(attachment.data)
encode_base64(f)
filename = attachment.filename
if filename and ascii_attachments:
# force filename to ascii
filename = unicodedata.normalize('NFKD', filename)
filename = filename.encode('ascii', 'ignore').decode('ascii')
filename = SPACES.sub(u' ', filename).strip()
try:
filename and filename.encode('ascii')
except UnicodeEncodeError:
if not PY3:
filename = filename.encode('utf8')
filename = ('UTF8', '', filename)
f.add_header('Content-Disposition',
attachment.disposition,
filename=filename)
for key, value in attachment.headers.items():
f.add_header(key, value)
msg.attach(f)
if message_policy:
msg.policy = message_policy
return msg
def as_string(self):
return self._message().as_string()
def as_bytes(self):
if PY34:
return self._message().as_bytes()
else: # fallback for old Python (3) versions
return self._message().as_string().encode(self.charset or 'utf-8')
def __str__(self):
return self.as_string()
def __bytes__(self):
return self.as_bytes()
def has_bad_headers(self):
"""Checks for bad headers i.e. newlines in subject, sender or recipients.
RFC5322: Allows multiline CRLF with trailing whitespace (FWS) in header
"""
headers = [self.sender, self.reply_to] + self.recipients
for header in headers:
if _has_newline(header):
return True
if self.subject:
if _has_newline(self.subject):
for linenum, line in enumerate(self.subject.split('\r\n')):
if not line:
return True
if linenum > 0 and line[0] not in '\t ':
return True
if _has_newline(line):
return True
if len(line.strip()) == 0:
return True
return False
def is_bad_headers(self):
from warnings import warn
msg = '''is_bad_headers is deprecated, use the new
has_bad_headers method instead.'''
warn(DeprecationWarning(msg), stacklevel=1)
return self.has_bad_headers()
def send(self, connection):
"""Verifies and sends the message."""
connection.send(self)
def add_recipient(self, recipient):
"""Adds another recipient to the message.
:param recipient: email address of recipient.
"""
self.recipients.append(recipient)
def attach(self,
filename=None,
content_type=None,
data=None,
disposition=None,
headers=None):
"""Adds an attachment to the message.
:param filename: filename of attachment
:param content_type: file mimetype
:param data: the raw file data
:param disposition: content-disposition (if any)
"""
self.attachments.append(
Attachment(filename, content_type, data, disposition, headers))
|
11,783 | c199fca0f78124f760aaa2669e706b0d484bae24 | # pylint: disable=not-callable, no-member, invalid-name, line-too-long, wildcard-import, unused-wildcard-import, missing-docstring
import pytest
import torch
from e3nn_little import o3
from e3nn_little.nn import (WeightedTensorProduct,
GroupedWeightedTensorProduct, Identity,
FullyConnectedWeightedTensorProduct)
def test():
irreps_in1 = o3.Irreps("1e + 2e + 3x3o")
irreps_in2 = o3.Irreps("1e + 2e + 3x3o")
irreps_out = o3.Irreps("1e + 2e + 3x3o")
in1 = [(mul, ir, 1.0) for mul, ir in irreps_in1]
in2 = [(mul, ir, 1.0) for mul, ir in irreps_in2]
out = [(mul, ir, 1.0) for mul, ir in irreps_out]
instr = [
(1, 1, 1, 'uvw', True, 1.0),
]
m = WeightedTensorProduct(in1, in2, out, instr)
x1 = torch.randn(irreps_in1.dim)
x2 = torch.randn(irreps_in2.dim)
m(x1, x2)
def test_wtp():
irreps_in1 = o3.Irreps("1e + 2e + 3x3o")
irreps_in2 = o3.Irreps("1e + 2e + 3x3o")
irreps_out = o3.Irreps("1e + 2e + 3x3o")
m = FullyConnectedWeightedTensorProduct(irreps_in1, irreps_in2, irreps_out)
print(m)
m(torch.randn(irreps_in1.dim), torch.randn(irreps_in2.dim))
def test_gwtp():
irreps_in1 = o3.Irreps("1e + 2e + 3x3o")
irreps_in2 = o3.Irreps("1e + 2e + 3x3o")
irreps_out = o3.Irreps("1e + 2e + 3x3o")
m = GroupedWeightedTensorProduct(irreps_in1, irreps_in2, irreps_out)
print(m)
m(torch.randn(irreps_in1.dim), torch.randn(irreps_in2.dim))
def test_id():
irreps_in = o3.Irreps("1e + 2e + 3x3o")
irreps_out = o3.Irreps("1e + 2e + 3x3o")
m = Identity(irreps_in, irreps_out)
print(m)
m(torch.randn(irreps_in.dim))
@pytest.mark.parametrize('sc', [False, True])
def test_variance(sc):
n = 1000
tol = 1.2
m = WeightedTensorProduct(
[(12, (0, 1), 1.0)],
[(3, (0, 1), 1.0)],
[(7, (0, 1), 1.0)],
[
(0, 0, 0, 'uvw', True, 1.0)
],
normalization='component',
internal_weights=True,
_specialized_code=sc,
)
x = m(torch.randn(n, 12), torch.randn(n, 3)).var(0)
assert x.mean().log10().abs() < torch.tensor(tol).log10()
m = WeightedTensorProduct(
[(12, (0, 1), 1.0), (79, (0, 1), 1.0)],
[(3, (0, 1), 1.0)],
[(7, (0, 1), 1.0)],
[
(0, 0, 0, 'uvw', True, 1.0),
(1, 0, 0, 'uvw', True, 2.5),
],
normalization='component',
internal_weights=True,
_specialized_code=sc,
)
x = m(torch.randn(n, 12 + 79), torch.randn(n, 3)).var(0)
assert x.mean().log10().abs() < torch.tensor(tol).log10()
m = WeightedTensorProduct(
[(12, (0, 1), 1.0), (79, (1, 1), 1.0)],
[(3, (0, 1), 1.0), (10, (1, 1), 1.0)],
[(7, (0, 1), 1.0)],
[
(0, 0, 0, 'uvw', True, 1.0),
(1, 1, 0, 'uvw', True, 1.5),
],
normalization='component',
internal_weights=True,
_specialized_code=sc,
)
x = m(torch.randn(n, 12 + 3 * 79), torch.randn(n, 3 + 10 * 3)).var(0)
assert x.mean().log10().abs() < torch.tensor(tol).log10()
m = WeightedTensorProduct(
[(12, (0, 1), 1.0), (79, (1, 1), 1.0)],
[(3, (1, 1), 1.0), (10, (1, 1), 1.0)],
[(7, (1, 1), 1.0)],
[
(0, 0, 0, 'uvw', True, 1.0),
(1, 1, 0, 'uvw', True, 1.5),
],
normalization='component',
internal_weights=True,
_specialized_code=sc,
)
x = m(torch.randn(n, 12 + 3 * 79), torch.randn(n, 3 * 3 + 10 * 3)).var(0)
assert x.mean().log10().abs() < torch.tensor(tol).log10()
m = WeightedTensorProduct(
[(12, (0, 1), 1.0), (79, (1, 1), 1.0)],
[(3, (1, 1), 1.0), (10, (2, 1), 3.0)],
[(7, (1, 1), 2.0)],
[
(0, 0, 0, 'uvw', True, 1.0),
(1, 1, 0, 'uvw', True, 1.5),
],
normalization='component',
internal_weights=True,
_specialized_code=sc,
)
y = torch.randn(n, 3 * 3 + 10 * 5)
y[:, 3 * 3:].mul_(3**0.5)
x = m(torch.randn(n, 12 + 3 * 79), y).var(0) / 2
assert x.mean().log10().abs() < torch.tensor(tol).log10()
m = WeightedTensorProduct(
[(12, (0, 1), 1.0), (79, (1, 1), 1.0)],
[(3, (1, 1), 1.0), (10, (2, 1), 3.0)],
[(7, (1, 1), 0.5)],
[
(0, 0, 0, 'uvw', True, 1.0),
(1, 1, 0, 'uvw', True, 1.5),
],
normalization='norm',
internal_weights=True,
_specialized_code=sc,
)
x = torch.randn(n, 12 + 3 * 79)
x[:, 12:].div_(3**0.5)
y = torch.randn(n, 3 * 3 + 10 * 5)
y[:, :3 * 3].div_(3**0.5)
y[:, 3 * 3:].div_(5**0.5)
y[:, 3 * 3:].mul_(3**0.5)
x = m(x, y).var(0) / 0.5 * 3
assert x.mean().log10().abs() < torch.tensor(tol).log10()
|
11,784 | 2a86d39169d7e0cc50843eb546ed2c6e754de09f | import json
import time
## dictionary for order ##
ordercheck = "yes";
Drinks = {
"Coke" : 4.50,
"Diet coke" : 4.50,
"Sprite" : 4.50,
"Asahi" : 12.75,
"None" : 0.0,
}
Mains = {
"Margherita" : 14.00,
"Napoletana" : 16.50,
"Supreme" : 21.00,
"Meat lovers" : 19.75,
"Hawaiian" : 16.50,
}
def printMenu():
print("")
print("=============== MENU ===============")
count = 1
print("")
print("Drinks")
for course in Drinks:
price = Drinks[course]
print(f" {course} - ${price}")
count += 1
print("")
print("Mains")
for course in Mains:
price = Mains[course]
print(f" {course} - ${price}")
print("")
print("")
print("====================================")
print(" Papa's Pizzaria ")
print("====================================")
print("")
### Defines Menu ###
menu = 'no'
while menu != ("i would like to order" in menu):
menu = input("Would you like to Order or see the Menu? ").lower().strip()
if ("i would like" in menu) or ("i want to" in menu):
if ("order" in menu):
print("Lets order shall we")
break
elif ("menu" in menu):
printMenu()
totalcost = 0
checkorder = "yes";
totalorder = "";
ordersmade = 0
while checkorder == "yes":
order = input("What would you like for Mains friend? (Please be kind and enter 1 item in at a time)").lower().strip()
if order == (""):
if ("i want " in order):
order = order.split("i want ", 1) [1];
order = order.capitalize();
if ("i would like " in order):
order = order.split("i would like " , 1) [1];
order = order.capitalize();
if order in Mains:
confirm = input(f"You have ordered {order}. Is this correct? ").lower().strip()
if ("yes" in confirm):
cost = Mains[order]
totalcost += cost
ordercheckdrinks = "yes";
print(f"Thank's for ordering {order}, that will cost you ${cost}, and your total comes to ${totalcost}")
orderdrinks = order.replace(" ", "@")
print("")
elif ("no" in confirm):
print("Ooops... let's try again")
print("")
ordercheckdrinks ="no";
else:
print(f"I guess that is a NO then")
##Order Drinks####
while ordercheckdrinks == "yes":
order = input("What Drink would you like? The key is one item at a time! ").lower().strip()
if order == "":
print("Ooops... it seems i have miss heard you could you please repeat that?")
print("")
if ("give me " in order):
order = order.split("give " , 1) [1];
order = order.capitalize();
if ("i would like " in order):
order = order.split("i would like " , 1) [1];
order = order.capitalize();
if ("i want " in order):
order = order.split("i want " , 1) [1];
order = order.capitalize();
if order in Drinks:
confirm = input(f"You have ordered {order}. Is this correct? ").lower().strip()
if ("yes" in confirm):
cost = Drinks[order]
totalcost += cost
print(f"Thank's for ordering {order}, that will cost you ${cost}, and your total comes to ${totalcost}")
ordermain = order.replace(" ", "@");
ordersmade = int(ordersmade) + 1;
checkorder = input("Would you like to order another meal? ").lower().strip()
if ("yes" in checkorder):
print("")
break
elif ordercheck == "no" and int(ordersmade) >= 3:
break
elif ordercheck == "no":
ordersleft = 3 - int(ordersmade);
time.sleep(1)
print("")
print(f"You must have at least three orders for home delivery please make {ordersleft} more orders to go.")
ordercheck = "yes";
print("")
break
else:
print("I guess that is a yes?")
ordercheck = "yes";
elif checkorder != "no" :
print("I'm sorry but that is not in the Available. Try saying 'i would like' or 'i want' before you order.")
menucheck = input(f"Would you like to see the menu? ")
if ("yes" in menucheck):
printMenu();
else:
print("Let's try again");
elif checkorder != "no":
print("I'm sorry but that is not in the Available. Try saying 'i would like' or 'i want' before you order.")
print("")
menucheck = input(f"Would you need to see the menu? ")
if ("yes" in menucheck):
printMenu();
else:
print("")
elif checkorder != "no":
print("I'm sorry but that is not in the Available. Try saying 'i would like' or 'i want' before you order.")
print("")
menucheck = input("--> Would you like to see the menu? ")
if ("yes" in menucheck):
printMenu();
else:
print("");
else:
if checkorder == "yes":
print("I'm sorry but that is not in the Menu. Try saying 'i want' before what you want.")
print("")
menucheck = input(f"Would you like to see the menu? ")
if ("yes" in menucheck):
printMenu();
else:
print("");
else:
print("");
print("")
time.sleep(1);
if name == 0:
lines.append(customername.lower())
import random
number = random.randint(5,60)
name = input("What is your name")
print("====================================")
print(" Receipt ")
print("====================================")
print("Store Manager: Papa Giusepe")
print("Served By: Papa's Chat Bot")
print(f"Order name: {name}")
print(f"Weight time: {number} minutes")
print("")
print("Your order is:")
print(*order, sep = "\n")
print("")
print(f"the total cost comes to AUD${totalcost}")
print("")
print("------------------------------------")
print(f"Grazie, - {name} for ordering with")
print("Papa's Chat Bot")
print("Hope enjoy!!!")
print("====================================")
print(" Papa's Pizzaria ")
print("====================================")
def waittime():
from random import randint
for i in range(len(lines)):
if name.lower() in lines [i]:
lines[i] = name.lower() + " " + order
opened = open("orderlists.text", 'w')
for i in lines:
opened.write(i + "\n")
opened.close()
|
11,785 | 2ec5933ddcf112d3a163d5f48e1134375812b964 | from DocInspector.DocStats import DocStats
def collectGeneralStats(stats: DocStats, service):
"""
Collects a bunch of general stats about the document
At present this includes the creation date, self link and title
:param stats: The object to store the stats in
:param service: The service to use to make api calls
"""
# Call the data from the api.
file_meta = service.files().get(fileId=stats.general.id).execute()
# Load in file stats
stats.general.name = file_meta.get('title')
stats.general.link = file_meta.get('selfLink')
stats.general.creationDate = file_meta.get('createdDate')
stats.timeline.setTimelineStart(stats.general.creationDate)
|
11,786 | 49a8c6afc5e5db6c641c0152f2d28fe3e5a52ca6 | import time
class Clock():
def __init__(self, hour, minute, second):
self.hour = hour
self.minute = minute
self.second = second
def __str__(self):
return 'The time is {}:{}:{}'.format(self.hour,self.minute,self.second)
#Main
h = int(input("Enter hour: "))
while h > 24 or h <= 0:
h = int(input("Incorrect input! Enter hours again: "))
m = int(input("Enter minute: "))
while m > 59 or m <= 0:
m = int(input("Incorrect input! Enter minutes again: "))
s = int(input("Enter second: "))
while s > 59 or s <= 0:
s = int(input("Incorrect input! Enter seconds again: "))
print(mytime)
print("\nSet your alarm time.")
h = int(input("Enter hour: "))
while h > 24 or h <= 0:
h = int(input("Incorrect input! Enter hours again: "))
m = int(input("Enter minute: "))
while m > 59 or m <= 0:
m = int(input("Incorrect input! Enter minutes again: "))
s = int(input("Enter second: "))
while s > 59 or s <= 0:
s = int(input("Incorrect input! Enter seconds again: "))
mytime = Clock(h, m, s)
alarm = Clock(h, m, s)
print(alarm)
if mytime != alarm:
print("RING,RING,RING")
print(time.strftime("\nCurrent time: %H:%M:%S"))
|
11,787 | d04c1ab68858dd510a879f47b4b998cdfded3791 | import discord, aiohttp
from discord.ext import commands
from urllib.parse import quote
import asyncio
class StackSearch(commands.Cog):
def __init__(self, client):
self.client = client
self.url = (
"https://api.stackexchange.com/search/advanced?site=stackoverflow.com&q="
)
async def fetch(self, query):
async with aiohttp.ClientSession() as session:
async with session.get(self.url + query) as response:
if response.status == 200:
return await response.json()
else:
raise Exception
@commands.command(aliases=("s", "stack", "stksearch"))
@commands.cooldown(1, 10, commands.BucketType.user)
async def stk(self, ctx, *, query):
"""
USAGE:
```py
stk|stack|s <query>
```
######
DESCRIPTION:
Search for your problems on stackoverflow.
######
EXAMPLE:
```py
cg.s python strings
```
"""
result = await self.fetch(quote(query))
length = len(result["items"])
page_no = 0
mes = await ctx.send(result["items"][0]["link"])
await mes.add_reaction("<:arrowleft:762542689425555486>")
await mes.add_reaction("<:arrowright:762542086788349964>")
async def display(page_no):
await mes.edit(
content=f'{result["items"][page_no]["link"]} {page_no+1}/{length}'
)
def check(reaction, user):
return reaction.message.id == mes.id and user == ctx.author
while True:
try:
reaction, user = await self.client.wait_for(
"reaction_add", check=check, timeout=20
)
emoji = str(reaction.emoji)
if emoji == "<:arrowright:762542086788349964>" and page_no < length - 1:
page_no += 1
await display(page_no)
elif emoji == "<:arrowleft:762542689425555486>" and page_no > 0:
page_no -= 1
await display(page_no)
await reaction.remove(user)
except asyncio.TimeoutError:
await mes.clear_reactions()
@stk.error
async def message_back(self, ctx, error):
if isinstance(error, discord.ext.commands.errors.CommandOnCooldown):
await ctx.send(str(error))
def setup(client):
client.add_cog(StackSearch(client))
|
11,788 | 9acabdbf36bce3c61af6e1cd4e4eef543c2f40a0 | a=int(input("enter 1st value"))
b=int(input("enter 2nd value"))
c=int(input("enter 3rd value"))
def sum(a,b,c):
sum=a+b+c
if a==b or b==c or a==c:
sum=0
return sum
print("the sum is: ",sum(a,b,c)) |
11,789 | b125be4cc2be6ec5358ab45fabae288fe41509b0 | str1="Partrol"
str2="Car"
print(str1+str2) |
11,790 | ae682fcd8e20cadaffdf2a715b684c710fb61c9d | import tkinter as tk
texte1 = "kd oqnbgzhm ehbghdq ztqz tm bncd ozq rtarshstshnm zkogzadshptd: bgzptd kdssqd drs qdlokzbdd ozq tmd ztsqd. tshkhrdq kz eqdptdmbd cdr kdssqdr ontq cdbncdq kd ldrrzfd."
texte2 = "gx qosvlnkd wkvlkxo xiu vscx qno yd fsu cx qniix cx unkggx kdvsddyx xu vsdukxdu g'kdckvx. gxi gxuuoxi cy fsu cx qniix qxofxuuxdu cx cxvngxo gxi gxuuoxi cy fxiinmx sokmkdng fscygs 26. ixygxi gxi gxuuoxi cx n n a isdu vlkwwoxxi."
texte3 = "dceuq e n'ehfp cg p'kyhhep uqfw cgiy citudm c gzudiq ni ezhd px c jhptv ep cggsht. kg hdtymdt xdzei gdx rzyq wir mvzxpw, cifcchdb znwd ccyw wy lkcsht, dp isgd uqfw wy ?"
def decalage(lettre_message, lettre_cle):
"""Alors ça c'est la correction mais ça marche pas bien -_-"""
return chr((ord(lettre_message) + ord(lettre_cle))%256)
def dec_texte(texte, cle):
texte_code = ""
t, c = 0, 0
while len(texte_code) < len(texte):
if texte[t] == " " or texte[t] == ":" or texte[t] == "," or texte[t] == "?" or texte[t] == "." or texte[t] == "2" or texte[t] == "6":
texte_code += texte[t]
else:
texte_code += decalage(texte[t], cle[c%len(cle)])
t, c = t + 1, c + 1
if c == len(cle):
c = 0
return texte_code
def chiffre():
resultat.delete(0, tk.END)
if entree_texte.get() == "" or entree_cle.get() == "":
label_res.config(text="Il manque quelque chose en entrée :/")
resultat.insert(0, dec_texte(entree_texte.get(), entree_cle.get()))
def chiffre_deux(texte, clef):
resultat.delete(0, tk.END)
resultat.insert(0, dec_texte(texte, clef))
return dec_texte(texte, clef)
def dechiffrement(texte_a_decoder, cle):
texte_decode = ""
t, c = 0, 0
while len(texte_decode) < len(texte_a_decoder):
if texte_a_decoder[t] == " " or texte_a_decoder[t] == ":" or texte_a_decoder[t] == "," or texte_a_decoder[t] == "?" or texte_a_decoder[t] == "." or texte_a_decoder[t] == "2" or texte_a_decoder[t] == "6":
texte_decode += texte_a_decoder[t]
else:
texte_decode += decalage(texte_a_decoder[t], chr(256-ord(cle[c%len(cle)])))
t, c = t + 1, c + 1
if c == len(cle):
c = 0
return texte_decode
def dechiffre():
resultat.delete(0, tk.END)
if entree_texte.get() == "" or entree_cle.get() == "":
label_res.config(text = "Il manque quelque chose en entrée :/")
else:
resultat.insert(0, dechiffrement(entree_texte.get(), entree_cle.get()))
def chiffre_xor(lettre_message, lettre_cle):
return chr(ord(lettre_message) ^ ord(lettre_cle))
def creer_liste_clef(taille):
possibilite_clef = [chr(i) for i in range(256)]
for i in range(taille):
# On crée une liste de toutes les combinaisons possibles
a = [j for j in possibilite_clef] # On ajoute notre alphabet à a
for y in range(i):
a = [x + j for j in possibilite_clef for x in a]
return a
def brute_force_cesar(texte_a_trouver):
"""Trouve une clé longue de 1 et une suite de caractères qui
correspondent au texte à trouver. Pas sûr de l'idée."""
alphabet = "abcdefghijklmnopqrstuvwxyz :,?.0123456789'"
# Tous les caractères possibles / vus dans les textes à décoder
liste_car = []
# Liste vide qui contiendra les combinaisons de caractères possibles
texte_test = ""
# Texte codé à comparé avec le texte initial
l = 0 # Index de liste_car
m = 0 # Index de la clef
t = 1 # Taille clef
clef = creer_liste_clef(t)
for i in range(len(texte_a_trouver)):
# On crée une liste de toutes les combinaisons possibles
a = [j for j in alphabet] # On ajoute notre alphabet à a
for y in range(i):
a = [x + j for j in alphabet for x in a]
# On ajoute chaque caractère à chaque caractère
# (pas sûr de cette phrase -_-)
liste_car = liste_car + a # On ajoute ce qu'on a trouvé à notre liste
while texte_test != texte_a_trouver:
# Tant qu'on code pas pareil que ce qu'on cherche
texte_test = chiffre_deux(str(liste_car[l]), clef)
# On teste l'encodage avec le texte et la clef actuels
l += 1 # On regarde le caractère suivant
if l >= len(liste_car): # Ne pas aller out of range
l = 0
m += 1 # On change la clef
if m == 256:
t += 1
clef = creer_liste_clef(t)
m += -1
entree_cle.insert(0, clef[m])
return ord(clef[m])
racine=tk.Tk()
racine.title("Cryptographie")
entree_texte = tk.Entry(racine, width = 50, font = ("helvetica", "20"))
entree_texte.grid(row = 0, column = 0)
entree_cle = tk.Entry(racine, width = 50, font = ("helvetica", "20"))
entree_cle.grid(row = 1, column = 0)
label_texte = tk.Label(racine,font = ("helvetica", "20"), text = "Entrer le message ici.")
label_texte.grid(row = 0, column = 1)
label_cle = tk.Label(racine,font = ("helvetica", "20"), text = "Entrer la clé ici.")
label_cle.grid(row = 1, column = 1)
bouton_coder=tk.Button(racine, text="Chiffrer texte",fg="black", width=15, command=chiffre)
bouton_coder.grid(row=2, column=0)
bouton_decoder=tk.Button(racine,text="Déchiffrer texte",fg="black", width=15,command=dechiffre)
bouton_decoder.grid(row=2, column=1)
resultat=tk.Entry(racine,width = 50, font = ("helvetica", "20"))
resultat.grid(row=3,column=0)
label_res=tk.Label(racine,font = ("helvetica", "20"), text="Résultat ici.")
label_res.grid(row = 3, column=1)
# print("La clef est : chr", brute_force_cesar("kd"))
# La clé trouvée est chr 255 -> ÿ (pb lié au code initial ?)
texte1_decode = "le prochain fichier aura un code par substitution alphabetique: chaque lettre est remplacee par une autre. utiliser la frequence des lettres pour decoder le message."
alphabet_francais = [[7.11, 'a'], [1.14, 'b'], [3.18, 'c'], [3.67, 'd'], [12.10, 'e'], [1.11, 'f'], [1.23, 'g'], [1.11, 'h'], [6.59, 'i'], [0.34, 'j'], [0.29, 'k'], [4.96, 'l'], [2.62, 'm'], [6.39, 'n'], [5.02, 'o'], [2.49, 'p'], [0.65, 'q'], [6.07, 'r'], [6.51, 's'], [5.92, 't'], [4.49, 'u'], [1.11, 'v'], [0.17, 'w'], [0.38, 'x'], [0.46, 'y'], [0.15, 'z']]
def str_convert(liste):
"""Renvoie un texte depuis une liste qui contient un texte découpé"""
texte_str = ""
for a in range(len(liste)):
texte_str += str(liste[a])
return texte_str
def trouver_frequence_lettre(lettre, texte):
"""Trouve le nombre d'itérations d'une lettre dans un texte"""
# Oui le nom porte à confusion
compteur = 0
for i in texte:
if i == lettre:
compteur += 1
return compteur
def trouver_frequence_texte(texte):
"""Applique la fonction précédante pour toutes les lettres"""
# On obtient vraiment une fréquence cette fois
alphabet_francais_texte = [0 for i in range(26)]
for i in range(26):
alphabet_francais_texte[i] = [alphabet_francais_texte[i], chr(i + 97)]
for i in range(26):
alphabet_francais_texte[i][0] = round((trouver_frequence_lettre(chr(i + 97), texte) * 100) / len(texte), 3)
alphabet_francais_texte.sort(reverse=True)
return alphabet_francais_texte
def substituer(texte): # Donne une vague idée mais pas efficace, mal codé
"""Remplace les lettres selon leur fréquence, en se basant sur
la fréquence moyenne d'apparition des lettres dans
l'alphabet français."""
alphabet = "abcdefghijklmnopqrstuvwxyz"
texte_lettre_only = []
for car in texte:
if car in alphabet:
texte_lettre_only.append(car)
nouveau_texte = list(texte)
j = 0
alphabet_francais_texte = trouver_frequence_texte(texte_lettre_only)
alphabet_francais.sort(reverse=True)
for lettre in texte_lettre_only:
a = False
i = 0
if nouveau_texte[j] == " " or nouveau_texte[j] == ":" or nouveau_texte[j] == "," or nouveau_texte[j] == "?" or nouveau_texte[j] == "." or nouveau_texte[j] == "2" or nouveau_texte[j] == "6":
j += 1
else:
while a == False:
if lettre == alphabet_francais_texte[i][1]:
nouveau_texte[j] = alphabet_francais[i][1]
a = True
else:
i += 1
if i == 26:
i = 0
j += 1
texte_str = str_convert(nouveau_texte)
return texte_str
# print(substituer(texte2))
def substituer_lettre(texte, lettre_initiale, lettre_finale):
nouveau_texte = list(texte)
i = 0
for lettre in texte:
if lettre == lettre_initiale:
nouveau_texte[i] = lettre_finale
i += 1
nouveau_texte = str_convert(nouveau_texte)
return nouveau_texte
# print(alphabet_francais)
# print(trouver_frequence_texte(texte2))
# print(texte2)
alphabet_decode = ['z', 'b', 'd', 'n', 'e', 'm', 'l', 'h', 's', 'j', 'i', 'h', 'g', 'a', 'r', 'p', 'p', 'r', 'o', 't', 't', 'c', 'f', 'e', 'u', 'y']
# Obtenu par essai et erreur (en testant la fonction substituer_lettre en boucle)
def decode_substitution(texte, alphabet):
"""Effectue une substitution par rapport à un alphabet donné."""
nouveau_texte = []
alphabet_francais = [[7.11, 'a'], [1.14, 'b'], [3.18, 'c'], [3.67, 'd'], [12.10, 'e'], [1.11, 'f'], [1.23, 'g'], [1.11, 'h'], [6.59, 'i'], [0.34, 'j'], [0.29, 'k'], [4.96, 'l'], [2.62, 'm'], [6.39, 'n'], [5.02, 'o'], [2.49, 'p'], [0.65, 'q'], [6.07, 'r'], [6.51, 's'], [5.92, 't'], [4.49, 'u'], [1.11, 'v'], [0.17, 'w'], [0.38, 'x'], [0.46, 'y'], [0.15, 'z']]
for lettre in texte:
a = False
i = 0
if lettre == " " or lettre == ":" or lettre == "," or lettre == "?" or lettre == "." or lettre == "2" or lettre == "6" or lettre == "'":
nouveau_texte.append(lettre)
else:
while a == False:
if lettre == alphabet_francais[i][1]:
nouveau_texte.append(alphabet[i])
a = True
else:
i += 1
if i == 26:
i = 0
texte_sub = str_convert(nouveau_texte)
return texte_sub
texte2_decode = "le prochain fichier est code par un mot de passe de taille inconnu et contient l'indice. les lettres du mot de passe permettent de décaler les lettres du message original modulo 26. seules les lettres de a a z sont chiffrees."
# print(decode_substitution(texte2, alphabet_decode))
def position_lettre(lettre):
alphabet = "abcdefghijklmnopqrstuvwxyz"
alphabet_liste = list(alphabet)
for i in range(len(alphabet_liste)):
if lettre == alphabet_liste[i]:
return i
def decaler_les_lettres(texte, clef):
liste_texte = list(texte)
liste_clef = list(clef)
a = 0
alphabet = "abcdefghijklmnopqrstuvwxyz"
alphabet_liste = list(alphabet)
for i in range(len(liste_texte)):
if liste_texte[i] in alphabet:
if position_lettre(liste_texte[i])+position_lettre(liste_clef[a]) < 0:
liste_texte[i] = alphabet_liste[position_lettre(liste_texte[i])+position_lettre(liste_clef[a])]
else:
liste_texte[i] = alphabet_liste[position_lettre(liste_texte[i])-position_lettre(liste_clef[a])]
a += 1
if a == len(clef):
a = 0
elif liste_texte[i] == " ":
a = 0
else:
a += 1
if a == len(clef):
a = 0
return str_convert(liste_texte)
def decaler_les_lettres_sans_espace(texte, clef):
liste_texte = list(texte)
liste_clef = list(clef)
a = 0
alphabet = "abcdefghijklmnopqrstuvwxyz"
alphabet_liste = list(alphabet)
for i in range(len(liste_texte)):
if liste_texte[i] in alphabet:
if position_lettre(liste_texte[i])+position_lettre(liste_clef[a]) < 0:
liste_texte[i] = alphabet_liste[position_lettre(liste_texte[i])+position_lettre(liste_clef[a])]
else:
liste_texte[i] = alphabet_liste[position_lettre(liste_texte[i])-position_lettre(liste_clef[a])]
a += 1
if a == len(clef):
a = 0
elif liste_texte[i] == " ":
pass
else:
a += 1
if a == len(clef):
a = 0
return str_convert(liste_texte)
def decaler_les_lettres_en_bourrin(texte, clef):
# Celui-là marche
liste_texte = list(texte)
liste_clef = list(clef)
a = 0
alphabet = "abcdefghijklmnopqrstuvwxyz"
alphabet_liste = list(alphabet)
for i in range(len(liste_texte)):
if liste_texte[i] in alphabet:
if position_lettre(liste_texte[i])+position_lettre(liste_clef[a]) < 0:
liste_texte[i] = alphabet_liste[position_lettre(liste_texte[i])+position_lettre(liste_clef[a])]
else:
liste_texte[i] = alphabet_liste[position_lettre(liste_texte[i])-position_lettre(liste_clef[a])]
a += 1
if a == len(clef):
a = 0
return str_convert(liste_texte)
def creer_clef_lettre(taille):
alphabet = "abcdefghijklmnopqrstuvwxyz"
for i in range(taille):
clef = [j for j in alphabet]
for y in range(i):
clef = [x + j for j in alphabet for x in clef]
return clef
liste_des_clef = creer_clef_lettre(4)
#for j in range(len(liste_des_clef)):
# coucou = decaler_les_lettres(texte3,liste_des_clef[j])
# if "bravo a" in coucou:
# print(coucou, liste_des_clef[j])
#for j in range(len(liste_des_clef)):
# coucou = decaler_les_lettres_sans_espace(texte3,liste_des_clef[j])
# if "bravo a" in coucou:
# print(coucou, liste_des_clef[j])
for j in range(len(liste_des_clef)):
coucou = decaler_les_lettres_en_bourrin(texte3,liste_des_clef[j])
if "bravo a" in coucou:
print(coucou, liste_des_clef[j])
# Pour "bravo a" j'avais essayé "grace a" au depart mais cela n'avait pas fonctionne, donc j'ai essaye "bravo a" et ca a marche
texte3_decode = "bravo a l'aide de l'indice vous avez reussi a casser ce code et a finir ce devoir. le dernier texte est pour les braves, regardez vous dans un miroir, en etes vous un ?"
# On regarde le texte dans un miroir -> on retourne chaque phrase (ou vers ?)
def retourner_texte(texte):
texte_a_lenvers = list(texte)
for i in range(len(texte)):
texte_a_lenvers[i] = texte[-i-1]
return str_convert(texte_a_lenvers)
texte4 = ["jeqeqecvnf suozvb jfk muj",
"dfjr fmy rvuqsk ve",
"itajtd mifwz nnrt",
"imtrvp zuh srzmzbqz tepr zn",
"tmsnirt imtrvp nec hw",
"dzpqj tjf pdecpr zl jr",
"ptejnt ekpb iu b",
"iiuyu iy ijz surg rjs ttsn",
"votp ac hw rzpuen jozw",
"rvwdvx jbo nirscyjv fi",
"svmkyw ve iaflss yie te",
"teffvv'u riznxjzvv jfk",
"nelrhtjrk dh sivdvjvve",
"yi cvb à jffrds tdp",
"rvwdv sebr onvnqsy zvp",
"zuhjwiM le wmifo wiezib nec",
"triot qmjvr'c onrwz",
"memfqg srq wdaietsq vk"]
texte4_decode = []
texte4_dune_traite = "jeqeqecvnf suozvb jfk muj dfjr fmy rvuqsk ve itajtd mifwz nnrt imtrvp zuh srzmzbqz tepr zn tmsnirt imtrvp nec hw dzpqj tjf pdecpr zl jr ptejnt ekpb iu b iiuyu iy ijz surg rjs ttsn votp ac hw rzpuen jozw rvwdvx jbo nirscyjv fi svmkyw ve iaflss yie te teffvv'u riznxjzvv jfk nelrhtjrk dh sivdvjvve yi cvb à jffrds tdp rvwdv sebr onvnqsy zvp zuhjwiM le wmifo wiezib nec triot qmjvr'c onrwz memfqg srq wdaietsq vk"
#for i in range(len(texte4)):
# texte4_decode.append(decaler_les_lettres_en_bourrin(retourner_texte(texte4[i]), "bravez"))
# texte4_decode.append("\n")
texte4_decode = decaler_les_lettres_en_bourrin(retourner_texte(texte4_dune_traite), "bravez")
# J'ai essayé "brave" et ses dérivés pour la clef (braves, braver, bravons...)
# J'ai d'abord obtenu un truc du genre : je voudrais pas crever avant d'avoir connu les chiens noirs du Mexique qui dorment sans rever les singes à cul nu devoreurs de tropiques les araignees d'argent au nid truffe de bulles
# Puis j'ai abandonné
print(str_convert(texte4_decode))
racine.mainloop()
|
11,791 | aec63c3ebe3e1f1a93dd88af86c34231642343a0 | # Generated by Django 2.0.1 on 2018-01-03 14:22
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('graphqlendpoint', '0015_auto_20180103_1041'),
]
operations = [
migrations.RemoveField(
model_name='call',
name='secondaryagent',
),
migrations.RemoveField(
model_name='call',
name='ticket',
),
migrations.AddField(
model_name='call',
name='event',
field=models.OneToOneField(null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='call', to='graphqlendpoint.Event'),
),
migrations.AddField(
model_name='event',
name='ticket',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='tickets', to='graphqlendpoint.Ticket'),
),
migrations.AlterField(
model_name='call',
name='primaryagent',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='call_for_agent', to='graphqlendpoint.Agent'),
),
migrations.AlterField(
model_name='ticket',
name='category',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.DO_NOTHING, related_name='category_tickets', to='graphqlendpoint.Category'),
),
]
|
11,792 | 6524091fd45dd2fd2d96e979c3c3a55aa7c5e4fd | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2018-04-11 12:33
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('cms_test', '0010_auto_20180411_1215'),
]
operations = [
migrations.CreateModel(
name='Channel',
fields=[
('channel_id', models.CharField(max_length=80, primary_key=True, serialize=False)),
('channel_name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='ShortVideoInfo',
fields=[
('short_video_info_id', models.AutoField(primary_key=True, serialize=False)),
],
),
]
|
11,793 | 4571417fe48b0f3e1dcaa6b884dbc6c08f166bbf | from kivy.app import App
from kivy.uix.label import Label
from kivy.clock import Clock
from kivy.uix.textinput import TextInput
from kivy.uix.boxlayout import BoxLayout
from kivy.uix.button import Button
from kivy.uix.checkbox import CheckBox
from kivy.uix.slider import Slider
import time
import json
import requests
import threading
from Subscriber import Subscriber
from Publisher import Publisher
broker=["test.mosquitto.org", 1883]
class MainApp(App):
def build(self):
self.autoSystem_on=False
self.presenceTMH = None
self.presenceTmH = None
self.presenceNTMH = None
self.presenceNTmH = None
self.presenceTMF = None
self.presenceTmF = None
self.presenceNTMF = None
self.presenceNTmF = None
self.main_layout = BoxLayout(orientation="vertical")
self.Title = Label(text='Smart House Controller',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .9})
self.main_layout.add_widget(self.Title)
self.temperature_layout=BoxLayout(orientation="horizontal")
self.Temperature = Label(text='Temperature :',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .9, 'center_y': .5})
self.Temperature_data = TextInput(
multiline=False,
readonly=True,
halign="left",
font_size=20,
background_color=[0,0,0,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[1,1,1,1]
)
self.temperature_layout.add_widget(self.Temperature)
self.temperature_layout.add_widget(self.Temperature_data)
self.presence_layout=BoxLayout(orientation="horizontal")
self.Presence = Label(text='Presence :',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .9, 'center_y': .5})
self.Presence_data = TextInput(
multiline=False,
readonly=True,
halign="left",
font_size=20,
background_color=[0,0,0,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[1,1,1,1]
)
self.temperature_layout.add_widget(self.Presence)
self.temperature_layout.add_widget(self.Presence_data)
self.main_layout.add_widget(self.temperature_layout)
self.check_layout=BoxLayout(orientation="horizontal")
self.auto = Label(text='Automatic',
font_size='20sp',
size_hint=(10, .2),
pos_hint={'center_x': -20.0, 'center_y': .5})
self.checkbox = CheckBox()
self.checkbox.bind(active=self.on_checkbox_active)
self.check_layout.add_widget(self.checkbox)
self.check_layout.add_widget(self.auto)
self.main_layout.add_widget(self.check_layout)
self.slider_layout=BoxLayout(orientation="horizontal")
self.fan = Label(text='Fan',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.fan_s = Slider(min=0, max=255, value=0)
self.heat = Label(text='Heater',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.heat_s = Slider(min=0, max=255, value=0)
self.slider_layout.add_widget(self.fan)
self.slider_layout.add_widget(self.fan_s)
self.slider_layout.add_widget(self.heat)
self.slider_layout.add_widget(self.heat_s)
self.main_layout.add_widget(self.slider_layout)
self.option_layout=BoxLayout(orientation="vertical")
self.presT_layout=BoxLayout(orientation="horizontal")
self.NpresT_layout=BoxLayout(orientation="horizontal")
self.Option_title= Label(text='Automatic control value of temperature',
font_size='20sp',
size_hint=(.5, .8),
pos_hint={'center_x': .5, 'center_y': .5})
self.presT_title= Label(text='With Presence',
font_size='20sp',
size_hint=(.5, .8),
pos_hint={'center_x': .5, 'center_y': .5})
self.NpresT_title= Label(text='Without Presence',
font_size='20sp',
size_hint=(.5, .8),
pos_hint={'center_x': .5, 'center_y': .5})
self.MAX1H= Label(text='MAX',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.MIN1H=Label(text='Heater MIN',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.MAX1F= Label(text='MAX',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.MIN1F=Label(text='Fan MIN',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.MAX2H= Label(text='MAX',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.MIN2H=Label(text='Heater MIN',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.MAX2F= Label(text='MAX',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.MIN2F=Label(text='Fan MIN',
font_size='20sp',
size_hint=(.5, .5),
pos_hint={'center_x': .5, 'center_y': .5})
self.presT_dataMH = TextInput(
multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.presT_dataMH.bind(text=self.on_textPMH)
self.presT_datamH = TextInput( multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.presT_datamH.bind(text=self.on_textPmH)
self.presT_layout.add_widget(self.MIN1H)
self.presT_layout.add_widget(self.presT_datamH)
self.presT_layout.add_widget(self.MAX1H)
self.presT_layout.add_widget(self.presT_dataMH)
self.presT_dataMF = TextInput(
multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.presT_dataMF.bind(text=self.on_textPMF)
self.presT_datamF = TextInput( multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.presT_datamF.bind(text=self.on_textPmF)
self.presT_layout.add_widget(self.MIN1F)
self.presT_layout.add_widget(self.presT_datamF)
self.presT_layout.add_widget(self.MAX1F)
self.presT_layout.add_widget(self.presT_dataMF)
self.NpresT_dataMH = TextInput(
multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.NpresT_dataMH.bind(text=self.on_textNPMH)
self.NpresT_datamH = TextInput( multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.NpresT_datamH.bind(text=self.on_textNPmH)
self.NpresT_layout.add_widget(self.MIN2H)
self.NpresT_layout.add_widget(self.NpresT_datamH)
self.NpresT_layout.add_widget(self.MAX2H)
self.NpresT_layout.add_widget(self.NpresT_dataMH)
self.NpresT_dataMF = TextInput(
multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.NpresT_dataMF.bind(text=self.on_textNPMF)
self.NpresT_datamF = TextInput( multiline=False,
halign="right",
font_size=15,
size_hint=(.35,1),
background_color=[1,1,1,1],
cursor_blink=False,
cursor_color=[0,0,0,1],
selection_color=[0,0,0,0],
#padding_y=29,
foreground_color=[0,0,0,1]
)
self.NpresT_datamF.bind(text=self.on_textNPmF)
self.NpresT_layout.add_widget(self.MIN2F)
self.NpresT_layout.add_widget(self.NpresT_datamF)
self.NpresT_layout.add_widget(self.MAX2F)
self.NpresT_layout.add_widget(self.NpresT_dataMF)
self.option_layout.add_widget(self.Option_title)
self.option_layout.add_widget(self.presT_title)
self.option_layout.add_widget(self.presT_layout)
self.option_layout.add_widget(self.NpresT_title)
self.option_layout.add_widget(self.NpresT_layout)
self.button = Button(
text="launch autonomous system",
pos_hint={"center_x": 0.5, "center_y": 0.5},
)
self.button.bind(on_release=self.on_button_press)
self.option_layout.add_widget(self.button)
return self.main_layout
def on_button_press(self,instance):
if self.presenceTMH != None and self.presenceTmH != None and self.presenceNTMH != None and self.presenceNTmH != None and self.presenceTMF != None and self.presenceTmF != None and self.presenceNTMF != None and self.presenceNTmF != None :
if self.autoSystem_on==False :
self.autoSystem_on=True
instance.background_color=[0,1,0,1]
instance.text="Launched"
else:
self.autoSystem_on=False
instance.background_color=[1,1,1,1]
instance.text="launch autonomous system"
else:
instance.background_color=[1,0,0,1]
def on_textPMH(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceTMH=val
else :
instance.text=""
self.presenceTMh=None
def on_textPmH(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceTmH=val
else :
instance.text=""
self.presenceTmH=None
def on_textPMF(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceTMF=val
else :
instance.text=""
self.presenceTMF=None
def on_textPmF(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceTmF=val
else :
instance.text=""
self.presenceTmF=None
def on_textNPMH(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceNTMH=val
else :
instance.text=""
self.presenceNTMH=None
def on_textNPmH(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceNTmH=val
else :
instance.text=""
self.presenceNTmH=None
def on_textNPMF(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceNTMF=val
else :
instance.text=""
self.presenceNTMF=None
def on_textNPmF(self,instance, value):
val,ok=self.isFloat(value)
if ok:
self.presenceNTmF=val
else :
instance.text=""
self.presenceNTmF=None
def isFloat(self,s):
try:
return float(s),True
except ValueError:
return 0,False
def on_checkbox_active(self,checkbox, value):
if value:
self.fan_s.disabled=True
self.heat_s.disabled=True
self.main_layout.add_widget(self.option_layout)
if self.presenceTMH != None and self.presenceTmH != None and self.presenceNTMH != None and self.presenceNTmH != None and self.presenceTMF != None and self.presenceTmF != None and self.presenceNTMF != None and self.presenceNTmF != None :
self.presT_dataMH.text=str( self.presenceTMH)
self.presT_datamH.text= str(self.presenceTmH)
self.NpresT_dataMH.text=str(self.presenceNTMH)
self.NpresT_datamH.text= str(self.presenceNTmH)
self.presT_dataMF.text=str( self.presenceTMF)
self.presT_datamF.text= str(self.presenceTmF)
self.NpresT_dataMF.text=str(self.presenceNTMF)
self.NpresT_datamF.text= str(self.presenceNTmF)
else:
self.fan_s.disabled=False
self.heat_s.disabled=False
self.main_layout.remove_widget(self.option_layout)
self.autoSystem_on=False
def myOnMessageReceived (self, paho_mqtt , userdata, msg):
# A new message is received
body=msg.payload.decode("utf-8")
print(body)
js=json.loads(body)
self.Temperature_data.text =str(js['temperature'])+" "+js['unit']
self.temperature=js['temperature']
self.Presence_data.text=str(js['presence'])
self.presence=js['presence']
def automatic_system(self,dt):
diz={
"Fan":None,
"Heat":None}
if self.autoSystem_on==False :
diz['Fan']=self.fan_s.value
diz['Heat']=self.heat_s.value
else:
if self.presence:
diz['Fan']=int(((self.presenceTMF-self.presenceTmF)/255)*self.temperature)
diz['Heat']=int(255-((self.presenceTMH-self.presenceTmH)/255)*self.temperature)
else:
diz['Fan']=int(((self.presenceNTMF-self.presenceNTmF)/255)*self.temperature)
diz['Heat']=int(255-((self.presenceNTMH-self.presenceNTmH)/255)*self.temperature)
if diz['Fan']>255:
diz['Fan']=255
if diz['Fan']<0:
diz['Fan']=0
if diz['Heat']>255:
diz['Heat']=255
if diz['Heat']<0:
diz['Heat']=0
test= Publisher("MyPublisher",broker)
test.start()
test.myPublish ('/tiot/17/house/control',json.dumps(diz))
test.stop()
def sendData(dt):
load=json.dumps({
"ID": "Smart_House",
"Description":"Control the smart house",
"endPoint":"/Mqtt/control",
})
requests.put('http://localhost:8080/service/', data=load)
if __name__ == '__main__':
app = MainApp()
Clock.schedule_once(sendData)
Clock.schedule_interval(sendData, 60)
broker=requests.get('http://localhost:8080/broker/')
broker=broker.json()
s='http://localhost:8080/device/one?ID=house'
topic=requests.get(s)
topic=topic.json()
sensSub=Subscriber("Temperature",broker,topic['endPoint'],app.myOnMessageReceived)
sensSub.start()
Clock.schedule_interval(app.automatic_system, 20)
app.run()
|
11,794 | b9c12f4b31361e4fc45febce76da50a5544d442c | """
Objects and Classes - Lab
Check your code: https://judge.softuni.bg/Contests/Practice/Index/1733#0
SUPyF2 Objects/Classes-Lab - 01. Comment
Problem:
Create a class with name "Comment". The __init__ method should accept 3 parameters
• username
• content
• likes (optional, 0 by default)
Use the exact names for your variables
Note: there is no input/output for this problem. Test the class yourself and submit only the class
Example:
Test Code Output
comment = Comment("user1", "I like this book") user1
print(comment.username) I like this book
print(comment.content) 0
print(comment.likes)
"""
class Comment:
def __init__(self, username, content, likes=0):
self.username = username
self.content = content
self.likes = likes
class Comment_take_two:
def __init__(self, username: str, content: str, likes=0):
self.username = username
self.content = content
self.likes = likes
|
11,795 | 1ae0f0e4ca02e3ac56376f3e86cb151c7b453874 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def add_hero(apps, schema_editor):
Hero = apps.get_model("heroes", "Hero")
# Make the default "does not have hero" hero
Hero.objects.get_or_create(steam_id=0)
def remove_hero(apps, schema_editor):
"""
We don't want to delete the null hero; it should always be.
"""
pass
class Migration(migrations.Migration):
dependencies = [
('heroes', '0002_auto_20150504_0759'),
]
operations = [
migrations.RunPython(add_hero, remove_hero),
]
|
11,796 | cea1f1566f7f4424970ea0677c74a098be779aca | """Identify place fields"""
import numpy as np
import matplotlib.pyplot as plt
import itertools as it
import random
from scipy.ndimage.filters import gaussian_filter1d
from scipy.signal import argrelextrema
from scipy.optimize import curve_fit
from scipy.stats import mode, percentileofscore
from pycircstat import var
from multiprocessing import Pool
from collections import Counter
def generate_tuning_curve(start_indices, end_indices, response_magnitudes,
position_unsynced, behavior_rate, position_synced,
imaging_rate, n_position_bins, return_squared,
initial_counts):
"""
start_indices : frame indices where transient events start.
end_indices : frame indices where transient events end.
response_magnitudes : areas under the included transients
position_synced : animal's position bin as a function of imaging frame.
nan'ed for frames to exclude. position[start/end] cannot be nan for
start/end in start_indices, end_indices
position_unsynced: the animal's position bin as a function of time. This
should be nan'ed for frames to exclude.
behavior_rate: period (sec) of unsynced data
imaging_rate: period (sec) of image-synced data
arguments are for a single roi for a single cycle
"""
initial_counts = initial_counts.copy()
values = np.zeros(n_position_bins)
if return_squared:
values_squared = np.zeros(n_position_bins)
roi_counter = Counter()
for event_idx, start, end, mag in zip(
it.count(), start_indices, end_indices, response_magnitudes):
start_frame_end = int(
np.round((start + 1) * imaging_rate / behavior_rate))
end_frame = int(np.round((end + 1) * imaging_rate / behavior_rate))
# what position bin is the animal in at time start?
current_pos = position_synced[start]
values[current_pos] += mag
if return_squared:
values_squared[current_pos] += mag ** 2
roi_counter.update(position_unsynced[start_frame_end:end_frame])
# position_unsynced[start_frame_end:end_frame] = np.nan
initial_counts.subtract(roi_counter)
counts = np.array([initial_counts[x] * behavior_rate
for x in xrange(n_position_bins)])
# counts = np.array([np.sum(position_unsynced == x) *
# behavior_rate for x in range(n_position_bins)])
# zeros counts implies no valid observations for the entire belt
# counts = np.ones(len(values)) * np.ceil(counts.mean())
assert np.sum(counts) != 0
if return_squared:
return values, counts, values_squared
# assert np.all(values[counts == 0] == 0)
return values, counts, None
def smooth_tuning_curves(tuning_curves, smooth_length=3, nan_norm=True):
# mean_zeroed = np.array(tuning_curves)
# mean_zeroed[np.isnan(tuning_curves)] = 0
mean_zeroed = np.nan_to_num(tuning_curves)
gaus_mean = gaussian_filter1d(mean_zeroed, smooth_length, mode='wrap')
if nan_norm:
isfinite = np.isfinite(tuning_curves).astype(float)
sm_isfinite = gaussian_filter1d(isfinite, smooth_length, mode='wrap')
return gaus_mean / sm_isfinite
else:
return gaus_mean
def transients_to_include(transients, frames_to_include):
"""
transients is the ROI transients data for a single cycle
frames_to_include is a list of the frames to include for a given cycle
returns list of start and stop frames for transients that begin in
frames_to_include (nROIs long)
"""
start_frames = []
end_frames = []
for trans in transients:
start_frames.append([])
end_frames.append([])
for start_frame, end_frame in zip(trans['start_indices'],
trans['end_indices']):
if start_frame in frames_to_include:
start_frames[-1].append(start_frame)
end_frames[-1].append(end_frame)
return start_frames, end_frames
def _nantrapz_1d(y, x=None, dx=1.0):
if x is None:
x_vals = np.arange(0, len(y) * dx, step=dx)
else:
x_vals = x
nans = np.isnan(y)
return np.trapz(y[~nans], x=x_vals[~nans])
def calcResponseMagnitudes(imData, starts, ends, im_period):
"""
imData is for a single cycle -- nROIs x nFrames
starts, ends are for a single cycle
"""
responses = []
for roi_imData, roi_starts, roi_ends in it.izip(imData, starts, ends):
responses.append([])
for start, end in zip(roi_starts, roi_ends):
# response = _nantrapz_1d(
# roi_imData[start:end + 1], dx=im_period)
response = 1.
responses[-1].append(response)
return responses
def shuffle_transients(true_starts, true_ends, frames_to_include):
durs = np.array(true_ends) - np.array(true_starts)
true_starts = [x for (y, x) in sorted(zip(durs, true_starts))][::-1]
true_ends = [x for (y, x) in sorted(zip(durs, true_ends))][::-1]
transients_frames = set([])
shuffle_starts = []
shuffle_ends = []
for start, end in zip(true_starts, true_ends):
valid = False
while not valid:
frame = random.sample(frames_to_include, 1)[0]
trans_frames = set(range(frame, frame + end - start + 1))
valid = not len(trans_frames.intersection(transients_frames))
shuffle_starts.append(frame)
shuffle_ends.append(frame + end - start)
transients_frames = transients_frames.union(trans_frames)
return shuffle_starts, shuffle_ends
def calcSpatialResponses(tuning_curve, putative_place_fields):
responses = []
for start, end in zip(*putative_place_fields):
if start <= end:
# response = np.trapz(tuning_curve[start:end + 1], axis=0)
# response = np.amax(tuning_curve[start:end + 1])
response = np.sum(tuning_curve[start:end + 1])
else:
# response = np.trapz(tuning_curve[start:], axis=0) + \
# np.trapz(tuning_curve[:end + 1], axis=0)
# response = np.amax([np.amax(tuning_curve[start:]),
# np.amax(tuning_curve[:end + 1])])
response = np.sum(tuning_curve[start:]) + \
np.sum(tuning_curve[:end + 1])
responses.append(response)
return responses
def calc_spatial_information(inputs):
event_counts, obs_counts, smooth_length = inputs
# This is devived from Skaggs with some algebraic simplifications
info = []
for roi_events, roi_obs in it.izip(event_counts, obs_counts):
O_sum = roi_obs.sum()
idx = np.nonzero(roi_events)[0]
roi_events = roi_events[idx]
roi_obs = roi_obs[idx]
E_sum = roi_events.sum()
R = roi_events / roi_obs
i = np.dot(roi_events, np.log2(R)) / E_sum - np.log2(E_sum / O_sum)
info.append(i)
return info
def identifyPutativeFields(tuning_curve):
"""Returns the start and end indices of non-zero intervals in
tuning_curve. End indices are inclusive (ie tuning_curve[end] > 0)
Corrects for the wrap-around case
"""
tuning_curve = np.copy(tuning_curve)
tuning_curve = np.around(tuning_curve, decimals=3)
elevated_indices = np.nonzero(tuning_curve)[0]
if len(elevated_indices) == 0:
return [[], []]
putative_starts = []
putative_ends = []
putative_start = elevated_indices[0]
while putative_start is not None:
putative_starts.append(putative_start)
# when does it return to zero?
try:
putative_end = putative_start + np.amin(
np.where(tuning_curve[putative_start:] == 0)) - 1
except ValueError:
# does not return to 0
putative_end = len(tuning_curve) - 1
putative_ends.append(putative_end)
try:
putative_start = np.amin(
elevated_indices[elevated_indices > putative_end])
except ValueError:
# no more intervals > 0
putative_start = None
# correct the wrap-around case
if 0 in putative_starts and len(tuning_curve) - 1 in putative_ends and \
len(putative_starts) > 1:
putative_starts.pop(0)
putative_ends[-1] = putative_ends.pop(0)
return putative_starts, putative_ends
def define_fields(tuning_curve):
def gaussian_func(x, a, c):
# gaussian with mean zero and zero vertical displacement
return a * np.exp(-(x ** 2) / (2 * c ** 2))
local_maxima = argrelextrema(
np.around(tuning_curve, decimals=5),np.greater, mode='wrap')[0]
if len(local_maxima) == 0:
all_plateaus = argrelextrema(np.array(tuning_curve), np.greater_equal,
mode='wrap')[0]
non_zero_vals = np.where(np.array(tuning_curve) != 0)[0]
plateaus = np.intersect1d(all_plateaus, non_zero_vals)
if len(plateaus) == 0:
local_maxima = np.array([])
else:
first_bins = np.hstack([True, np.diff(plateaus) > 1])
local_maxima = plateaus[first_bins]
# Check wraparound case
if 0 in local_maxima and len(tuning_curve) - 1 in local_maxima:
local_maxima = local_maxima[1:]
local_minima = argrelextrema(
np.around(tuning_curve, decimals=5), np.less_equal, mode='wrap')[0]
frames = set([])
mid_point = len(tuning_curve) / 2
for local_max in local_maxima:
offset = mid_point - local_max
rolled_tuning = np.roll(tuning_curve, offset)
rolled_mins = (local_minima + offset) % len(tuning_curve)
try:
left_local_min = np.amax(rolled_mins[rolled_mins < mid_point])
except ValueError:
# If there are no mins to the left of the midpoint try to find
# a point that is 25% of the current peak
try:
left_boundary = np.amax(np.where(rolled_tuning[:mid_point] <
0.25 * rolled_tuning[mid_point])[0])
except ValueError:
# If it never falls down to 25% of the current peak, fit the
# entire left half
left_boundary = 0
else:
# Check to see if it tuning falls down to 25% of peak before the
# closest local min
try:
left_peak_edge = np.amax(np.where(rolled_tuning[:mid_point] <
0.25 * rolled_tuning[mid_point])[0])
except ValueError:
left_boundary = left_local_min
else:
left_boundary = np.amax((left_peak_edge, left_local_min))
# Same thing for the right side
try:
right_local_min = np.amin(rolled_mins[rolled_mins > mid_point])
except ValueError:
try:
right_boundary = mid_point + 1 + np.amin(
np.where(rolled_tuning[mid_point + 1:] <
0.25 * rolled_tuning[mid_point])[0])
except ValueError:
right_boundary = len(tuning_curve) - 1
else:
try:
right_peak_edge = mid_point + 1 + np.amin(
np.where(rolled_tuning[mid_point + 1:] <
0.25 * rolled_tuning[mid_point])[0])
except ValueError:
right_boundary = right_local_min
else:
right_boundary = np.amin((right_peak_edge, right_local_min))
x_data = np.arange(left_boundary, right_boundary + 1) - mid_point
data_to_fit = rolled_tuning[left_boundary:right_boundary + 1]
data_to_fit -= np.amin(data_to_fit)
popt, pcov = curve_fit(
gaussian_func, x_data, data_to_fit, p0=[.1, 1])
if np.abs(popt[1]) > len(tuning_curve) / 2.:
frames = frames.union(range(0, len(tuning_curve)))
else:
pf_start = int(local_max - 2 * np.abs(popt[1]))
pf_end = int(local_max + 2 * np.abs(popt[1]))
if pf_start < 0:
frames = frames.union(range(pf_start + len(tuning_curve),
len(tuning_curve)))
pf_start = 0
if pf_end >= len(tuning_curve):
frames = frames.union(range(0, pf_end - len(tuning_curve) + 1))
pf_end = len(tuning_curve) - 1
frames = frames.union(range(pf_start, pf_end + 1))
sorted_frames = sorted(frames)
gaps = np.where(np.diff(sorted_frames) != 1)[0]
starts = [sorted_frames[0]]
ends = []
for gap in gaps:
ends.append(sorted_frames[gap])
starts.append(sorted_frames[gap + 1])
ends.append(sorted_frames[-1])
# Correct wraparound case
# i.e starts = [0, 90], ends = [10, 99]
if len(starts) > 1 and starts[0] == 0 \
and ends[-1] == len(tuning_curve) - 1:
starts[0] = starts.pop()
ends.pop()
# filter by area
areas = []
for start, end in zip(starts, ends):
if start <= end:
area = np.trapz(tuning_curve[start:end + 1])
else:
area = np.trapz(tuning_curve[start:]) + \
np.trapz(tuning_curve[:end + 1])
areas.append(area)
maximum = np.amax(areas)
include = [a >= 0.5 * maximum for a in areas]
starts = [start for i, start in enumerate(starts) if include[i]]
ends = [end for i, end in enumerate(ends) if include[i]]
return starts, ends
def _shuffler(inputs):
"""Calculates shuffled place fields.
Used to split across pools"""
(true_starts, true_ends, transient_responses, position_unsynced,
behav_period, position_synced, framePeriod, frames_to_include,
nROIs, n_position_bins, initial_counts) = inputs
shuffle_values = np.zeros((nROIs, n_position_bins))
shuffle_counts = np.zeros((nROIs, n_position_bins))
for cycle_true_starts, cycle_true_ends, cycle_responses, cycle_pos, \
cycle_pos_synced, cycle_frames, cycle_counts in it.izip(
true_starts, true_ends, transient_responses, position_unsynced,
position_synced, frames_to_include, initial_counts):
for roi_idx, roi_starts, roi_ends, roi_responses in zip(
it.count(), cycle_true_starts, cycle_true_ends,
cycle_responses):
shuffle_starts, shuffle_ends = shuffle_transients(
true_starts=roi_starts, true_ends=roi_ends,
frames_to_include=cycle_frames)
v, c, _ = generate_tuning_curve(
start_indices=shuffle_starts, end_indices=shuffle_ends,
response_magnitudes=roi_responses,
position_unsynced=cycle_pos, behavior_rate=behav_period,
position_synced=cycle_pos_synced, imaging_rate=framePeriod,
n_position_bins=n_position_bins, return_squared=False,
initial_counts=cycle_counts)
shuffle_values[roi_idx] += v
shuffle_counts[roi_idx] += c
# shuffled counts may become zero if there's an issue with the behavior
# sampling rate
assert np.any(np.sum(shuffle_counts, axis=0)) != 0
return shuffle_values, shuffle_counts
def binned_positions(expt, imData, frames_to_include, MAX_N_POSITION_BINS):
"""Calculate the binned positions for each cycle"""
nROIs, nFrames, nCycles = imData.shape
framePeriod = expt.frame_period()
behav_period = expt.find('trial').behaviorData()['samplingInterval']
position_unsynced = [] # position bins as a function of behavioral frame
position_synced = [] # position bins as a function of imaging frame
initial_counts = []
for idx, cycle in enumerate(expt.findall('trial')):
position_unsynced.append((cycle.behaviorData(
sampling_interval='actual')['treadmillPosition'] *
MAX_N_POSITION_BINS).astype(int))
position_synced.append(np.zeros(nFrames, dtype='int'))
# exclude frames, e.g. when animal is not running
exclude_frames = list(set(np.arange(nFrames)).difference(
set(frames_to_include[idx])))
for frame in xrange(nFrames):
start = int(np.round(frame * framePeriod / behav_period))
end = int(np.round((frame + 1) * framePeriod / behav_period))
position_array = position_unsynced[idx][start:end]
assert np.all(position_array >= 0)
assert np.all(np.isfinite(position_array))
pos = int(np.mean(position_array))
if pos not in position_array:
pos_mode, _ = mode(position_array)
pos = int(pos_mode)
assert not np.isnan(pos)
position_synced[idx][frame] = pos
if frame in exclude_frames:
position_unsynced[idx][start:end] = -1
initial_counts.append(Counter(position_unsynced[idx]))
return position_unsynced, position_synced, initial_counts
def _calc_information(
MAX_N_POSITION_BINS, true_values, true_counts, bootstrap_values,
bootstrap_counts, n_bins_list, smooth_lengths, n_processes):
"""
Returns
-------
true_information : ndarray(ROIs, nbins)
shuffle_information : ndarray(ROIs, bootstraps, nbins)
"""
nROIs = len(true_counts)
n_bootstraps = bootstrap_values.shape[2]
true_information = np.empty((nROIs, len(n_bins_list)))
shuffle_information = np.empty((nROIs, n_bootstraps, len(n_bins_list)))
if n_processes > 1:
pool = Pool(processes=n_processes)
for bin_idx, (n_bins, factor_smoothing) in enumerate(zip(
n_bins_list, smooth_lengths)):
true_information_by_shift = np.empty(
(nROIs, MAX_N_POSITION_BINS / n_bins))
for bin_shift in np.arange(MAX_N_POSITION_BINS / n_bins):
values = np.roll(true_values, shift=bin_shift, axis=1).reshape(
[nROIs, n_bins, -1]).sum(2)
counts = np.roll(true_counts, shift=bin_shift, axis=1).reshape(
[nROIs, n_bins, -1]).sum(2)
true_information_by_shift[:, bin_shift] = calc_spatial_information(
(values, counts, factor_smoothing))
true_information[:, bin_idx] = np.max(
true_information_by_shift, axis=1)
shuffle_information_by_shift = np.empty(
(nROIs, n_bootstraps, MAX_N_POSITION_BINS / n_bins))
for bin_shift in np.arange(MAX_N_POSITION_BINS / n_bins):
# Need to round for non-integer values
assert np.all(np.around(
np.std(np.sum(bootstrap_values, axis=1), axis=1), 12) == 0)
shuffle_values = np.rollaxis(np.roll(
bootstrap_values, shift=bin_shift, axis=1).reshape(
[nROIs, n_bins, -1, n_bootstraps]).sum(2), 2, 0)
assert np.all(np.around(
np.std(np.sum(shuffle_values, axis=2), axis=0), 12) == 0)
shuffle_counts = np.rollaxis(np.roll(
bootstrap_counts, shift=bin_shift, axis=1).reshape(
[nROIs, n_bins, -1, n_bootstraps]).sum(2), 2, 0)
if n_processes > 1:
chunksize = 1 + n_bootstraps / n_processes
map_generator = pool.imap_unordered(
calc_spatial_information, zip(
shuffle_values, shuffle_counts,
it.repeat(factor_smoothing)),
chunksize=chunksize)
else:
map_generator = map(
calc_spatial_information, zip(
shuffle_values, shuffle_counts,
it.repeat(factor_smoothing)))
idx = 0
for info in map_generator:
shuffle_information_by_shift[:, idx, bin_shift] = info
idx += 1
shuffle_information[:, :, bin_idx] = np.max(
shuffle_information_by_shift, axis=2)
if n_processes > 1:
pool.close()
pool.join()
return true_information, np.rollaxis(shuffle_information, 1, 0)
def _calc_variances(
true_values, true_counts, bootstrap_values, bootstrap_counts):
true_variances = []
p_vals = []
bins = 2 * np.pi * np.arange(0, 1, 1. / len(true_values[0]))
for values, counts, shuffle_values, shuffle_counts in it.izip(
true_values, true_counts, bootstrap_values, bootstrap_counts):
true_value = var(bins, values / counts)
true_variances.append(true_value)
roi_shuffles = []
for shuffle in range(shuffle_values.shape[1]):
roi_shuffles.append(var(
bins, shuffle_values[:, shuffle] / shuffle_counts[:, shuffle]))
p_vals.append(percentileofscore(roi_shuffles, true_value) / 100.)
return true_variances, p_vals
def _shuffle_bin_counts(
MAX_N_POSITION_BINS, position_unsynced, position_synced,
frames_to_include, im_period, behav_period, true_starts,
true_ends, transient_responses, n_bootstraps, n_processes,
initial_counts):
"""Create shuffled versions of transient and observation counts per bin"""
nROIs = len(true_starts[0])
if n_processes > 1:
pool = Pool(processes=n_processes)
inputs = (true_starts, true_ends, transient_responses, position_unsynced,
behav_period, position_synced, im_period, frames_to_include,
nROIs, MAX_N_POSITION_BINS, initial_counts)
if n_processes > 1:
# chunksize = min(1 + n_bootstraps / n_processes, 200)
chunksize = 1 + n_bootstraps / n_processes
map_generator = pool.imap_unordered(
_shuffler, it.repeat(inputs, n_bootstraps), chunksize=chunksize)
else:
map_generator = map(_shuffler, it.repeat(inputs, n_bootstraps))
bootstrap_values = np.empty((nROIs, MAX_N_POSITION_BINS, n_bootstraps))
bootstrap_counts = np.empty((nROIs, MAX_N_POSITION_BINS, n_bootstraps))
bootstrap_idx = 0
for values, counts in map_generator:
bootstrap_values[:, :, bootstrap_idx] = values
bootstrap_counts[:, :, bootstrap_idx] = counts
bootstrap_idx += 1
if n_processes > 1:
pool.close()
pool.join()
return bootstrap_values, bootstrap_counts
def find_truth(
imData, transients, MAX_N_POSITION_BINS, position_unsynced,
position_synced, frames_to_include, im_period, behav_period,
initial_counts):
"""
Returns
-------
true_values :
tranients per position bin. Dims=(nROis, nBins)
true_counts :
Number of observation per position bin
true_starts :
Imaging frame indices of transients starts.
true_ends :
Imaging frame indices of transients stops (inclusive).
transient_responses : list of list of list
Used to weight. Currently all ones. Index order [cycle][roi][event].
"""
nROIs = imData.shape[0]
true_values = np.zeros((nROIs, MAX_N_POSITION_BINS))
true_values_squared = np.zeros((nROIs, MAX_N_POSITION_BINS))
true_counts = np.zeros((nROIs, MAX_N_POSITION_BINS))
true_starts = [] # by cycle
true_ends = [] # by cycle
transient_responses = [] # by cycle
for idx, cycle_imData, cycle_transients, cycle_pos, \
cycle_frames_to_include, cycle_counts in it.izip(
it.count(), np.rollaxis(imData, 2, 0),
np.rollaxis(transients, 1, 0),
position_unsynced, frames_to_include, initial_counts):
starts, ends = transients_to_include(
cycle_transients, cycle_frames_to_include)
true_starts.append(starts)
true_ends.append(ends)
responses = calcResponseMagnitudes(imData=cycle_imData, starts=starts,
ends=ends,
im_period=im_period)
transient_responses.append(responses)
for roi_idx, roi_starts, roi_ends, roi_responses in zip(
it.count(), starts, ends, responses):
v, c, vs = generate_tuning_curve(
start_indices=roi_starts, end_indices=roi_ends,
response_magnitudes=roi_responses,
position_unsynced=cycle_pos, behavior_rate=behav_period,
position_synced=position_synced[idx],
imaging_rate=im_period,
n_position_bins=MAX_N_POSITION_BINS, return_squared=True,
initial_counts=cycle_counts)
true_values[roi_idx] += v
true_values_squared[roi_idx] += vs
true_counts[roi_idx] += c
return (
true_values,
true_values_squared,
true_counts,
true_starts,
true_ends,
transient_responses,
)
def id_place_fields(expt, intervals='running', n_position_bins=100,
dFOverF='from_file', channel='Ch2', label=None,
demixed=False, smooth_length=3, n_bootstraps=1000,
confidence=95, transient_confidence=95,
n_processes=1, debug=False, isolated=False):
params = {'intervals': intervals, 'n_position_bins': n_position_bins,
'dFOverF': dFOverF, 'smooth_length': smooth_length,
'n_bootstraps': n_bootstraps, 'confidence': confidence,
'transient_confidence': transient_confidence}
running_kwargs = {'min_duration': 1.0, 'min_mean_speed': 0,
'end_padding': 0, 'stationary_tolerance': 0.5,
'min_peak_speed': 5, 'direction': 'forward'}
# running_kwargs = {'min_duration': 0, 'min_mean_speed': 0,
# 'end_padding': 0, 'stationary_tolerance': 2.,
# 'min_peak_speed': 0, 'direction': 'forward'}
# every element of n_bins_list must divide into the first element evenly
# to alow for fast re-binning during bin-shift calculations
n_bins_list = [100, 50, 25, 20, 10, 5, 4, 2]
# same length as n_bins_list
smooth_lengths = [3, 1, 1, 0, 0, 0, 0, 0]
assert np.all([n_bins_list[0] % x == 0 for x in n_bins_list[2:]])
MAX_N_POSITION_BINS = n_bins_list[0]
assert MAX_N_POSITION_BINS % n_position_bins == 0
imData = expt.imagingData(
dFOverF=dFOverF, channel=channel, label=label, demixed=demixed)
if isolated:
transients = expt.transSubset('isolated',
channel=channel, label=label, demixed=demixed, threshold=transient_confidence)
else:
transients = expt.transientsData(
channel=channel, label=label, demixed=demixed, threshold=transient_confidence)
# imaging framerate
im_period = expt.frame_period()
# behavioral data framerate
behav_period = expt.find('trial').behaviorData()['samplingInterval']
nROIs, nFrames, nCycles = imData.shape
if intervals == 'all':
frames_to_include = [np.arange(nFrames) for x in range(nCycles)]
elif intervals == 'running':
running_intervals = expt.runningIntervals(returnBoolList=False,
**running_kwargs)
# list of frames, one per list element per cycle
frames_to_include = [np.hstack([np.arange(start, end) for
start, end in cycle]) for
cycle in running_intervals]
else:
# Assume frames_to_include was passed in directly
frames_to_include = intervals
position_unsynced, position_synced, initial_counts = binned_positions(
expt, imData, frames_to_include, MAX_N_POSITION_BINS)
true_values, true_values_squared, true_counts, true_starts, true_ends, \
transient_responses = find_truth(
imData, transients, MAX_N_POSITION_BINS, position_unsynced,
position_synced, frames_to_include, im_period, behav_period,
initial_counts)
bootstrap_values, bootstrap_counts = _shuffle_bin_counts(
MAX_N_POSITION_BINS, position_unsynced, position_synced,
frames_to_include, im_period, behav_period, true_starts,
true_ends, transient_responses, n_bootstraps, n_processes, initial_counts)
true_information, shuffle_information = _calc_information(
MAX_N_POSITION_BINS, true_values, true_counts, bootstrap_values,
bootstrap_counts, n_bins_list, smooth_lengths, n_processes)
true_circ_variances, circ_variance_p_vals = _calc_variances(
true_values, true_counts, bootstrap_values, bootstrap_counts)
# which num of bins give max difference in info
shuffle_means = shuffle_information.mean(axis=0) # rois x bins
true_diffed = true_information - shuffle_means # rois x bins
shuffle_diffed = shuffle_information - shuffle_means # bootstraps x rois x bins
# TODO: adjustment for self contribution to the mean
# take best bin for true and for each shuffle
optimal_true_info = true_diffed.max(axis=1)
optimal_shuffle_info = shuffle_diffed.max(axis=2)
thresholds = np.percentile(optimal_shuffle_info, confidence, axis=0)
true_values = true_values.reshape([nROIs, n_position_bins, -1]).sum(2)
true_values_squared = true_values_squared.reshape(
[nROIs, n_position_bins, -1]).sum(2)
true_counts = true_counts.reshape([nROIs, n_position_bins, -1]).sum(2)
if smooth_length > 0:
true_result = smooth_tuning_curves(true_values / true_counts,
smooth_length=smooth_length,
nan_norm=False)
else:
true_result = true_values / true_counts
pfs = []
information_p_vals = []
for roi_idx, tuning_curve, response, threshold, shuffle_scores in it.izip(
it.count(), true_result, optimal_true_info, thresholds,
optimal_shuffle_info.T):
information_p_vals.append(1. - percentileofscore(shuffle_scores, response) / 100.)
if response > threshold:
starts, ends = define_fields(tuning_curve)
if len(starts):
pfs.append(zip(starts, ends))
else:
print('Tuned cell w/ no field: roi ' +
'{}, response {}, threshold {}'.format(
roi_idx, response, threshold))
else:
pfs.append([])
result = {'spatial_tuning': true_values / true_counts,
'true_values': true_values,
'true_counts': true_counts,
'true_circ_variances': np.array(true_circ_variances),
'circ_variance_p_vals': np.array(circ_variance_p_vals),
'std': np.sqrt(true_values_squared / true_counts -
(true_values / true_counts) ** 2),
'pfs': pfs,
'parameters': params,
'spatial_information': optimal_true_info,
'thresholds': thresholds,
'information_p_values': np.array(information_p_vals)}
if intervals == 'running':
result['running_kwargs'] = running_kwargs
if smooth_length > 0:
result['spatial_tuning_smooth'] = true_result
result['std_smooth'] = smooth_tuning_curves(
result['std'], smooth_length=smooth_length, nan_norm=False)
if debug:
fig = plt.figure()
tuning = np.copy(true_result)
pcs = np.where(pfs)[0]
for pc in pcs:
# bring place cells to the top
tuning = np.vstack((tuning[pc], tuning))
tuning = np.delete(tuning, pc + 1, 0)
# add in a blank row
arranged = np.vstack((tuning[:len(pcs)], [np.nan] * n_position_bins,
tuning[len(pcs):]))
plt.imshow(arranged, vmin=0, vmax=np.percentile(true_result, 95),
interpolation='nearest')
fig, axs = plt.subplots(10, 10, sharex=True, sharey=True)
roi_names = expt.roiNames(channel=channel, label=label)
for idx1 in range(10):
for idx2 in range(10):
index = 10 * idx1 + idx2
if index >= len(pcs):
continue
idx = pcs[index]
axs[idx1, idx2].plot(true_result[idx])
axs[idx1, idx2].set_title(roi_names[idx])
for pf in pfs[idx]:
color = plt.cm.Set1(np.random.rand(1))
start = pf[0]
end = pf[1]
if start < end:
x_range = np.arange(start, end + 1)
y_min = np.zeros(len(x_range))
y_max = true_result[idx, start:end + 1]
axs[idx1, idx2].fill_between(x_range, y_min, y_max,
color=color)
else:
x_range = np.arange(start, true_result.shape[1])
y_min = np.zeros(len(x_range))
y_max = true_result[idx, start:]
axs[idx1, idx2].fill_between(x_range, y_min, y_max,
color=color)
x_range = np.arange(0, end + 1)
y_min = np.zeros(len(x_range))
y_max = true_result[idx, :end + 1]
axs[idx1, idx2].fill_between(x_range, y_min, y_max,
color=color)
plt.xlim((0, n_position_bins))
plt.ylim((-0.02, np.amax(true_result)))
plt.show()
result['true_information'] = optimal_true_info
result['bootstrap_distributions'] = bootstrap_values
result['thresholds'] = thresholds
return result
|
11,797 | 8a0b4a0ccb703163c2bcf255a9c79e4702445158 | """TLE
N = int(input())
sequence = list(input().split())
ans = []
for i in range(N):
a_i = sequence[i]
ans.append(a_i)
ans = ans[::-1]
print(" ".join(ans))
"""
from collections import deque
n = int(input())
sequence = deque(map(int,input().split()))
ans = deque()
if n % 2 == 1:
for i in range(n):
if i % 2 == 0:
ans.appendleft(sequence[0])
else:
ans.append(sequence[0])
sequence.popleft()
#print(ans)
else:
for i in range(n):
if i % 2 == 1:
ans.appendleft(sequence[0])
else:
ans.append(sequence[0])
sequence.popleft()
#print(ans)
print(*ans)
|
11,798 | 60385f21272751711e4beaaa9d106ae13a4d76cd | from django.shortcuts import render
from django.http import HttpResponse,Http404
import datetime
from mytest.forms import Mail
from django.views.decorators.csrf import csrf_exempt
from django.core.mail import send_mail
from django import template
register=template.Library()
def hello(request):
return HttpResponse("Hello World!")
def date_time(request):
now=datetime.datetime.now()
return HttpResponse("this time is %s"%now)
def hour_del(request,offset):
try:
offset=int(offset)
except ValueError:
raise Http404()
dt=datetime.datetime.now()+datetime.timedelta(hours=offset)
return HttpResponse("after %s hours is %s"%(offset,dt))
def position(request):
pos={'site':"重庆邮电大学"}
return render(request,'test_1.txt',pos)
def current_time(request):
now=datetime.datetime.now()
time={'current_time':now}
return render(request,'time.html',time)
@csrf_exempt
def contact(request):
if request.method=='POST':
form=Mail(request.POST)
if form.is_valid():
data=form.cleaned_data
send_mail(data['subject'],data['message'],['lst123456@qq.com'],[data['email'],])
return render(request,'mail.txt',{'sucess':'发送成功','form':form})
else:
form=Mail()
return render(request,'mail.txt',{'form':form})
def list(request):
article=[{'title':'蚊香','body':'可快速消灭蚊虫'},{'title':'书','body':'人丑就要多读书'}]
return render(request,'list.html',{'articles':article})
@register.simple_tag
def value(v1,v2):
return v1+v2
def test(request):
return render(request,'albums-store.html')
# Create your views here.
|
11,799 | 4d06cf7586652071d4ca21d874b32a0a232a3c3f | '''
@author: shylent
'''
from itertools import count, islice
from random import randint
from tftp.util import CANCELLED, iterlast, timedCaller
from twisted.internet.defer import inlineCallbacks
from twisted.internet.task import Clock
from twisted.trial import unittest
class TimedCaller(unittest.TestCase):
@staticmethod
def makeTimedCaller(timings, clock=None):
record = []
call = lambda: record.append("call")
last = lambda: record.append("last")
if clock is None:
caller = timedCaller(timings, call, last)
else:
caller = timedCaller(timings, call, last, clock)
return caller, record
def test_raises_ValueError_with_no_timings(self):
error = self.assertRaises(ValueError, self.makeTimedCaller, [])
self.assertEqual("No timings specified.", str(error))
@inlineCallbacks
def test_calls_last_with_one_timing(self):
caller, record = self.makeTimedCaller([0])
self.assertIs(None, (yield caller))
self.assertEqual(["last"], record)
@inlineCallbacks
def test_calls_both_functions_with_multiple_timings(self):
caller, record = self.makeTimedCaller([0, 0])
self.assertIs(None, (yield caller))
self.assertEqual(["call", "last"], record)
def test_pauses_between_calls(self):
clock = Clock()
caller, record = self.makeTimedCaller([1, 2, 3], clock=clock)
self.assertEqual([], record)
clock.advance(1)
self.assertEqual(["call"], record)
clock.advance(2)
self.assertEqual(["call", "call"], record)
clock.advance(3)
self.assertEqual(["call", "call", "last"], record)
self.assertIs(None, caller.result) # Finished.
def test_can_be_cancelled(self):
clock = Clock()
caller, record = self.makeTimedCaller([1, 2, 3], clock=clock)
self.assertEqual([], record)
clock.advance(1)
self.assertEqual(["call"], record)
clock.advance(2)
self.assertEqual(["call", "call"], record)
caller.cancel()
self.assertIs(CANCELLED, caller.result)
# Advancing the clock does not result in more calls.
clock.advance(3)
self.assertEqual(["call", "call"], record)
class IterLast(unittest.TestCase):
def test_yields_nothing_when_no_input(self):
self.assertEqual([], list(iterlast([])))
def test_yields_once_for_input_of_one(self):
thing = object()
self.assertEqual(
[(True, thing)],
list(iterlast([thing])))
def test_yields_once_for_each_input(self):
things = [object() for _ in range(1, randint(9, 99))]
self.assertEqual(
[(False, thing) for thing in things[:-1]] + [(True, things[-1])],
list(iterlast(things)))
def test_works_with_unsized_iterators(self):
things = [object() for _ in range(1, randint(9, 99))]
self.assertEqual(
[(False, thing) for thing in things[:-1]] + [(True, things[-1])],
list(iterlast(iter(things))))
def test_works_with_infinite_iterators(self):
# ... though is_last will never be True.
self.assertEqual(
[(False, 1), (False, 2), (False, 3)],
list(islice(iterlast(count(1)), 3)))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.