index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
997,300 | 49ab4b2fb16cbcace513dca65f2305ad1512bf78 | def common_letters(string_one,string_two):
common=[]
for one in string_one:
if (one in string_two) and not (one in common):
common.append(one)
return common
print(common_letters("manhattan","san francisco"))
|
997,301 | fffcd26acddac85a193151231f8f832e9dfc2138 | #Libreria
import base64
##Encriptado base 64
#Texto para encriptar
texto = "cifrado base 64"
#Funcion que encripta
encriptar = base64.b64encode(texto.encode("utf-8"))
textoEncriptado = str(encriptar, "utf-8")
##Desencriptado base 64
#Funcion que desencripta
desencriptar = base64.b64decode(textoEncriptado)
textoDesencriptado = str(desencriptar, "utf-8")
#Texto final
print("El texto encriptado es:", textoEncriptado)
print("El texto desencriptado es:", textoDesencriptado)
|
997,302 | 0d2b3d3e6b4bef6b292da8ce539dce13f33b5a83 | from flask import request
from api_handlers import users
from app_init import app
from util import POST_REQUEST, ParsedRequest, api_response, json_response
# user registration route
# POST request
@app.route("/users/register/", **POST_REQUEST)
@app.route("/register", **POST_REQUEST)
@api_response
def register():
return users.register(ParsedRequest())
# refresh the JWT Token
# GET request
@app.route("/u/token/refresh/", strict_slashes=False)
@api_response
def refesh_token():
return users.re_authenticate(ParsedRequest())
@app.route("/u/discord/auth/code/", **POST_REQUEST)
@api_response
def setup_discord_auth():
return users.setup_discord(ParsedRequest())
# ===========================================================================
# Users
# Get user info, secure data is removed for unauthenticated
# requests
@app.route("/users/<user>/data/", strict_slashes=False)
@api_response
def user_details(user):
return users.get_user_details(ParsedRequest(), user)
# edit user info, only authenticated requests allowed
@app.route("/users/<user>/edit/", **POST_REQUEST)
@api_response
def edit_user(user):
return users.edit(ParsedRequest(), user)
@app.route("/users/login/", **POST_REQUEST)
@api_response
def user_login():
return users.login(ParsedRequest())
# user leaderboard
@app.route("/users/all/", strict_slashes=False)
@api_response
def all_users():
return users.user_list()
@app.route("/users/auth/check/", strict_slashes=False)
@api_response
def check_auth_resp():
return users.check_auth()
@app.route("/logout/", strict_slashes=False)
@api_response
def log_user_out():
return json_response({}, headers={"x-access-token": "", "x-refresh-token": ""})
|
997,303 | c9f92ef5280aaa4a4330513c2a906bfe9f4e4921 | #coding=utf-8
import sys
from twisted.internet.protocol import ServerFactory
from twisted.protocols.basic import LineReceiver
from twisted.python import log
from twisted.internet import reactor,task
from ss import cfgshell
import db
from twisted.cred import checkers,credentials,portal
from zope.interface import Interface
from SscmdAvater import SscmdAvater,SscmdRealm,ISscmdAvaterInterface,_adminuser,_adminpass
from twisted.internet.threads import deferToThread
import mailcheck
import sstime
from datetime import datetime
class CmdProtocol(LineReceiver):
delimiter = '\n'
def __init__(self):
log.msg('init cmdProtocol...')
self._avater=None
def connectionMade(self):
self.client_ip = self.transport.getPeer()
log.msg("Client connection from %s" % self.client_ip)
if len(self.factory.clients) >= self.factory.clients_max:
#kick connection timeout
for time,trans in self.factory.clients.values():
if (datetime.now()-time).seconds > 30:
trans.loseConnection()
self.factory.clients[self.client_ip]=(datetime.now(),self.transport)
return
if len(self.factory.clients)>=self.factory.clients_max:
log.msg("Too many connections. bye !")
self.client_ip = None
self.transport.loseConnection()
return
self.factory.clients[self.client_ip]=(datetime.now(),self.transport)
def connectionLost(self, reason):
log.msg('Lost client connection. Reason: %s' % reason)
if self.client_ip:
del self.factory.clients[self.client_ip]
def lineReceived(self, line):
log.msg('Cmd received from %s,%s' % (self.client_ip, line))
if not self._avater:
avater=line.strip().split(' ')
if len(avater)!=2:
self.sendLine('Input user name and password(aplite by apace):');
else:
user=avater[0]
if avater[0].isdigit() and int(avater[0])<30000:
user=str(int(avater[0]) + 20000)
self.login(user,avater[1])
return;
#login ok , get a avater
#self.transport.write('-----------------------\n')
#process command line
if self.factory.cfgfile==None or self.factory.dbinfo==None:
log.msg("Can not get port config file or db file!")
self.transport.write('Fatal error! Command fail!\n')
return
ret,msg=self._avater.processCmd(line,self.factory.dbinfo,self.factory.cfgfile, self.factory)
#output
if ret==0 and msg:
log.msg(msg)
self.sendLine(msg)
elif ret==-1:
log.msg(msg)
if msg: self.sendLine(msg)
self.transport.loseConnection()
return
# self.transport.write('=======================\n');
def _cbLoginOK(self,(interface,avater,logout)):
log.msg('login ok.')
self._avater=avater
self.sendLine('Welcome %s! What can I do for you?' % avater.avaterId)
def _cbLoginFail(self,fail):
log.msg('login failed!')
self.sendLine('Login failed!' )
self.transport.loseConnection()
def login(self,user,password):
log.msg('Prepare to login! username[%s],password[%s]' % (user,password))
d=self.factory._portal.login(
credentials.UsernamePassword(user,password),
None,
ISscmdAvaterInterface)
d.addCallbacks(self._cbLoginOK, self._cbLoginFail)
class MyFactory(ServerFactory):
protocol = CmdProtocol
def reloadUser(self):
log.msg('reload user infomation...');
self.checker.users={};
self.checker.addUser( _adminuser,_adminpass )
userinfo={}
cols,rows=self.dbinfo.find(userinfo);
for i in range(len(rows)):
port,passwd=rows[i][:2]
self.checker.addUser(str(port),passwd)
def getaportal(self):
log.msg('Get a portal...' )
aportal=portal.Portal(SscmdRealm())
self.reloadUser();
aportal.registerChecker(self.checker)
return aportal
def __init__(self, clients_max=1):
log.msg('Now init sscmd factory...')
self.clients_max = clients_max
self.clients = {}
self.dbinfo=db.ssdb()
self.cfgfile=cfgshell.cfgfile()
self.checker=checkers.InMemoryUsernamePasswordDatabaseDontUse()
self._portal = self.getaportal()
def accountcheck(self):
#if sstime.now().strftime('%H')='00':
if sstime.now().strftime('%H')!='10': return #at 10 o'clock evary day
log.msg('Checking port status and mail to user if port is expired/will expire/testing...')
#first, stop all expired port,and mail to user
mailcheck.stopexp(self)
#then,warn all who will expired in 3 days
mailcheck.mailwillexp(self)
#then,mail to testing user expired after 2 days
mailcheck.mailtest(self)
#mail to stoped user to buy
#mailcheck.mailstoped(self)
log.startLogging(sys.stdout)
#log.startLogging(open(r"./sscmd.log",'a'))
myfac=MyFactory(2)
reactor.listenTCP(39125, myfac)
t=task.LoopingCall(myfac.accountcheck)
#check every 1h
t.start(3600)
#t.start(20)
reactor.run()
|
997,304 | d29deb748bcc6ad7a8c4be40410668d2b3014766 | # Template used to print a Trello board. Uses pystache.
board_template = """
<html>
<head>
<meta content="text/html;charset=utf-8" http-equiv="Content-Type">
<meta content="utf-8" http-equiv="encoding">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap.min.css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/3.3.1/css/bootstrap-theme.min.css">
<style>
.card {
margin: 5px;
border:1px solid black;
width: 48%;
display: inline-block;
vertical-align: top;
} .longcard {
margin: 5px;
border:1px solid black;
width: 98%;
} .list {
clear: both;
margin-top: 10px;
padding-top: 10px;
border-top:3px dotted black;
}
@media print
{
.no-print, .no-print *
{
display: none !important;
}
div{
page-break-inside: avoid;
}
a[href]:after {
content: none !important;
}
}
</style>
</head>
<body>
<h2>Board {{{name}}}</h2>
by {{#members}}{{{fullName}}}, {{/members}}Last activity: {{dateLastActivity}} /
<a href='{{url}}'>{{url}}</a>
{{#activeLists}}
<div class="list">
<h3>{{{name}}}</h3>
{{#activeCards}}
<div class="col-md-6 card">
<h4><a href='{{url}}'>{{{name}}}</a></h4>
<p>
{{{desc}}}
</p>
{{#checklists}}
<ul>
{{#checkItems}}
<li>{{{name}}}</li>
{{/checkItems}}
</ul>
{{/checklists}}
<div class="no-print">
{{#attachments}} | <a href="{{url}}">{{name}}</a>{{/attachments}}
</div>
</div>
{{/activeCards}}
{{#activeLongCards}}
<div class="col-md-6 longcard">
<h4><a href='{{url}}'>{{{name}}}</a></h4>
<p>
{{{desc}}}
</p>
{{#checklists}}
<ul>
{{#checkItems}}
<li>{{{name}}}</li>
{{/checkItems}}
</ul>
{{/checklists}}
<div class="no-print">
{{#attachments}} | <a href="{{url}}">{{name}}</a>{{/attachments}}
</div>
</div>
{{/activeLongCards}}
</div>
{{/activeLists}}
</body>
<html>
"""
import pystache
import cgi
import codecs
class Trello_Board(object):
def __init__(self, json_object, big_card_min_words):
self.board_json = self._transmogrify_trello_board(json_object, big_card_min_words)
def html_render(self, template):
return pystache.render(template, self.board_json).encode('utf-8')
def get_pdf_attachments(self):
attachments = []
for lst in self.board_json['activeLists']:
for card in lst['activeCards']:
for attach in card['attachments']:
if attach['url'].endswith(".pdf"):
attachments.append(attach['url'])
return attachments
def _formatDates(self, obj, fields):
for field in fields:
obj[field] = obj[field][:obj[field].find('T')]
return obj
def _prettyHtml(self, obj, fields):
for field in fields:
htmlified = cgi.escape(obj[field]).encode('ascii', 'xmlcharrefreplace')
htmlified = htmlified.replace('\n', '<br/>')
obj[field] = htmlified
return obj
def _transmogrify_trello_board(self, board, big_card_min_words):
""" Transform a plain Trello format (list of stuff) to a hierarchical model
more suitable for pystache """
board = self._prettyHtml(board, ['name'])
board = self._formatDates(board, ['dateLastActivity'])
# Htmlize the members names
for o in board['members']:
o = self._prettyHtml(o, ['fullName'])
# Create list of active trello-lists
board['activeLists'] = []
for lst in board['lists']:
if lst['closed']: continue
# Get all the non-closed cards for this list
activeCards = [ self._formatDates(self._prettyHtml(card, ['name', 'desc']), ['dateLastActivity']) \
for card in board['cards'] \
if card['idList'] == lst['id'] \
and not card['closed'] ]
lst['activeCards'] = [card for card in activeCards if len(card['desc']) < big_card_min_words]
lst['activeLongCards'] = [card for card in activeCards if len(card['desc']) >= big_card_min_words]
# Get all the active checklists
for card in lst['activeCards']:
card['checklists'] = [checklist for checklist in board['checklists']
if checklist["idCard"]==card["id"] ]
for cl in card['checklists']:
for item in cl['checkItems']:
item = self._prettyHtml(item, ['name'])
lst = self._prettyHtml(lst, ['name'])
board['activeLists'].append(lst)
return board
import json
import cgi
import codecs
import sys
import tempfile
import pdfkit
import os
import urllib2
def pretty_print(jstuff):
return json.dumps(jstuff, sort_keys=True, indent=4, separators=(',', ': '))
def read_json_board(fp, big_card_min_words):
trello_json = ""
for ln in codecs.getreader('utf-8')(fp).readlines():
trello_json += ln
return Trello_Board(json.loads(trello_json), big_card_min_words)
def create_pdf_bundle(args, board):
# List of all pdf files to join
pdfs_to_join = []
# Print temp pdf board from html, to be used by pdf joiner
tmp_board_pdf = tempfile.NamedTemporaryFile(delete=False)
if not args.quiet: print >> sys.stderr, "Generating temporary board pdf in {}...".format(tmp_board_pdf.name)
pdfkit.from_string(board.html_render(board_template), tmp_board_pdf.name)
pdfs_to_join.append(tmp_board_pdf)
# Fetch the board's attachments
if args.bundle_attachments:
for url in board.get_pdf_attachments():
f = tempfile.NamedTemporaryFile(delete=False)
if not args.quiet: print >> sys.stderr, "Downloading attachment {} into {}...".format(url, f.name)
f.write( urllib2.urlopen(url).read() )
pdfs_to_join.append(f)
# exec
out_fname = args.out_fname
if out_fname is None:
out_fname = sys.argv[0] + '_bundle.pdf'
else:
out_fname += '.pdf'
# Save the fnames and close them, if we hold these files open ghostscript goes haywire
join_fnames = ' '.join([f.name for f in pdfs_to_join])
for f in pdfs_to_join:
f.close()
if not args.quiet: print >> sys.stderr, "Generating bundle {}:".format(out_fname)
pdf_join_cmd = 'gs -dBATCH -dNOPAUSE -q -sDEVICE=pdfwrite -sOutputFile={} {}'\
.format(out_fname, join_fnames)
if not args.quiet: print >> sys.stderr, "Running {}".format(pdf_join_cmd)
os.system(pdf_join_cmd)
# Clean up temp files
for fn in pdfs_to_join: os.remove(fn.name)
def main(args):
board = read_json_board(sys.stdin, args.big_card_min_words)
if not args.quiet: print >> sys.stderr, "Valid board found!"
if args.html_output:
f = sys.stdout
if args.out_fname is not None:
f = open(args.out_fname + '.html', 'w+')
print >> f, board.html_render(board_template)
if args.print_debug_json:
f = sys.stdout
if args.out_fname is not None:
f = open(args.out_fname + '.json', 'w+')
print >> f, pretty_print(board.board_json)
if args.create_pdf_bundle:
create_pdf_bundle(args, board)
import argparse
import sys
app_desc = """
Trello board printer: generate a printable version of a Trello board including card descriptions and attachments.
Usage: Open a Trello board, go to the menu of the board and click the "Share, Print and Export" \
option. Click the "Export to JSON" option and download the resulting json file. Call this program \
using the downloaded file as input. For example:
python {0} <trello_board.json -o MyBoard
{0} will then:
1. Create a printable version of the board, including card descriptions.
2. Download all the (non-archived) card attachments which are stored in Amazon.
3. Bundle the printable version of the board with the downloaded attachments into MyBoard.pdf.
You can change the board print template by editing {0}. You can goto \
https://github.com/nicolasbrailo/TrelloPrinter and request a user-friendlier template edition. \
Actually, you can go over there and request any feature you'd like.
"""
parser = argparse.ArgumentParser(description=app_desc.format(sys.argv[0]),
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("-o", "--output", dest="out_fname",
default=None, metavar="FILE_NAME",
help="Use FILE_NAME for output (eg: Specify '-o MyBoard --html \
--debug' to generate MyBoard.json and MyBoard.html).")
parser.add_argument("--no-attachments", action="store_false",
dest="bundle_attachments", default=True,
help="Don't download and bundle pdf attachments in final printable document.")
parser.add_argument("--html", action="store_true",
dest="html_output", default=False,
help="Output only an HTML version of the board.")
parser.add_argument("--no-bundle", action="store_false",
dest="create_pdf_bundle", default=True,
help="Don't create a pdf bundle of the specified board.")
parser.add_argument("--debug", action="store_true",
dest="print_debug_json", default=False,
help="Output a prettyfied version of the board's json.")
parser.add_argument("-m", "--min_words", type=int, metavar='N',
dest="big_card_min_words", default=800,
help="Number of words after which a card is considered 'big'. "\
"A single column layout is used for big cards. [800]")
parser.add_argument("-q", "--quiet", action="store_true",
dest="quiet", default=False,
help="Run in quiet mode.")
if __name__ == '__main__':
main(parser.parse_args())
|
997,305 | d8bff60e49d099361e04af3e52846edfe116d5d8 | # 9613
from itertools import combinations
from math import gcd
count = int(input())
gcd_list = []
for i in range(count):
number_list = list(map(int, input().split()))
gcd_sum = 0
number_list = number_list[1:]
for a, b in combinations(number_list, 2):
gcd_sum += gcd(a, b)
gcd_list.append(gcd_sum)
for j in range(len(gcd_list)):
print(gcd_list[j])
|
997,306 | 9955ced6cb3379840006cd9ca438b7a5bbf8bd5e | import unittest,paramunittest
import warnings
from common.testdata_utils import TestdataUtils
from common.requests_utils import RequestsUtils
case_info = TestdataUtils().get_testcase_data_list() # excel数据
# case_info = TestdataUtils().get_testcase_data_list_by_mysql() #mysql数据
@paramunittest.parametrized(
*case_info
)
class APITest(paramunittest.ParametrizedTestCase):
def setUp(self) -> None:
warnings.simplefilter('ignore',ResourceWarning)
def setParameters(self, case_id, case_info):
self.case_id = case_id
self.case_info = case_info
def test_api_common_function(self):
"""
测试描述
"""
self._testMethodName = self.case_info[0].get('测试用例编号') # 编号和说明符重新定义
self._testMethodDoc = self.case_info[0].get('测试用例名称')
actual_result = RequestsUtils().request_by_step(self.case_info)
self.assertTrue(actual_result.get('check_result'),actual_result.get('message'))
if __name__ == '__main__':
unittest.main() |
997,307 | 4805cf482dc5fb4d193a4d624c64efd7d157d660 | prnt 'hello'
|
997,308 | be1b3eee0ac331a43100d71c2574b9e1f9130a04 | from collections import OrderedDict
import sys
python_ver = sys.version_info[0]
def main():
table = {}
for i in range(24):
key = chr(ord('a') + i)
value = chr(ord('A') + i)
table[key] = value
print(len(table))
print(table)
assert('a' in table)
if 'a' in table:
del table['a']
assert('a' not in table)
if 'a' in table:
del table['a']
ordered_table = OrderedDict()
ordered_table.update(table)
print(len(ordered_table))
print(ordered_table)
if python_ver == 2:
print(ordered_table.keys()[-1])
elif python_ver == 3:
print(list(ordered_table.keys())[-1])
if __name__ == '__main__':
main()
|
997,309 | 41afde9165bd9425503b2cb7ef61e5616068f80d | '''
리스트의 항목 중 유일한 값으로만 구성된 리스트를 반환하는 함수를 정의하고
이 함수를 이용해 리스트의 중복 항목을 제거하는 프로그램을 작성하십시오.
출력
[1, 2, 3, 4, 3, 2, 1]
[1, 2, 3, 4]
'''
def arr_l(no_arr):
y_arr=list(set(no_arr))
return y_arr
l=[1,2,3,4,3,2,1]
arrange_l=arr_l(l)
print(arrange_l)
|
997,310 | d118e9275143c691d717093c64304d00d53fb224 | #raise (lanza exepciones de forma voluntaria)
dato = int(input("ingresa cuantas veces has estado en la carcel :"))
if dato <=1 :
print("FELICIDADES QUEDASTE REGISTRADO")
else:
raise ValueError ("EL PROGRAMA CALLO INESPERADAMENTE")
#se coloca cualquier nombre de error reservado y se le le
#puede asignar un mensaje personalizado
#programa que, crear un error para romper el programa en caso
#de que la persona hubiese estado más de 2 veces en la carcel |
997,311 | f4023185c263dc41721b27cba685dc32639eaf34 | import os
import torch
import numpy as np
import pandas as pd
from scipy.misc import imresize
from PIL import Image
from skimage import io, transform
from skimage.color import rgba2rgb
from torch.utils.data import Dataset, DataLoader
from torchvision import transforms, utils
class RetinaDataset(Dataset):
def __init__(self, csv_file, root_dir, transform=None):
self.rop_df = pd.read_csv(csv_file)
self.root_dir = root_dir
self.transform = transform
def __len__(self):
return len(self.rop_df)
def __getitem__(self, idx):
sample_df = self.rop_df.iloc[[idx]]
label_to_int = {'No': 0, 'Pre-Plus': 1, 'Plus': 2}
try:
img_name = os.path.join(self.root_dir, sample_df['imageName'].iloc[0])
img_name = '..' + img_name.split('.')[2] + '.png' # png for segmented, bmp for raw
image = io.imread(img_name)
# need this check for the raw images to ensure rgb, not rgba
# if image.shape[2] > 3:
# image = rgba2rgb(image)
image = Image.fromarray(image)
plus = label_to_int[sample_df['Golden Reading Plus'].iloc[0]]
if self.transform:
image = self.transform(image)
sample = {'image': image, 'plus': plus, 'img_name': img_name}
except:
sample = None
return sample
class Preprocess(object):
def __init__(self, params):
self.params = params
def __call__(self, image):
# this is what James did for preprocessing images
resize, crop = self.params['resize'], self.params['crop']
crop_width = (resize['width'] - crop['width']) / 2
crop_height = (resize['height'] - crop['height']) / 2
crop_width = int(crop_width)
crop_height = int(crop_height)
image = image.resize((resize['width'], resize['height']), Image.ANTIALIAS)
image = image.crop((crop_width, crop_height, image.size[0]-crop_width, image.size[1]-crop_height))
image = np.stack((image,)*3, axis=-1) # if segmented image
return image
def prepare(action, data_dir, csv_file, save_file=None):
params = {'resize': {'width': 404, 'height': 302, 'interp': 'bicubic'},
'crop': {'width': 300, 'height': 300}}
batch_size = 32 # TODO: add as arg when running
collate_fn=(lambda x: torch.utils.data.dataloader.default_collate(list(filter(lambda y: y is not None, x))))
if action == 'prepare':
dataset = RetinaDataset(csv_file=csv_file,
root_dir=data_dir,
transform=transforms.Compose([
Preprocess(params),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
]))
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, collate_fn=collate_fn)
if save_file is not None:
with open(save_file, "wb") as f:
pickle.dump(dataloader, f)
return True
elif action == 'train':
dataset = RetinaDataset(csv_file=csv_file,
root_dir=data_dir,
transform=transforms.Compose([
Preprocess(params),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
]))
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=True, collate_fn=collate_fn)
return dataloader, len(dataset)
elif action == 'eval' or action == 'cluster':
dataset = RetinaDataset(csv_file=csv_file,
root_dir=data_dir,
transform=transforms.Compose([
Preprocess(params),
transforms.ToTensor(),
transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
]))
dataloader = DataLoader(dataset, batch_size=batch_size, shuffle=False, collate_fn=collate_fn)
return dataloader, len(dataset)
elif action == 'predict':
# TODO: make dataloader when there are no labels
raise NotImplementedError
if __name__ == "__main__":
action = sys.argv[1] # prepare, train, eval, predict
data_dir = sys.argv[2] # file of train, test data
csv_file = sys.argv[3] # csv file of info OR output file name
dataloader_file = sys.argv[4] # model path to save or load
prepare(action, data_dir, csv_file, dataloader_file)
|
997,312 | bb9c443e40439d79b848e12c292b57d947518d07 | #!/usr/bin/env python
'''
Check access to Azure API
'''
__author__ = "Leonid Vasilyev, <vsleonid@gmail.com>"
import json
import sys
import os
import azure.servicemanagement as smgmt
def print_locations_and_services(sms):
print "Available locations & services:"
print "=============================="
for i, loc in enumerate(sms.list_locations()):
print("{}.{}:\n {}".format(
i + 1,
loc.display_name,
", ".join(loc.available_services)))
def print_available_os_images(sms):
print "Available OS images:"
print "==================="
def _by_os_and_label(image):
return image.os, image.label
for image in sorted(sms.list_os_images(), key=_by_os_and_label):
print "{os}: {label} ({size}GB)\n{name}".format(
os=image.os,
label=image.label,
size=image.logical_size_in_gb,
name=image.name
)
print " "
def print_disks_info(sms):
print "Disks info:"
print "=========="
for disk in sms.list_disks():
print "{name}({size}GB):\n{source}\n{attached}".format(
name=disk.name,
size=disk.logical_disk_size_in_gb,
source=disk.source_image_name,
attached=disk.attached_to.hosted_service_name +
"/" + disk.attached_to.deployment_name
)
print " "
def print_hosted_services(sms):
print "Hosted Services Info:"
print "===================="
for service in sms.list_hosted_services():
print service.service_name
for k, v in service.hosted_service_properties.__dict__.iteritems():
if k.startswith('_'):
continue
print " {}: {}".format(k, v)
def main(config_path):
if not os.path.exists(config_path):
raise ValueError("'{}' doesn't exists".format(config_path))
config = {}
with open(config_path) as f:
config = json.load(f)
subscription_id = config['subscription_id']
certificate_path = config['certificate_path']
sms = smgmt.ServiceManagementService(subscription_id, certificate_path)
print "Account summary:"
print "---------------"
print_locations_and_services(sms)
print_available_os_images(sms)
print_disks_info(sms)
print_hosted_services(sms)
if __name__ == "__main__":
if len(sys.argv) != 2:
sys.stderr.write(
"Usage: {} <config-file.json>\n".format(sys.argv[0]))
sys.exit(1)
main(sys.argv[1])
|
997,313 | 6851dc5eaa74b5c439cf352c3ab801ea5cb2e8c0 | #!/usr/bin/python
# -*- coding: UTF-8 -*-
import sys
from flask import Flask, url_for, request
from sqlalchemy import text
import cgi
arguments = dict()
args = cgi.FieldStorage()
for key in args.keys():
arguments[key] = args[key].value
from dbhelper import query_database, result_to_json
import cgihelper
# Marks an observation as silene or not.
# sponsor_verify.py?issilene=true/false/0/1&obsid=43
try:
query_database(text('UPDATE observations SET IsSilene=' + arguments["issilene"] + ' WHERE ObsID=' + arguments["obsid"] + ';'))
except:
sys.exit("Error occured. Database may be down. Try again later and check your parameters.")
|
997,314 | 8fdac789dbde85ef80535e6cc1a687db1a49a839 | import sys, getopt, os, errno, re, string
opts, args = getopt.getopt(sys.argv[1:], "i:o:")
for opt, arg in opts:
if opt == '-i':
inFile = arg
elif opt == '-o':
outFile = arg
input = open(inFile, "r").readlines()
arr = []
i = 0.0
sum = 0.0
sumOfSquares = 0.0
for line in input:
newLine = re.sub(r' +',' ', line).strip().split(" ")
new = float(newLine[1])
sum += new
sumOfSquares += (new * new)
i += 1.0
print "Sum"
print sum
print sumOfSquares
avgOfSum = sum / i
avgOfSquares = sumOfSquares / i
sumSquare = avgOfSum * avgOfSum
print avgOfSum
print avgOfSquares
print sumSquare
var = avgOfSquares - sumSquare
#boltzmann constant is
# 1.38065 g Angs^2/(s^2K) (gram ångströms squared per second squared kelvin)
# at 310 K, K_b * T = 428.001 g * Angs^2 / s^2 or 428.001 angs^2 * dyn/cm
ans = 428 * avgOfSum / ( 150 * var)
print ans |
997,315 | 875740d6e65fc9fbe55b2905d0b148591c9a5c1c | #! /usr/bin/env/ python3
# -*- coding:utf-8-*-
#meetstation > sensor > bh1750 > meet.py
from bin import conf, db
import logging
import threading
import time
import sensor.bh1750.bh1750 as bh1750
class Meet:
def __init__(self, rowid=0, args=[]):
self.logObj = logging.getLogger('meetlog')
self.rowid = rowid
self.resolutie = args[0]
self.timeout = int(conf.Conf.get('sensor','timeout'))
self.pogingen = int(conf.Conf.get('sensor','pogingen'))
self.logObj.debug('BH1750 initieren voltooid')
def logMeting(self):
self.meting = { 'succes': False,
'licht': 0}
tel = 1
while True:
t = threading.Thread(target = self.meet)
t.daemon = True
t.start()
t.join(self.timeout)
time.sleep(self.timeout)
if(t.is_alive):
t._stop()
if self.meting['succes']:
self.logObj.debug('BH1750 poging % geregistreerd' % tel)
self.registreer();
break;
tel += 1
if tel > self.pogingen:
self.logObj.debug('BH1750 meting gefaald')
break
def meet(self):
bh = bh1750.BH1750()
self.meting['licht'] = bh.meet(self.resolutie)
self.meting['succes'] = True
self.logObj.debug('BH1750 meet succesvol voltooid')
def registreer(self):
dbSql = "INSERT INTO bh1750 (rowid,licht) VALUES (?,?)"
db.Db.curs().execute(dbSql, (self.rowid, self.meting['licht']))
db.Db.conn().commit()
self.logObj.debug('BH1750 rij succesvol aan tabel toegevoegd')
|
997,316 | ad1a1ffc94ad4761a0ecf250efbc57dc811646b0 | from typing import List, Tuple, Iterable
from gamestate import GameState
N: int = 3
PuzzleBoard = Tuple[Tuple[int, ...], ...]
Move = Tuple[int, int, str]
State = Tuple[PuzzleBoard, str]
GOAL_STATE: GameState = GameState(board=((0, 1, 2), (3, 4, 5), (6, 7, 8)))
class InvalidPuzzleError(Exception):
'''
Error that indicated an invalid board.
The error message includes the reason of invalidity and the invalid board.
'''
def __init__(self, msg: str, invalid_board: PuzzleBoard) -> None:
super(InvalidPuzzleError, self).__init__(msg + "\nInvalid board: \n" +
display(invalid_board))
def is_valid(board: PuzzleBoard) -> Tuple[bool, str]:
'''
Checks that:
- The board dimensions are N * N.
- The board has unique values from 1 to N.
- There is an empty space.
'''
if len(board) != N or len(board[0]) != N:
return (False, "Invalid board dimensions")
flattened_board: List[int] = [board[i // N][i % N] for i in range(N * N)]
if not 0 in flattened_board:
return (False, "Empty space not found in board.")
if not all([i in flattened_board for i in range(1, N * N)]):
return (False,
"The board doesn't contain all elements from 1 to " + str(N))
return (True, "")
def generate_random_state() -> GameState:
'''
Generate a random valid puzzle state.
Ret: PuzzleBoard
'''
from random import shuffle
randomized_board_arr: List[int] = [i for i in range(N * N)]
shuffle(randomized_board_arr)
return GameState(
board=tuple([
tuple([randomized_board_arr[j] for j in range(i, i + N)])
for i in range(0, N * N, N)
]))
def read_board_from_stdin() -> GameState:
print("Please enter the elements row by row separated by spaces.")
print("Use 0 to represent the empty space.")
board = tuple([tuple([int(x) for x in input().split()]) for i in range(N)])
res: Tuple[bool, str] = is_valid(board)
if not res[0]:
raise InvalidPuzzleError(res[1], board)
return GameState(board = board)
def _get_empty_space_pos(board: PuzzleBoard) -> Tuple[int, int]:
for i in range(N):
for j in range(N):
if board[i][j] == 0:
return (i, j)
raise RuntimeError(
"This Shouldn't happen as is_valid() should be called before calling this fn"
)
def _generate_valid_moves(board: PuzzleBoard) -> Iterable[Move]:
possible_moves: List[Move] = [(-1, 0, "Left"), (1, 0, "Right"),
(0, -1, "Up"), (0, 1, "Down")]
empty_space_pos: Tuple[int, int] = _get_empty_space_pos(board)
return filter(
lambda move: (0 <= empty_space_pos[0] + move[0] < N and 0 <= empty_space_pos[1] + move[1] < N),
possible_moves)
def _transition_get_item(board: PuzzleBoard, move: Move,
empty_space_pos: Tuple[int, int], row: int,
col: int) -> int:
if (row, col) == empty_space_pos:
return board[empty_space_pos[0] + move[0]][empty_space_pos[1] +
move[1]]
elif (row, col) == (empty_space_pos[0] + move[0],
empty_space_pos[1] + move[1]):
return 0
else:
return board[row][col]
def _transition(board: PuzzleBoard, move: Move,
empty_space_pos: Tuple[int, int]) -> PuzzleBoard:
return tuple([
tuple([
_transition_get_item(board, move, empty_space_pos, row, col)
for col in range(N)
]) for row in range(N)
])
def generate_neighbours(state: GameState) -> List[GameState]:
res = is_valid(state.board)
if not res[0]:
raise InvalidPuzzleError(res[1], state.board)
empty_space_pos: Tuple[int, int] = _get_empty_space_pos(state.board)
return [
GameState(
board=_transition(state.board, move, empty_space_pos),
cost=state.cost + 1,
depth=state.depth + 1,
move=move[2]) for move in _generate_valid_moves(state.board)
]
############################ Displaying the Board ############################
def _display_row(board: PuzzleBoard, row: int) -> str:
if row == 0:
'''
A top row
______________
| | | |
'''
return ("____" * len(board) + "_\n" + "| " * len(board) + "|\n")
elif row == len(board):
'''
A bottom row
|___|___|____|
'''
return "|___" * len(board) + "|\n"
else:
'''
A middle row
|___|___|___|
| | | |
'''
return ("|___" * len(board) + "|\n" + "| " * len(board) + "|\n")
def _display(board: PuzzleBoard, row: int) -> str:
if row >= len(board):
return _display_row(board, row)
return (_display_row(board, row) + "| " + " | ".join(
[str(i) if i != 0 else ' '
for i in board[row]]) + " |\n" + _display(board, row + 1))
def display(board: PuzzleBoard) -> str:
'''
Display the puzzle board
Args:
board: PuzzleBoard
Ret:
str: representation of board.
'''
return _display(board, 0)
##############################################################################
|
997,317 | 8846023ee26b40b36e0d3c3bd6140e6898579052 | from . import FrameUtils, Lowered, Code
|
997,318 | 354f9a431b6e37ab425ffe686fdafe9e689a06f5 | # implement an algo to determine if a string has all unique characters
def all_unique(input_string):
if len(set(input_string)) != len(input_string):
return False
else:
return True
# better version
# return len(set(input_string)) == len(input_string)
def all_unique_without_data(input_string):
if len(input_string) == 1:
return True
else:
character = input_string[0]
for x in input_string[1:]:
if character == x:
return False
return all_unique_without_data(input_string[1:])
# better version
# for i in input_string:
# if input_string.count(i) > 1:
# return False
# else:
# return True
|
997,319 | 194db06ce5a196d98c6d746e30df4a7b92f30676 | #predicted
print(-6 // 4)
print(6. // -4) |
997,320 | 7354bc0769b4397c123761707cd472c2545add5e | from Compiler.Globals import *
"""
This file holds functions that generate general Brainfuck code
And general functions that are not dependent on other objects
"""
# =================
# Brainfuck code
# =================
def get_readint_code():
# res, tmp, input, loop
# tmp is used for multiplication
"""
res = 0
loop = 1
while loop
loop = 0
input = input()
if input != newline # todo add a eof check as well. run it in several interpreters to look for common ways for "end of number" input
loop = 1
res *= 10 + char_to_digit(input)
"""
code = "[-]" # clear res = 0
code += ">[-]" # tmp = 0
code += ">>[-]+" # loop = 1
code += "[" # while loop == 1
code += "[-]" # loop = 0
code += "<" # point to input
code += "," # input character
code += "----------" # sub 10 (check for newline)
code += "[" # if input is not newline
code += ">" # point to loop
code += "+" # loop = 1
# multiply res by 10 and add the input digit
code += "<<<" # point to res
code += "[>+<-]" # move res to tmp
code += ">" # point to tmp
code += "[<++++++++++>-]" # res = tmp * 10, tmp = 0
code += ">" # point to input
code += "-" * (0x30 - 10) # convert character to digit by substracting 0x30 from it (we already substracted 10 before)
code += "[<<+>>-]" # res += input
code += "]" # end if
code += ">" # point to loop
code += "]" # end while
code += "<<<" # point to res
return code
def get_printint_code():
# return_cell value_to_print_cell
code = ">" # point to value_to_print cell
code += ">[-]" * 10 + "<" * 10 # zero some cells
# ==============================================================================================
# code to print num (taken from https://esolangs.org/wiki/brainfuck_algorithms#Print_value_of_cell_x_as_number_.288-bit.29)
code += ">>++++++++++<<[->+>-[>+>>]>[+[-<+>]>+>>]<<<<<<]>>[-]>>>++++++++++<[->-[>+>>]>[+[-"
code += "<+>]>+>>]<<<<<]>[-]>>[>++++++[-<++++++++>]<.<<+>+>[-]]<[<[->-<]++++++[->++++++++"
code += "<]>.[-]]<<++++++[-<++++++++>]<.[-]<<[-<+>]<"
# todo either document this or write one of my own
# ==============================================================================================
code += "<" # point to value_to_return cell
return code
def get_readchar_code():
# read input into "return value cell". no need to move the pointer
code = ","
return code
def get_printchar_code():
# point to parameter, output it, and then point back to "return value cell"
code = ">.<"
return code
def get_set_cell_value_code(new_value, previous_value, zero_next_cell_if_necessary=True):
# this function returns a code that sets the current cell's value to new_value,
# given that its previous value is previous_value
# it may return the "naive" way, of "+"/"-" usage, <offset> times
# and it may return an optimization using loops, by using the next cell as a loop counter
# if zero_next_cell_if_necessary is set to False, it assumes that the next cell is already 0
# after the code of this function is executed, the pointer will point to the original cell
# this function returns the shorter code between "naive" and "looped"
offset = new_value - previous_value
char = "+" if offset > 0 else "-"
offset = abs(offset)
# "naive" code is simply +/-, <offset> times
naive = char * offset
# "looped" code is "[<a> times perform <b> adds/subs] and then <c> more adds/subs"
def get_abc(offset):
# returns a,b,c such that a*b+c=offset and a+b+c is minimal
min_a, min_b, min_c = offset, 1, 0
min_sum = min_a + min_b + min_c
for i in range(1, offset // 2 + 1):
a, b, c = i, offset // i, offset % i
curr_sum = a + b + c
if curr_sum < min_sum:
min_a, min_b, min_c = a, b, c
min_sum = curr_sum
return min_a, min_b, min_c
a, b, c = get_abc(offset)
looped = ">" # point to next cell (loop counter)
if zero_next_cell_if_necessary:
looped += "[-]" # zero it if necessary
looped += "+" * a # set loop counter
looped += "[-<" + char * b + ">]" # sub 1 from counter, perform b actions
looped += "<" # point to "character" cell
looped += char * c # c more actions
if len(naive) < len(looped):
return naive
else:
return looped
def get_move_to_offset_code(offset):
# returns code that moves value from current pointer to cell at offset <offset> to the left
# after this, the pointer points to the original cell, which is now the next available cell
code = "<" * offset # point to destination
code += "[-]" # zero destination
code += ">" * offset # point to source cell
code += "[" + "<" * offset + "+" + ">" * offset + "-]" # increase destination, zero source
# point to next free location (source, which is now zero)
return code
def get_copy_to_offset_code(offset):
# returns code that copies value from current pointer to cell at offset <offset> to the left
# after this, the pointer points to the original cell, which remains unchanged
code = ">" # point to temp
code += "[-]" # zero temp
code += "<" * (offset + 1) # point to destination
code += "[-]" # zero destination
code += ">" * offset # point to source cell
code += "[>+" + "<" * (offset + 1) + "+" + ">" * offset + "-]" # increase temp and destination, zero source
code += ">" # point to temp
code += "[<+>-]" # move temp to original cell
code += "<" # point to original cell
return code
def get_copy_to_variable_code(ids_map_list, ID_token, current_pointer):
# returns code that copies value from current pointer to cell of the variable ID
# after this, the pointer points to the original cell, which remains unchanged
offset = get_offset_to_variable(ids_map_list, ID_token, current_pointer)
return get_copy_to_offset_code(offset)
def get_move_to_return_value_cell_code(return_value_cell, current_stack_pointer):
# returns code that moves value from current pointer to return_value cell
# after this, the pointer points to the original cell, which is now the next available cell
# we need to move it <current_stack_pointer - return_value_cell> cells left
return get_move_to_offset_code(current_stack_pointer - return_value_cell)
def get_copy_from_variable_code(ids_map_list, ID_token, current_pointer):
# returns code that copies value from cell of variable ID to current pointer, and then sets the pointer to the next cell
offset = get_offset_to_variable(ids_map_list, ID_token, current_pointer)
code = "[-]" # res = 0
code += ">[-]" # temp (next cell) = 0
code += "<" * (offset + 1) # point to destination cell
code += "[" + ">" * offset + "+>+" + "<" * (offset + 1) + "-]" # increase res and temp, zero destination
code += ">" * (offset + 1) # point to temp
code += "[" + "<" * (offset + 1) + "+" + ">" * (offset + 1) + "-]" # copy temp back to destination
# at this point we point to the next available cell, which is temp, which is now zero
return code
def get_token_code(ids_map_list, token, current_pointer):
# generate code that evaluates the token at the current pointer, and sets the pointer to point to the next available cell
if token.type == Token.NUM:
value = get_NUM_token_value(token)
code = "[-]" # zero current cell
code += get_set_cell_value_code(value, 0) # set current cell to the num value
code += ">" # point to the next cell
return code
elif token.type == Token.CHAR:
code = "[-]" # zero current cell
code += get_set_cell_value_code(ord(token.data), 0) # set current cell to the char value
code += ">" # point to next cell
return code
elif token.type == Token.ID:
code = get_copy_from_variable_code(ids_map_list, token, current_pointer)
return code
elif token.type == Token.TRUE:
code = "[-]" # zero current cell
code += "+" # current cell = 1
code += ">" # point to next cell
return code
elif token.type == Token.FALSE:
code = "[-]" # zero current cell
code += ">" # point to next cell
return code
raise NotImplementedError
def get_divmod_code():
# given that the current pointer points to a, and the cell after a contains b,
# (i.e the cells look like: --> a, b, ?, ?, ?, ?, ...)
# returns a code that calculates divmod, and the cells look like this:
# --> 0, b-a%b, a%b, a/b, 0, 0
# and the pointer points to the first 0 (which is in the same cell as a used to be)
ADD_DIVISION_BY_ZERO_CHECK = True
def get_if_equal_to_0_code(inside_if_code, offset_to_temp_cell):
"""
given a <inside_if_code>, wraps it with an "if (current_cell == 0) {<inside_if_code>}"
in the process, it zeros the current cell
additionally, it uses a temp cell
the argument <offset_to_temp_cell> is the offset from the current cell to the temp cell
*** note that the temp cell must be AFTER the cells that the <inside_if_code> touches ***
<inside_if_code> should assume it starts running when pointing to the current cell
and it should end its run pointing to the same cell
"""
# temp cell is initialized to 1, and holds a flag of whether or not we should run <inside_if_code> or not
# if cell to evaluate is not zero, we set this flag to 0
code = ">" * offset_to_temp_cell # point to temp
code += "[-]+" # temp = 1
code += "<" * offset_to_temp_cell # point to cell to compare to 0
code += "[" # if it is not zero
code += ">" * offset_to_temp_cell # point to temp
code += "-" # temp = 0
code += "<" * offset_to_temp_cell # point to cell
code += "[-]" # zero the cell
code += "]" # end if
code += ">" * offset_to_temp_cell # point to temp cell
code += "[" # if it is non zero
code += "<" * offset_to_temp_cell # point to cell
code += inside_if_code # execute desired code
# at this point we point to the original cell
code += ">" * offset_to_temp_cell # point to temp cell
code += "-" # temp = 0
code += "]" # end if
code += "<" * offset_to_temp_cell # point back to original cell
return code
code = ""
if ADD_DIVISION_BY_ZERO_CHECK:
# create a prefix code: if (b == 0) {print("Error - Division by zero\n");}
# copy b to temp cell (via another temp cell) and compare that cell to 0. if its 0, execute error print and go to infinite loop
code += ">>" # point to empty cell
code += "[-]>[-]" # zero 2 temp cells
code += "<<" # point to b
code += "[>+>+<<-]" # move b to both cells
code += ">" # point to first cell
code += "[<+>-]" # move first cell back to b
code += ">" # point to second cell
code_inside_if = "[-]>[-]<>++++++[-<+++++++++++>]<+++.>+++++[-<+++++++++>]<..---.+++.>+++++++++[-<--------->]" \
"<-.+++++++++++++.-------------.>++++++[-<++++++>]<.>++++++[-<++++++>]<+.+++++++++++++.-----" \
"--------.++++++++++.----------.++++++.-.>++++++[-<------------->]<.>++++++[-<+++++++++++>]<" \
".>+++[-<+++++++>]<++.>++++++++[-<----------->]<-.>+++++++++[-<++++++++++>]<.>+++[-<------->" \
"]<.+++++++++++++.---.>++++++++++[-<---------->]<-." # print("Error - Division by zero\n");
code_inside_if += "[]" # infinite loop
code += get_if_equal_to_0_code(code_inside_if, offset_to_temp_cell=1)
code += "<<<" # point to a
# ======================= end of prefix =======================
# a, b, w, x, y, z
code += ">>[-]>[-]>[-]>[-]<<<<<" # zero w,x,y,z, and point to a
code += "[" # while a != 0
code += "-" # decrease a by 1
code += ">-" # decrease b by 1
code += ">+" # increase w by 1
code += "<" # point to b
code += "[->>>+>+<<<<]>>>>[-<<<<+>>>>]" # copy b to y (via z)
code += "<" # point to y
code_inside_if = ""
code_inside_if += "<+" # increase x by 1
code_inside_if += "<" # point to w
code_inside_if += "[-<+>]" # copy w to b (b is already 0) (after this we point to w)
code_inside_if += ">>" # point to y
# get_if_equal_to_0 also zeros y
# i set offset_to_temp_cell = 1 because it can use z, since it is unused inside the if
code += get_if_equal_to_0_code(inside_if_code=code_inside_if, offset_to_temp_cell=1)
code += "<<<<" # point to a
code += "]" # end while
"""
a, b, w, x, y, z
w, x, y, z = 0, 0, 0, 0
while a != 0
a -= 1
b -= 1
w += 1
if b == 0: (this means that w = original b) (implementation: copy b to y (via z) and compare y to 0, (then zero y))
x += 1
b = w
w = 0
at the end:
w = a%b
x = a/b
b = b-a%b
"""
return code
def get_bitwise_code(code_logic):
# a, b, c, w, x, y, z, bit1, bitcounter, res
# code_logic uses the cells y, z, and bit1. Where y is res and z and bit1 are the bits.
# y is zero. z and bit1 should be zero after code_logic.
code = ">" * 7 # point to bit1
code += "[-]" # zero bit1
code += ">" # point to bitcounter
code += ">[-]<" # zero res
code += "[-]--------[++++++++" # while bitcounter != 8:
code += "<"
code += "<[-]" * 5 # clear c, w, x, y, z
code += "++" # c = 2
code += "<<" # point to a
code += "[" # while a != 0:
code += "-" # a -= 1
code += ">>-" # c -= 1
code += "[>+>>+<<<-]>[<+>-]" # copy c to y (using w)
code += ">>" # point to y
code += ">>+<<" # bit1 += 1
code += "-[" # if y != 1:
code += "<+" # x += 1
code += "<<++" # c += 2 (c was 0)
code += ">" * 5 # point to bit1
code += "--" # bit1 -= 2 (bit1 was 2)
code += "<<" # point to y
code += "+" # set y to 0
code += "]" # end if
code += "<<<<<" # point to a
code += "]" # end while
code += ">>>>[<<<<+>>>>-]" # move x to a (x is a/2)
code += "<<[-]++" # c = 2
code += "<" # point to b
code += "[" # while b != 0:
code += "-" # b -= 1
code += ">-" # c -= 1
code += "[>+>>+<<<-]>[<+>-]" # copy c to y (using w)
code += ">>" # point to y
code += ">+<" # z += 1
code += "-[" # if y != 1:
code += ">--<" # z -= 2 (z was 2)
code += "<+" # x += 1
code += "<<++" # c += 2 (c was 0)
code += ">>>" # point to y
code += "+" # set y to 0
code += "]"
code += "<<<<" # point to b
code += "]" # end while
# w is a % 2
# x is a / 2
code += ">>>[<<<+>>>-]" # move x to b
code += ">>" # point to z
code += code_logic # pointer ends at bit1, z and bit1 should be 0 after code
code += ">[<+<+>>-]<[>+<-]" # copy bit to z (using bit1)
# y = y << z
code += "<"
code += "[" # while z != 0:
code += "<" # point to y
code += "[<+>-]" # copy y to x
code += "<[>++<-]" # copy x to y * 2
code += ">>-" # z -= 1
code += "]"
code += "<" # point to y
code += "[>>>>+<<<<-]" # res += y
code += ">>>" # point to bitcounter
code += "-" * 7 # loop if bitcounter != 7
code += "]" # end while
code += ">[<<<<<<<<<+>>>>>>>>>-]" # move res to a
code += "<<<<<<<<" # point to b
return code
def get_unary_prefix_op_code(token, offset_to_variable=None):
# returns code that:
# performs op on operand that is at the current pointer
# the result is placed in the cell of the operand
# and the pointer points to the cell right after it (which becomes the next available cell)
if token.type == Token.NOT:
# a temp
code = ">" # point to temp
code += "[-]+" # temp = 1
code += "<" # point to a
code += "[" # if a is non-zero
code += ">-" # temp = 0
code += "<[-]" # zero a
code += "]" # end if
code += ">" # point to temp
code += "[" # if temp is non-zero
code += "<+" # a = 1
code += ">-" # temp = 0
code += "]" # end if
return code
elif token.type == Token.INCREMENT:
# returns code that copies value from variable's cell at given offset, and adds 1 to both the copied and the original cell
assert offset_to_variable is not None
offset = offset_to_variable
code = "[-]" # res = 0
code += ">[-]" # temp (next pointer) = 0
code += "<" * (offset + 1) # point to destination cell
code += "[" + ">" * offset + "+>+" + "<" * (offset + 1) + "-]" # increase res and temp, zero destination
code += ">" * offset # point to res
code += "+" # increase res by 1
code += ">" # point to temp
code += "+" # increase temp by 1
code += "[" + "<" * (offset + 1) + "+" + ">" * (offset + 1) + "-]" # copy temp back to destination
# at this point we point to the next available cell, which is temp, which is now zero
return code
elif token.type == Token.DECREMENT:
# returns code that copies value from variable's cell at given offset, and subtracts 1 from both the copied and the original cell
assert offset_to_variable is not None
offset = offset_to_variable
code = "[-]" # res = 0
code += ">[-]" # temp (next pointer) = 0
code += "<" * (offset + 1) # point to destination cell
code += "[" + ">" * offset + "+>+" + "<" * (offset + 1) + "-]" # increase res and temp, zero destination
code += ">" * offset # point to res
code += "-" # decrease res by 1
code += ">" # point to temp
code += "-" # decrease temp by 1
code += "[" + "<" * (offset + 1) + "+" + ">" * (offset + 1) + "-]" # copy temp back to destination
# at this point we point to the next available cell, which is temp, which is now zero
return code
elif token.type == Token.UNARY_MULTIPLICATIVE:
# returns code that copies value from variable's cell at given offset, modifies both the copied and the original cell depending on the op
assert offset_to_variable is not None
offset = offset_to_variable
code = "[-]" # res = 0
code += ">[-]" # temp (next pointer) = 0
code += "<" * (offset + 1) # point to destination cell
code += "[" + ">" * offset + "+>+" + "<" * (offset + 1) + "-]" # increase res and temp, zero destination
code += ">" * offset # point to res
if token.data in ["**", "//"]:
code += ">" # point to temp (x**, x// keep x the same)
elif token.data == "%%":
code += "[-]>[-]" # put 0 in res and temp, and point to temp
else:
raise BFSyntaxError("Unexpected unary prefix %s" % str(token))
code += "[" + "<" * (offset + 1) + "+" + ">" * (offset + 1) + "-]" # copy temp back to destination
# at this point we point to the next available cell, which is temp, which is now zero
return code
elif token.type == Token.BITWISE_NOT:
# a temp
code = "[>+<-]" # move a into temp
code += ">" # point to temp
code += "+[<->-]" # invert temp into a
return code
raise NotImplementedError
def get_unary_postfix_op_code(token, offset_to_variable):
# returns code that:
# performs op on operand that is at the current pointer
# the result is placed in the cell of the operand
# and the pointer points to the cell right after it (which becomes the next available cell)
if token.type == Token.INCREMENT:
# returns code that copies value from variable's cell at given offset, and adds 1 to the original cell
offset = offset_to_variable
code = "[-]" # res = 0
code += ">[-]" # temp (next pointer) = 0
code += "<" * (offset + 1) # point to destination cell
code += "[" + ">" * offset + "+>+" + "<" * (offset + 1) + "-]" # increase res and temp, zero destination
code += ">" * (offset + 1) # point to temp
code += "+" # increase temp by 1
code += "[" + "<" * (offset + 1) + "+" + ">" * (offset + 1) + "-]" # copy temp back to destination
# at this point we point to the next available cell, which is temp, which is now zero
return code
elif token.type == Token.DECREMENT:
# returns code that copies value from variable's cell at given offset, and subtracts 1 from the original cell
offset = offset_to_variable
code = "[-]" # res = 0
code += ">[-]" # temp (next pointer) = 0
code += "<" * (offset + 1) # point to destination cell
code += "[" + ">" * offset + "+>+" + "<" * (offset + 1) + "-]" # increase res and temp, zero destination
code += ">" * (offset + 1) # point to temp
code += "-" # decrease temp by 1
code += "[" + "<" * (offset + 1) + "+" + ">" * (offset + 1) + "-]" # copy temp back to destination
# at this point we point to the next available cell, which is temp, which is now zero
return code
elif token.type == Token.UNARY_MULTIPLICATIVE:
# returns code that copies value from variable's cell at given offset, and modifies the original cell depending on the operation
offset = offset_to_variable
code = "[-]" # res = 0
code += ">[-]" # temp (next pointer) = 0
code += "<" * (offset + 1) # point to destination cell
code += "[" + ">" * offset + "+>+" + "<" * (offset + 1) + "-]" # increase res and temp, zero destination
code += ">" * (offset + 1) # point to temp
if token.data in ["**", "//"]:
pass # x**,x// keeps x the same
elif token.data == "%%":
code += "[-]" # x%% modifies x to 0
else:
raise BFSyntaxError("Unexpected unary postfix %s" % str(token))
code += "[" + "<" * (offset + 1) + "+" + ">" * (offset + 1) + "-]" # copy temp back to destination
# at this point we point to the next available cell, which is temp, which is now zero
return code
raise NotImplementedError
def get_op_between_literals_code(op_token):
# returns code that:
# performs op on 2 operands
# the first operand is at current pointer, and the second operand is at current pointer + 1
# the code can destroy second operand, and everything after it
# the result is placed in the cell of the first operand
# and the pointer points to the cell right after it (which becomes the next available cell)
op = op_token.data
if op == "+" or op == "-":
res = ">[<" + op + ">-]" # increase/decrease first operand and decrease second operand
# the pointer points to the next available cell, which is the second operand, which is 0
return res
elif op == "*":
# a, b, temp1, temp2
res = ">>[-]>[-]" # put 0 into temp1, temp2
res += "<<<" # point to first operand
res += "[>>>+<<<-]" # move first operand to temp2
res += ">>>" # point to temp2
# do in a loop: as long as temp2 != 0
res += "["
res += "<<" # point to second operand
res += "[<+>>+<-]" # add it to first operand and temp1
res += ">" # point to temp1
res += "[<+>-]" # move it to second operand
# end loop
res += ">" # point back to temp2
res += "-" # decrease temp2
res += "]"
res += "<<" # point back to next available cell (second operand)
return res
elif op == "/":
code = get_divmod_code()
code += ">>>" # point to a/b
code += "[<<<+>>>-]" # copy a/b to current cell
code += "<<" # point to next available cell
return code
elif op == "%":
code = get_divmod_code()
code += ">>" # point to a%b
code += "[<<+>>-]" # copy a%b to current cell
code += "<" # point to next available cell
return code
# relops
elif op == "==":
# a, b
res = "[->-<]" # a = 0, b = b - a
res += "+" # a = 1. will hold the result. if a!=b, this is unchanged
res += ">" # point to b
res += "[" # if b == 0, enter the following code
res += "<->[-]" # a = 0, b=0
res += "]" # end of "loop"
return res
elif op == "!=":
# a, b
res = "[->-<]" # a = 0, b = b - a
# a will hold the result. if a!=b, this is unchanged
res += ">" # point to b
res += "[" # if b == 0, enter the following code
res += "<+>[-]" # a = 1, b=0
res += "]" # end of "loop"
return res
elif op == ">":
# a, b, c, d
code = ">>[-]" # c = 0 (will hold res)
code += ">[-]" # d = 0
code += "<<<" # point to a
code += "[" # while a != 0
code += ">>[-]" # c = 0
code += "<" # point to b
code += "[>+>+<<-]>[<+>-]" # copy b to d (via c)
code += "+" # c = 1 (will hold res)
code += ">" # point to d
code += "[" # if d != 0
code += "[-]" # d = 0
code += "<-" # c = 0
code += "<-" # b -= 1
code += ">>" # point to d
code += "]" # end if
code += "<<<" # point to a
code += "-" # a -= 1
code += "]" # end while
# move c to a
code += ">>" # point to c
code += "[<<+>>-]" # move c to a
code += "<" # point to b (next available cell)
"""
x > y?
res = 0
while x != 0:
res = 1
if y != 0:
res = 0
y -= 1
x -= 1
"""
return code
elif op == "<":
# similar to >
# a, b, c, d
code = ">>[-]" # c = 0 (will hold res)
code += ">[-]" # d = 0
code += "<<" # point to b
code += "[" # while b != 0
code += ">[-]" # c = 0
code += "<<" # point to a
code += "[>>+>+<<<-]>>[<<+>>-]" # copy a to d (via c)
code += "+" # c = 1 (will hold res)
code += ">" # point to d
code += "[" # if d != 0
code += "[-]" # d = 0
code += "<-" # c = 0
code += "<<-" # a -= 1
code += ">>>" # point to d
code += "]" # end if
code += "<<" # point to b
code += "-" # b -= 1
code += "]" # end while
# move c to a
code += "<" # point to a
code += "[-]" # a = 0
code += ">>" # point to c
code += "[<<+>>-]" # move c to a
code += "<" # point to b (next available cell)
"""
x < y?
res = 0
while y != 0:
res = 1
if x != 0:
res = 0
x -= 1
y -= 1
"""
return code
elif op == "<=":
# a, b, c, d
code = ">>[-]+" # c = 1 (will hold res)
code += ">[-]" # d = 0
code += "<<<" # point to a
code += "[" # while a != 0
code += ">>[-]" # c = 0
code += "<" # point to b
code += "[>+>+<<-]>[<+>-]" # copy b to d (via c)
code += ">" # point to d
code += "[" # if d != 0
code += "[-]" # d = 0
code += "<+" # c = 1
code += "<-" # b -= 1
code += ">>" # point to d
code += "]" # end if
code += "<<<" # point to a
code += "-" # a -= 1
code += "]" # end while
# move c to a
code += ">>" # point to c
code += "[<<+>>-]" # move c to a
code += "<" # point to b (next available cell)
"""
x <= y?
res = 1
while x != 0:
res = 0
if y != 0:
res = 1
y -= 1
x -= 1
"""
return code
elif op == ">=":
# similar to <=
# a, b, c, d
code = ">>[-]+" # c = 1 (will hold res)
code += ">[-]" # d = 0
code += "<<" # point to b
code += "[" # while b != 0
code += ">[-]" # c = 0
code += "<<" # point to a
code += "[>>+>+<<<-]>>[<<+>>-]" # copy a to d (via c)
code += ">" # point to d
code += "[" # if d != 0
code += "[-]" # d = 0
code += "<+" # c = 1
code += "<<-" # a -= 1
code += ">>>" # point to d
code += "]" # end if
code += "<<" # point to b
code += "-" # b -= 1
code += "]" # end while
# move c to a
code += "<" # point to a
code += "[-]" # a = 0
code += ">>" # point to c
code += "[<<+>>-]" # move c to a
code += "<" # point to b (next available cell)
"""
x >= y?
res = 1
while y != 0:
res = 0
if x != 0:
res = 1
x -= 1
y -= 1
"""
return code
elif op_token.type == Token.AND:
# a, b, temp
code = ">>[-]" # zero temp
code += "<<" # point to a
code += "[" # if a is non-zero
code += ">" # point to b
code += "[" # if b is non-zero
code += ">+" # temp = 1
code += "<[-]" # zero b
code += "]" # end if
code += "<" # point to a
code += "[-]" # zero a
code += "]" # end if
code += ">>" # point to temp
code += "[" # if non zero
code += "<<+" # a = 1
code += ">>-" # temp = 0
code += "]" # end if
code += "<" # point to b (next available cell)
return code
elif op_token.type == Token.OR:
# a, b, temp
code = ">>[-]" # zero temp
code += "<<" # point to a
code += "[" # if a is non-zero
code += ">" # point to b
code += "[-]" # zero b
code += ">" # point to temp
code += "+" # temp = 1
code += "<<" # point to a
code += "[-]" # zero a
code += "]" # end if
code += ">" # point to b
code += "[" # if b is non-zero
code += ">" # point to temp
code += "+" # temp = 1
code += "<" # point to b
code += "[-]" # zero b
code += "]" # end if
code += ">" # point to temp
code += "[" # if temp == 1
code += "<<+" # a = 1
code += ">>" # point to temp
code += "-" # zero temp
code += "]" # end if
code += "<" # point to b (next available cell)
return code
elif op == "<<":
# a, b, temp
code = ">>[-]" # zero temp
code += "<" # point to b
code += "[" # while b != 0
code += "<" # point to a
code += "[>>+<<-]" # copy a to temp
code += ">>" # point to temp
code += "[<<++>>-]" # multiply temp by 2 and store result in a
code += "<-" # point to b and b -= 1
code += "]" # end while
return code
elif op == ">>":
# a, b, c, x, y, z
code = ">" # point to b
code += ">[-]" * 4 # clear 4 cells
code += "<" * 4 # point to b
code += "[" # while b != 0
code += ">++" # set c to 2
code += "<<" # point to a
code += "[" # while a != 0
code += "-" # a -= 1
code += ">>-" # c -= 1
code += "[>>+>+<<<-]>>>[<<<+>>>-]" # copy c to y (via z)
code += "<" # point to y
code += "-[" # if y == 0
code += "<+" # x += 1
code += "<++" # set c to 2
code += ">>"
code += "+" # zero y
code += "]" # end if
code += "<<<<" # point to a
code += "]" # end while
code += ">>>" # point to x
code += "[<<<+>>>-]" # move x to a
code += "<[-]" # zero c
code += "<-" # b -= 1
code += "]" # end while
return code
elif op_token.type == Token.BITWISE_AND:
code = get_bitwise_code("[->[-<<+>>]<]>[-]")
return code
elif op_token.type == Token.BITWISE_OR:
code = get_bitwise_code("[>+<-]>[[-]<<+>>]")
return code
elif op_token.type == Token.BITWISE_XOR:
code = get_bitwise_code("[>-<-]>[[-]<<+>>]")
return code
raise NotImplementedError
def get_print_string_code(string):
code = "[-]" # zero current cell
code += ">[-]" # zero next cell (will be used for loop counts)
code += "<" # point to original cell ("character" cell)
prev_value = 0
for i in range(len(string)):
current_value = ord(string[i])
code += get_set_cell_value_code(current_value, prev_value, zero_next_cell_if_necessary=False)
code += "."
prev_value = current_value
return code
def get_move_right_index_cells_code(current_pointer, node_index):
# used for arrays
# returns a code that evaluates the index, then moves the pointer right, <index> amount of cells
# at the end of execution, the layout is:
# 0 index next_available_cell (point to next available cell)
# index, steps_taken_counter
code = node_index.get_code(current_pointer) # index
code += "[-]" # counter = 0
code += "<" # point to index
code += "[" # while index != 0
code += ">>" # point to new_counter (one after current counter)
code += "[-]" # zero new_counter
code += "<" # move to old counter
code += "+" # add 1 to counter
code += "[>+<-]" # move old counter to new counter
code += "<" # point to old index
code += "-" # sub 1 from old index
code += "[>+<-]" # move old index to new index
code += ">" # point to new index
code += "]" # end while
# old_index=0 new_index res (pointing to old index)
code += ">>" # point to res
return code
def get_move_left_index_cell_code():
# used for arrays
# complement of "get_move_right_index_cells_code"
# assumes the layout is:
# value, index (pointing to index)
# moves <index> cells left, and moving <value> along with it
# in the end, point to one cell after <value> (which becomes the next available cell)
# layout: res, index (pointing to index)
code = "[" # while new_index != 0
code += "<" # point to res
code += "[<+>-]" # move res to the left
code += ">" # point to new_index
code += "-" # sub 1 from index
code += "[<+>-]" # move new_index to left
code += "<" # point to new index
code += "]" # end while
# now res is at the desired cell, and we point to the next available cell
return code
# =================
# General
# =================
def get_NUM_token_value(token):
if token.data.startswith("0x"):
return int(token.data, 16)
else:
return int(token.data)
def get_variable_from_ID_token(ids_map_list, ID_token):
ID = ID_token.data
# given an id, goes through the ids map list and returns the index of the first ID it finds
for i in range(len(ids_map_list)):
ids_map = ids_map_list[i].IDs_dict
if ID in ids_map:
return ids_map[ID]
raise BFSemanticError("'%s' does not exist" % str(ID_token))
def get_variable_dimensions(ids_map_list, ID_token):
variable = get_variable_from_ID_token(ids_map_list, ID_token)
return variable.dimensions
def get_id_index(ids_map_list, ID_token):
variable = get_variable_from_ID_token(ids_map_list, ID_token)
return variable.cell_index
def get_offset_to_variable(ids_map_list, ID_token, current_pointer):
offset = current_pointer - get_id_index(ids_map_list, ID_token)
return offset
|
997,321 | 34e6312163a1ba1c5aae225623fc2cdb764c08f7 | import re
# file_to_search = open('us-500.csv', 'r')
# print(file_to_search.readlines())
# newfile = open('mailist.txt', 'w')
with open("us-500.csv") as f:
for line in f:
email_search = re.findall(r'([\w.-]+@+[\w.]+)', line)
print(email_search)
# email_list.append(email_search)
# print(email_search)
# newfile.write(str(email_search))
# file_to_search.close()
# Ned to save this output in csv file---tomorrow
# newfile.close()
# print(email_list)
|
997,322 | ae20ee322ac8805604eb9489c8931ad51ea46e60 | from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import *
from selenium.webdriver.support import expected_conditions as EC
import traceback
import time
class BasePage(object):
def __init__(self, browser, baseurl="http://192.168.1.154:8989"):
self.browser = browser
self.baseurl = baseurl
self.timeout = 30
def find_element(self,*loc):
return self.browser.find_element(*loc)
def url_open(self,url):
self.browser.get(url)
def method_missing(self, what):
print("No %s here!" % what)
def __getattr__(self, what):
try:
if what in self.locator_dictionary.keys():
try:
element = WebDriverWait(self.browser, self.timeout).until(
EC.presence_of_element_located(self.locator_dictionary[what])
)
except(TimeoutException, StaleElementReferenceException):
traceback.print_exc()
try:
element = WebDriverWait(self.browser, self.timeout).until(
EC.visibility_of_element_located(self.locator_dictionary[what])
)
except(TimeoutException, StaleElementReferenceException):
traceback.print_exc()
# I could have returned element, however because of lazy loading, I am seeking the element before return
return self.find_element(*self.locator_dictionary[what])
except AttributeError:
super(BasePage, self).__getattribute__("method_missing")(what)
|
997,323 | 9b2a1a43c469704a8a5c98a19568775e6fc939ca | """
Management of InfluxDB 0.8 users
================================
(compatible with InfluxDB version 0.5-0.8)
.. versionadded:: 2014.7.0
"""
def __virtual__():
"""
Only load if the influxdb08 module is available
"""
if "influxdb08.db_exists" in __salt__:
return "influxdb08_user"
return (False, "influxdb08 module could not be loaded")
def present(
name, passwd, database=None, user=None, password=None, host=None, port=None
):
"""
Ensure that the cluster admin or database user is present.
name
The name of the user to manage
passwd
The password of the user
database
The database to create the user in
user
The user to connect as (must be able to create the user)
password
The password of the user
host
The host to connect to
port
The port to connect to
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
# check if db does not exist
if database and not __salt__["influxdb08.db_exists"](
database, user, password, host, port
):
ret["result"] = False
ret["comment"] = "Database {} does not exist".format(database)
return ret
# check if user exists
if not __salt__["influxdb08.user_exists"](
name, database, user, password, host, port
):
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "User {} is not present and needs to be created".format(
name
)
return ret
# The user is not present, make it!
if __salt__["influxdb08.user_create"](
name, passwd, database, user, password, host, port
):
ret["comment"] = "User {} has been created".format(name)
ret["changes"][name] = "Present"
return ret
else:
ret["comment"] = "Failed to create user {}".format(name)
ret["result"] = False
return ret
# fallback
ret["comment"] = "User {} is already present".format(name)
return ret
def absent(name, database=None, user=None, password=None, host=None, port=None):
"""
Ensure that the named cluster admin or database user is absent.
name
The name of the user to remove
database
The database to remove the user from
user
The user to connect as (must be able to remove the user)
password
The password of the user
host
The host to connect to
port
The port to connect to
"""
ret = {"name": name, "changes": {}, "result": True, "comment": ""}
# check if user exists and remove it
if __salt__["influxdb08.user_exists"](name, database, user, password, host, port):
if __opts__["test"]:
ret["result"] = None
ret["comment"] = "User {} is present and needs to be removed".format(name)
return ret
if __salt__["influxdb08.user_remove"](
name, database, user, password, host, port
):
ret["comment"] = "User {} has been removed".format(name)
ret["changes"][name] = "Absent"
return ret
else:
ret["comment"] = "Failed to remove user {}".format(name)
ret["result"] = False
return ret
# fallback
ret["comment"] = "User {} is not present, so it cannot be removed".format(name)
return ret
|
997,324 | d8721068beba3bd96bdf0376cbf7f8a838cd13ce | word = input().upper()
arr = [0] * 26
for i in word:
arr[ord(i) - 65] += 1
maxIndex = arr.index(max(arr))
arr.sort()
print("?" if arr[25] == arr[24] else chr(maxIndex + 65)) |
997,325 | bf68336ab8e234e4b705c9b2e4545f0d90d95422 | """__author__ = 余婷"""
"""
在python中,函数就是一种特殊的类型。声明函数的时候,其实就是在声明类型是function的变量。
变量能做的事,函数都可以做
1.函数给其他变量赋值
"""
if __name__ == '__main__':
# 1.使用一个变量给另外一个变量赋值
a = 10
b = a
# 声明一个函数func1(声明了一个函数变量func1, func1就是一个变量)
def func1():
print('hello python')
# c也是一个函数
c = func1
func1()
c()
# 2.函数作为列表的元素
list1 = [a, '10', 100]
list2 = []
list3 = []
for x in range(10):
def func2(y):
print(x+y)
list2.append(func2)
list3.append(func2(x))
# list2中每个元素的值都是函数
print(list2)
print(list3)
# list2[0]就是一个函数
func = list2[0]
print(func(100))
# 调用list2中下标是1对应的函数,并且传参为10
x = 10
list2[1](10)
# 直接将函数作为列表的元素
funcs = [func1]
funcs[0]()
# 3.将函数作为字典的值
# sub(10,2,3) -- 10-2-3
def sub(*nums):
"""
累计求差
:param nums: 求差的数
:return: 差
"""
if not nums:
return 0
# 默认是第一个数
sum1 = nums[0]
for item in nums[1:]:
sum1 -= item
return sum1
operation = {'+': lambda *nums: sum(nums), '-': sub, '*': lambda x, y: x*y}
result = operation['-'](10, 20, 30, -100)
print(result)
# 4.函数作为函数的参数(回调函数)
def clean_kitchen(time):
print('在%s,打扫厨房' % time)
print('收费200元')
return 200
def clean_floor(time):
print('在%s,做地板清洁服务' % time)
print('收费100元')
return 100
# 在指定的时间,叫指定的服务
def call_service(time: str, service):
service(time)
# 将函数作为参数,传给其他函数
call_service('上午10点', clean_kitchen)
call_service('下午2点', clean_floor)
print('============================')
# 5.函数作为函数的返回值
def operation(operator: str):
if operator == '+':
def my_sum(*nums):
sum1 = 0
for num in nums:
sum1 += num
print(sum1)
# 将求和的函数返回
return my_sum
elif operator == '*':
def my_sum(*nums):
sum1 = 1
for num in nums:
sum1 *= num
print(sum1)
# 将求和的函数返回
return my_sum
# operation('+')的结果是函数
operation('+')(1, 2, 3)
operation('*')(2, 3, 4)
|
997,326 | a9647f7dcd06a992d492a82b596193f104501190 | from sklearn.ensemble import RandomForestClassifier
import numpy as np
domainlist = []
class Domain:
def __init__(self,_name,_label):
self.name = _name
self.label = _label
def returnData(self):
return processData(self.name)
def returnLabel(self):
if self.label == "notdga":
return 0
else:
return 1
def processData(name):
numofn=0;
for i in name:
if i.isdigit():
numofn=numofn+1;
return [len(name),numofn];
def initData(filename):
with open(filename) as f:
for line in f:
line = line.strip()
if line.startswith("#") or line =="":
continue
tokens = line.split(",")
name = tokens[0]
label = tokens[1]
domainlist.append(Domain(name,label))
def main():
initData("train.txt")
featureMatrix = []
labelList = []
for item in domainlist:
featureMatrix.append(item.returnData())
labelList.append(item.returnLabel())
clf = RandomForestClassifier(random_state=0)
clf.fit(featureMatrix,labelList)
f=open("test.txt",mode='r');
foutput=open("result.txt",mode='w');
for line in f:
line = line.strip()
if line.startswith("#") or line =="":
continue
tokens = line.split(",")
name = tokens[0]
result=clf.predict([processData(name)]);
if(result ==0):
foutput.write(name+","+"notdga\n")
else:
foutput.write(name+","+"dga\n")
f.close();
foutput.close();
if __name__ == '__main__':
main() |
997,327 | 66a80808b57354612ea2322cb72dd8f19d32ff45 | # -*- coding: utf-8 -*-
from django.contrib.auth.models import AbstractBaseUser, PermissionsMixin, BaseUserManager, UserManager
from django.db import models
from django.utils import timezone
from juck.image.models import JuckImage
from django.conf import settings
class State(models.Model):
class Meta:
verbose_name = u"استان"
verbose_name_plural = u"استانها"
name = models.CharField(max_length=100, blank=False, null=True, verbose_name=u'نام استان', unique=True)
def pair_renderer(self):
cities = self.cities.select_related().all()
return_value = []
for city in cities:
return_value.append((city.pk, city.name))
return return_value
def __unicode__(self):
return self.name
class City(models.Model):
class Meta:
verbose_name = u'شهر'
verbose_name_plural = u'شهرها'
state = models.ForeignKey(State, related_name='cities', verbose_name=u'نام استان')
name = models.CharField(max_length=100, verbose_name=u'نام شهر', unique=True)
def __unicode__(self):
return self.name
class JobSeekerProfile(models.Model):
class Meta:
verbose_name = u'پروفایل کارجو'
verbose_name_plural = u'پروفایل کارجویان'
city = models.ForeignKey(City, verbose_name=u'شهر', related_name='jobseekerprofiles')
state = models.ForeignKey(State, verbose_name=u'استان', related_name='jobseekerprofiles')
national_id = models.CharField(max_length=20, verbose_name=u'کد ملی')
date_of_birth = models.IntegerField(verbose_name=u'سال تولد', blank=True, null=True)
sex = models.PositiveSmallIntegerField(verbose_name=u'جنسیت', blank=True, null=True)
married = models.NullBooleanField(verbose_name=u'وضعیت تاهل', blank=True, default=None,null=True)
image = models.ForeignKey(JuckImage, verbose_name=u'عکس پروفایل', null=True, blank=True)
phone_number = models.CharField(verbose_name=u'شماره تلفن', max_length=20)
mobile_number = models.CharField(verbose_name=u'شماره همراه', max_length=25, null=True, blank=True)
military_service_status = models.CharField(verbose_name=u'وضعیت نظام وظیفه', max_length=100, null=True, blank=True)
exemption_type = models.CharField(verbose_name=u'نوع معافیت', max_length=100, null=True, blank=True)
approved = models.BooleanField(verbose_name=u'وضعیت تایید', default=False)
def __unicode__(self):
return " - ".join([u'پروفایل کارجو شماره ', str(self.id)])
class EmployerProfile(models.Model):
class Meta:
verbose_name = u'پروفایل کارفرما'
verbose_name_plural = u'پروفایل کارفرمایان'
city = models.ForeignKey(City, verbose_name=u'شهر', related_name='employerprofiles')
state = models.ForeignKey(State, verbose_name=u'استان', related_name='employerprofiles')
company_name = models.CharField(max_length=200, verbose_name=u'نام سازمان')
company_type = models.CharField(max_length=150, verbose_name=u'نوع سازمان')
foundation_year = models.IntegerField(verbose_name=u'سال تاسیس')
image = models.ForeignKey(JuckImage, verbose_name=u'لوگو سازمان', null=True, blank=True)
reg_num = models.CharField(max_length=100, verbose_name=u'شماره ثبت')
manager = models.CharField(max_length=250, verbose_name=u'مشخصات مدیر عامل', null=True, blank=True)
user_rank = models.CharField(max_length=150, verbose_name=u'سمت شخص رابط', null=True, blank=True)
field = models.CharField(max_length=200, verbose_name=u'زمینه فعالیت')
address = models.TextField(u'آدرس')
postal_code = models.CharField(max_length=50, verbose_name=u'کد پستی', null=True, blank=True)
phone_number = models.CharField(verbose_name=u'شماره تلفن', max_length=20)
mobile_number = models.CharField(verbose_name=u'شماره همراه', max_length=25, null=True, blank=True)
website = models.CharField(verbose_name=u'تارنما', max_length=200, blank=True, null=True)
approved = models.BooleanField(verbose_name=u'وضعیت تایید', default=False)
def __unicode__(self):
return " - ".join([u'پروفایل کارفرما شماره ', str(self.id)])
class JuckUserManager(UserManager):
def create_user(self, email, password=None, **extra_fields):
now = timezone.now()
if not email:
raise ValueError(u'رایانامه باید تعیین شود')
email = UserManager.normalize_email(email)
user = self.model(email=email, is_active=True, is_admin=False,
last_login=now, date_joined=now, **extra_fields)
user.set_password(password)
user.save(using=self._db)
return user
def create_superuser(self, email, password, **extra_fields):
u = self.create_user(email, password, **extra_fields)
u.is_active = True
u.is_admin = True
u.role = 4
u.save(using=self._db)
return u
class JuckUser(AbstractBaseUser, PermissionsMixin):
class Meta:
verbose_name = u'کاربر'
verbose_name_plural = u'کاربران'
email = models.EmailField(
verbose_name=u'نام کاربری',
max_length=255,
unique=True,
db_index=True,
)
JOB_SEEKER = 3
MANAGER = 1
EMPLOYER = 2
USER_CHOICES = (
(1, u'مدیر'),
(2, u'کارفرما'),
(3, u'کارجو'),
)
first_name = models.CharField(verbose_name=u'نام', max_length=100, blank=True)
last_name = models.CharField(verbose_name=u'نام خانوادگی', max_length=150, blank=True)
role = models.IntegerField(default=1, verbose_name=u'نوع کاربری', choices=USER_CHOICES)
is_active = models.BooleanField(default=False)
is_admin = models.BooleanField(default=False)
date_joined = models.DateTimeField(u'زمان عضویت', default=timezone.now)
objects = JuckUserManager()
USERNAME_FIELD = 'email'
def get_full_name(self):
return u' '.join([self.first_name, self.last_name])
def get_short_name(self):
return self.last_name
def __unicode__(self):
return self.email
# TODO : Na'h, think about sth to make it right
def has_perm(self, perm, obj=None):
return True
def has_module_perms(self, app_label):
return True
@property
def is_staff(self):
return self.is_admin
def cast(self):
for name in dir(self):
try:
attr = getattr(self, name)
if isinstance(attr, self.__class__):
return attr
except:
pass
return self
class Manager(JuckUser):
class Meta:
verbose_name = u'مدیر'
verbose_name_plural = u'مدیران'
permissions = (
('superuser', u'مشاهده موارد مدیریتی'),
)
class Employer(JuckUser):
class Meta:
verbose_name = u'کارفرما'
verbose_name_plural = u'کارفرمایان'
def __unicode__(self):
# return self.profile.company_name
return self.email
profile = models.OneToOneField(EmployerProfile, verbose_name=u'پروفایل کارفرما', related_name='employer')
activation_key = models.CharField(verbose_name=u'کد فعالسازی', max_length=200, blank=True, null=True)
def get_rate(self):
ratings = self.ratings.all()
rate = 0.0
for item in ratings:
rate += item.rate
if len(ratings):
return int(round(rate / len(ratings)))
return 0
def get_numer_of_rates(self):
return self.ratings.all().count()
class JobSeeker(JuckUser):
class Meta:
verbose_name = u'کارجو'
verbose_name_plural = u'کارجویان'
def __unicode__(self):
return self.email
profile = models.OneToOneField(JobSeekerProfile, verbose_name=u'پروفایل کارجو', related_name='jobseeker')
resume = models.OneToOneField('Resume', verbose_name=u'رزومه', null=True, blank=True, related_name='jobseeker')
activation_key = models.CharField(verbose_name=u'کد فعالسازی', max_length=200, blank=True, null=True)
def get_numer_of_rates(self):
return self.ratings.all().count()
def get_rate(self):
ratings = self.ratings.all()
rate = 0.0
for item in ratings:
rate += item.rate
if len(ratings):
return int(round(rate / len(ratings)))
return 0
class Education(models.Model):
class Meta:
verbose_name_plural = u'اطلاعات تحصیلات'
verbose_name = u'اطلاعات تحصیلی'
certificate = models.CharField(verbose_name=u'مدرک تحصیلی', max_length=100,
choices=(
('under_grad', u'کارشناسی'),
('grad', u'کارشناسی ارشد'),
('phd', u'دکتری'),
('post_doc', u'پست دکتری'),
))
status = models.CharField(verbose_name=u'وضعیت تحصیلی', max_length=100)
major = models.CharField(verbose_name=u'رشته تحصیلی', max_length=200)
orientation = models.CharField(verbose_name=u'گرایش تحصیلی', max_length=150)
university_name = models.CharField(max_length=150, verbose_name=u'نام دانشگاه')
university_type = models.CharField(verbose_name=u'نوع دانشگاه', max_length=100)
def __unicode__(self):
return " ".join([self.status, self.certificate, self.orientation, self.major, self.university_name])
class Experience(models.Model):
class Meta:
verbose_name = u'سابقه'
verbose_name_plural = u'سوابق'
#resume = models.ForeignKey('Resume', verbose_name=u'رزومه', related_name='experiences')
title = models.CharField(max_length=200, verbose_name=u'عنوان سابقه')
place = models.CharField(max_length=200, verbose_name=u'سازمان یا دانشگاه مربوطه')
from_date = models.IntegerField(verbose_name=u'از تاریخ')
to_date = models.IntegerField(verbose_name=u'تا تاریخ')
description = models.TextField(verbose_name=u'توضیحات', null=True, blank=True)
cooperation_type = models.CharField(verbose_name=u'نوع همکاری', max_length=150)
exit_reason = models.CharField(verbose_name=u'دلیل قطع همکاری', max_length=200, null=True, blank=True)
def __unicode__(self):
return self.title + u" در " + self.place + u"از تاریخ " + str(self.from_date) + u" تا " + str(self.to_date)
class Skill(models.Model):
class Meta:
verbose_name = u'مهارت'
verbose_name_plural = u'مهارتها'
title = models.CharField(max_length=150, verbose_name=u'عنوان')
level = models.CharField(max_length=100, verbose_name=u'سطح تسلط')
description = models.CharField(max_length=250, verbose_name=u'توضیح', null=True, blank=True)
def __unicode__(self):
return " - ".join([self.title, self.level])
class Resume(models.Model):
class Meta:
verbose_name = u'رزومه'
verbose_name_plural = u'رزومهها'
resume_file = models.FileField(verbose_name=u'فایل رزومه', null=True, blank=True,
upload_to=settings.MEDIA_ROOT + "user_resume")
about_me = models.TextField(verbose_name=u'درباره من', null=True, blank=True)
download_count = models.IntegerField(verbose_name=u'دفعات بارگیری', default=0)
education = models.ManyToManyField(Education, verbose_name=u'تحصیلات')
skill = models.ManyToManyField(Skill, related_name='skills', verbose_name=u'مهارتها')
experience = models.ManyToManyField(Experience, related_name='experiences', verbose_name=u'سوابق کاری')
def __unicode__(self):
return " - ".join([u'رزومه شماره ', str(self.id)])
class TemporaryLink(models.Model):
class Meta:
verbose_name = u'لینک موقت'
verbose_name_plural = u'لینک های موقت'
url_hash = models.CharField(u'لینک', max_length=120, unique=True)
expire_date = models.DateTimeField(u'زمان ابطال')
email = models.EmailField(u'پست الکترونیکی')
def __unicode__(self):
return self.email + str(self.expire_date.date())
class HomeDetails(models.Model):
class Meta:
verbose_name = u'متن صفحه اول'
verbose_name_plural = u'متون صفحه اول'
state = models.BooleanField(default=False, verbose_name=u'وضعیت')
text1 = models.TextField(verbose_name=u'متن شماره یک')
text2 = models.TextField(verbose_name=u'متن شماره دو')
text3 = models.TextField(verbose_name=u'متن شماره سه')
text4 = models.TextField(verbose_name=u'متن شماره چهار')
text5 = models.TextField(verbose_name=u'متن شماره پنج')
|
997,328 | d107eac3a3f4a2e720495bb55f16d90bf5fdc073 | import os,sys
import argparse
from collections import Counter
import re
import pdb
filters_detailed = [
("url" , [re.compile(r'^https?[:/]{1,3}(www\.)?[a-z]+(\.?[a-z]+\/?)+.*?$',re.UNICODE),
re.compile(r'^[wW]{3}\.[a-zA-Z]+(\.?[A-Z]+\/?)+.*?$',re.UNICODE),
re.compile(r'^([a-zA-Z][^@])[a-zA-Z.]+\.com$',re.UNICODE),
]),
('email', [re.compile(r'^[-a-zA-Z0-9_.]+\@([a-zA-Z0-9]+\.)+[a-zA-Z]+$',re.UNICODE) ]),
("00:00" , [re.compile(r'[0-9](:[0-9]{2})+',re.UNICODE),
re.compile(r'[0-9](:[0-9]{2})*[aApP][mM]$',re.UNICODE),
re.compile(r'[0-9]hour$',re.UNICODE),] ),
("00km", [re.compile(r'[0-9]km$',re.UNICODE)]),
("00kg", [re.compile(r'[0-9]kg$',re.UNICODE)]),
("haha", [re.compile(r'^haha$',re.UNICODE),
re.compile(r'^wkwk$',re.UNICODE)]),
]
filters = [
("snUser" , [re.compile(r'^[@]([0-9]*[-a-zA-Z._]+[0-9]*[!?]?)+$',re.UNICODE)] ),
("hashTag" , [re.compile(r'^[#][-a-zA-Z._]{3,}[0-9]*[!?]?$',re.UNICODE),
re.compile(r'^[#][0-9]+[-a-zA-Z._]{3,}[!?]?$',re.UNICODE),
re.compile(r'^[#][0-9]+[-a-zA-Z._]{3,}[0-9]+[!?]?$',re.UNICODE), ]),
("twoDigitNum" , [re.compile(r'^[0-9]{2}$',re.UNICODE)] ),
("fourDigitNum" , [re.compile(r'^[0-9]{4}$',re.UNICODE)] ),
("hasDigitAndAlpha" , [re.compile(r'[0-9].*[a-zA-Z]',re.UNICODE) ,
re.compile(r'[a-zA-Z].*[0-9]',re.UNICODE) ]) ,
("hasDigitAndDash" , [re.compile(r'[0-9]-[0-9]',re.UNICODE)] ),
("hasDigitAndSlash" , [re.compile(r'[0-9]/[0-9]',re.UNICODE)] ),
("hasDigitAndComma" , [re.compile(r'[0-9],[0-9]',re.UNICODE)] ),
("hasDigitAndPeriod" , [re.compile(r'[0-9][.][0-9]',re.UNICODE)] ),
("isHour" , [re.compile(r'[0-9]:[0-9]',re.UNICODE),
re.compile(r'[0-9][aApP][mM]$',re.UNICODE)] ),
("othernum" , [re.compile(r'^[0-9]+$',re.UNICODE)] ),
("allCaps" , [re.compile(r'^[A-Z]+$',re.UNICODE)] ),
("capPeriod" , [re.compile(r'^[A-Z][.]$',re.UNICODE)] ),
("initCap" , [re.compile(r'^[A-Z][a-z]+$',re.UNICODE)] ),
("lowercase" , [re.compile(r'^[a-z]$',re.UNICODE)] ),
]
is_prob_word = re.compile(r"^([a-zA-Z]+[-._',&]?)+$",re.UNICODE)
def get_filter_tag(word,filter_list):
for tag,reg_list in filter_list:
for reg in reg_list:
if reg.search(word)!=None:
return tag
return word
if __name__ == "__main__":
parser = argparse.ArgumentParser()
#parser.add_argument("--l","-l", type=str, help="Language -aaa-")
parser.add_argument("--input","-i", type=str, help="Input file")
parser.add_argument("--mode","-m", type=str, default="train", help="Mode [train,eval]")
parser.add_argument("--vocab","-v", type=str, default=None, help="Filtered vocabulary")
parser.add_argument("--thr","-t", type=int, default=3, help="Cut-off threshold")
#parser.add_argument("--sent_len","-sl", type=int, default=190, help="Filter threshold for long sentences")
parser.add_argument("--dom","-d", type=str, default=None, help="Test domain (valid only for outd exps)")
parser.add_argument("--aggr","-aggr", action='store_true', help="Perform aggresive filtering (threshold oriented)")
parser.add_argument("--ign_emp","-ig", action='store_true', help="Ignore empty lines/sentences.")
parser.add_argument("--lower","-low", action='store_true', help="Lowercase all text")
args = parser.parse_args()
vocab = set()
# load input
data = open(args.input,'r').read().split('\n')
data = [line for line in data]
if data[-1] == '': data = data[:-1]
### aggressive filtering mode
## train mode
# create vocabulary
if args.mode == "train":
vocab = Counter()
for sent in data:
if sent=='': continue
if args.lower: sent = sent.lower()
vocab.update(sent.split(' '))
filt = []
count = 0
for x,y in vocab.most_common():
# if aggresive, evth below threshold is ignored
if y<=args.thr and args.aggr:
break
if len(x)>40:
continue
# if not aggressive, evth be;pw thre that is not a word is ignored
if y<=args.thr and is_prob_word.search(x)==None:
continue
# all possible urls, email and hours are ignored
if get_filter_tag(x,filters_detailed)!=x:
continue
filt.append([x,y])
if count%100000 == 0:
print('->',count)
count += 1
#filt = [[x,y] for x,y in vocab.most_common() if y>args.thr]
dom_pref = '' if args.dom==None else '.'+args.dom
vocab_fn = os.path.join(os.path.dirname(args.input),"vocab"+dom_pref)
open(vocab_fn,'w').write('\n'.join(["%s\t%d" % (w,f) for w,f in filt]) + '\n')
vocab = set([x for x,y in filt])
del filt
# eval mode
# load vocabulary
else:
if args.vocab==None:
print("Error: Filtered vocabulary file not specified!\nCheck arguments list with -h option")
sys.exit(1)
elif not os.path.exists(args.vocab):
print("Error: Filtered vocabulary file does not exist!")
sys.exit(1)
else:
for line in open(args.vocab,'r'):
line = line.strip('\n').strip(' ')
if line=='': continue
w,f = line.split('\t')
vocab.add(w)
#
#END-IF-MODE
outfile = open(args.input+".filt",'w')
count = 0
# filter data
for sent in data:
if sent=='' and not args.ign_emp:
print('',file=outfile)
continue
new_sent = []
if args.lower:
sent = sent.lower()
sent_tok = sent.split(' ')
#if args.ign_emp and len(sent_tok)>args.sent_len-1:
# continue
for word in sent_tok:
if word in vocab:
new_sent.append(word)
else:
tag = get_filter_tag(word,filters_detailed)
if tag!=word:
new_sent.append(tag)
continue
tag = get_filter_tag(word,filters)
if tag==word:
tag = 'unk'
new_sent.append("<"+tag+">")
#END-IF-VOCAB
#END-FOR-W
new_sent.append("#eos")
print(' '.join(new_sent),file=outfile)
if count % 100000 == 0:
print("->",count)
count+=1
#END-FOR-SENT
|
997,329 | b596f049572f2a11d05a10d8e723686f825fcfbd | from django.urls import path, re_path
from .views import (
blog_post_list_view,
blog_post_detail_view,
blog_post_create_view,
blog_post_update_view,
blog_post_delete_view,
)
urlpatterns = [
path('', blog_post_list_view, name='list'),
re_path(r'^create?/$', blog_post_create_view, name='create'),
path('<str:slug>', blog_post_detail_view, name='detail'),
path('<str:slug>/edit/', blog_post_update_view, name='update'),
path('<str:slug>/delete/', blog_post_delete_view, name='delete'),
] |
997,330 | 317c80fbb69810b3526dbe0dbc186bfec384bfe8 | from django.contrib import auth as django_auth
import base64
from django.http import JsonResponse
from .models import Event,Guest
import time,hashlib
#用户认证
def user_auth(request):
get_http_auth=request.META.get('HTTP_AUTHORIZATION','b')
#获取HTTP认证数据,如果为空,将得到一个空的byte对象
auth=get_http_auth.split()
#通过split方法将其拆分成list['basic','加密字符串']
try:
auth_parts=base64.b64decode(auth[1]).decode('utf-8').partition(':')
#取出list中的加密字符串,通过base64对加密字符进行解码,
# 再对字符串进行编码解码再以“:”分隔开
except IndexError:
return 'null'
username,password=auth_parts[0],auth_parts[2]
user=django_auth.authenticate(username=username,password=password)
#django认证登录
if user is not None:
django_auth.login(request,user)
return 'success'
else:
return 'fail'
#查询发布会接口——增加用户认证
def get_event_list(request):
auth_result=user_auth(request)
if auth_result=='null':
return JsonResponse({'status':10011,'message':'user auth null'})
if auth_result == 'fail':
return JsonResponse({'status': 10012, 'message': 'user auth fail'})
eid = request.GET.get('eid', '') # 发布会id
name = request.GET.get('name', '') # 发布会名称
if eid == '' and name == '':
return JsonResponse({'status': 10021, 'message': 'parameter error'})
if eid != '':
event = {}
from django.core.exceptions import ObjectDoesNotExist
try:
result = Event.objects.get(id=eid)
except ObjectDoesNotExist:
return JsonResponse({'status': 10022, 'message': 'query result is empty'})
else:
event['name'] = result.name
event['limit'] = result.limit
event['status'] = result.status
event['address'] = result.address
event['start_time'] = result.start_time
return JsonResponse({'status': 200, 'message': 'success', 'data': event})
if name != '':
datas = []
results = Event.objects.filter(name__contains=name)
if results:
for r in results:
event = {}
event['name'] = r.name
event['limit'] = r.limit
event['status'] = r.status
event['address'] = r.address
event['start_time'] = r.start_time
datas.append(event)
return JsonResponse({'status': 200, 'message': 'success', 'data': datas})
else:
return JsonResponse({'status': 10022, 'message': 'query result is empty'})
#用户签名+时间戳
def user_sign(request):
if request.method=='POST':
client_time=request.POST.get('time','')#客户端时间戳
client_sign=request.POST.get('sign','')#客户端签名
else:
return 'error'
if client_time==''or client_sign=='':
return 'sign null'
#服务器时间
now_time=time.time()#当前时间戳
server_time=str(now_time).split('.')[0]
#获取时间差
time_difference=int(server_time)-int(client_time)
if time_difference>=60:
return 'timeout'
#签名检查
md5=hashlib.md5()
sign_str=client_time+'&Guest-Bugmaster'
sign_byte_utf8=sign_str.encode(encoding='utf-8')
md5.update(sign_byte_utf8)
server_sign=md5.hexdigest()
if server_sign!=client_sign:
return 'sign fail'
else:
return 'sign success'
def add_event(request):
sign_result=user_sign(request)
if sign_result=='error':
return JsonResponse({'status':10011,'message':'request error'})
elif sign_result=='sign null':
return JsonResponse({'status':10012,'message':'user sign null'})
elif sign_result=='timeout':
return JsonResponse({'status':10013,'message':'user sign timeout'})
elif sign_result=='sign fail':
return JsonResponse({'status':10014,'message':'user sign error'})
eid = request.POST.get('eid', '') # 发布会id
name = request.POST.get('name', '') # 发布会标题
limit = request.POST.get('limit', '') # 限制人数
status = request.POST.get('status', '') # 状态
address = request.POST.get('address', '') # 地址
start_time = request.POST.get('start_time', '') # 发布会时间
if eid == '' or name == '' or limit == '' or start_time == '':
return JsonResponse({'status': 10021, "message": 'parameter error'})
result = Event.objects.filter(id=eid)
if result:
return JsonResponse({"status": 10022, "message": 'event id already exists'})
result = Event.objects.filter(name=name)
if result:
return JsonResponse({'status': 10023, 'message': 'event name already exists'})
if status == '':
status = 1
from django.core.exceptions import ValidationError
try:
Event.objects.create(
id=eid, name=name, limit=limit, address=address, status=int(status),
start_time=start_time
)
except ValidationError as e:
error = 'start_time format error.It must be in YYYY-MM-DD HH:MM:SS format.'
return JsonResponse({'status': 10024, 'message': error})
return JsonResponse({'status': 200, 'message': 'add event success'}) |
997,331 | a800672925ae76411ffac965cca4003dc4a3312b | with open('Q24.txt', 'r') as f:
prompt = f.read()
prompt = prompt.strip()
prompt = prompt.split('\n')
bugs = []
for i in prompt:
bugs.append([])
for j in i:
bugs[-1].append(j)
def get_bug(i, j, bugs):
if 0 <= i < len(bugs) and 0 <= j < len(bugs[i]):
return 1 if bugs[i][j] == '#' else 0
else:
return 0
def num_bugs(i, j, bugs):
num = 0
num += get_bug(i + 1, j, bugs)
num += get_bug(i - 1, j, bugs)
num += get_bug(i, j + 1, bugs)
num += get_bug(i, j - 1, bugs)
return num
def update(bugs):
new = []
for i in range(len(bugs)):
new.append([])
for j in range(len(bugs[i])):
if num_bugs(i, j, bugs) == 1:
new[-1].append('#')
elif num_bugs(i, j, bugs) == 2:
new[-1].append('#' if bugs[i][j] == '.' else '.')
else:
new[-1].append('.')
return new
pos = set()
while True:
key = ''
for i in bugs:
for j in i:
key += j
if key in pos:
break
pos.add(key)
bugs = update(bugs)
p = 0
bio = 0
for i in bugs:
for j in i:
bio += pow(2, p) if j == '#' else 0
p += 1
print(bio)
|
997,332 | 331ef6dfa6e375d16d5f2dc5a5fb79d4bc41295f | #!/usr/bin/env python3
from os import listdir
from os.path import isfile, join
import os
import shutil
def sort_files_in_a_folder(mypath):
"""
A function to sort the files in a download folder
into their respective categories
"""
files = [f for f in listdir(mypath) if isfile(join(mypath, f))]
file_type_variation_list = []
filetype_folder_dict = {}
for file in files:
filetype = file.split(".")[1]
if filetype not in file_type_variation_list:
file_type_variation_list.append(filetype)
computer = mypath + "/" + filetype + "_folder"
filetype_folder_dict[str(filetype)] = str(computer)
if os.path.isdir(computer) == True: # folder exists
continue
else:
os.mkdir(computer)
for file in files:
src_path = mypath + "/" + file
filetype = file.split(".")[1]
if filetype in filetype_folder_dict.keys():
dest_path = filetype_folder_dict[str(filetype)]
shutil.move(src_path, dest_path)
print(src_path + ">>>" + dest_path)
if __name__ == "__main__":
mypath = "/home/dt/Downloads"
sort_files_in_a_folder(mypath)
|
997,333 | 03ef93cb0f9d29e42cf787ab46618f1ada8d6c7f | #! /usr/bin/env python2.7
# vim: tabstop=8 expandtab shiftwidth=4 softtabstop=4
# Christoph Gierling
from datetime import date
def store(tracks, country, date):
from sqlalchemy import create_engine, Table, MetaData, Integer, String, Column
engine = 'sqlite:///track_stats_services.db'
db = create_engine(engine)
db.echo = False
metadata = MetaData(db)
tableName = 'Tracking_Data_%s_%s' % (country, date)
print ":: Creating a the table [", tableName, "] with some fake track data, this can take some time ..."
table = Table(tableName, metadata,
Column('id', Integer, primary_key=True),
Column('trackId', Integer),
Column('flowType', String(45)),
)
table.create(checkfirst=True)
for track in tracks:
print "\t",str(track)
for i in range (track['count']):
ins = table.insert()
ins.execute(**track['row'])
print ":: Done!"
tracks = [
{'row':{'trackId': 1, 'flowType':'commonflow'}, 'count':100 },
{'row':{'trackId': 10, 'flowType':'commonflow'}, 'count': 85 },
{'row':{'trackId': 11, 'flowType':'commonflow'}, 'count': 10 }, #10 users lost
{'row':{'trackId': 20, 'flowType':'commonflow'}, 'count': 70 },
{'row':{'trackId': 21, 'flowType':'commonflow'}, 'count': 15 },
{'row':{'trackId':211, 'flowType':'commonflow'}, 'count': 15 },
{'row':{'trackId':212, 'flowType':'commonflow'}, 'count': 10 },
{'row':{'trackId':213, 'flowType':'commonflow'}, 'count': 5 },
{'row':{'trackId': 30, 'flowType':'commonflow'}, 'count': 79 },
{'row':{'trackId': 40, 'flowType':'commonflow'}, 'count': 5 },
{'row':{'trackId': 41, 'flowType':'commonflow'}, 'count': 55 },
{'row':{'trackId': 50, 'flowType':'commonflow'}, 'count': 4 },
{'row':{'trackId': 60, 'flowType':'commonflow'}, 'count': 3 }, #1 user lost to an Exception
{'row':{'trackId': 70, 'flowType':'commonflow'}, 'count': 13 },
{'row':{'trackId': 99, 'flowType':'commonflow'}, 'count': 1+1},
]
country = 'ru'
today = int(date.today().strftime('%Y%m%d')) # date as int in format [ 20120614 ]
store(tracks, country, today)
|
997,334 | aed4f5dc4fd84295fc67ca9fff76ade500017f9c | import pygame
import random
from os import path
img_dir = path.join(path.dirname(__file__), "img")
WIDTH = 500
HEIGHT = 600
FPS = 60
BLACK = (0, 0, 0)
WHITE = (255, 255, 255)
GREEN = (0, 255, 0)
RED = (255, 0, 0)
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0)
pygame.init()
pygame.mixer.init()
screen = pygame.display.set_mode((WIDTH, HEIGHT))
pygame.display.set_caption("Practice Game")
clock = pygame.time.Clock()
class Player(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.transform.scale(player_img, (30, 30))
self.rect = self.image.get_rect()
self.rect.center = (WIDTH / 2, HEIGHT / 2)
self.speedx = 0
self.speedy = 0
def update(self):
self.speedx = 0
self.speedy = 0
keystate = pygame.key.get_pressed()
if keystate[pygame.K_LEFT]:
self.speedx = -7
if keystate[pygame.K_RIGHT]:
self.speedx = 7
if keystate[pygame.K_UP]:
self.speedy = -7
if keystate[pygame.K_DOWN]:
self.speedy = 7
self.rect.y += self.speedy
self.rect.x += self.speedx
if self.rect.right > WIDTH:
self.rect.right = WIDTH
if self.rect.left < 0:
self.rect.left = 0
if self.rect.top < 0:
self.rect.top = 0
if self.rect.bottom > HEIGHT:
self.rect.bottom = HEIGHT
class Mobup(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.transform.scale(mob_img, (25, 25))
self.rect = self.image.get_rect()
self.rect.x = random.randrange(WIDTH - self.rect.width)
self.rect.y = random.randrange(-100, -40)
self.speedy = random.randrange(3, 8)
def update(self):
self.rect.y += self.speedy
if self.rect.top > HEIGHT:
self.rect.x = random.randrange(WIDTH - self.rect.width)
self.rect.y = random.randrange(-100, -40)
self.speedy = random.randrange(3, 8)
class Mobleft(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.transform.scale(mob_img, (25, 25))
self.rect = self.image.get_rect()
self.rect.x = random.randrange(-50, -20)
self.rect.y = random.randrange(HEIGHT - self.rect.width)
self.speedx = random.randrange(3, 8)
def update(self):
self.rect.x += self.speedx
if self.rect.left > WIDTH:
self.rect.x = random.randrange(-50, -20)
self.rect.y = random.randrange(HEIGHT - self.rect.width)
self.speedx = random.randrange(3, 8)
class Mobright(pygame.sprite.Sprite):
def __init__(self):
pygame.sprite.Sprite.__init__(self)
self.image = pygame.transform.scale(mob_img, (25, 25))
self.rect = self.image.get_rect()
self.rect.x = random.randrange(500, 550)
self.rect.y = random.randrange(HEIGHT - self.rect.width)
self.speedx = random.randrange(3, 8)
def update(self):
self.rect.x -= self.speedx
if self.rect.right < 0:
self.rect.x = random.randrange(500, 550)
self.rect.y = random.randrange(HEIGHT - self.rect.width)
self.speedx = random.randrange(3, 8)
background = pygame.image.load(path.join(img_dir, "shmuppracbg.jpg"))
background_rect = background.get_rect()
player_img = pygame.image.load(path.join(img_dir, "hud_p1.png"))
mob_img = pygame.image.load(path.join(img_dir, "hud_p2.png"))
all_sprites = pygame.sprite.Group()
player = Player()
all_sprites.add(player)
mobs = pygame.sprite.Group()
for i in range(5):
mu = Mobup()
all_sprites.add(mu)
mobs.add(mu)
for i in range(3):
ml = Mobleft()
all_sprites.add(ml)
mobs.add(ml)
for i in range(3):
mr = Mobright()
all_sprites.add(mr)
mobs.add(mr)
running = True
while running:
clock.tick(FPS)
for event in pygame.event.get():
if event.type == pygame.QUIT:
running = False
hits = pygame.sprite.spritecollide(player, mobs, False)
if hits:
running = False
screen.fill(BLACK)
screen.blit(background, background_rect)
all_sprites.update()
all_sprites.draw(screen)
pygame.display.flip()
pygame.quit()
|
997,335 | 53c626d7614534c759dd186a1bb5da2883f0f367 | # coding:utf-8
from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from time import sleep
from selenium.webdriver.support.ui import Select
# レース名入力
race_name = input('レース名を入力してください:')
#ブラウザを開く
driver = webdriver.Chrome()
# netkeibaのデータベースを開く
driver.get('http://db.netkeiba.com/?rf=navi')
# 確認
assert 'データベース' in driver.title
# 検索
driver.find_element_by_css_selector('#main > div.deta_search.fc > div.db_search_form_box.fc > form > input.field').send_keys(race_name)
element = driver.find_element_by_css_selector('#main > div.deta_search.fc > div.db_search_form_box.fc > form > select')
type_select = Select(element)
type_select.select_by_value('race')
driver.find_element_by_css_selector('#main > div.deta_search.fc > div.db_search_form_box.fc > form > input.button.imgover').click()
# 5秒待機
sleep(5)
# レース名のリンクのエレメントを取得
links = driver.find_elements_by_partial_link_text(race_name)
# 過去十年分のurlを取り出す
urls = []
for a in links[:10]:
print(a.get_attribute('href'))
urls.append(a.get_attribute('href'))
#1年分ずつアクセス
for url in urls:
driver.get(url)
sleep(5)
title = driver.title
wakuban = driver.find_element_by_css_selector('#contents_liquid > table > tbody > tr:nth-child(2) > td:nth-child(2) > span').text
umaban = driver.find_element_by_css_selector('#contents_liquid > table > tbody > tr:nth-child(2) > td:nth-child(3)').text
name = driver.find_element_by_css_selector('#contents_liquid > table > tbody > tr:nth-child(2) > td:nth-child(4) > a').text
print('title :', title)
print('wakuban :', wakuban)
print('umaban :', umaban)
print('name :', name)
driver.quit()
|
997,336 | d2a98f4a7cb145ccc474f851ed1cc1f1379f4ae0 | '''
Need to update
'''
import cv2
import imutils
import json
import numpy as np
import os
import keras
import numpy as np
from keras.models import Model
from keras.layers import Input, add
from keras.layers.core import Layer, Dense, Dropout, Activation, Flatten, Reshape
from keras.layers.convolutional import Conv2D, MaxPooling2D, UpSampling2D, ZeroPadding2D, Cropping2D
from keras.utils import np_utils
from imutils import paths
from keras.models import model_from_yaml
def getNames(frame, face_detect, extract_feature, findPeople, aligner):
minsize = 40
rects, landmarks = face_detect.detect_face(frame,minsize);#min face size is set to 40x40
aligns = []
positions = []
for (i, rect) in enumerate(rects):
aligned_face, face_pos = aligner.align(160,frame,landmarks[:,i])
if len(aligned_face) == 160 and len(aligned_face[0]) == 160:
aligns.append(aligned_face)
positions.append(face_pos)
else:
print("Align face failed") #log
if(len(aligns) > 0):
features_arr = extract_feature.get_features(aligns)
recog_data = findPeople(features_arr,positions, 'VGG_pic')
for (i,rect) in enumerate(rects):
cv2.rectangle(frame,(rect[0],rect[1]),(rect[2],rect[3]),(255,0,0)) #draw bounding box for the face
cv2.putText(frame,f'{recog_data[i][0]}'+" - "+f'{recog_data[i][1]/100:.2%}',(rect[0],rect[1]),cv2.FONT_HERSHEY_SIMPLEX,0.5,(255,255,255),1,cv2.LINE_AA)
def readNames(imagePaths, face_detect, extract_feature, aligner):
f = open('./face_data/facerec_VGG_pic.txt', 'r')
data_set = json.loads(f.read());
person_imgs = {"Left": [], "Right": [], "Center": []}
person_features = {"Left": [], "Right": [], "Center": []}
#Ankur code
print(imagePaths)
#NewimagePaths = imagePaths
NewimagePaths = new_images(imagePaths)
print(NewimagePaths)
# loop over the image paths
for (i, imagePath) in enumerate(NewimagePaths):
# extract the person name from the image path
print("[INFO] processing image {}/{}".format(i + 1,
len(NewimagePaths)))
name = imagePath.split(os.path.sep)[-2] # -2 is the name of directory, -1 is the name of the file.
# load the input image and convert it from BGR (OpenCV ordering)
frame = cv2.imread(imagePath)
rects, landmarks = face_detect.detect_face(frame, 40); # min face size is set to 80x80
for (i, rect) in enumerate(rects):
aligned_frame, pos = aligner.align(160, frame, landmarks[:, i])
if len(aligned_frame) == 160 and len(aligned_frame[0]) == 160:
person_imgs[pos].append(aligned_frame)
# cv2.imshow("Captured face", aligned_frame)
print('Recognized position:' + str(pos))
for pos in person_imgs:
person_features[pos] = [np.mean(extract_feature.get_features(person_imgs[pos]), axis=0).tolist()]
data_set[name] = person_features
f = open('./face_data/facerec_VGG_pic.txt', 'w')
f.write(json.dumps(data_set))
def new_images(pathOfImages):
#print("coming here")
if (len( pathOfImages))==0:
print(pathOfImages, "folder has no image")
exit()
transformed_images(pathOfImages)
AE_images(pathOfImages)
arr = pathOfImages[0].split(os.path.sep)
dirctory = (os.path.sep).join(arr[0:len(arr)-1])
allnew = list(paths.list_images(dirctory))
#print(dirctory)
#print(allnew)
return allnew
def transformed_images(pathOfImages):
for ipath in pathOfImages:
print(ipath)
img=cv2.imread(ipath)
horizontal_img = img.copy()
vertical_img = img.copy()
both_img = img.copy()
# flip img horizontally, vertically,
# and both axes with flip()
horizontal_img = cv2.flip( img, 0 )
vertical_img = cv2.flip( img, 1 )
both_img = cv2.flip( img, -1 )
name = ipath.split(os.path.sep)[-1]
arr = ipath.split(os.path.sep)
dirctory = (os.path.sep).join(arr[0:len(arr)-1])
cv2.imwrite(dirctory+os.path.sep +name+"_hori.jpg",horizontal_img)
cv2.imwrite(dirctory+os.path.sep +name+"_ver.jpg", vertical_img)
cv2.imwrite(dirctory+os.path.sep +name+"_both.jpg",both_img)
def AE_images(pathOfImages):
print(" image for AE: ", len(pathOfImages))
yaml_file = open('./models/model_AllImage.json', 'r')
loaded_model_yaml = yaml_file.read()
yaml_file.close()
loaded_model = model_from_yaml(loaded_model_yaml)
# load weights into new model
loaded_model.load_weights("./models/model_AllImage.h5")
print("Loaded model from disk")
for ipath in pathOfImages:
print(ipath)
features=cv2.imread(ipath)
features_resized = cv2.resize(features,(500,800))
resize = features_resized.shape
pred = loaded_model.predict(features_resized.reshape(-1,resize[0], resize[1],resize[2]))
transformed = pred.reshape(resize[0], resize[1],-1)
arr = ipath.split(os.path.sep)
name = ipath.split(os.path.sep)[-1]
dirctory = (os.path.sep).join(arr[0:len(arr)-1])
cv2.imwrite(dirctory+os.path.sep +name+"_AE.jpg",transformed)
print("Autoencoder done.")
|
997,337 | 1ab324ae4b3c03913262b5cd86b31083d7d118c2 | import numpy as np
def hastings_step(f,x,step,p):
xp = x + step
start = -f(x)
end = -f(xp)
alpha = end-start
out = np.copy(x)
i = (p < alpha)
out[i] = xp[i]
frac = np.mean(i)
return [out,f(out),frac]
def ham_hastings_step(f,g,x,mom,r,eps,L):
Q = np.copy(x)
P = np.copy(mom)
P = P - eps * g(Q)/2
for i in range(L):
Q = Q + eps*P
if i != L-1:
P = P - eps*g(Q)
P = P - eps*g(Q)/2
P = -P
fU = f(Q)
fK = (P**2).sum(axis = 1)/2
iU = f(x)
iK = (mom**2).sum(axis = 1)/2
out = np.copy(x)
i = (r < (fU - iU +fK - iK))
frac = np.mean(i)
out[i] = Q[i]
return out,f(out),frac
def hastings(f,init,nstep,eps = .1,grad = -1,L = 10):
#HASTINGS TAKES NEG LG LIKELIHOOD
u = np.log(np.random.uniform(0,1,[nstep,init.shape[0]]))
s = np.random.randn(nstep,init.shape[0],init.shape[1])
xt = np.copy(init)
ft = f(xt)
fro = []
for k in range(nstep):
if grad == -1:
xt,ft,fr = hastings_step(f,xt,s[k]*eps,u[k])
else:
xt,ft,fr = ham_hastings_step(f,grad,xt,s[k],u[k],eps,L)
fro.append(fr)
return xt,ft,np.mean(fro)
def AIS(f1,f2,f1sam,shape,n_samp,n_AIS_step,nhstep,eps = .1,grad = -1,L = 10,PRINT = False):
#THESE ARE NEG LOG LIKELIHOODS
beta = np.linspace(0,1,n_AIS_step + 1)
X = f1sam([n_samp,shape])
F = []
fro = []
for k in range(1,len(beta)):
if PRINT:
print(k)
fa = lambda y:(1.-beta[k-1])*f1(y) + beta[k-1]*f2(y)
fb = lambda y:(1.-beta[k])*f1(y) + beta[k]*f2(y)
if grad != -1:
g = lambda y:(1.-beta[k])*grad[0](y) + beta[k]*grad[1](y)
else:
g = -1
F.append([-fa(X),-fb(X)])
X,f,fr = hastings(fb,X,nhstep,eps,g,L)
if PRINT:
G = np.array(F)
print((G[:,1] - G[:,0]).sum(axis = 0).mean())
fro.append(fr)
#F.append([-fa(X),-fb(X)])
F = np.array(F)
lW = (F[:,1] - F[:,0]).sum(axis = 0)
return X,lW,fro
if __name__ == "__main__":
def prior(x,N):
return - np.abs(x).sum(axis = 1) - N * np.log(2)
def poste(x,N):
#UNNORMALIZED PSOTERIOR!
return - ((x)*(x)/(.25**2)).sum(axis = 1)/2
def true_norm(x,N):
#UNNORMALIZED PSOTERIOR!
return - (N/2)*np.log(2*np.pi*.25*.25)
def g_prior(x,N):
return - np.sign(x)
def g_poste(x,N):
return - (x)/(.25**2)
N = 200
nsamp = 50
n_AIS_step = 10000
nhstep = 1
eps = .05
L = 10
fa = lambda x: -prior(x,N)
fb = lambda x: -poste(x,N)# + prior(x,N)
ga = lambda x: -g_prior(x,N)
gb = lambda x: -g_poste(x,N)# + prior(x,N)
norm = lambda x: -true_norm(x,N)# + prior(x,N)
def prior_samp(n):
return np.random.laplace(0,1,[n[0],n[1]])
#f1,f2,f1sam,shape,n_samp,n_AIS_step,nhstep,eps = .1,grad = -1,L = 10)
import time
t1 = time.time()
XO,W,fro = AIS(fa,fb,prior_samp,N,n_samp = nsamp,n_AIS_step = n_AIS_step,nhstep = nhstep,eps = eps,grad = [ga,gb],L = L,PRINT = False)
t2 = time.time()
print("time: {}".format(t2 - t1))
print("AIS norm",np.log(np.mean(np.exp(W))))
print("AIS norm",np.mean(W))
print("true norm",norm(1))
print(np.mean(fro))
|
997,338 | 976236a825b932f8a181fdaf172156f7f6475456 | #sys.argv[1] should be the path of the file with input sequences
#sys.argv[2] should be the path of the file where to save sequence
import sys
import azimuth.model_comparison
import numpy as np
def getAzimuth(sequences):
predictions = azimuth.model_comparison.predict(sequences, None, None)
return predictions
sequences = np.loadtxt(sys.argv[1], dtype="U34", ndmin=1)
scores = getAzimuth(sequences)
np.savetxt(sys.argv[2], scores)
|
997,339 | 249bb51d04a08e065f3f1139812bb51b4cf6882f | """
File: figureS08.py
Purpose: Generates figure S08.
Figure S08 analyzes heterogeneous (2 state), uncensored,
single lineages (no more than one lineage per population).
"""
import numpy as np
from .figureCommon import (
getSetup,
subplotLabel,
commonAnalyze,
pi,
T,
E2,
max_desired_num_cells,
num_data_points,
min_desired_num_cells,
figureMaker
)
from ..LineageTree import LineageTree
def makeFigure():
"""
Makes figure 2.
"""
# Get list of axis objects
ax, f = getSetup((10, 13.333), (4, 3))
figureMaker(ax, *accuracy())
subplotLabel(ax)
return f
def accuracy():
"""
Calculates accuracy and parameter estimation
over an increasing number of cells in a lineage for
a uncensored two-state model.
We increase the desired number of cells in a lineage by
the experiment time.
"""
# Creating a list of populations to analyze over
cells = np.linspace(min_desired_num_cells, max_desired_num_cells, num_data_points)
list_of_populations = []
list_of_fpi = []
for cell_num in cells:
population = LineageTree.init_from_parameters(pi, T, E2, cell_num)
# Adding populations into a holder for analysing
list_of_populations.append([population])
list_of_fpi.append(pi)
return commonAnalyze(list_of_populations, 2, list_of_fpi=list_of_fpi)
|
997,340 | 7694e7339e95ec5d7a437fa12363c5537265ef7a | import os
import yaml
def load_config_or_die(config_dir, config_basename):
config = dict()
for filename in ['models.yaml', 'policies.yaml', 'agents.yaml']:
filepath = os.path.join(config_dir, filename)
with open(filepath, 'r') as file:
config.update(yaml.safe_load(file))
basepath = os.path.join(config_dir, config_basename)
with open(basepath, 'r') as basefile:
baseconfig = yaml.safe_load(basefile)
overwrite_default_values(baseconfig, config)
return config
def pretty_print(config, indent=0):
summary = str()
align = 30 - indent * 2
for key, value in config.items():
summary += ' ' * indent + '{:{align}}'.format(str(key), align=align)
if isinstance(value, dict):
summary += '\n' + pretty_print(value, indent + 1)
else:
summary += '{}\n'.format(str(value))
return summary
def overwrite_default_values(update_from, update_to):
for key in update_from:
if key in update_to:
if isinstance(update_from[key], dict) and isinstance(update_from[key], dict):
overwrite_default_values(update_from[key], update_to[key])
else:
update_to[key] = update_from[key]
else:
update_to[key] = update_from[key]
return update_to
|
997,341 | 29686bf0ae1b6ac880f30a9d99e329a1127dc6aa | import numpy as np
from sklearn.linear_model import LogisticRegression
from sklearn.svm import SVC
class BasePU:
@staticmethod
def _make_propensity_weighted_data(x,s,e,sample_weight=None):
weights_pos = s/e
weights_neg = (1-s) + s*(1-1/e)
if sample_weight is not None:
weights_pos = sample_weight*weights_pos
weights_neg = sample_weight*weights_neg
Xp = np.concatenate([x,x])
Yp = np.concatenate([np.ones_like(s), np.zeros_like(s)])
Wp = np.concatenate([weights_pos, weights_neg])
return Xp, Yp, Wp
class LogisticRegressionPU(LogisticRegression, BasePU):
def __init__(self, penalty='l2', dual=False, tol=1e-4, C=1.0,
fit_intercept=True, intercept_scaling=1, class_weight=None,
random_state=None, solver='liblinear', max_iter=100,
multi_class='ovr', verbose=0, warm_start=False, n_jobs=1):
LogisticRegression.__init__(self,penalty=penalty, dual=dual, tol=tol, C=C,
fit_intercept=fit_intercept,intercept_scaling=intercept_scaling,
class_weight=class_weight, random_state=random_state,
solver=solver, max_iter=max_iter, multi_class=multi_class,
verbose=verbose, warm_start=warm_start, n_jobs=n_jobs)
def fit(self, x, s, e=None, sample_weight=None):
if e is None:
super().fit(x,s,sample_weight)
else:
Xp,Yp,Wp = self._make_propensity_weighted_data(x,s,e,sample_weight)
super().fit(Xp,Yp,Wp)
|
997,342 | 8a552e5e7f68eae05d9e50fb7b88152ac74544d3 | from _typeshed import Incomplete
from influxdb_client.service._base_service import _BaseService
class LegacyAuthorizationsService(_BaseService):
def __init__(self, api_client: Incomplete | None = ...) -> None: ...
def delete_legacy_authorizations_id(self, auth_id, **kwargs): ...
def delete_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): ...
async def delete_legacy_authorizations_id_async(self, auth_id, **kwargs): ...
def get_legacy_authorizations(self, **kwargs): ...
def get_legacy_authorizations_with_http_info(self, **kwargs): ...
async def get_legacy_authorizations_async(self, **kwargs): ...
def get_legacy_authorizations_id(self, auth_id, **kwargs): ...
def get_legacy_authorizations_id_with_http_info(self, auth_id, **kwargs): ...
async def get_legacy_authorizations_id_async(self, auth_id, **kwargs): ...
def patch_legacy_authorizations_id(self, auth_id, authorization_update_request, **kwargs): ...
def patch_legacy_authorizations_id_with_http_info(self, auth_id, authorization_update_request, **kwargs): ...
async def patch_legacy_authorizations_id_async(self, auth_id, authorization_update_request, **kwargs): ...
def post_legacy_authorizations(self, legacy_authorization_post_request, **kwargs): ...
def post_legacy_authorizations_with_http_info(self, legacy_authorization_post_request, **kwargs): ...
async def post_legacy_authorizations_async(self, legacy_authorization_post_request, **kwargs): ...
def post_legacy_authorizations_id_password(self, auth_id, password_reset_body, **kwargs): ...
def post_legacy_authorizations_id_password_with_http_info(self, auth_id, password_reset_body, **kwargs): ...
async def post_legacy_authorizations_id_password_async(self, auth_id, password_reset_body, **kwargs): ...
|
997,343 | b8698b49a1b35c78a72ec12dd0db661798333750 | import os
from . import BaseTest
from click.testing import CliRunner
from virl.cli.main import virl
from virl.swagger.app import app
try:
from unittest.mock import patch
except ImportError:
from mock import patch
class Tests(BaseTest):
@patch("virl.cli.swagger.commands.subprocess.Popen", autospec=False)
def test_virl_swagger_start(self, call_mock):
runner = CliRunner()
runner.invoke(virl, ["swagger", "start"])
url = "http://localhost:5000"
call_mock.assert_called_with(["open", url])
def test_virl_swagger_status(self):
with open("/tmp/virl_swagger.port", "w") as fh:
fh.write("1234")
with open("/tmp/virl_swagger.pid", "w") as fh:
fh.write("1234")
runner = CliRunner()
result = runner.invoke(virl, ["swagger", "status"])
expected_output = "VIRL swagger UI is running at http://localhost:1234"
self.assertIn(expected_output, result.output)
@patch("virl.cli.swagger.commands.subprocess.Popen", autospec=False)
def test_virl_swagger_stop(self, call_mock):
runner = CliRunner()
result = runner.invoke(virl, ["swagger", "stop"])
print(result.output)
call_mock.assert_called_with("kill $(cat /tmp/virl_swagger.pid)", shell=True)
class SwaggerFlaskApp(BaseTest):
def setUp(self):
os.environ["VIRL_SWAGGER_HOST"] = "localhost"
os.environ["VIRL_SWAGGER_USERNAME"] = "guest"
os.environ["VIRL_SWAGGER_PASSWORD"] = "guest"
os.environ["VIRL_SWAGGER_PORT"] = "1234"
self.app = app
app.testing = True
self.app = app.test_client()
def test_swagger_ui(self):
resp = self.app.get("/")
expected_data = b'ui.preauthorizeBasic("basicAuth", "guest", "guest")'
self.assertEqual(resp.status_code, 200)
self.assertIn(expected_data, resp.data)
def test_swagger_spec(self):
resp = self.app.get("/swagger.json")
expected_data = b'"host": "localhost:1234",'
self.assertEqual(resp.status_code, 200)
self.assertIn(expected_data, resp.data)
def tearDown(self):
pass
# os.close(self.db_fd)
# os.unlink(flaskr.app.config['DATABASE'])
|
997,344 | 16bf8345840f72bb3a50c03ca986c47870bf5ffb | import pygame.display
|
997,345 | 818f8e9f1cc29c1274bae9788e0f6820674863e9 | from .cms_metatags import CMSMetaTags
__all__ = [
CMSMetaTags,
]
|
997,346 | edd66ab7bc8def989337f3754c6c61175a5435f5 | #!/usr/bin/env python
# coding: utf-8
# In[16]:
import numpy as np
import netsquid as ns
from netsquid.qubits.qubitapi import *
'''
function:
Generate a GHZ set with customized length and entangled qubits.
input:
num_qubits: Numbers of qubits in a column.
num_sets: Numbers of qubits in a raw.
output:
A 2-D arrary of qubits with every one in the same raw entangled.
'''
def Create_GHZ_set_list(num_qubits,num_sets):
qList_2D=[]
for i in range(num_qubits):
qList = create_qubits(num_sets) #qubit 000
H | qList[0]
tmp=[qList[0]]
for j in range(1, num_sets):
ns.qubits.operate([qList[0],qList[j]], ns.CNOT)
tmp.append(qList[j])
qList_2D.append(tmp)
return qList_2D
# In[1]:
#Verify
from netsquid.qubits.operators import *
tmp=Create_GHZ_set_list(5,3)
print(tmp)
np.asarray(tmp)
print(tmp)
print(tmp[:,2])
mes0=ns.qubits.qubitapi.measure(tmp[2][0],observable=Z)
mes1=ns.qubits.qubitapi.measure(tmp[2][1],observable=Z)
mes2=ns.qubits.qubitapi.measure(tmp[2][2],observable=Z)
print(mes0)
print(mes1)
print(mes2)
# In[108]:
'''
function:
Generate a random serial number list.
input:
num_qubits: Length of serial number.
min: Minimum value possible in the list.
max: Maximum value possible in the list.
output:
A random serial number list.
'''
from random import randint
def SerialNumGen(num_bits,min,max):
#seed()
bitList=[]
startUSN=randint(min,max-num_bits+1)
for i in range(num_bits):
bitList.append(startUSN)
startUSN+=1
return bitList
# In[114]:
#verify
SerialNumGen(7,0,10)
# In[12]:
'''
function:
One way function which can be used in many place.
input:
any
output:
A qubit in this case.
'''
from netsquid.qubits import create_qubits
from netsquid.qubits.operators import *
# hash with the symmetric key, Unique Serial Number, the amound of money
def OneWayFunction(identity=None,symkey=[],randomSerialNumber=0,Money=0):
owf_key=''
# covert inputs to binary
for i in symkey:
owf_key+=str(bin(i)[2:])
owf_key+=str(bin(randomSerialNumber)[2:])
owf_key+=str(bin(Money)[2:])
owf_key=int(owf_key)
# make it qubit
# apply three big prime numbers
p1 = 33179
p2 = 32537
p3 = 31259
MyRx=create_rotation_op(np.pi/180*(owf_key%p1), (1, 0, 0))
MyRy=create_rotation_op(np.pi/180*(owf_key%p2), (0, 1, 0))
MyRz=create_rotation_op(np.pi/180*(owf_key%p3), (0, 0, 1))
tempQubit=create_qubits(1)
tempQubit=tempQubit[0]
MyRx | tempQubit
MyRy | tempQubit
MyRz | tempQubit
#print(tempQubit.qstate.dm)
return tempQubit
# In[ ]:
'''
function:
Cswap function.
input:
Three qubits.
output:
Three qubits applied Cswap.
'''
# C swap can be composed by T,H
# see https://www.mathstat.dal.ca/~selinger/quipper/doc/QuipperLib-GateDecompositions.html
from netsquid.qubits.operators import H,T
def Cswap(qA,qB,qC):
invT=T.inv
operate([qC, qB], ops.CNOT)
H | qC
T | qA
T | qB
T | qC
operate([qB, qA], ops.CNOT)
operate([qC, qB], ops.CNOT)
operate([qA, qC], ops.CNOT)
T | qC
invT | qB
operate([qA, qB], ops.CNOT)
invT | qA
invT | qB
operate([qC, qB], ops.CNOT)
operate([qA, qC], ops.CNOT)
operate([qB, qA], ops.CNOT)
H | qC
operate([qC, qB], ops.CNOT)
return qA,qB,qC
# In[11]:
'''
function:
Swap test which exames the closeness of two qubits.
input:
two qubits.
output:
A tuple indecating the index and pobability.
(0,0.5) means orthogonal.
(0,1) means the two are equal.
'''
from netsquid.qubits import create_qubits
from netsquid.qubits.operators import H,Z
def SwapTest(qB,qC):
qA=create_qubits(1)
qA=qA[0]
H | qA
Cswap(qA,qB,qC)
H | qA
return ns.qubits.qubitapi.measure(qA,observable=Z)
# In[9]:
'''
function:
Create qubits list.
input:
numbers of qubits.
output:
A list of quantum states.(0,1,+,-)
And corespond quantum list.
'''
from netsquid.qubits import create_qubits
from random import randint
from netsquid.qubits.operators import H,X
def Create_random_qubits(num_bits):
res_state=[]
qlist=[]
qlist=create_qubits(num_bits)
for i in range(0,num_bits):
res_state.append(randint(0,3)) # in four states
for a,b in zip(res_state, qlist):
if a == 0: # 0 state
pass
elif a == 1: # 1 state #X
X | b
elif a == 2: # + state #H
H | b
elif a == 3: # - state #XH
X | b
H | b
else :
print("Create random bits ERROR!!")
return res_state, qlist
# In[13]:
'''
function:
Measuring qubits according to certain basis.
Names of qubits need to be indexed from 0
input:
A list of basis consised by 0/1. (0:standard, 1:Hadamard)
A list of qubits.
output:
A list of measurment tuple accordingly. Return merely 0 means missing such qubits
'''
import netsquid as ns
def Measure_by_basis(basisList,qList):
if len(basisList)<len(qList):
print("Quantum list is too long! ERROR!!")
return 0
else:
res_measurement=[0]*len(basisList) #init to 0
for q in qList:
pos=int(q.name[5:]) #get qubit index #defalt first qubit name = QS#0-0
if basisList[pos]==0:
res_measurement[pos]=ns.qubits.qubitapi.measure(q,observable=Z) #measure in standard basis
elif basisList[a]==1:
res_measurement[pos]=ns.qubits.qubitapi.measure(q,observable=X) #measure in Hadamard basis
else:
print("measuring ERROR!!\n")
return res_measurement
# In[ ]:
'''
function:
Wait certain amout of simulated time in simulation
This is the way NetSquid implements waiting action in simulated time.
By customizing a wait event, it will call End_waiting function after waiting.
More example at https://github.com/h-oll/netsquid-private/blob/master/Others/QMemory/QMemoryNoiceSim.py
'''
class example_class():
def example_function:
# Put folowing lines in functions you want to wait.
My_waitENVtype = EventType("WAIT_EVENT", "Wait for N nanoseconds")
self._schedule_after(customized_delay, My_waitENVtype) # customized_delay
self._wait_once(ns.EventHandler(self.End_waiting),entity=self,event_type=My_waitENVtype)
# Put above lines in functions you want to wait.
# called after qaiting
def End_waiting(self,event):
#continue your protocol
# In[ ]:
'''
Assuming that qubit flip happens less likely than not flipping.
Correct qubit according to majority without measuring them.
input:
Qubit lists to compare and correct when qubit flips.
Idealy Qlist1=Qlist2=Qlist3.
Same column in different Qlist will be corrected accoring to majority.
output:
Corrected Qubit list
'''
import numpy as np
import netsquid as ns
from netsquid.qubits import create_qubits
from netsquid.qubits.operators import *
def QBitCorrection(Qlist1,Qlist2,Qlist3):
ret=[]
for q1,q2,q3 in zip(Qlist1,Qlist2,Qlist3):
Qlist=[q1,q2,q3]
# Qlist
# get Error Syndrome
ErrorSyndromeLen=len(Qlist)-1
ES_Qlist=create_qubits(ErrorSyndromeLen)
#print(ES_Qlist)
mes=[]
for i in range(ErrorSyndromeLen):
ns.qubits.operate([Qlist[i],ES_Qlist[i]], ns.CNOT)
ns.qubits.operate([Qlist[i+1],ES_Qlist[i]], ns.CNOT)
mes.append(ns.qubits.qubitapi.measure(ES_Qlist[i],observable=Z)[0])#
#print(mes)
# get Qlist idea from Error Syndrome
res=[True]*len(Qlist)
ind=True
for i in range(len(mes)):
if mes[i]==1:
ind= not ind
res[i+1]=ind
else:
res[i+1]=ind
# count false cases
F_count=0
for i in res:
if i ==False:
F_count+=1
# correct qubits
if 2*F_count>len(mes): # case that false is more than true, than false might be the correct ones.
for i in range(len(res)):
if res[i] == True:
X|Qlist[i]
else:
for i in range(len(res)):
if res[i] == False:
X|Qlist[i]
ret.append(Qlist[0])
return ret
# In[ ]:
#Verify
qlist1=create_qubits(7)
qlist2=create_qubits(7)
qlist3=create_qubits(7)
#X|qlist1[5]
X|qlist2[5]
X|qlist3[5]
#X|qlist[0]
#X|qlist[2]
#X|qlist[1]
for i in qlist1:
print(ns.qubits.qubitapi.measure(i,observable=Z))
print("--------")
res=QBitCorrection(qlist1,qlist2,qlist3)
for i in res:
print(ns.qubits.qubitapi.measure(i,observable=Z))
# In[ ]:
'''
Create EPR pairs.
input:
Numbers of pairs.
output:
Two lists of qubits, with the corresponding slots entangled.
'''
import netsquid as ns
from netsquid.qubits import create_qubits
from netsquid.qubits.operators import *
def Create_multiEPR(num_bits):
qListA=[]
qListB=[]
for i in range(num_bits):
qA, qB = create_qubits(2) # qubit 00
ns.qubits.operate(qA, ns.H)
ns.qubits.operate([qA,qB], ns.CNOT)
qListA.append(qA)
qListB.append(qB)
return qListA, qListB
# In[ ]:
# Verify
AA,BB=Create_multiEPR(5)
mes=ns.qubits.qubitapi.measure(AA[2],observable=Z)
for i in range(0,4):
print(AA[i].qstate.dm)
print(BB[i].qstate.dm)
print(mes)
# In[ ]:
'''
Compare two lists, find the unmatched index, then remove corresponding slots in loc_meas.
Input:
Two lists with elements 0-2 (0:Z, 1:X, 2:qubit miss).
Output:
measurement result left.
'''
def Compare_basis(loc_basis_list,res_basis_list,loc_meas):
if len(loc_basis_list) != len(res_basis_list):
print("Comparing error! length issue!")
print(loc_basis_list)
print(res_basis_list)
return -1
popList=[]
for i in range(len(res_basis_list)):
if loc_basis_list[i] != res_basis_list[i]:
popList.append(i)
for i in reversed(popList):
if loc_meas:
loc_meas.pop(i)
return loc_meas
# In[ ]:
# Verify
a=[1,2,3]
b=[4,2,6]
c=[7,8,9]
Compare_basis(a,b,c)
print(c)
# In[42]:
import netsquid as ns
from random import randint
from netsquid.qubits.operators import X,Z
'''
Randomly measure a qubits list by Z or X basis.
Input:
Numbers of qubits that should be >= the length of qlist. Equal case happens when no loss.
Qubit list to measure.
Output:
basisList: A list of basis applied(Z X -1). -1 means qubit missing. (detect by qubit name)
loc_res_measure: A list of measurment results. If there's a qubit loss,
both opList and loc_res_measure will have value -1 in the such slot in the list.
'''
def Random_ZX_measure(num_bits,qlist):
num_start=int(qlist[0].name[-len('-'):])# get value after qubit name "QS#<i>-n"
basisList = []*num_bits # set boundary
loc_res_measure=[]*num_bits # set boundary
ind=0
for i in range(num_start,num_start+num_bits):
if ind <= len(qlist)-1:
if int(qlist[ind].name[-len('-'):]) == i:
rbit = randint(0,1) # 0:Z 1:X
if rbit:
basisList.append('X')
loc_res_measure.append(ns.qubits.qubitapi.
measure(qlist[ind],observable=X)[0]) #measure in Hadamard basis
else:
basisList.append('Z')
loc_res_measure.append(ns.qubits.qubitapi.
measure(qlist[ind],observable=Z)[0]) #measure in standard basis
ind+=1
else:
basisList.append(-1)
loc_res_measure.append(-1)
return basisList,loc_res_measure
# In[47]:
# verify
from netsquid.qubits import create_qubits
qList = create_qubits(4)
qList2 = create_qubits(3)
qList2.pop()
qList.extend(qList2)
print(qList)
oplist,mes=Random_ZX_measure(6,qList)
print(oplist,mes)
# In[ ]:
|
997,347 | d9b69c27742fdc5ea2aa923e210214b226d8ebd7 | # 爬取小说
import requests
from bs4 import BeautifulSoup
from python.study.other.config.logger import Logger
from time import sleep, time
from python.study.other.config.readconfig import MyConfig
import python.study.other.util.fileutil as fileutil
import python.study.other.util.strutil as strutil
logger = Logger().get_logger()
def get_response(url):
start = time()
header = ('Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 '
'(KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36')
kv = {'user-agent': header}
response = requests.get(url, headers=kv)
if response.status_code != 200:
logger.info(response.status_code)
html = response.text
soup = BeautifulSoup(html, 'html.parser')
end = time()
logger.info('request获取{}网页耗时{}'.format(url, end - start))
return soup
def prase_char(soup, baseurl):
base_path = MyConfig.get_value('path', 'base_path') + 'book/'
ul = soup.select_one('ul[id="chapters-list"]')
li_list = ul.select('li')
filepath = base_path
# with open('bookurl.csv', 'w', encoding='GBK') as file:
for li in li_list:
a = li.select_one('a')
if not a:
filepath = base_path
filepath += li.string.replace(' ', '')
continue
get_content(baseurl + a.attrs['href'], filepath + '/' + a.string + '.txt')
logger.info('章节名 {}内容获取完毕'.format(a.string))
# strurl += baseurl + a.attrs['href'] + ',' + a.string + '\n'
# file.write(strurl)
sleep(0.05)
def get_content(url, file_path):
soup = get_response(url)
logger.info('true file path is {}'.format(file_path))
fileutil.is_dir_live(file_path, True)
with open(file_path, 'w', encoding='utf-8') as file:
content = ''
h1 = soup.select_one('h1')
logger.debug('章节名 {}'.format(h1.string))
content += h1.string + '\n'
div = soup.select_one('div[id="txtContent"]')
divcontent = strutil.split_str(str(div), '<div id="txtContent">', '</div>', False)
divcontent = divcontent.strip().replace('<br/>', '\n').replace(
'<div class="gad2"><script type="text/javascript">try{mad1();} catch(ex){}</script></div>', '')
content += ' ' + divcontent
file.write(content)
if __name__ == '__main__':
prase_char(get_response('https://www.boquge.com/book/71607/'), 'https://www.boquge.com')
|
997,348 | 39d68c07de4f7e2f5d78aa9de395f0ea03ac02a5 | import math
from wpilib.joystick import Joystick
from wpilib.buttons.joystickbutton import JoystickButton
from commands.extendall import ExtendAll
from commands.retractall import RetractAll
from commands.extendfront import ExtendFront
from commands.extendback import ExtendBack
from commands.retractfront import RetractFront
from commands.rampextend import RampExtend
from commands.rampretract import RampRetract
import robotmap
class T16000M(Joystick):
def __init__(self, port):
super().__init__(port)
self.port = port
self.setXChannel(0)
self.setYChannel(1)
self.setZChannel(2)
self.setThrottleChannel(3)
self.setTwistChannel(2)
# ----------------------------------------------------------
# Config Values
# ----------------------------------------------------------
class ConfigHolder:
pass
config = ConfigHolder()
# Driver Sticks
config.leftDriverStickNullZone = 0.1
config.rightDriverStickNullZone = 0.08
config.throttleFilterPower = 0.4
config.turnFilterPower = 0.4
# Left Joystickc
config.btnDriveSlow = 1
config.btnResetEncodersIndex = 2
config.btnEnableLightSensorIndex = 3
# Right Joystick
config.btnResetYawAngleIndex = 7 #temporarily changed from 2 to 7
config.btnExtendAllIndex = 1
config.btnRetractAllIndex = 2
config.btnExtendFrontIndex = 3
config.btnExtendBackIndex = 4
config.btnRetractFrontIndex = 5
config.btnRetractBackIndex = 6
# GO Gamepad (Logitech)
config.btnHatchGrabTogIndex = 1 # 1 = A
config.btnCargoGrabOpenTogIndex = 3 # 3 = X
config.btnCargoGrabCloseTogIndex = 5 #????
config.btnRampExtendTogIndex = 6
config.btnRampRetractTogIndex = 7
config.axisElevatorIndex = 9 #???
# ----------------------------------------------------------
# Stick and Button Objects
# ----------------------------------------------------------
leftDriverStick = None
rightDriverStick = None
goGamePad = None
resetYawBtn = None
btnResetEncoders = None
btnDriveSlow = None
btnLift = None # added to eject/retract cylinder(s)
btnRetract = None
btnEnableLightSensor = None
btnRampExtendTog = None
btnRampRetractTog = None
btnHatchGrabTog = None
btnCargoGrabTog = None
axisElevator = None
# ----------------------------------------------------------
# Init
# ----------------------------------------------------------
def init():
"""
Assign commands to button actions, and publish your joysticks so you
can read values from them later.
"""
global leftDriverStick
global rightDriverStick
global goGamePad
try:
leftDriverStick = T16000M(0)
except:
print('OI: Error - Could not instantiate Left Driver Stick on USB port 0!!!')
try:
rightDriverStick = T16000M(1)
except:
print('OI: Error - Could not instantiate Right Driver Stick on USB port 0!!!')
try:
goGamePad = Joystick(2)
except:
print('OI: Error - Could not instantiate Right Driver Stick on USB port 2!!!')
# ----------------------------------------------------------
# Driver Controls
# ----------------------------------------------------------
#global resetYawBtn
#resetYawBtn = JoystickButton(rightDriverStick, config.btnResetYawAngleIndex)
#resetYawBtn.whenPressed(NavxResetYawAngle())
global btnDriveSlow
btnDriveSlow = JoystickButton(leftDriverStick, config.btnDriveSlow)
global btnEnableLightSensor
btnEnableLightSensor = JoystickButton(leftDriverStick, config.btnEnableLightSensorIndex)
global btnExtendAll
btnExtendAll = JoystickButton(rightDriverStick, config.btnExtendAllIndex)
btnExtendAll.whenPressed(ExtendAll())
global btnRetract
btnRetract = JoystickButton(rightDriverStick, config.btnRetractAllIndex)
btnRetract.whenPressed(RetractAll())
global btnExtendFront
btnExtendFront = JoystickButton(rightDriverStick, config.btnExtendFrontIndex)
btnExtendFront.whenPressed(ExtendFront())
global btnExtendBack
btnExtendBack = JoystickButton(rightDriverStick, config.btnExtendBackIndex)
btnExtendBack.whenPressed(ExtendBack())
global btnRetractFront
btnRetractFront = JoystickButton(rightDriverStick, config.btnRetractFrontIndex)
btnRetractFront.whenPressed(RetractFront())
global btnCargoGrabTog
btnCargoGrabTog = JoystickButton(goGamePad, config.btnHatchGrabTogIndex)
btnCargoGrabTog.whenPressed(ExtendBack())
"""
global btnResetEncoders
btnResetEncoders = JoystickButton(leftDriverStick, config.btnResetEncodersIndex)
btnResetEncoders.whenPressed(TankDriveResetEncoders())
"""
"""
global axisElevator
axisElevator = JoystickAxis(goGamePad, config.axisElevatorIndex)
axisElevator. #??? idk how to configure joystick axis
"""
"""
global btnRampTog
btnRampTog = JoystickButton(goGamePad, config.btnRampTogIndex)
btnRampTog.whenPressed(ExtendFront())
"""
#global btnResetEncoders
#btnResetEncoders = JoystickButton(leftDriverStick, config.btnResetEncodersIndex)
#btnResetEncoders.whenPressed(TankDriveResetEncoders())
# These variable names are inconsistent, need to be fixed!!!!
#global btnRampExtendTog
#btnRampExtendTog = JoystickButton(goGamePad, config.btnRampExtendTogIndex)
#btnRampExtendTog.whenPressed(RampExtend())
#global btnRampRetractTog
#btnRampRetractTog = JoystickButton(goGamePad, config.btnRampRetractTogIndex)
#btnRampRetractTog.whenPressed(RampRetract())
# ----------------------------------------------------------
# Utility Functions
# ----------------------------------------------------------
# https://www.desmos.com/calculator/yopfm4gkno
# power should be > 0.1 and less than 4 or 5 ish on the outside
# If power is < 1.0, the curve is a logrithmic curve to give more power closer to center
# Powers greater than one give a more traditional curve with less sensitivity near center
def filterInputToPower(val, deadZone=0.0, power=2):
power = math.fabs(power)
if power < 0.1:
power = 0.1
if power > 5:
power = 5
sign = 1.0
if val < 0.0:
sign = -1.0
val = math.fabs(val)
deadZone = math.fabs(deadZone)
if val < deadZone:
val = 0.0
else:
val = val * ((val - deadZone) / (1 - deadZone))
output = val ** power
return output * sign
# View output: https://www.desmos.com/calculator/uh8th7djep
# to keep a straight line, scale = 0, and filterFactor = 1
# Keep filterFactor between 0 and 1
# Scale can go from 0 up, but values over 3-4 have dubious value
# Nice curve for game pad is filterFactor = 0.2, scale=1.5
def filterInput(val, deadZone=0.0, filterFactor=1.0, scale=0.0):
"""
Filter an input using a curve that makes the stick less sensitive at low input values
Take into account any dead zone required for values very close to 0.0
"""
sign = 1.0
if val < 0.0:
sign = -1.0
val = math.fabs(val)
deadZone = math.fabs(deadZone)
if val < deadZone:
val = 0.0
else:
val = val * ((val - deadZone) / (1 - deadZone))
output = val * ((filterFactor * (val**scale)) + ((1 - filterFactor) * val))
output *= sign
return output
#try using tanh with import numpy for a different scaling.
def applyDeadZone(val, deadZone):
"""
Apply a dead zone to an input with no other smoothing. Values outsize the dead zone are correctly scaled for 0 to 1.0
:return:
The float value of the adjusted intput
"""
sign = 1.0
if val < 0.0:
sign = -1.0
val = math.fabs(val)
deadZone = math.fabs(deadZone)
if val < deadZone:
val = 0.0
else:
val = val * ((val - deadZone) / (1 - deadZone))
val *= sign
return val
def getRawThrottle():
"""
Use the Y Axis of the left stick for throttle. Value is reversed so that 1.0 is forward (up on a joystick is usually negative input)
:return:
The float value of the throttle between -1.0 and 1.0
"""
val = leftDriverStick.getY()
if val != 0.0:
val *= -1.0
return val
def getRawTurn():
return rightDriverStick.getX()
|
997,349 | 5ae4a4dc869198bebbb7ffbb015dae04ae4adb78 | from peewee import *
from datetime import datetime
base = SqliteDatabase('adressess.db')
fields = ['Id', 'Description', 'Add date', 'Is done', 'Remove']
class BaseModel(Model):
class Meta:
database = base
class Person(BaseModel):
login = CharField(null=False, unique=True)
password = CharField()
class Meta:
order_by = ('login',)
class Task(BaseModel):
description = TextField(null=False)
date = DateTimeField(default=datetime.now)
isCompleted = BooleanField(default=False)
assigned = ForeignKeyField(Person, related_name='task')
class Meta:
order_by= ('date',)
def connect():
base.connect()
base.create_tables([Person, Task], safe=True)
loadData()
return True
def signin(login, password):
try:
person, _created = Person.get_or_create(login=login, password=password)
return person
except IntegrityError:
return None
def loadData():
if Person.select().count() > 0:
return
persons = ('Adam', 'Ewa')
tasks = ('1. Task', '2. Task', '3. Task')
for login in persons:
person = Person(login=login, password='123')
person.save()
for description in tasks:
task = Task(description=description, assigned=person)
task.save()
base.commit()
base.close()
def parseTask (task):
return [
task.id,
task.description,
'{0:%Y-%m-%d %H:%M:%S}'.format(task.date),
task.isCompleted,
False]
def readData(person):
tasks = Task.select().where(Task.assigned == person)
return list(map(parseTask, tasks))
def addTask(person, description):
task = Task(description=description, assigned=person, )
task.save()
return parseTask(task)
def saveData(tasks):
for i, task in enumerate(tasks):
model = Task.select().where(Task.id == task[0]).get()
if task[4]:
model.delete_instance()
del tasks[i]
else:
model.description = task[1]
model.isCompleted = task[3]
model.save()
|
997,350 | 8c787aec2d78e704a4b6b335c640534afc46deba | import os
import pandas as pd
import streamlit.components.v1 as components
_RELEASE = True
if not _RELEASE:
_custom_table = components.declare_component(
"custom_table",
url="http://localhost:3001",
)
else:
parent_dir = os.path.dirname(os.path.abspath(__file__))
build_dir = os.path.join(parent_dir, "frontend/build")
_custom_table = components.declare_component(
"custom_table", path=build_dir)
# # Create a wrapper function for the component. This is an optional
# # best practice - we could simply expose the component function returned by
# # `declare_component` and call it done. The wrapper allows us to customize
# # our component's API: we can pre-process its input args, post-process its
# # output value, and add a docstring for users.
def st_material_table(data, key=None):
return _custom_table(data=data, key=key, default=pd.DataFrame())
# Add some test code to play with the component while it's in development.
# During development, we can run this just as we would any other Streamlit
# app: `$ streamlit run my_component/__init__.py`
if not _RELEASE:
import streamlit as st
st.title('Streamlit Custom Material Table Test')
course_df = pd.read_csv(st.secrets['db']['url'], index_col=0)
# Create an instance of our component with a constant `name` arg, and
# print its output value.
_ = st_material_table(course_df)
footer = """
<style>
footer {
visibility: hidden;
}
.footer {
clear: both;
width: 100%;
height: 2.5rem;
border-top: 1px solid #f8f8f2A9;
position: relative;
bottom: 0;
padding-top: 20px;
left: 0px;
text-align: center;
}
</style>
<div class="footer">
© Brian L. Chen (<a href="https://github.com/icheft">@icheft</a>)
</div>"""
st.markdown(footer, unsafe_allow_html=True)
|
997,351 | 7e638f7c0ee34a6ea44083808d4c3f691704bcf6 | from typing import Tuple, Union, Dict, List
from jsonpath_ng import parse
Mapping = Dict[str, Union["Mapping", str]]
def get_from_list(ls, index=0):
try:
return ls[index]
except IndexError:
return None
def map_jsonpath(source: dict, mapping: Mapping):
res = {}
mappings: List[Tuple[Tuple[str], Union[str, Mapping]]] = [
((k,), v) for k, v in mapping.items()
]
while len(mappings) > 0:
keys, value = mappings.pop()
if isinstance(value, str):
inner = res
for k in keys[:-1]:
inner[k] = inner.get(k, {})
inner = res[k]
inner[keys[-1]] = get_from_list(
[m.value for m in parse(value).find(source)]
)
else:
for k, v in value.items():
mappings.append(((tuple(list(k) + [k])), v))
return res
|
997,352 | f67434f911508507d5412ccf5156da2f46975981 | from heapq import heappop,heappush
def dijkstra(s,n,edge):
"""
始点sから各頂点への最短距離を返す
Parameters
----------
s : int
視点とする頂点(0-indexed)
n : int
グラフの要素数
eage:list
グラグ
Returns
-------
dist :list
始点sから各頂点への最短距離
"""
dist=[float("inf")]*n
dist[s]=0
used=[-1]*n
hq=[[dist[s],s]]
while hq:
d,cur=heappop(hq)
if dist[cur]<d:continue # 距離が最小でない場合は飛ばす
for nx,nxd in edge[cur]:
if dist[cur]+nxd<dist[nx]:
dist[nx]=dist[cur]+nxd
used[nx]=cur
heappush(hq,[dist[cur]+nxd,nx])
return dist
n=int(input())
edge=[[] for _ in range(n)]
for _ in range(n-1):
a,b,c=map(int,input().split())
a-=1
b-=1
edge[a].append((b,c))
edge[b].append((a,c))
q,k=map(int,input().split())
xy=[list(map(int,input().split()))for _ in range(q)]
d=dijkstra(k-1,n,edge)
for x,y in xy:
print(d[x-1]+d[y-1])
|
997,353 | 5385f05d189a9359f9d63d9f82c63bdcf6198928 | def make_quesadilla(protein, topping="sour cream"):
quesadilla = f"Here is a {protein} quesadilla with {topping}"
print(quesadilla)
make_quesadilla("chicken")
make_quesadilla("beef", "guacamole")
make_quesadilla(topping="ranch dressing", protein="chicken")
|
997,354 | e1a42c8541d4c8be2d211148c11e500de42a5999 | #!/usr/bin/env python
import csv
#import seaborn as sns
import numpy as np
#import matplotlib.mlab as mlab
#import matplotlib.pyplot as plt
import sys
#from matplotlib import rcParams
fid_list = [0,1,2,3,4,5,6,7,10,11,12,13,
22,23,24,28,29,33,35,37,39,40,
41,42,43,44,45,48,49,50,51,52,
55,57,58,59,61,62,63,65,68,71,
73,78,82,84,85,90,93,97,98,101,
105,106,107,108,110,111,116,118,
120,130,131,143]
scores = []
for fid in range(len(fid_list)):
tmp = np.load("model/id" + str(fid) + "/scores.npz")["scores"][0]
scores.append(max(tmp))
np.savez("rst/rst_our_clf.npz",scores=np.array(scores))
#--------------------------------------------
"""
rcParams['xtick.labelsize'] = '18'
rcParams['ytick.labelsize'] = '18'
rcParams['legend.fontsize'] = '14'
rcParams['legend.frameon'] = False
rcParams['axes.labelsize'] = '18'
ours = [25.66,10032.34,3.78,1.87,47.85,0.56,0.18,0.31,0.18,0.24,0.43,0.26]
ind = np.arange(4)
bar_width = 0.15
colors=['r','b','g','k','y','purple']
patterns = ('x', '+', 'o', 'x', '\\', '//')
fig, axarr = plt.subplots(3, sharex=True)
for ii in range(3):
b = np.load("rst/rst_prediction.npz")
b = b["scores"]
b = b[:,ii,:]
maxs = []
for i in range(4):
maxs.append(b[:,range(i,20,4)].mean()*1.5)
#print maxs
a = np.load(sys.argv[ii+1])
stds = a["stds"]
means = a["means"]
rects=[]
for i in range(5):
mean = means[i]
mean = [mean[j]/maxs[j] for j in range(len(mean))]
std = stds[i]
std = [std[j]/maxs[j] for j in range(len(std))]
#print mean, std
rect = axarr[ii].bar(1.2*ind + i*bar_width, mean, bar_width, color=colors[i],fill=True, yerr=std)
for j in rect:
j.set_hatch(patterns[i])
rects.append(rect)
mean = [ours[i] for i in range(ii,12,3)]
mean = [mean[j]/maxs[j] for j in range(len(mean))]
std = stds.min(axis=0)
std = [std[j]/maxs[j]*0.95 for j in range(len(std)-2)] + [std[j]/maxs[j]*1.1 for j in range(2,len(std))]
#print mean, std
rect = axarr[ii].bar(1.2*ind + 5*bar_width, mean, bar_width, color=colors[5],fill=True, yerr=std)
for j in rect:
j.set_hatch(patterns[5])
rects.append(rect)
axarr[ii].set_xticklabels( ('MRE', 'MSE', 'HR@20%', 'HR@30%'))
axarr[ii].set_yticks(np.arange(0,1.1,0.2))
axarr[ii].set_xticks(1.2*ind+ 4*bar_width)
axarr[ii].set_ylabel('M(%d) scores' % ii)
axarr[ii].set_ylim([0,1.5])
#axarr[ii].legend(rects, ('Linear-SVR', 'SVR', 'LR', 'Bayes', 'CART', 'Our method'), loc=(0.4,0.75), ncol=3)
axarr[0].legend(rects, ('Linear-SVR', 'SVR', 'LR', 'Bayes', 'CART', 'Our method'), loc=(0.4,0.75), ncol=3)
plt.show()
""" |
997,355 | f0f630f6edc4f473ff2cfd01e6941768721ae30b | from django.contrib.sessions.backends.db import SessionStore
from django.shortcuts import render, redirect
from django.conf import settings
from rest_framework import authentication
from rest_framework import permissions
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.decorators import api_view, authentication_classes, permission_classes
from zeep import Client #zeep pakage(not django or python)
from .serializers import ProductSerializer, context_serialize, OrerSerializer
from cart.cart import Cart
from shop.models import Product
from shop.categories_set import categories_tree, categories_tree_fa
from shop.product_selects_methods import random_products, bestSellingProducts, searchEngine, listProductByCat, listProductByPrice
class ProductsLists(APIView): #index
def get(self, request):
names = ['iPhone 8 Plus (Product) Red', 'V30s Thinq', 'Galaxy Note 9']
products = Product.objects.filter(available=True)
vije_products = [products.get(name=name) for name in names]
new_products = products[:12]
best_selling = bestSellingProducts(Product.objects.all())[ :-5:-1]
all_products = [vije_products, new_products, best_selling]
all_products_txt = ['vijeproducts', 'newproducts', 'bestselling'] #orders of all_products_txt, depend on all_products.
serializers = {}
for products, txt in zip(all_products, all_products_txt):
serializers[txt] = ProductSerializer(products, many=True).data
return Response({**context_serialize(request.user, request), **serializers}) #is like {'cart':..., 'user':..., 'zip_bestselling':..., 'zip_newproducts':..., 'zip_vijeproducts':...}
class CartDetail(APIView): #cart_add+cart_detail
def post(self, request, **kwargs):
sessionid = request.META.get('HTTP_SESSIONID') #for anomious user in api, unlike broser that send sessionid auto and load request.session here we must do handy.
if sessionid: #if sessionid is wrong/fake django create new SessionStor()
request.session = SessionStore(session_key = sessionid) #if this not provide, for every anomios request from api, django will create a session. (because in first request made a session and in response create it, but in next request forget it(sessionid unlike broser no sent) so again made session and in response create another. but here just save(in response and cart.py in one session(SessionStore(sessionid)
cart = Cart(request)
product_id = self.kwargs['product_id']
if product_id != 'None': #in url we have not bool(None) value, all is str. if product_id='None' just is product_detail else cart_add+cart_detail.
update_quantity = True if request.data.get('update_quantity')=='True' else False
cart.add(product_id, int(request.data['quantity']), update_quantity)
if request.user.is_authenticated: #range(21) is oject not list numbers.
return Response({**context_serialize(request.user, request), 'range':list(range(21))}) #dont need send sessionid. session will optain with user
else:
request.session.save() #save auth session, for creating them(and obtain key).
return Response({**context_serialize(request.user, request), 'range':list(range(21)), 'sessionid':request.session.session_key}) #session configurations need sessionid
class FingersSet(APIView): #django implement(default)
def post(self, request):
cart = Cart(request)
cart2 = Cart(request)
for item in cart2:
product = item['product']
quantity = request.data[str(product.id)] #careful quantity is str
if product.stock>0:
cart.add(product_id=product.id, quantity=int(quantity), update_quantity=True)
else:
cart.remove(product.id)
return redirect('rest:order_create')
class CartRemove(APIView):
def get(self, request, **kwargs):
cart = Cart(request)
cart.remove(self.kwargs['product_id'])
request._request.method='POST' #you can put post method instead of get(for CartRemove) and remove thease two line.
request._request.POST={}
return CartDetail().dispatch(request._request, product_id=None) #this is like redirect to CartDetail, we just give request to class and next, initial/setup our class, and class will decision for outpute,from our request, that here choice post method.
class OrderCreate(APIView): #you can test succesful redirecting(from cart_detail to order_create) by SessionAuthentication setting in settings and test with browser, but note when redirect, browser url dont show redirected url, but its real redirect and recive data of target view.
def get(self, request):
return Response(context_serialize(request.user, request)) #important: user redirect to OrderForm and come to this get, and after return, recive data and clint side make desicion that request.user is {} so raise authentication error for cliet. else(user authenticat) clint show OrerSerializer form(clint know OrerSerializer fields(ourself create front) and dont need sent its fields or etc...).
#context_serialize is like context_processors in django(must send for every page(its data use in header)
def post(self, request):
if not request.user.is_authenticated:
return Response(context_serialize(request.user, request)) #in clint side proper error should populate.
serializer = OrerSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user) #request._request.method = 'POST' , for merging request and verify in myshop/zarinpal here post in request and get verify.
request._request.POST = request.POST #without this error will raise. request._request.POST must be our real post data(data that we post in here(order_create)
return zarinpal_request_verify(request._request) #return ZarinpalRequestVerify().dispatch(request._request)
else:
return Response({**context_serialize(request.user, request), **serializer.errors})
@api_view(['GET', 'POST'])
#@authentication_classes([authentication.TokenAuthentication, authentication.SessionAuthentication])
@permission_classes([permissions.IsAuthenticated])
def zarinpal_request_verify(request): #with APIView dont work zarinpal
cart = Cart(request)
MERCHANT = 'XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX'
description = "توضیحات مربوط به تراکنش را در این قسمت وارد کنید"
email = request.user.email
mobile = request.user.phone if request.user.phone else None
CallbackURL = 'http://127.0.0.1:8000/rest/zarinpal/request_verify/' #its better ourself generate CallbackURL(to clint).
global amount
amount = cart.get_total_price() #client.service.PaymentRequest and redirect('https://sandbox.zarinpal.com/pg/StartPay/' + str(result.Authority)) must do in clint.
if request.method == 'GET':
client = Client('https://sandbox.zarinpal.com/pg/services/WebGate/wsdl') #here user is in our application so need to context_serialize(unline method post of this class)
if request.GET.get('Status') == 'OK':
result = client.service.PaymentVerification(MERCHANT, request.GET['Authority'], amount)
if result.Status == 100:
for item in cart:
item['product'].stock -= 1
item['product'].save()
cart.clear()
message = 'عملیات با موفقیت به اتمام رسیده و خرید شما انجام شد. کد پیگیری: '
return Response({**context_serialize(request.user, request), 'RefID':result.RefID, 'message':message})
elif result.Status == 101:
message = 'عملیات پرداخت انجام شده است.'
cart.clear() #may need somthims.
return Response({**context_serialize(request.user, request), 'status':str(result.Status), 'message':message})
else:
message = 'عمليات پرداخت ناموفق.'
return Response({**context_serialize(request.user, request), 'status':str(result.Status), 'message':message})
else:
message = 'عمليات پرداخت ناموفق يا توسط کاربر لغو شده است.'
return Response({**context_serialize(request.user, request), 'status':str(result.Status), 'message':message})
if request.method == 'POST': #here user is not in our application(it is in zarinpal page) so dont need to context_serialize
return Response({'MERCHANT':MERCHANT, 'description':description, 'email':email, 'mobile':str(mobile), 'CallbackURL':CallbackURL, 'amount':amount})
#'mobile':mobile raise error(phone is dont jason serializable.)
|
997,356 | e6a41fce674ecde154461ce4f9fb990df22bfdf3 | def trap(arr):
left = max(arr)
copy = arr
arr.remove(left)
right = max(arr)
total = 0
for i in range(len(copy)):
total = total + min(left,right) - copy
trap([3,0,2,0,4])
|
997,357 | f36d8d821fcc1e8c77c04fbc1fbfb127d9691c3d | import re, functools, unittest
# RECURSION EXERCISES
#
# Each one of these can be solved using recursion
##### Problem 1 #####
# choose via Pascal's Triangle
# The "choose" operation in combinatorics means the number of ways you can
# choose k items. For example: The number of unique pairs of socks you can
# generate from 10 disparate socks is "10 choose 2". One way to compute
# "choose" is surprisingly using Pascal's Triangle:
# https://en.wikipedia.org/wiki/Pascal%27s_triangle
#
# N choose K is the number in the Nth row and the Kth column of pascals
# triangle. Pascal's triangle is recursively defined (each element is the sum
# of the two above it). Compute N choose K knowing this recursive relationship
def choose(n, k):
raise NotImplementedError
##### LIST EXERCISES #####
# Here's a basic implementation of a linked list
# example usage:
# list = ListNode('r', ListNode('e', ListNode('g'))) ## looks like r -> e -> g
# list.first ## r
# list.rest ## e -> g
# list.rest.rest.first ## g
class ListNode:
def __init__(self, data, next):
self.data = data
self.next = next
def first(self):
return self.data
def rest(self):
return self.next
##### Problem 2 #####
# Get the length of a singly linked list
# Remember to use recursion!
def list_length(list):
raise NotImplementedError
##### Problem 3 #####
# Returns true if the two lists have
# all the same elements.
def lists_equal(a, b):
raise NotImplementedError
##### Problem 4 #####
# Get the maximum number in a singly linked list
# e.g. 4 -> 3 -> 6 -> 2 => 6
def list_max(list):
raise NotImplementedError
##### Problem 5 #####
# Return the last element of the list
# e.g. 4 -> 3 -> 6 -> 2 => 2
def last_elt(list):
raise NotImplementedError
def test_list():
return ListNode(2, ListNode(6, ListNode(4, None)))
class RecursionTest(unittest.TestCase):
def test_choose(self):
self.assertEqual(choose(10, 5), 252)
self.assertEqual(choose(6, 6), 1)
def test_list_length(self):
self.assertEqual(list_length(test_list()), 3)
def test_lists_equal(self):
self.assertEqual(lists_equal(test_list(), test_list()), True)
self.assertEqual(lists_equal(test_list().rest, test_list()), True)
def test_list_max(self):
self.assertEqual(list_max(test_list()), 6)
def test_last_elt(self):
self.assertEqual(list_max(test_list()), 4)
if __name__ == '__main__':
unittest.main()
|
997,358 | 2473dc0697d2d46d9f8d6b8d0dc5a613f12acb97 | import json
basecamp_project_id = '4075148'
basecamp_url = f'https://3.basecampapi.com/{basecamp_project_id}'
class NotFoundError(Exception):
pass
def get_software_project(sess):
proj_resp = sess.get(f'{basecamp_url}/projects.json')
projects = proj_resp.json()
for proj in projects:
if proj['name'] == 'Software':
return proj
return None
def extract_todoset_id(project):
for item in project['dock']:
if item['name'] == 'todoset':
return item['id']
return None
def extract_repos(description):
# Basecamp uses rich text, so the description comes wrapped in <div> tags.
description = description.replace('<div>', '')
description = description.replace('</div>', '')
return [repo.strip() for repo in description.split(',')]
def get_todolist(sess, project, repo_name):
todoset_id = extract_todoset_id(project)
# We expect our project to have a list of todos, so we raise an error if not found
if todoset_id is None:
raise NotFoundError('List of to-dos not found for project')
url = f'{basecamp_url}/buckets/{project["id"]}/todosets/{todoset_id}/todolists.json'
todolist_resp = sess.get(url)
todolists = todolist_resp.json()
for todolist in todolists:
repos = extract_repos(todolist['description'])
if repo_name in repos:
return todolist
# A to-do list not existing for a repo is acceptable, so we return None
# to indicate this isn't an error condition
return None
def create_todo(sess, project, todolist, event):
url = f'{basecamp_url}/buckets/{project["id"]}/todolists/{todolist["id"]}/todos.json'
payload = {
'content': event['title'],
'description': f'<div>{event["desc"]}</div><div>{event["url"]}</div>'
}
r = sess.post(url, json=payload)
return r
def handle_gh_event(sess, gh_event):
proj = get_software_project(sess)
todolist = get_todolist(sess, proj, gh_event['repo'])
if todolist is None:
return None
r = create_todo(sess, proj, todolist, gh_event)
return r
|
997,359 | 6f058c5f0b248eead29043f36c05c0237b94aa70 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import json
import xlrd
import glob
import time
import openpyxl
import subprocess
from optparse import OptionParser
def cell2str(data):
if data == None or data == '':
return '.'
elif isinstance(data, (int, long, float)) :
return str(data)
else:
return data
def getValueWithMergeLookup(sheet, row, column):
cellval = sheet.cell(row=row,column=column).value
while cellval == None:
row = row - 1
cellval = sheet.cell(row=row,column=column).value
return cellval
### GATHERING PARAMETERS ############################################################
parser = OptionParser()
parser.add_option('-r', '--run-folder', help="full run path", dest='run_folder')
parser.add_option('-w', '--sbt-wait', help="(optional) if run SBT, exit", dest='sbt_wait', action='store_true')
(options, args) = parser.parse_args()
with open('/DATA/work/global_parameters.json', 'r') as g:
global_param = json.load(g)
with open(options.run_folder+'/barcodes.json', 'r') as g:
barcodes_json = json.load(g)
bilan_mut = xlrd.open_workbook(global_param['bilan_mut_path'])
header = ['Run_name','Sample','Chr','Transcript','Gene','Exon','Intron','c.','p.','Start.Pos','Ref.Seq','Var.Seq','Class.','Var.Freq.',
'Var.Cov.','Pos.Cov.','Region','Type','Consequence','Impact','COSMIC','dbSNP','InterVar','ClinVar.significance','ClinVar.disease',
'NCI60','ESP','1000G_ALL','1000G_EUR','1000G_AMR','1000G_AFR','1000G_EAS','1000G_SAS','SIFT','POLYPHEN2_HVAR','Comm.Bio.']
#####################################################################################
finalreports = []
if options.run_folder:
if os.path.isdir(options.run_folder+'/_2018_reanalysis'):
finalreports = glob.glob(options.run_folder+'/_2018_reanalysis/*/*finalReport.xlsx')
else:
finalreports = glob.glob(options.run_folder+'/*/*finalReport.xlsx')
finalreports = [item for item in finalreports if not '~' in item]
path_split = options.run_folder.split('/')
if path_split[-1] == '':
run_name = path_split[-2]
else:
run_name = path_split[-1]
print "- Processing run %s ..." % run_name
else:
print "Error - No run specified"
exit()
sample2variantlist = {}
sample2finalreport = {}
for report in finalreports:
sample = report.split('/')[-1].split('_IonXpress')[0]
sample2finalreport[sample] = report
barcode = 'IonXpress_' + report.split('IonXpress_')[-1].split('.finalReport')[0]
target = barcodes_json[barcode]['target_region_filepath'].split('/')[-1]
for _run_type in global_param['run_type']:
if global_param['run_type'][_run_type]['target_bed'].split('/')[-1] == target:
sample2variantlist[sample] = '/DATA/work/scripts/tests/unique_variantList_%s' % run_name
break
variants_seen = []
for variantlist in list(set(sample2variantlist.values())):
unique_VariantList = open(variantlist,'w')
header_string = '\t'.join(header)
unique_VariantList.write(header_string+'\n')
sampleset = [key for key in sample2variantlist if sample2variantlist[key] == variantlist]
for sample in sampleset:
print "\t - %s" % sample
finalreport = openpyxl.load_workbook(sample2finalreport[sample])
annotation_sheet = finalreport['Annotation']
if 'VEP' in finalreport.sheetnames:
vep_sheet = finalreport['VEP']
# column name to index
column2index = {}
for j in range(1,annotation_sheet.max_column+1):
column2index[annotation_sheet.cell(row=1,column=j).value] = j
for i in range(2,annotation_sheet.max_row+1):
if annotation_sheet.cell(row=i,column=column2index['Chr']).value == None: # avoid empty line and "Amplicons < 300X: " line
continue
Transcript = cell2str(annotation_sheet.cell(row=i,column=column2index['Transcript']).value)
c_nomen = cell2str(annotation_sheet.cell(row=i,column=column2index['c.']).value)
Region = cell2str(annotation_sheet.cell(row=i,column=column2index['Region']).value)
Type = cell2str(annotation_sheet.cell(row=i,column=column2index['Type']).value)
if (Transcript,c_nomen) not in variants_seen:
variants_seen.append((Transcript,c_nomen))
else:
continue
if ((Region in ['?','intronic','UTR3','UTR5','ncRNA_intronic']) or (Type == 'synonymous')):
continue
Comm = cell2str(annotation_sheet.cell(row=i,column=column2index['Comm.Bio.']).value)
Comm = Comm.replace(u'é','e').replace(u'è','e').replace(u'ê','e').replace(u'à','a')
Chr = cell2str(annotation_sheet.cell(row=i,column=column2index['Chr']).value)
Gene = cell2str(annotation_sheet.cell(row=i,column=column2index['Gene']).value)
Exon = cell2str(annotation_sheet.cell(row=i,column=column2index['Exon']).value)
p_nomen = cell2str(annotation_sheet.cell(row=i,column=column2index['p.']).value)
Start_Pos = cell2str(annotation_sheet.cell(row=i,column=column2index['Start.Pos']).value)
Ref_Seq = cell2str(annotation_sheet.cell(row=i,column=column2index['Ref.Seq']).value)
Var_Seq = cell2str(annotation_sheet.cell(row=i,column=column2index['Var.Seq']).value)
Class = cell2str(annotation_sheet.cell(row=i,column=column2index['Class.']).value)
Var_Freq = cell2str(annotation_sheet.cell(row=i,column=column2index['Var.Freq.']).value)
Var_Cov = cell2str(annotation_sheet.cell(row=i,column=column2index['Var.Cov.']).value)
Pos_Cov = cell2str(annotation_sheet.cell(row=i,column=column2index['Pos.Cov.']).value)
COSMIC = cell2str(annotation_sheet.cell(row=i,column=column2index['COSMIC']).value)
dbSNP = cell2str(annotation_sheet.cell(row=i,column=column2index['dbSNP']).value)
InterVar = cell2str(annotation_sheet.cell(row=i,column=column2index['InterVar']).value)
ClinVar_significance = cell2str(annotation_sheet.cell(row=i,column=column2index['ClinVar.significance']).value)
ClinVar_disease = cell2str(annotation_sheet.cell(row=i,column=column2index['ClinVar.disease']).value)
NCI60 = cell2str(annotation_sheet.cell(row=i,column=column2index['NCI60']).value)
ESP = cell2str(annotation_sheet.cell(row=i,column=column2index['ESP']).value)
_1000G_ALL = cell2str(annotation_sheet.cell(row=i,column=column2index['1000G_ALL']).value)
_1000G_EUR = cell2str(annotation_sheet.cell(row=i,column=column2index['1000G_EUR']).value)
_1000G_AMR = cell2str(annotation_sheet.cell(row=i,column=column2index['1000G_AMR']).value)
_1000G_AFR = cell2str(annotation_sheet.cell(row=i,column=column2index['1000G_AFR']).value)
_1000G_EAS = cell2str(annotation_sheet.cell(row=i,column=column2index['1000G_EAS']).value)
_1000G_SAS = cell2str(annotation_sheet.cell(row=i,column=column2index['1000G_SAS']).value)
SIFT = cell2str(annotation_sheet.cell(row=i,column=column2index['SIFT']).value)
POLYPHEN2_HVAR = cell2str(annotation_sheet.cell(row=i,column=column2index['POLYPHEN2_HVAR']).value)
vep_intron = '.'
vep_consequence = '.'
vep_impact = '.'
if 'VEP' in finalreport.sheetnames:
for v in range(2,vep_sheet.max_row+1):
vep_transcript = cell2str(vep_sheet.cell(row=v,column=3).value.split('.')[0])
vep_pos = cell2str(getValueWithMergeLookup(vep_sheet,v,15))
vep_ref = cell2str(getValueWithMergeLookup(vep_sheet,v,16))
vep_alt = cell2str(getValueWithMergeLookup(vep_sheet,v,17))
if (vep_transcript,vep_pos,vep_ref,vep_alt) == (Transcript,Start_Pos,Ref_Seq,Var_Seq):
vep_intron = cell2str(vep_sheet.cell(row=v,column=6).value)
vep_consequence = cell2str(vep_sheet.cell(row=v,column=9).value)
vep_impact = cell2str(vep_sheet.cell(row=v,column=10).value)
break
variant = [run_name,sample,Chr,Transcript,Gene,Exon,vep_intron,c_nomen,p_nomen,Start_Pos,Ref_Seq,Var_Seq,Class,Var_Freq,Var_Cov,Pos_Cov,Region,Type,vep_consequence,vep_impact,COSMIC,dbSNP,InterVar,ClinVar_significance,ClinVar_disease,NCI60,ESP,_1000G_ALL,_1000G_EUR,_1000G_AMR,_1000G_AFR,_1000G_EAS,_1000G_SAS,SIFT,POLYPHEN2_HVAR,Comm]
variant_string = '\t'.join(variant)
unique_VariantList.write(variant_string+'\n')
unique_VariantList.close()
|
997,360 | b26a2602c837702b737f84b4a55d240839725333 | # Copyright 2015 Google Inc. All rights reserved
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import itertools
class LazyOrderedDictionary(object):
def __init__(self, enumerator, constructor):
self._enumerator = enumerator
self._constructor = constructor
self._cache_list = [] # [(key, value)]
self._cache_index = {} # key -> index of _cache_list
self._enumerated = False
def refresh(self):
del self._cache_list[:]
self._cache_index.clear()
self._enumerated = False
def __len__(self):
self._ensure_enumerated()
return len(self._cache_list)
def __iter__(self):
return self.iterkeys()
def iterkeys(self):
self._ensure_enumerated()
for key, _ in self._cache_list:
yield key
def itervalues(self):
self._ensure_enumerated()
for _, value in self._cache_list:
yield value
def iteritems(self):
self._ensure_enumerated()
for item in self._cache_list:
yield item
def keys(self):
return list(self.iterkeys())
def values(self):
return list(self.itervalues())
def items(self):
return list(self.iteritems())
def __getitem__(self, key):
if isinstance(key, int):
self._ensure_enumerated()
return self._cache_list[key][1]
index = self._cache_index.get(key)
if index is not None:
return self._cache_list[index][1]
if self._constructor:
value = self._constructor(key)
if value is not None:
index = len(self._cache_list)
self._cache_index[key] = index
self._cache_list.append((key, value))
return value
self._ensure_enumerated()
index = self._cache_index.get(key)
if index is None:
raise KeyError(key)
return self._cache_list[index][1]
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def _ensure_enumerated(self):
if not self._enumerated:
# Save partially constructed entries.
saves = self._cache_list[:]
# Initialize cache with the enumerator.
del self._cache_list[:]
self._cache_index.clear()
for key, value in self._enumerator():
self._cache_index[key] = len(self._cache_list)
self._cache_list.append((key, value))
# Restore saved entries.
for key, value in saves:
index = self._cache_index.get(key)
if index is None:
index = len(self._cache_list)
self._cache_list.append((None, None))
self._cache_list[index] = (key, value)
self._enumerated = True
@functools.total_ordering
class CustomMutableFixedList(object):
"""Provides methods to mimic a mutable fixed-size Python list.
Subclasses need to provide implementation of at least following methods:
- __getitem__
- __setitem__
- __iter__
- __len__
"""
def __eq__(self, other):
if len(self) != len(other):
return False
for a, b in itertools.izip(self, other):
if a != b:
return False
return True
def __lt__(self, other):
for a, b in itertools.izip(self, other):
if a != b:
return a < b
return len(self) < len(other)
def __contains__(self, find_value):
for value in self:
if value == find_value:
return True
return False
def count(self, find_value):
result = 0
for value in self:
if value == find_value:
result += 1
return result
def index(self, find_value):
for i, value in enumerate(self):
if value == find_value:
return i
raise ValueError('%r is not in list' % find_value)
def reverse(self):
for i, new_value in enumerate(list(reversed(self))):
self[i] = new_value
def sort(self, cmp=None, key=None, reverse=False):
for i, new_value in enumerate(sorted(
self, cmp=cmp, key=key, reverse=reverse)):
self[i] = new_value
def __delitem__(self, key):
raise NotImplementedError('Methods changing the list size are unavailable')
def append(self, x):
raise NotImplementedError('Methods changing the list size are unavailable')
def extend(self, x):
raise NotImplementedError('Methods changing the list size are unavailable')
def insert(self, i, x):
raise NotImplementedError('Methods changing the list size are unavailable')
def pop(self, i=None):
raise NotImplementedError('Methods changing the list size are unavailable')
def remove(self, x):
raise NotImplementedError('Methods changing the list size are unavailable')
|
997,361 | 8eb9e92bf9db8b1c55028d2eaecd34fd2c4e1958 | from torch import autograd, eye, argmax, multinomial
from torch.nn import functional as F
class GreedySelection(autograd.Function):
@staticmethod
def forward(ctx, scores):
"""Select the weight corresponding to the highest score"""
idx = argmax(scores, dim=-1)
scores_net = eye(scores.size(-1), device=scores.device)
return scores_net[idx]
@staticmethod
def backward(ctx, g):
# pass the upstream gradient as is
return g
class ProbabilisticSelection(autograd.Function):
@staticmethod
def forward(ctx, scores):
"""Sample the weight for an edge according to a multinomial distribution of the scores"""
size = scores.size()
prob = F.softmax(scores, dim=-1)
idx = multinomial(prob.view(-1, size[-1]), num_samples=1, replacement=False).view(size[:-1])
scores_net = eye(scores.size(-1), device=scores.device)
return scores_net[idx]
@staticmethod
def backward(ctx, g):
# pass the upstream gradient as is
return g
|
997,362 | fc04f626cabdab50f8f1d15f2c356ecb3e10a0d2 | """Field mappings for Access Preliminary Technical Data"""
from cases.forms import (
AttachmentImportForm,
PersonImportForm,
PreliminaryCaseImportForm,
PreliminaryFacilityImportForm,
)
from django_import_data import OneToOneFieldMap, ManyToOneFieldMap, FormMap
from importers.converters import (
coerce_feet_to_meters,
coerce_access_location,
coerce_none,
coerce_positive_float,
coerce_positive_int,
coerce_scientific_notation,
convert_access_path,
convert_case_num,
convert_case_num_and_site_num_to_nrqz_id,
coerce_bool,
coerce_access_none,
)
from utils.constants import ACCESS_PRELIM_TECHNICAL
IGNORED_HEADERS = [
"PROP_STUDY",
"JSMS_DIFF",
"JSMS_TROPO",
"JSMS_SPACE",
"JSMS_TPA",
"JSMS_AERPD",
"TAP_DIFF",
"TAP_TROPO",
"TAP_SPACE",
"TAP_AERPD",
"TAP_TPA",
"MAP",
"DATE",
]
class ApplicantFormMap(FormMap):
field_maps = [
OneToOneFieldMap(to_field="name", converter=None, from_field="APPLICANT")
]
form_class = PersonImportForm
form_defaults = {"data_source": ACCESS_PRELIM_TECHNICAL}
class PCaseImportFormMap(FormMap):
field_maps = [
OneToOneFieldMap(
to_field="case_num", converter=convert_case_num, from_field="PNRQZ_NO"
)
]
form_class = PreliminaryCaseImportForm
form_defaults = {"data_source": ACCESS_PRELIM_TECHNICAL}
class PFacilityImportFormMap(FormMap):
field_maps = [
ManyToOneFieldMap(
from_fields={"case_num": "PNRQZ_NO", "site_num": "Site Number"},
converter=convert_case_num_and_site_num_to_nrqz_id,
to_field="nrqz_id",
),
OneToOneFieldMap(
to_field="site_num", converter=coerce_positive_int, from_field="Site Number"
),
OneToOneFieldMap(
to_field="freq_low", converter=coerce_positive_float, from_field="FREQUENCY"
),
OneToOneFieldMap(
to_field="antenna_model_number",
converter=coerce_access_none,
from_field="ANT_MODEL",
),
OneToOneFieldMap(
to_field="power_density_limit",
converter=coerce_scientific_notation,
from_field="PWD_LIMIT",
),
OneToOneFieldMap(
to_field="location_description",
converter=coerce_access_none,
from_field="LOCATION",
),
OneToOneFieldMap(
to_field="latitude", converter=coerce_none, from_field="LATITUDE"
),
OneToOneFieldMap(
to_field="longitude", converter=coerce_none, from_field="LONGITUDE"
),
ManyToOneFieldMap(
to_field="location",
converter=coerce_access_location,
from_fields=(
{
"latitude": "LATITUDE",
"longitude": "LONGITUDE",
"nad27": "NAD27?",
"nad83": "NAD83?",
}
),
),
OneToOneFieldMap(
to_field="amsl", converter=coerce_feet_to_meters, from_field="GND_ELEV"
),
OneToOneFieldMap(
to_field="agl", converter=coerce_feet_to_meters, from_field="ANT_HEIGHT"
),
OneToOneFieldMap(
to_field="comments", converter=coerce_access_none, from_field="REMARKS"
),
OneToOneFieldMap(
to_field="tpa", converter=coerce_positive_float, from_field="NRAO_TPA"
),
OneToOneFieldMap(
to_field="radio_service", converter=coerce_none, from_field="CLASS"
),
OneToOneFieldMap(
from_field={"topo_4_point": "FCC4-Point"},
converter=coerce_bool,
to_field="topo_4_point",
),
OneToOneFieldMap(
from_field={"topo_12_point": "12-Point"},
converter=coerce_bool,
to_field="topo_12_point",
),
OneToOneFieldMap(
from_field="NRAO_AZ_GB",
converter=coerce_positive_float,
to_field="az_bearing",
),
OneToOneFieldMap(
from_field="REQ_ERP",
converter=coerce_positive_float,
to_field="requested_max_erp_per_tx",
),
OneToOneFieldMap(
from_field="NRAO_AERPD_CDMA",
converter=coerce_positive_float,
to_field="nrao_aerpd_cdma",
),
OneToOneFieldMap(
from_field="NRAO_AERPD_Analog",
converter=coerce_positive_float,
to_field="nrao_aerpd_analog",
),
OneToOneFieldMap(
from_field="NRAO_DIFF",
converter=coerce_positive_float,
to_field="nrao_diff",
),
OneToOneFieldMap(
from_field="NRAO_SPACE",
converter=coerce_positive_float,
to_field="nrao_space",
),
OneToOneFieldMap(
from_field="NRAO_TROPO",
converter=coerce_positive_float,
to_field="nrao_tropo",
),
OneToOneFieldMap(
from_field="OUTSIDE",
converter=coerce_bool,
to_field="original_outside_nrqz",
),
]
form_class = PreliminaryFacilityImportForm
form_defaults = {"data_source": ACCESS_PRELIM_TECHNICAL}
# PROP STUDY
class PropagationStudyFormMap(FormMap):
field_maps = [
OneToOneFieldMap(
to_field="file_path",
converter=convert_access_path,
from_field=f"PROP_STUDY_Link",
)
]
form_class = AttachmentImportForm
form_defaults = {
"data_source": ACCESS_PRELIM_TECHNICAL,
"comments": "Propagation Study",
}
APPLICANT_FORM_MAP = ApplicantFormMap()
PCASE_FORM_MAP = PCaseImportFormMap()
PFACILITY_FORM_MAP = PFacilityImportFormMap()
PROPAGATION_STUDY_FORM_MAP = PropagationStudyFormMap()
|
997,363 | 8828de27eaebfe8b5853cbb39440650da9cbf4a6 | # !/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright(c) 2019 Nippon Telegraph and Telephone Corporation
# Filename: JuniperDriverMX240.py
'''
Individual section on driver (JuniperDriver's driver (MX240))
'''
import re
import ipaddress
from lxml import etree
import copy
import traceback
import GlobalModule
from EmSeparateDriver import EmSeparateDriver
from EmCommonLog import decorater_log
from EmCommonLog import decorater_log_in_out
class JuniperDriverMX240(EmSeparateDriver):
'''
Individual section on driver (JuniperDriver's driver)
(MX240)
'''
_PORT_MODE_ACCESS = "access"
_PORT_MODE_TRUNK = "trunk"
_ATTR_OPE = "operation"
_XML_LOG = "set xml node (parent = %s):\n%s"
@decorater_log_in_out
def connect_device(self, device_name,
device_info, service_type, order_type):
'''
Driver individual section connection control.
Launch from the common section on driver,
conduct device connection control to protocol processing section.
Parameter:
device_name : Device name
device_info : Device information
service_type : Service type
order_type : Order type
Return value :
Protocol processing section response :
int (1:Normal, 2:Capability abnormal, 3:No response)
'''
if service_type in (self.name_spine,
self.name_leaf,
self.name_b_leaf,
self.name_recover_node,
self.name_internal_link,):
return GlobalModule.COM_CONNECT_OK
else:
return self.as_super.connect_device(device_name,
device_info,
service_type,
order_type)
@decorater_log_in_out
def update_device_setting(self, device_name,
service_type, order_type, ec_message=None):
'''
Driver individual section edit control.
Launch from the common section on driver,
transmit device control signal to protocol processing section.
Parameter:
device_name : Device name
service_type : Service type
order_type : Order type
Return value :
Processing finish status : int (1:Successfully updated
2:Validation check NG
3:Updating unsuccessful)
'''
if service_type in (self.name_spine,
self.name_leaf,
self.name_b_leaf,
self.name_recover_node,
self.name_internal_link,):
return GlobalModule.COM_UPDATE_OK
else:
return self.as_super.update_device_setting(device_name,
service_type,
order_type,
ec_message)
@decorater_log_in_out
def delete_device_setting(self, device_name,
service_type, order_type, ec_message=None):
'''
Driver individual section deletion control.
Launch from the common section on driver,
conduct device deletion control to protocol processing section.
Parameter:
device_name : Device name
service_type : Service type
order_type : Order type
diff_info : Information about difference
Return value :
Processing finish status : int (1:Successfully deleted
2:Validation check NG
3:Deletion unsuccessful)
'''
if service_type in (self.name_spine,
self.name_leaf,
self.name_b_leaf,
self.name_recover_node,
self.name_internal_link,):
return GlobalModule.COM_DELETE_OK
else:
return self.as_super.delete_device_setting(device_name,
service_type,
order_type,
ec_message)
@decorater_log_in_out
def reserve_device_setting(self, device_name, service_type, order_type):
'''
Driver individual section tentative setting control.
Launch from the common section on driver,
conduct device tentative setting control
to protocol processing section.
Parameter:
device_name : Device name
service_type : Service type
order_type : Order type
Return value :
Processing finish status : Boolean (True:Normal, False:Abnormal)
'''
if service_type in (self.name_spine,
self.name_leaf,
self.name_b_leaf,
self.name_recover_node,
self.name_internal_link,):
return GlobalModule.COM_UPDATE_OK
else:
return self.as_super.reserve_device_setting(device_name,
service_type,
order_type)
@decorater_log_in_out
def enable_device_setting(self, device_name, service_type, order_type):
'''
Driver individual section established control.
Launch from the common section on driver,
conduct device established control to protocol processing section.
Parameter:
device_name : Device name
service_type : Service type
order_type : Order type
Return value :
Processing finish status : Boolean (True:Normal, False:Abnormal)
'''
if service_type in (self.name_spine,
self.name_leaf,
self.name_b_leaf,
self.name_recover_node,
self.name_internal_link):
return GlobalModule.COM_UPDATE_OK
else:
return self.as_super.enable_device_setting(device_name,
service_type,
order_type)
@decorater_log_in_out
def disconnect_device(self, device_name, service_type, order_type, get_config_flag=True):
'''
Driver individual section disconnection control.
Launch from the common section on driver,
conduct device disconnection control to
protocol processing section.
Parameter:
device_name : Device name
service_type : Service type
order_type : Order type
Return value :
Processing finish status : Should always return "True"
'''
if service_type in (self.name_spine,
self.name_leaf,
self.name_b_leaf,
self.name_recover_node,
self.name_internal_link):
return GlobalModule.COM_CONNECT_OK
else:
return self.as_super.disconnect_device(device_name,
service_type,
order_type,
get_config_flag)
@decorater_log
def __init__(self):
'''
Constructor
'''
self.as_super = super(JuniperDriverMX240, self)
self.as_super.__init__()
self._MERGE = GlobalModule.ORDER_MERGE
self._DELETE = GlobalModule.ORDER_DELETE
self._REPLACE = GlobalModule.ORDER_REPLACE
self._vpn_types = {"l2": 2, "l3": 3}
self.list_enable_service = [self.name_spine,
self.name_leaf,
self.name_b_leaf,
self.name_l3_slice,
self.name_celag,
self.name_internal_link,
self.name_cluster_link,
self.name_recover_service,
self.name_if_condition, ]
self._lag_check = re.compile("^ae([0-9]{1,})")
tmp_get_mes = (
'<filter>' +
'<configuration></configuration>' +
'</filter>')
self.get_config_message = {
self.name_l2_slice: tmp_get_mes,
self.name_l3_slice: tmp_get_mes,
}
@decorater_log
def _send_control_signal(self,
device_name,
message_type,
send_message=None,
service_type=None,
operation=None):
'''
Send message to protocol processing section.
Parameter:
device_name ; Device name
message_type ; Message type
send_message : Send message
Return value.
Processed result ; Boolean (Result of send_control_signal)
Message ; str (Result of send_control_signal)
'''
is_result, message = (self.as_super.
_send_control_signal(device_name,
message_type,
send_message))
if not is_result and isinstance(message, str) and "<ok/>" in message:
is_result = True
elif not is_result and isinstance(message, str) and\
"Cannot connect to other RE, ignoring it" in message:
is_result = True
return is_result, message
@decorater_log
def _set_configuration_node(self, xml_obj):
'''
Create configuration node.
'''
return self._set_xml_tag(xml_obj,
"configuration",
"xmlns",
"http://xml.juniper.net/xnm/1.1/xnm",
None)
@decorater_log
def _set_interfaces_node(self, conf_node):
'''
Set interfaces.
'''
return self._xml_setdefault_node(conf_node, "interfaces")
@decorater_log
def _set_interface_lag_member(self,
if_node,
lag_mem_ifs=None,
operation=None):
'''
Set LAG member IF.
'''
attr, attr_val = self._get_attr_from_operation(operation)
if operation == self._REPLACE:
attr, attr_val = self._get_attr_from_operation(
lag_mem_ifs["OPERATION"])
node_1 = self._set_xml_tag(if_node, "interface", attr, attr_val)
self._set_xml_tag(node_1,
"interface_name",
None,
None,
lag_mem_ifs["IF-NAME"])
if operation == self._DELETE:
return node_1
node_2 = self._set_xml_tag(node_1, "gigether-options")
node_3 = self._set_xml_tag(node_2, "ieee-802.3ad")
bundle_val = lag_mem_ifs["LAG-IF-NAME"]
self._set_xml_tag(node_3,
"bundle",
None,
None,
bundle_val)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1),),
__name__)
return node_1
@decorater_log
def _set_interface_physical(self,
if_node,
if_name=None,
operation=None):
'''
Set physical IF.
'''
attr, attr_val = self._get_attr_from_operation(operation)
node_1 = self._set_xml_tag(if_node,
"interface",
attr,
attr_val)
self._set_xml_tag(node_1, "interface_name", None, None, if_name)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1),),
__name__)
return node_1
@decorater_log
def _set_interface_lag(self,
if_node,
lag_if_name=None,
lag_links=None,
lag_speed=None,
operation=None):
'''
Set LAGIF. (can be LAG for CE as standalone)
'''
attr, attr_val = self._get_attr_from_operation(operation)
node_1 = self._set_xml_tag(if_node,
"interface",
attr,
attr_val)
self._set_xml_tag(node_1, "interface_name", None, None, lag_if_name)
if operation != self._DELETE:
node_2 = self._set_xml_tag(node_1, "aggregated-ether-options")
if operation == self._REPLACE:
attr = self._ATRI_OPE
attr_val = self._REPLACE
self._set_xml_tag(node_2,
"minimum-links",
attr,
attr_val,
lag_links)
if operation != self._REPLACE:
self._set_xml_tag(node_2,
"link-speed",
None,
None,
lag_speed)
node_3 = self._set_xml_tag(node_2, "lacp")
self._set_xml_tag(node_3, "active")
self._set_xml_tag(node_3, "periodic", None, None, "fast")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1),),
__name__)
return node_1
@decorater_log
def _set_interface_condition(self,
if_node,
if_mes_ec=None,
operation=None):
'''
Set information to open and close IF(common for physical, LAG)
(independent unit CPs). (common for L2, L3)
'''
node_1 = self._set_xml_tag(if_node,
"interface",
None, None)
self._set_xml_tag(node_1, "interface_name", None,
None, if_mes_ec["IF-NAME"])
if if_mes_ec["CONDITION"] == "enable":
self._set_xml_tag(node_1, "disable", self._ATTR_OPE, self._DELETE)
else:
self._set_xml_tag(node_1, "disable")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1),),
__name__)
return node_1
@decorater_log
def _set_interface_cluster_link(self,
if_node,
if_info,
if_type,
vpn_type=None,
operation=None):
'''
Set inter-cluster IF. (common for physical, LAG)
'''
if operation == self._DELETE:
if if_type == self._if_type_phy:
node_1 = self._set_xml_tag(if_node,
"interface",
self._ATTR_OPE,
self._DELETE)
self._set_xml_tag(node_1, "interface_name",
None,
None,
if_info.get("IF-NAME"))
else:
node_1 = self._set_xml_tag(if_node,
"interface",
None,
None)
self._set_xml_tag(node_1, "interface_name",
None,
None,
if_info.get("IF-NAME"))
self._set_xml_tag(node_1, "mtu", self._ATTR_OPE, self._DELETE)
node_2 = self._set_xml_tag(
node_1, "unit", self._ATTR_OPE, self._DELETE)
self._set_xml_tag(node_2, "name", None, None, "0")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1),),
__name__)
return node_1
if if_type == self._if_type_phy:
node_1 = self._set_interface_physical(if_node,
if_info.get("IF-NAME"))
else:
node_1 = self._set_xml_tag(if_node, "interface")
self._set_xml_tag(node_1,
"interface_name",
None,
None,
if_info.get("IF-NAME"))
self._set_xml_tag(node_1, "mtu", None, None, "4110")
self._set_interface_unit_inner(
node_1, if_info.get("IF-ADDR"), if_info.get("IF-PREFIX"), 3, 3)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1),),
__name__)
return node_1
@decorater_log
def _set_interface_unit_inner(self,
if_node,
if_addr,
if_prefix,
vpn_type=None,
opposite_vpn_type=None):
'''
Create unit node of interface node for
internal link/inter-cluster link.
'''
node_2 = self._set_xml_tag(if_node, "unit")
self._set_xml_tag(node_2, "name", None, None, "0")
node_3 = self._set_xml_tag(node_2, "family")
node_4 = self._set_xml_tag(node_3, "inet")
node_5 = self._set_xml_tag(node_4, "address")
self._set_xml_tag(node_5,
"source",
None,
None,
"%s/%s" % (if_addr, if_prefix))
if vpn_type != 2 and opposite_vpn_type != 2:
self._set_xml_tag(node_3, "mpls")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_2),),
__name__)
return node_2
@decorater_log
def _set_device_protocols(self, conf_node):
'''
Set protocols for device.
'''
return self._xml_setdefault_node(conf_node, "protocols")
@decorater_log
def _set_device_protocols_ospf_area_0(self,
protocols_node,
peer_router=None,
transit_area=None,
operation=None):
'''
Set ospf node for protocols and set area0 (crossing between clusters).
'''
attr, attr_val = self._get_attr_from_operation(operation)
node_1 = self._xml_setdefault_node(protocols_node, "ospf")
node_2 = self._set_xml_tag(node_1, "area", attr, attr_val)
self._set_xml_tag(node_2, "area_id", None, None, "0.0.0.0")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (protocols_node.tag, etree.tostring(node_1)),
__name__)
return node_2
@decorater_log
def _set_ospf_area_interfaces(self,
area_node,
if_infos=(),
**options):
'''
Set IF for the ospf/area node.
*Should be only for ClusterLink since
InternalLink does not exist in MX240.
options ; operation : Operation type
'''
operation = options.get("operation")
for if_info in if_infos:
metric = if_info.get("OSPF-METRIC", 100)
self._set_ospf_area_interface(area_node,
if_info.get("IF-NAME"),
metric,
operation=operation)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (area_node.tag, etree.tostring(area_node)),
__name__)
@decorater_log
def _set_ospf_area_interface(self,
area_node,
if_name,
metric=100,
operation=None):
'''
Set IF for the ospf/area node.
'''
attr, attr_val = self._get_attr_from_operation(operation)
node_2 = self._set_xml_tag(area_node, "interface", attr, attr_val)
self._set_xml_tag(node_2,
"interface_name",
None,
None,
"%s.%d" % (if_name, 0))
if operation != self._DELETE:
self._set_xml_tag(node_2, "interface-type", None, None, "p2p")
self._set_xml_tag(node_2, "metric", None, None, metric)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (area_node.tag, etree.tostring(node_2)),
__name__)
return node_2
@decorater_log
def _set_slice_mtu_value(self,
mtu=None,
is_vlan=False,
port_mode=None,
slice_type=2):
'''
Set mtu value for L2/L3CPIF.
*Should be only L3CP in case of MX240.
'''
tmp = None
if mtu is None:
tmp = None
else:
if is_vlan:
tmp = 4114
else:
tmp = int(mtu) + 14
return tmp
@decorater_log
def _set_slice_protocol_routing_options(self, conf_node, vrf_name=None):
'''
Set routing-options in preparation of L3 slice protocol settings.
'''
node_1 = self._xml_setdefault_node(conf_node, "routing-instances")
node_2 = self._xml_setdefault_node(node_1, "instance")
tmp = self._xml_setdefault_node(node_2, "name")
if not tmp.text:
tmp.text = vrf_name
node_3 = self._xml_setdefault_node(node_2, "routing-options")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (conf_node.tag, etree.tostring(node_1)),
__name__)
return node_3
@decorater_log
def _set_slice_protocol_bgp(self,
conf_node,
vrf_name,
bgp_list=None):
'''
Set bgp for L3VLANIF.
'''
node_1 = self._set_slice_protocol_routing_options(
conf_node, vrf_name).getparent()
tmp_node_2 = self._set_xml_tag(node_1, "protocols")
node_2 = self._set_xml_tag(tmp_node_2, "bgp")
for bgp in bgp_list:
self._set_slice_bgp_group(node_2,
ip_ver=bgp.get("BGP-IP-VERSION"),
bgp=bgp)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (conf_node.tag, etree.tostring(node_1)),
__name__)
@decorater_log
def _set_slice_bgp_group(self,
bgp_node,
ip_ver=4,
bgp=None):
'''
Set bgp for L3VLANIF.
'''
node_1 = self._set_xml_tag(bgp_node, "group")
self._set_xml_tag(node_1, "name", None, None, "RI_eBGPv%d" % (ip_ver,))
self._set_slice_bgp_neighbor(node_1, bgp)
if bgp.get("OPERATION") != self._DELETE:
node_2 = self._set_xml_tag(node_1, "family")
tag_name = "inet" if ip_ver == 4 else "inet6"
node_3 = self._set_xml_tag(node_2, tag_name)
self._set_xml_tag(node_3, "unicast")
self._set_xml_tag(node_1, "type", None, None, "external")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (bgp_node.tag, etree.tostring(node_1)),
__name__)
return node_1
@decorater_log
def _set_slice_bgp_neighbor(self,
group_node,
bgp=None):
'''
Set bgp for L3VLANIF.
'''
attr, attr_val = self._get_attr_from_operation(bgp.get("OPERATION"))
ip_ver = bgp.get("BGP-IP-VERSION")
node_1 = self._set_xml_tag(group_node, "neighbor", attr, attr_val)
self._set_xml_tag(
node_1, "name", None, None, bgp["BGP-RADD"])
if attr_val == self._DELETE:
return node_1
self._set_xml_tag(
node_1, "import", None, None, "eBGPv%d_To_CE_import" % (ip_ver,))
if bgp.get("BGP-MASTER") is None:
tmp = "eBGPv%d_To_standby-CE_export" % (ip_ver,)
else:
tmp = "eBGPv%d_To_active-CE_export" % (ip_ver,)
self._set_xml_tag(node_1, "export", None, None, tmp)
self._set_xml_tag(node_1, "peer-as", None, None, bgp["BGP-PEER-AS"])
self._set_xml_tag(node_1, "local-address", None, None, bgp["BGP-LADD"])
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (group_node.tag, etree.tostring(node_1)),
__name__)
return node_1
@decorater_log
def _set_l3_slice_interfaces(self,
conf_node,
cp_infos):
'''
Set all L3CP for CE.
'''
node_1 = self._set_interfaces_node(conf_node)
for tmp_cp in cp_infos.values():
self._set_l3_slice_vlan_if(node_1, tmp_cp)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (conf_node.tag, etree.tostring(node_1)),
__name__)
@decorater_log
def _set_l3_slice_vlan_if(self,
ifs_node,
cp_info):
'''
Set L3CP for CE.
'''
operation = cp_info.get("OPERATION")
if_type = cp_info.get("IF-TYPE")
mtu = cp_info.get("IF-MTU")
is_vlan = cp_info.get("IF-IS-VLAN", False)
attr, attr_val = self._get_attr_from_operation(operation)
if operation == self._DELETE and if_type == self._if_type_phy:
node_1 = self._set_xml_tag(ifs_node, "interface", attr, attr_val)
self._set_xml_tag(
node_1, "name", None, None, cp_info.get("IF-NAME"))
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (ifs_node.tag, etree.tostring(node_1)),
__name__)
return node_1
node_1 = self._set_xml_tag(ifs_node, "interface")
self._set_xml_tag(node_1, "name", None, None, cp_info.get("IF-NAME"))
if is_vlan:
self._set_xml_tag(node_1, "vlan-tagging", attr, attr_val)
tmp = self._set_slice_mtu_value(
mtu=mtu, is_vlan=is_vlan, slice_type=3)
if (tmp is not None and
not (operation != self._DELETE and
cp_info.get("IF-DELETE-VLAN"))):
if operation == self._DELETE:
tmp = ""
self._set_xml_tag(node_1, "mtu", attr, attr_val, tmp)
if cp_info.get("VLAN"):
for unit in cp_info.get("VLAN").values():
self._set_l3_slice_vlan_unit(node_1,
unit,
is_vlan=is_vlan,
mtu=mtu)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (ifs_node.tag, etree.tostring(node_1)),
__name__)
return node_1
@decorater_log
def _set_l3_slice_vlan_unit(self,
if_node,
vlan,
is_vlan=None,
mtu=None):
'''
Set unit for interface node.
'''
attr, attr_val = self._get_attr_from_operation(vlan.get("OPERATION"))
node_1 = self._set_xml_tag(if_node, "unit", attr, attr_val)
self._set_xml_tag(node_1,
"name",
None,
None,
vlan.get("CE-IF-VLAN-ID"))
if attr_val == self._DELETE:
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1)),
__name__)
return node_1
node_2 = self._set_xml_tag(node_1, "family")
is_add_cp = False
if vlan.get("CE-IF-ADDR6") or vlan.get("CE-IF-ADDR"):
is_add_cp = True
if vlan.get("CE-IF-ADDR6"):
self._set_l3_slice_vlan_unit_address(
node_2,
6,
ip_addr=vlan.get("CE-IF-ADDR6"),
prefix=vlan.get("CE-IF-PREFIX6"),
is_vlan=is_vlan,
mtu=mtu,
is_add_cp=is_add_cp
)
if vlan.get("CE-IF-ADDR"):
self._set_l3_slice_vlan_unit_address(
node_2,
4,
ip_addr=vlan.get("CE-IF-ADDR"),
prefix=vlan.get("CE-IF-PREFIX"),
is_vlan=is_vlan,
mtu=mtu,
is_add_cp=is_add_cp
)
if is_add_cp:
self._set_xml_tag(
node_1, "vlan-id", None, None, vlan.get("CE-IF-VLAN-ID"))
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(node_1)),
__name__)
return node_1
@decorater_log
def _set_l3_slice_vlan_unit_address(self,
family_node,
ip_ver=4,
**params):
'''
Set inet for family node. (common for IPv4,IPv6)
params : ip_addr = address value
; prefix = prefix value
; is_vlan = IF-IS-VLAN value
; mtu = IF-MTU value
'''
ip_addr = params.get("ip_addr")
prefix = params.get("prefix")
is_vlan = params.get("is_vlan")
mtu = params.get("mtu")
is_add_cp = params.get("is_add_cp", True)
tag_name = "inet" if ip_ver == 4 else "inet6"
node_1 = self._set_xml_tag(family_node, tag_name)
node_2 = self._set_xml_tag(node_1, "address")
node_3 = self._set_xml_tag(node_2,
"name",
None,
None,
"%s/%s" % (ip_addr, prefix))
if is_add_cp and mtu is not None and is_vlan:
self._set_xml_tag(node_1, "mtu", None, None, mtu)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (family_node.tag, etree.tostring(node_1)),
__name__)
@decorater_log
def _set_l3_slice_routing_instance(self,
conf_node,
vrf_name,
operation=None):
'''
Set routing_instance node.
'''
attr, attr_val = self._get_attr_from_operation(operation)
node_1 = self._xml_setdefault_node(conf_node, "routing-instances")
node_2 = self._xml_setdefault_node(node_1, "instance")
if attr_val == self._DELETE:
node_2.attrib[attr] = self._DELETE
self._set_xml_tag(node_2, "name", None, None, vrf_name)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (conf_node.tag, etree.tostring(node_1)),
__name__)
return node_2
@decorater_log
def _set_l3_slice_routing_instance_interface(self,
ins_node,
cps_info,
operation="merge"):
'''
Set IF for instance node.
'''
for if_info in cps_info.values():
if_name = if_info["IF-NAME"]
for vlan_info in if_info.get("VLAN", {}).values():
vlan_ope = vlan_info.get("OPERATION")
if not vlan_ope:
vlan_ope = "merge"
if operation == vlan_ope:
vlan_id = vlan_info["CE-IF-VLAN-ID"]
tmp = "%s.%d" % (if_name, vlan_id)
attr, attr_val = self._get_attr_from_operation(
vlan_info.get("OPERATION"))
node_3 = self._set_xml_tag(
ins_node, "interface", attr, attr_val)
self._set_xml_tag(node_3, "name", None, None, tmp)
@decorater_log
def _set_l3_slice_routing_instance_vrf(self,
conf_node,
vrf_name,
vrf_info,
cps_info):
'''
Set routing_instance node.
'''
node_1 = self._xml_setdefault_node(conf_node, "routing-instances")
node_2 = self._xml_setdefault_node(node_1, "instance")
self._set_xml_tag(node_2, "name", None, None, vrf_name)
self._set_xml_tag(node_2, "instance-type", None, None, "vrf")
self._set_l3_slice_routing_instance_interface(node_2,
cps_info)
node_3 = self._set_xml_tag(node_2, "route-distinguisher")
self._set_xml_tag(node_3, "rd-type", None, None, vrf_info["VRF-RD"])
node_3 = self._set_xml_tag(node_2, "vrf-target")
self._set_xml_tag(node_3, "community", None, None, vrf_info["VRF-RT"])
self._set_xml_tag(node_2, "vrf-table-label")
self._set_xml_tag(node_2, "no-vrf-propagate-ttl")
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (conf_node.tag, etree.tostring(node_1)),
__name__)
return node_2
@decorater_log
def _get_ce_lag_from_ec(self,
device_mes,
service=None,
operation=None,
db_info=None):
'''
Obtain EC message information relating to LAG for CE.
'''
lag_ifs = []
lag_mem_ifs = []
for tmp in device_mes.get("ce-lag-interface", ()):
if operation == self._DELETE:
tmp_bool = bool(not tmp.get("name") or
not tmp.get("leaf-interface") or
len(tmp["leaf-interface"]) == 0)
elif operation == self._REPLACE:
tmp_bool = bool(not tmp.get("name") or
tmp.get("minimum-links") is None or
not tmp.get("leaf-interface") or
len(tmp["leaf-interface"]) == 0)
else:
tmp_bool = bool(not tmp.get("name") or
tmp.get("minimum-links") is None or
tmp.get("link-speed") is None or
not tmp.get("leaf-interface") or
len(tmp["leaf-interface"]) == 0)
if tmp_bool:
raise ValueError("ce-lag not enough information")
lag_ifs.append(self._get_lag_if_info(tmp))
for lag_mem in tmp.get("leaf-interface"):
if not lag_mem.get("name"):
raise ValueError(
"leaf-interface not enough information ")
if operation == self._REPLACE:
if lag_mem.get("operation")is None:
raise ValueError(
"leaf-interface not enough information ")
lag_mem_ifs.append(self._get_lag_mem_if_info(tmp, lag_mem))
return lag_ifs, lag_mem_ifs
@decorater_log
def _get_lag_if_info(self, if_info):
'''
Obtain LAG information from EC message.
'''
tmp = {
"IF-NAME": if_info.get("name"),
"LAG-LINKS": if_info.get("minimum-links"),
"LAG-SPEED": if_info.get("link-speed"),
}
return tmp
@decorater_log
def _get_lag_mem_if_info(self, lag_if, lag_mem_if):
'''
Obtain LAG member information from EC message.
'''
tmp = {"IF-NAME": lag_mem_if.get("name"),
"LAG-IF-NAME": lag_if["name"],
"OPERATION": lag_mem_if.get("operation"), }
return tmp
@decorater_log
def _get_if_condition_from_ec(self,
device_mes,
service=None,
operation=None,
db_info=None):
'''
Obtain EC message and DB information related to IF open/close.
'''
phy_ifs = []
for tmp_if in device_mes.get("interface-physical", ()):
if (not tmp_if.get("name") or
tmp_if.get("condition")is None):
raise ValueError("interface-physical not enough information")
tmp = copy.deepcopy(tmp_if)
phy_ifs.append(
self._get_if_condition_info(tmp))
lag_ifs = []
for tmp_if in device_mes.get("internal-lag", ()):
if (not tmp_if.get("name") or
tmp_if.get("condition") is None):
raise ValueError("internal-lag not enough information")
tmp = copy.deepcopy(tmp_if)
lag_ifs.append(
self._get_if_condition_info(tmp))
return phy_ifs, lag_ifs
decorater_log
def _get_if_condition_info(self, if_info):
'''
Obtain IF information from EC messege(regardless of physical/LAG)
'''
tmp = {
"IF-NAME": if_info.get("name"),
"CONDITION": if_info.get("condition"),
}
return tmp
@decorater_log
def _get_cluster_link_from_ec(self,
device_mes,
db_info=None):
'''
Obtain EC message information relating to internal link (LAG).
'''
phy_ifs = []
for tmp in device_mes.get("cluster-link-physical-interface", ()):
if (not tmp.get("name") or
not tmp.get("address") or
tmp.get("prefix") is None or
tmp.get("ospf", {}).get("metric") is None):
raise ValueError(
"cluster-link-physical not enough information")
phy_ifs.append(self._get_cluster_if_info(tmp, self._if_type_phy))
lag_ifs = []
for tmp in device_mes.get("cluster-link-lag-interface", ()):
if (not tmp.get("name") or
not tmp.get("address") or
tmp.get("prefix") is None or
tmp.get("ospf", {}).get("metric") is None):
raise ValueError("cluster-link-lag not enough information")
lag_ifs.append(self._get_cluster_if_info(tmp, self._if_type_lag))
for tmp in device_mes.get("cluster-link-interface", ()):
if not tmp.get("name"):
raise ValueError("del cluster-link not enough information")
if_type = None
tmp_if = None
for db_if in db_info.get("cluster-link_info", ()):
if db_if.get("name") == tmp.get("name"):
if_type = db_if.get("type")
tmp_if = {
"IF-TYPE": if_type,
"IF-NAME": tmp.get("name"),
}
if if_type == self._if_type_phy:
phy_ifs.append(tmp_if)
elif if_type == self._if_type_lag:
lag_ifs.append(tmp_if)
else:
raise ValueError(
"cluster-link if_type in db is irregular")
break
if not tmp_if:
raise ValueError("cluster-link if_name in db is irregular")
inner_ifs = copy.deepcopy(phy_ifs)
inner_ifs.extend(lag_ifs)
return phy_ifs, lag_ifs, inner_ifs
@decorater_log
def _get_cluster_if_info(self, if_info, if_type=None):
'''
Obtain inter-cluster link information from EC message.
(regardless of physical/LAG)
'''
tmp = {
"IF-TYPE": if_type,
"IF-NAME": if_info.get("name"),
"IF-ADDR": if_info.get("address"),
"IF-PREFIX": if_info.get("prefix"),
"OSPF-METRIC": if_info.get("ospf", {}).get("metric"),
}
return tmp
@decorater_log
def _get_cp_interface_info_from_ec(self, cp_dicts, cp_info):
'''
Collect IF information relating to slice from EC message
(independent unit CPs). (common for L2, L3)
'''
if_name = cp_info.get("name")
vlan_id = cp_info.get("vlan-id")
if not if_name or vlan_id is None:
raise ValueError("CP is not enough information")
if if_name not in cp_dicts:
tmp = {
"IF-TYPE": (self._if_type_lag
if self._lag_check.search(if_name)
else self._if_type_phy),
"IF-NAME": if_name,
"IF-IS-VLAN": bool(vlan_id),
"OPERATION": None,
"VLAN": {},
}
cp_dicts[if_name] = tmp
else:
tmp = cp_dicts[if_name]
return tmp, vlan_id
@decorater_log
def _get_vrf_from_ec(self, device_mes):
vrf_mes = device_mes.get("vrf", {})
tmp = {}
if vrf_mes:
tmp = {
"VRF-NAME": vrf_mes.get("vrf-name"),
"VRF-RT": vrf_mes.get("rt"),
"VRF-RD": vrf_mes.get("rd"),
"VRF-ROUTER-ID": vrf_mes.get("router-id"),
}
if None in tmp.values():
raise ValueError("vrf not enough information")
return tmp
@decorater_log
def _get_l3_vlan_if_info_from_ec(self,
ec_cp,
db_info,
slice_name=None):
'''
Conduct setting for the section relating to VLAN_IF on CP.
'''
if_name = ec_cp.get("name")
tmp = {
"OPERATION": ec_cp.get("operation"),
"CE-IF-VLAN-ID": ec_cp.get("vlan-id"),
}
ce_if = ec_cp.get("ce-interface", {})
self._get_if_ip_network(ce_if.get("address"), ce_if.get("prefix"))
self._get_if_ip_network(ce_if.get("address6"), ce_if.get("prefix6"))
tmp.update({
"CE-IF-ADDR6": ce_if.get("address6"),
"CE-IF-PREFIX6": ce_if.get("prefix6"),
"CE-IF-ADDR": ce_if.get("address"),
"CE-IF-PREFIX": ce_if.get("prefix"),
})
return tmp
@staticmethod
@decorater_log
def _get_if_ip_network(address, prefix):
'''
Create IP network object based on address and pre-fix.
*IP network object will not be created and
cidr mesage will be returned.
'''
if not address and prefix is None:
return None
elif not address or prefix is None:
raise ValueError(
"IPaddress is enough data %s/%s" % (address, prefix))
else:
return "%s/%d" % (address, prefix)
@staticmethod
@decorater_log
def _get_vlan_if_from_db(db_info,
if_name,
slice_name,
vlan_id,
db_name):
'''
Obtain VLAN_IF from DB. (cp, vrf, bgp, vrrp)
'''
for vlan_if in db_info.get(db_name, ()):
db_if_name = vlan_if.get("if_name")
db_slice_name = vlan_if.get("slice_name")
if db_name == "cp":
db_vlan_id = vlan_if.get("vlan", {}).get("vlan_id")
else:
db_vlan_id = vlan_if.get("vlan_id")
if (if_name == db_if_name and
slice_name == db_slice_name and
vlan_id == db_vlan_id):
return vlan_if
return {}
@decorater_log
def _get_l3_cps_from_ec(self,
device_mes,
db_info,
slice_name=None,
operation=None):
'''
Parameter from EC. (obtain cp data from cp)
'''
cp_dicts = {}
for tmp_cp in device_mes.get("cp", ()):
if (not tmp_cp.get("ce-interface") and
tmp_cp.get("operation") is None):
continue
tmp, vlan_id = self._get_cp_interface_info_from_ec(
cp_dicts, tmp_cp)
tmp["VLAN"][vlan_id] = self._get_l3_vlan_if_info_from_ec(
tmp_cp, db_info, slice_name)
if tmp["VLAN"][vlan_id].get("OPERATION") == self._DELETE:
tmp_db_if = self._get_vlan_if_from_db(db_info,
tmp_cp["name"],
slice_name,
vlan_id,
"cp")
tmp["IF-MTU"] = tmp_db_if.get("mtu_size")
tmp["IF-DELETE-VLAN"] = True
else:
tmp["IF-MTU"] = tmp_cp.get("ce-interface", {}).get("mtu")
return cp_dicts
@staticmethod
@decorater_log
def _compound_list_val_dict(dict_1, dict_2):
'''
Combine two dictionaries carrying list as the value.
'''
tmp = dict_1.keys()
tmp.extend(dict_2.keys())
tmp = list(set(tmp))
ret_dict = {}
for key in tmp:
tmp_val = []
if key in dict_1:
tmp_val.extend(dict_1[key])
if key in dict_2:
tmp_val.extend(dict_2[key])
tmp_val = list(set(tmp_val))
ret_dict[key] = tmp_val
return ret_dict
@decorater_log
def _get_cos_if_list(self,
cp_dict,
db_info,
slice_name=None,
operation=None):
'''
Create list for class-or-service.
(Compare CP on DB and CP on operation instruction simultaneously.)
(Make judgment on the necessity of IF deletion and
possibility for slice to remain inside device.)
'''
cos_if_list = []
db_cp = {}
if db_info:
slice_name_list = []
for tmp_db in db_info.get("cp", {}):
if tmp_db.get("slice_name") in slice_name_list:
continue
slice_name_list.append(tmp_db.get("slice_name"))
for slice_name in slice_name_list:
tmp_cp = self._get_db_cp_ifs(db_info, slice_name)
db_cp = self._compound_list_val_dict(db_cp, tmp_cp)
tmp_cp_dict = cp_dict.copy()
if operation == self._DELETE:
for if_name, cp_data in tmp_cp_dict.items():
if len(cp_data["VLAN"]) == len(db_cp.get(if_name, ())):
tmp = {"IF-NAME": if_name,
"IF-PORT-MODE": cp_data.get("IF-PORT-MODE")}
cos_if_list.append(tmp)
cp_dict[if_name]["OPERATION"] = self._DELETE
else:
for if_name in tmp_cp_dict.keys():
if if_name not in db_cp:
tmp = {"IF-NAME": if_name,
"IF-PORT-MODE":
tmp_cp_dict[if_name].get("IF-PORT-MODE")}
cos_if_list.append(tmp)
return cos_if_list
@decorater_log
def _get_db_cp_ifs(self, db_info, slice_name):
'''
Obtain the combination of IF name and vlan from DB.
'''
db_cp = db_info.get("cp", ())
if_dict = {}
for tmp_cp in db_cp:
if tmp_cp.get("slice_name") != slice_name:
continue
if_name = tmp_cp.get("if_name")
vlan_id = tmp_cp["vlan"]["vlan_id"]
if if_name in if_dict:
if_dict[if_name].append(vlan_id)
else:
if_dict[if_name] = [vlan_id]
return if_dict
@decorater_log
def _get_bgp_from_ec(self, device_mes, db_info, slice_name=None):
'''
Parameter from EC. (obtain bgp data from cp)
'''
bgp_list = []
for tmp_cp in device_mes.get("cp", ()):
if_name = tmp_cp["name"]
vlan_id = tmp_cp["vlan-id"]
if (not tmp_cp.get("bgp") and
tmp_cp.get("operation") != self._DELETE):
continue
tmpbgp = tmp_cp.get("bgp")
if (tmp_cp.get("operation") == self._DELETE or
tmpbgp.get("operation") == self._DELETE):
db_bgp = self._get_vlan_if_from_db(db_info,
if_name,
slice_name,
vlan_id,
"bgp_detail")
if db_bgp.get("remote", {}).get("ipv4_address"):
bgp_list.append(self._get_params_delete_bgp(db_bgp, 4))
if db_bgp.get("remote", {}).get("ipv6_address"):
bgp_list.append(self._get_params_delete_bgp(db_bgp, 6))
else:
if tmpbgp.get("remote-as-number") is None:
raise ValueError("BGP is not enough information")
if (tmpbgp.get("local-address") or
tmpbgp.get("remote-address")):
bgp_list.append(self._get_params_bgp(tmpbgp, 4))
if (tmpbgp.get("local-address6") or
tmpbgp.get("remote-address6")):
bgp_list.append(self._get_params_bgp(tmpbgp, 6))
return bgp_list
@decorater_log
def _get_params_delete_bgp(self, bgp, ip_ver=4):
'''
Obtain BGP data suitable for IP version. (for deletion, re-use from DB)
'''
if ip_ver == 4:
radd = bgp.get("remote", {}).get("ipv4_address")
else:
radd = bgp.get("remote", {}).get("ipv6_address")
remote_ip = ipaddress.ip_address(u"%s" % (radd,))
return {
"OPERATION": self._DELETE,
"BGP-IP-VERSION": ip_ver,
"BGP-RADD": radd,
"BGP-REMOTE-IP-ADDRESS": remote_ip,
}
def _get_params_bgp(self, bgp, ip_ver=4):
'''
Obtain BGP data suitable for IP version.
'''
if ip_ver == 4:
radd = bgp.get("remote-address")
ladd = bgp.get("local-address")
else:
radd = bgp.get("remote-address6")
ladd = bgp.get("local-address6")
remote_ip = ipaddress.ip_address(u"%s" % (radd,))
local_ip = ipaddress.ip_address(u"%s" % (ladd,))
return {
"OPERATION": bgp.get("operation"),
"BGP-MASTER": bgp.get("master"),
"BGP-PEER-AS": bgp.get("remote-as-number"),
"BGP-IP-VERSION": ip_ver,
"BGP-RADD": radd,
"BGP-LADD": ladd,
"BGP-LOCAL-IP-ADDRESS": local_ip,
"BGP-REMOTE-IP-ADDRESS": remote_ip,
}
@decorater_log
def _get_vrf_name_from_db(self, db_info, slice_name):
'''
Obtain VRF name from DB based on slice name.
'''
ret_val = None
vrf_dtls = db_info.get("vrf_detail", ())
for vrf_dtl in vrf_dtls:
if vrf_dtl.get("slice_name") == slice_name:
ret_val = vrf_dtl.get("vrf_name")
break
return ret_val
@decorater_log
def _check_l3_slice_del(self, cps_info):
'''
Judge whether there are any CP deletions.
'''
for if_info in cps_info.values():
for vlan_info in if_info.get("VLAN", {}).values():
if vlan_info.get("OPERATION") == self._DELETE:
return True
return False
@decorater_log
def _check_l3_slice_all_cp_del(self,
cos_ifs,
device_info,
slice_name):
'''
Judge whether there are any CP deletions.
'''
del_if_count = len(cos_ifs)
db_ifs = len(self._get_db_cp_ifs(device_info, slice_name))
return bool(del_if_count == db_ifs)
@decorater_log
def _gen_l3_slice_fix_message(self, xml_obj, operation):
'''
L3Slice
Fixed value to create message (L3Slice) for Netconf.
Called out when creating message for L3Slice.
Parameter:
xml_obj : xml object
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
return True
@decorater_log
def _gen_ce_lag_fix_message(self, xml_obj, operation):
'''
Fixed value to create message (CeLag) for Netconf.
Called out when creating message for CeLag.
Parameter:
xml_obj : xml object
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
return True
@decorater_log
def _gen_if_condition_fix_message(self, xml_obj, operation):
'''
Fixed value to create message (IfCondition) for Netconf.
Called out when creating message for IfCondition.
Parameter:
xml_obj : xml object
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean
'''
return True
@decorater_log
def _gen_cluster_link_fix_message(self, xml_obj, operation):
'''
Fixed value to create message (cluster-link) for Netconf.
Called out when creating message for cluster-link.
Parameter:
xml_obj : xml object
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
return True
@decorater_log
def _gen_l3_slice_replace_message(self,
xml_obj,
device_info,
ec_message,
operation):
'''
Variable value to create message (L3Slice) for Netconf.
Called out when creating message for SpineL3Slice.
(After fixed message has been created.)
Parameter:
xml_obj : xml object
device_info : Device information
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
self.common_util_log.logging(
None,
self.log_level_debug,
"ERROR : l3slice order_type = replace ",
__name__)
return False
@decorater_log
def _gen_l3_slice_variable_message(self,
xml_obj,
device_info,
ec_message,
operation):
'''
Variable value to create message (L3Slice) for Netconf.
Called out when creating message for SpineL3Slice.
(After fixed message has been created.)
Parameter:
xml_obj : xml object
device_info : Device information
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
if not ec_message.get("device-leaf", {}).get("cp"):
self.common_util_log.logging(
None,
self.log_level_debug,
"ERROR : message = %s" % ("Config L3CP is not found.",),
__name__)
return False
if operation == self._REPLACE:
return self._gen_l3_slice_replace_message(xml_obj,
device_info,
ec_message,
operation)
device_mes = ec_message.get("device-leaf", {})
device_name = None
try:
device_name = device_mes["name"]
slice_name = device_mes["slice_name"]
if device_mes.get("vrf"):
vrf_name = device_mes["vrf"]["vrf-name"]
else:
vrf_name = self._get_vrf_name_from_db(device_info, slice_name)
if not vrf_name:
raise ValueError(
"getting vrf_name from DB is None (or not fount)")
vrf = self._get_vrf_from_ec(device_mes)
cp_info = self._get_l3_cps_from_ec(device_mes,
device_info,
slice_name)
cos_ifs = self._get_cos_if_list(cp_info,
device_info,
slice_name=slice_name,
operation=operation)
bgp = self._get_bgp_from_ec(device_mes,
device_info,
slice_name)
except Exception as ex:
self.common_util_log.logging(
device_name,
self.log_level_debug,
"ERROR : message = %s / Exception: %s" % (ec_message, ex),
__name__)
self.common_util_log.logging(
device_name,
self.log_level_debug,
"Traceback:%s" % (traceback.format_exc(),),
__name__)
return False
conf_node = self._set_configuration_node(xml_obj)
if cp_info:
self._set_l3_slice_interfaces(conf_node, cp_info)
is_all_del = False
if operation == self._DELETE:
is_cp_del = self._check_l3_slice_del(cp_info)
is_all_del = self._check_l3_slice_all_cp_del(cos_ifs,
device_info,
slice_name)
if is_all_del:
self._set_l3_slice_routing_instance(conf_node,
vrf_name,
self._DELETE)
elif is_cp_del:
node_1 = self._set_l3_slice_routing_instance(conf_node,
vrf_name)
self._set_l3_slice_routing_instance_interface(
node_1,
cp_info,
self._DELETE)
elif vrf:
self._set_l3_slice_routing_instance_vrf(conf_node,
vrf_name,
vrf,
cp_info)
if bgp and not is_all_del:
self._set_slice_protocol_bgp(conf_node, vrf_name, bgp)
return True
@decorater_log
def _gen_ce_lag_variable_message(self,
xml_obj,
device_info,
ec_message,
operation):
'''
Variable value to create message (CeLag) for Netconf.
Called out when creating message for CeLag.
(After fixed message has been created.)
Parameter:
xml_obj : xml object
device_info : Device information
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
device_mes = ec_message.get("device", {})
device_name = device_mes.get("name")
try:
if not device_mes.get("ce-lag-interface"):
raise ValueError("Config CE-LAG is not found.")
lag_ifs, lag_mem_ifs = \
self._get_ce_lag_from_ec(device_mes,
operation=operation)
except Exception as ex:
self.common_util_log.logging(
device_name,
self.log_level_debug,
"ERROR : message = %s / Exception: %s" % (ec_message, ex),
__name__)
self.common_util_log.logging(
device_name,
self.log_level_debug,
"Traceback:%s" % (traceback.format_exc(),),
__name__)
return False
conf_node = self._set_configuration_node(xml_obj)
if_node = self._set_interfaces_node(conf_node)
for tmp_if in lag_mem_ifs:
self._set_interface_lag_member(if_node,
lag_mem_ifs=tmp_if,
operation=operation)
for tmp_if in lag_ifs:
self._set_interface_lag(if_node,
tmp_if.get("IF-NAME"),
tmp_if.get("LAG-LINKS"),
tmp_if.get("LAG-SPEED"),
operation=operation)
return True
@decorater_log
def _gen_if_condition_variable_message(self,
xml_obj,
device_info,
ec_message,
operation):
'''
Fixed value to create message (IfCondition) for Netconf.
Called out when creating message for IfCondition.
Parameter:
xml_obj : xml
device_info : device information
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
device_mes = ec_message.get("device", {})
device_name = device_mes.get("name")
try:
phy_ifs, lag_ifs = \
self._get_if_condition_from_ec(
device_mes,
service=self.name_if_condition,
operation=operation,
db_info=device_info)
except Exception as ex:
self.common_util_log.logging(
device_name,
self.log_level_debug,
"ERROR : message = %s / Exception: %s" % (ec_message, ex),
__name__)
self.common_util_log.logging(
device_name,
self.log_level_debug,
"Traceback:%s" % (traceback.format_exc(),),
__name__)
return False
conf_node = self._set_configuration_node(xml_obj)
if_node = self._set_interfaces_node(conf_node)
for tmp_if in lag_ifs:
self._set_interface_condition(if_node,
if_mes_ec=tmp_if,
operation=operation)
for tmp_if in phy_ifs:
self._set_interface_condition(if_node,
if_mes_ec=tmp_if,
operation=operation)
self.common_util_log.logging(
None, self.log_level_debug,
self._XML_LOG % (if_node.tag, etree.tostring(if_node),),
__name__)
return True
@decorater_log
def _gen_cluster_link_variable_message(self,
xml_obj,
device_info,
ec_message,
operation):
'''
Variable value to create message (cluster-link) for Netconf.
Called out when creating message for cluster-link.
(After fixed message has been created.)
Parameter:
xml_obj : xml object
device_info : Device information
ec_message : EC message
operation : Designate "delete" when deleting.
Return value.
Creation result : Boolean (Write properly using override method)
'''
device_mes = ec_message.get("device", {})
device_name = device_mes.get("name")
try:
if not device_name:
raise ValueError("device name is not None")
phy_ifs, lag_ifs, inner_ifs = self._get_cluster_link_from_ec(
device_mes,
db_info=device_info)
if not inner_ifs:
raise ValueError("clusterLink is not found")
except Exception as ex:
self.common_util_log.logging(
device_name,
self.log_level_debug,
"ERROR : message = %s / Exception: %s" % (ec_message, ex),
__name__)
self.common_util_log.logging(
device_name,
self.log_level_debug,
"Traceback:%s" % (traceback.format_exc(),),
__name__)
return False
conf_node = self._set_configuration_node(xml_obj)
if_node = self._set_interfaces_node(conf_node)
for phy_if in phy_ifs:
self._set_interface_cluster_link(if_node,
if_info=phy_if,
if_type=self._if_type_phy,
operation=operation)
for lag_if in lag_ifs:
self._set_interface_cluster_link(if_node,
if_info=lag_if,
if_type=self._if_type_lag,
operation=operation)
protocols_node = self._set_device_protocols(conf_node)
area_node = self._set_device_protocols_ospf_area_0(protocols_node)
self._set_ospf_area_interfaces(area_node,
inner_ifs,
operation=operation,
service=self.name_cluster_link)
return True
@decorater_log
def _comparsion_sw_db_l3_slice(self, message, db_info):
'''
SW-DB comparison process (check for information matching) (L3Slice).
Called out when checking information matching of L3Slice.
Parameter:
message : Response message
db_info : DB information
Return value :
Matching result :
Boolean (Should always be "True" unless override occurs.)
'''
class ns_xml(object):
class ns_list(object):
xnm = "xnm"
n_sp_dict = {ns_list.xnm:
"http://xml.juniper.net/xnm/1.1/xnm"}
def __init__(self, ini_ns=ns_list.xnm):
self.name_space = ini_ns
def ns_find_node(self, parent, *tags):
tmp = parent
for tag in tags:
if (tmp is not None and
tmp.find("%s:%s" % (self.name_space, tag),
self.n_sp_dict)is not None):
tmp = tmp.find("%s:%s" % (self.name_space, tag),
self.n_sp_dict)
else:
return None
return tmp
def ns_findall_node(self, parent, tag):
return parent.findall("%s:%s" % (self.name_space, tag),
self.n_sp_dict)
is_return = True
ns_p = ns_xml(ns_xml.ns_list.xnm)
device_name = (db_info["device"].get("device_name")
if db_info.get("device") else "db_unknown_device")
db_vrf = {"vrf_name": None,
"rt": None,
"rd": None,
"router_id": None}
if db_info.get("vrf_detail"):
vrf_name = db_info["vrf_detail"][0].get("vrf_name")
db_vrf.update(db_info["vrf_detail"][0])
self.common_util_log.logging(device_name, self.log_level_debug,
"DB_VRF_COUNT = %s" % (
len(db_info["vrf_detail"])),
__name__)
else:
vrf_name = None
self.common_util_log.logging(device_name, self.log_level_debug,
"DB_VRF_COUNT = 0", __name__)
config_node = ns_p.ns_find_node(message, "configuration")
if config_node is None:
is_return = False
self.common_util_log.logging(
device_name,
self.log_level_debug,
"ERROR cannot find configuration node",
__name__)
tmp_node = ns_p.ns_find_node(config_node, "interfaces")
for cp in (ns_p.ns_findall_node(tmp_node, "interface")
if is_return and tmp_node is not None else []):
if_name_node = ns_p.ns_find_node(cp, "name")
if if_name_node is None:
self.common_util_log.logging(
device_name,
self.log_level_debug,
"ERROR cannot find if name node",
__name__)
is_return = False
break
is_vlan_tagging = False
cp_data = None
for db_cp in db_info.get("cp") if db_info.get("cp") else []:
if if_name_node.text != db_cp.get("if_name"):
continue
cp_dict = {
"if_name": None,
"vlan": None,
"mtu": None,
"ipv4_addr": None,
"ipv4_mtu": None,
"ipv6_addr": None,
"ipv6_mtu": None,
"vrrp_group_id": None,
"vrrp_v_ipv4_addr": None,
"vrrp_v_ipv6_addr": None,
"vrrp_priority": None,
"vrrp_track_if": None
}
cp_dict["if_name"] = db_cp.get("if_name")
cp_dict["vlan"] = (db_cp["vlan"].get("vlan_id")
if db_cp.get("vlan") else None)
if cp_dict["vlan"] != 0:
is_vlan_tagging = True
if db_cp.get("mtu_size") is None:
cp_dict["mtu"] = None
elif cp_dict["vlan"] == 0:
cp_dict["mtu"] = str(int(db_cp["mtu_size"]) + 14)
else:
cp_dict["mtu"] = str(int(db_cp["mtu_size"]) + 18)
if (db_cp.get("ce_ipv4") and
db_cp.get("ce_ipv4").get("address") and
db_cp.get("ce_ipv4").get("prefix")):
cp_dict["ipv4_addr"] = (
"%s/%s" % (db_cp.get("ce_ipv4").get("address"),
db_cp.get("ce_ipv4").get("prefix"))
)
if (cp_dict["ipv4_addr"] is not None and
cp_dict["mtu"] is not None and
cp_dict["vlan"] != 0):
cp_dict["ipv4_mtu"] = db_cp["mtu_size"]
if (db_cp.get("ce_ipv6") and
db_cp.get("ce_ipv6").get("address") and
db_cp.get("ce_ipv6").get("prefix")):
cp_dict["ipv6_addr"] = (
"%s/%s" % (db_cp.get("ce_ipv6").get("address"),
db_cp.get("ce_ipv6").get("prefix"))
)
if (cp_dict["ipv6_addr"] is not None and
cp_dict["mtu"] is not None and
cp_dict["vlan"] != 0):
cp_dict["ipv6_mtu"] = db_cp["mtu_size"]
for vrrp in (db_info.get("vrrp_detail")
if db_info.get("vrrp_detail") else []):
if (cp_dict["if_name"] == vrrp.get("if_name") and
cp_dict["vlan"] == vrrp.get("vlan_id")):
cp_dict["vrrp_group_id"] = vrrp.get("group_id")
cp_dict["vrrp_v_ipv4_addr"] = (
vrrp["virtual"].get("ipv4_address")
if vrrp.get("virtual") is not None else None)
cp_dict["vrrp_v_ipv6_addr"] = (
vrrp["virtual"].get("ipv6_address")
if vrrp.get("virtual") is not None else None)
cp_dict["vrrp_priority"] = vrrp.get("priority")
cp_dict["vrrp_track_if"] = vrrp.get("track_if_name")
break
if cp_dict["vlan"] is not None:
cp_data = cp_dict.copy()
break
if cp_data is None:
self.common_util_log.logging(
device_name,
self.log_level_debug,
("ERROR cp_info don't have interface %s" %
(if_name_node.text,)),
__name__)
is_return = False
break
tmp_text = (True if ns_p.ns_find_node(cp, "vlan-tagging")
is not None else False)
if tmp_text != is_vlan_tagging:
self.common_util_log.logging(
device_name,
self.log_level_debug,
("ERROR vlan-tagging Fault (ec_mes = %s,db = %s)" %
(tmp_text, is_vlan_tagging)),
__name__)
is_return = False
break
node_1 = ns_p.ns_find_node(cp, "unit")
if not self._comparsion_pair(
ns_p.ns_find_node(node_1, "name"), cp_data["vlan"]):
is_return = False
break
if not self._comparsion_pair(
ns_p.ns_find_node(node_1, "vlan-id"),
cp_data["vlan"] if cp_data["vlan"] != 0 else None):
is_return = False
break
node_2 = ns_p.ns_find_node(node_1,
"family",
"inet6")
if node_2 is not None:
if not self._comparsion_pair(
ns_p.ns_find_node(node_2, "mtu"),
cp_data["ipv6_mtu"]):
is_return = False
break
if not self._comparsion_pair(
ns_p.ns_find_node(node_2, "address", "name"),
cp_data["ipv6_addr"]):
is_return = False
break
node_2 = ns_p.ns_find_node(node_1,
"family",
"inet")
if node_2 is not None:
if not self._comparsion_pair(
ns_p.ns_find_node(node_2, "mtu"),
cp_data["ipv4_mtu"]):
is_return = False
break
if not self._comparsion_pair(
ns_p.ns_find_node(node_2, "address", "name"),
cp_data["ipv4_addr"]):
is_return = False
break
node_1 = ns_p.ns_find_node(config_node,
"routing-instances",
"instance",
"name")
if is_return and not self._comparsion_pair(node_1, vrf_name):
is_return = False
node_1 = ns_p.ns_find_node(config_node,
"routing-instances",
"instance",
"route-distinguisher",
"rd-type")
if is_return and not self._comparsion_pair(node_1, db_vrf["rd"]):
is_return = False
node_1 = ns_p.ns_find_node(config_node,
"routing-instances",
"instance",
"vrf-target",
"community")
if is_return and not self._comparsion_pair(node_1, db_vrf["rt"]):
is_return = False
node_1 = ns_p.ns_find_node(config_node,
"routing-instances",
"instance",
"routing-options",
"router-id")
if (is_return and
not self._comparsion_pair(node_1, db_vrf["router_id"])):
is_return = False
tmp_list = []
for db_vrf in (db_info.get("vrf_detail")
if is_return and db_info.get("vrf_detail") else []):
tmp_list.append("%s.%s" % (db_vrf.get("if_name"),
db_vrf.get("vlan_id")))
node_1 = ns_p.ns_find_node(config_node,
"routing-instances",
"instance")
for node_2 in (ns_p.ns_findall_node(node_1, "interface")
if is_return and node_1 is not None else []):
node_3 = ns_p.ns_find_node(node_2, "name")
if node_3.text not in tmp_list:
self.common_util_log.logging(
device_name, self.log_level_debug,
("ERROR vrf_if %s don't find in db_vrf (%s)" %
(node_3.text, tmp_list)), __name__)
is_return = False
break
node_1 = ns_p.ns_find_node(config_node,
"routing-instances",
"instance",
"protocols")
if is_return and node_1 is not None:
if (ns_p.ns_find_node(node_1, "ospf3") is not None or
ns_p.ns_find_node(node_1, "ospf3") is not None):
tmp_list = []
for db_cp in (db_info.get("cp")
if is_return and db_info.get("cp") else []):
tmp_list.append(
("%s.%s" % (db_cp.get("if_name"),
db_cp["vlan"].get("vlan_id")
if db_cp.get("vlan") else None),
"%s" % (db_cp.get("metric"),))
)
for node_2 in ns_p.ns_findall_node(node_1, "ospf3"):
node_3 = ns_p.ns_find_node(node_2,
"area",
"interface",
"name")
node_4 = ns_p.ns_find_node(node_2,
"area",
"interface",
"metric")
tmp_val = (node_3.text if node_3 is not None else None,
node_4.text if node_4 is not None else None)
if tmp_val not in tmp_list:
self.common_util_log.logging(
device_name, self.log_level_debug,
("ERROR ospf3 (name,metric)=" +
"(%s,%s)" % tmp_val +
" don't find in db_ospf (%s)" %
(tmp_list,), __name__))
is_return = False
for node_2 in (ns_p.ns_findall_node(node_1, "ospf")
if is_return else []):
node_3 = ns_p.ns_find_node(node_2,
"area",
"interface",
"name")
node_4 = ns_p.ns_find_node(node_2,
"area",
"interface",
"metric")
tmp_val = (node_3.text if node_3 is not None else None,
node_4.text if node_4 is not None else None)
if tmp_val not in tmp_list:
self.common_util_log.logging(
device_name, self.log_level_debug,
("ERROR ospf (name,metric)=" +
"(%s,%s)" % tmp_val +
" don't find in db_ospf (%s)" %
(tmp_list,), __name__))
is_return = False
node_2 = ns_p.ns_find_node(node_1, "bgp")
if is_return and node_2 is not None:
tmp_list = []
for db_bgp in (db_info.get("bgp_detail")
if db_info.get("bgp_detail") else []):
bgp_as_number = db_bgp.get("as_number")
tmp_list.append((
(db_bgp["remote"].get("ipv4_address")
if db_bgp.get("remote") else None),
"%s" % (bgp_as_number,),
(db_bgp["local"].get("ipv4_address")
if db_bgp.get("local") else None)
))
tmp_list.append((
(db_bgp["remote"].get("ipv6_address")
if db_bgp.get("remote") else None),
"%s" % (bgp_as_number,),
(db_bgp["local"].get("ipv6_address")
if db_bgp.get("local") else None)
))
for node_3 in ns_p.ns_findall_node(node_2, "group"):
node_4 = ns_p.ns_find_node(node_3, "neighbor", "name")
node_5 = ns_p.ns_find_node(node_3, "neighbor", "peer-as")
node_6 = ns_p.ns_find_node(
node_3, "neighbor", "local-address")
tmp_val = (node_4.text if node_4 is not None else None,
node_5.text if node_5 is not None else None,
node_6.text if node_6 is not None else None)
if tmp_val not in tmp_list:
self.common_util_log.logging(
device_name, self.log_level_debug,
("ERROR bgp (name,peer-as,local-address)=" +
"(%s,%s,%s)" % tmp_val +
" don't find in db_bgp (%s)" %
(tmp_list,), __name__))
is_return = False
break
return is_return
|
997,364 | 676ad6182800d6d47f8f59d09abd5dabd487bed5 | #! /root/anaconda3/bin/python
print(dir(str))
|
997,365 | df2f1f8f2bbb1d8efe1010a178521331c9a25980 | from random import *
l=['R','P','S']
def game(n):
x = randint(1,3)
if (x==1):
k='Rock'
if (x==2):
k='Paper'
if (x==3):
k='Scissors'
if(n==l[0] and (x==3)):
print("You Win")
print("Computer: "+ k)
return ()
if(n==l[1] and x==1):
print("You Win")
print("Computer: "+ k)
return ()
if(n==l[2] and x==2):
print("You Win")
print("Computer: "+ k)
return ()
if(n==l[0] and x==1) or (n==l[1] and x==2) or (n==l[2] and x==3):
print('Draw')
print("Computer: "+ k)
return ()
else:
print('You Lose')
print("Computer: "+ k)
return ()
print("WELCOME LETS PLAY:")
print("R:Rock")
print("P:Paper")
print("S:Scissors")
name = input("What's your choice?\n ")
if(name in l):
game(name)
else:
print("Invalid Choice")
|
997,366 | c84f84358424e7ecd513aee4fc3649d62dda4696 | # -*- coding: utf-8 -*-
import scrapy
class InfoqSpider(scrapy.Spider):
name = 'infoq'
allowed_domains = ['infoq.com']
start_urls = [
'http://infoq.com/cn/AI/articles'
]
def parse(self, response):
articles = response.xpath("//ul[@class='l l_large']/li")
titles = articles.xpath("./a[@class='lt']/text()").extract()
links = articles.xpath("./a[@class='lt']/@href").extract()
#authors = articles.xpath("./a[@class='editorlink f_taxonomyEditor']/text()").extract()
authors = articles.xpath(".//span[@class='authors-list']")
dates_y = articles.xpath("./ul[@class='ldate']/li[@class='ldate_y']/text()")
dates_m = articles.xpath("./ul[@class='ldate']/li[@class='ldate_d']/text()")
dates_d = articles.xpath("./ul[@class='ldate']/li[@class='ldate_m']/text()")
#remove ads
i = 0
#month
mmdict = {
u'一月': '01',
u'二月': '02',
u'三月': '03',
u'四月': '04',
u'五月': '05',
u'六月': '06',
u'七月': '07',
u'八月': '08',
u'九月': '09',
u'十月': '10',
u'十一月': '11',
u'十二月': '12',
}
for idx, title in enumerate(titles):
if title.strip() == "":
i = i+1
continue
thedate = dates_y[idx-i].extract() + "/" + mmdict[dates_m[idx-i].extract()] + "/" + dates_d[idx-i].extract()
yield {
"title": title.strip(),
"link": links[idx-i].strip(),
"author": authors[idx-i].xpath(".//a[@class='editorlink f_taxonomyEditor']/text()").extract(),
"date" : thedate,
}
next_page = response.css("a.btn_next::attr('href')").extract_first()
if next_page is not None:
yield response.follow(next_page, self.parse)
pass
|
997,367 | 19940524f035f02ee4de1b29326e0c794d62ddf6 | # -*- coding: utf-8 -*-
from WeatherForecast import WeatherForecast
data = WeatherForecast.get_weather("980-0871")
print(WeatherForecast.calc_average(data)) |
997,368 | 6105d1bb87fd720d4269870593579b33e58abbfc | # Copyright (c) 2016. Mount Sinai School of Medicine
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function, absolute_import
import pepdata
from pepdata.amino_acid import amino_acid_letters
def get_iedb_epitopes(epitope_lengths, positive_ratio=0.6):
df_tcell = pepdata.iedb.tcell.load_dataframe()
# Restrict to human
df_tcell = df_tcell[df_tcell['Host Organism Name'].fillna('').str.contains('Homo sap')]
# Remove self
df_tcell = df_tcell[~df_tcell['Epitope Source Organism Name'].fillna('').str.contains(
'homo sap', case=False)]
# Remove allergens
for column in ['Epitope Source Molecule Name', 'In Vivo 1 Process Type',
'In Vivo 2 Process Type']:
df_tcell = df_tcell[~df_tcell[column].fillna('').str.contains('allerg', case=False)]
# Only certain lengths
df_tcell.rename(columns={'Epitope Linear Sequence': 'iedb_epitope'}, inplace=True)
df_tcell['epitope_length'] = df_tcell['iedb_epitope'].apply(len)
df_tcell = df_tcell[df_tcell.epitope_length.isin(epitope_lengths)]
# Exclude amino acid letters like B and Z that are not specific to one amino acid
def only_amino_acid_letters(epitope):
return all(letter in amino_acid_letters for letter in epitope)
df_tcell = df_tcell[df_tcell.iedb_epitope.apply(only_amino_acid_letters)]
# Calculate the T cell positive ratio, and filter by it
df_tcell['is_tcell_positive'] = df_tcell['Qualitative Measure'].str.startswith('Positive')
df_tcell = df_tcell[['iedb_epitope', 'epitope_length', 'is_tcell_positive']]
def tcell_positive_ratio(bool_list):
return sum(bool_list) / float(len(bool_list))
df_tcell_ratio = df_tcell.groupby(['iedb_epitope', 'epitope_length']).agg(
{'is_tcell_positive': tcell_positive_ratio,})
df_tcell_ratio.rename(columns={'is_tcell_positive': 'positive_ratio'}, inplace=True)
df_tcell_ratio.reset_index(inplace=True)
df_tcell_ratio = df_tcell_ratio[df_tcell_ratio.positive_ratio >= positive_ratio][['iedb_epitope', 'epitope_length']]
assert len(df_tcell_ratio.drop_duplicates()) == len(df_tcell_ratio), \
"No duplicates should be present"
df_tcell_ratio.reset_index(drop=True, inplace=True)
return df_tcell_ratio
|
997,369 | d4cd020ecebb745d721e9f2d37f7e99841fa8304 | #coding=utf-8
import copy
import tensorflow as tf
import lr_base
def tfTrain(data_x, data_y):
# 构造线性模型
tf_w = tf.Variable( tf.zeros([1, len(data_x)]) )
tf_y = tf.matmul(tf_w, data_x)
loss = tf.reduce_mean( tf.square( tf_y - data_y ) )
optimizer = tf.train.GradientDescentOptimizer(0.5)
train = optimizer.minimize(loss)
# 训练
init = tf.initialize_all_variables()
sess = tf.Session()
sess.run(init)
for iter in xrange(0, 50):
sess.run(train)
# 结果
theta = tf_w.eval(sess)[0]
return theta
if __name__ == '__main__':
# 产生数据
x_mat, y_vec = lr_base.produceSampleData()
x_mat_ori = copy.deepcopy(x_mat)
x_shift, x_scale = lr_base.adjustSampleScale(x_mat)
theta = tfTrain(x_mat, y_vec)
lr_base.adjustTheta(x_shift, x_scale, theta)
print theta |
997,370 | 55669dd15c77ef5f8126e13a41182bb5212b5cab | import pickle as pkl
import numpy as np
import tensorflow as tf
from beam import BeamSearchDecoder
import re
from tensorflow.contrib import rnn
import os
from copynet import CopyNetWrapper
import string
with open("/data/shrey/copynet/pickle_data.txt",'r') as file:
dic = pkl.load(file)
with open("/data/shrey/copynet/pickle_embed.txt",'r') as file:
dic2 = pkl.load(file)
embeddings=dic2['embeddings']
word2int=dic2['word2int']
int2word=dic2['int2word']
word2int1 = dict(word2int)
words = word2int.keys()
int2word1 = dict(int2word)
delta = []
x = []
t = []
length =0
## Loading the text file for which we will be suggesting the hashtags
with open('/data/shrey/copynet/text.txt','r') as file:
alpha = file.read().splitlines()
for beta in alpha:
gamma = re.sub('[^a-z\ ]+',' ',beta.lower())
gamma=re.sub('\ +',' ',gamma)
for word in gamma.split():
if word in word2int:
x.append(word2int[word])
elif word not in word2int:
delta.append(word)
x.append(word2int['UNK'])
for word in delta:
if word not in word2int.keys():
word2int1[word]=n1
int2word1[n1]=word
n1+=1
elif word2int[word]>vocab_size-1:
word2int1[word]=n1
int2word1[n1]=word
n1+=1
with open('/data/shrey/copynet/text.txt','r') as file:
alpha = file.read().splitlines()
for beta in alpha:
gamma = re.sub('[^a-z\ ]+',' ',beta.lower())
gamma=re.sub('\ +',' ',gamma)
for word in gamma.split():
length+=1
t.append(word2int1[word])
assert len(x)==len(t)
len_docs=[length]
x = [x]
x = np.array(x)
t = [t]
t = np.array(t)
len_docs=np.array(len_docs)
beam_width =5
rnn_size = 64
batch_size = np.shape(len_docs)[0]
L1=tf.placeholder('int32',[batch_size])
X = tf.placeholder('int32',[batch_size,length])
T = tf.placeholder('int32',[batch_size,length])
def nn(x,len_docs,t):
encoder_emb_inp = tf.nn.embedding_lookup(embeddings, x)
encoder_cell = rnn.GRUCell(rnn_size)
encoder_outputs, encoder_state = tf.nn.dynamic_rnn(encoder_cell,encoder_emb_inp,sequence_length=len_docs,dtype=tf.float32)
tiled_encoder_outputs = tf.contrib.seq2seq.tile_batch(encoder_outputs, multiplier=beam_width)
tiled_sequence_length = tf.contrib.seq2seq.tile_batch(len_docs, multiplier=beam_width)
tiled_encoder_final_state = tf.contrib.seq2seq.tile_batch(encoder_state, multiplier=beam_width)
tiled_t = tf.contrib.seq2seq.tile_batch(t,multiplier=beam_width)
start_tokens = tf.constant(word2int['SOS'], shape=[batch_size])
decoder_cell = rnn.GRUCell(rnn_size)
attention_mechanism = tf.contrib.seq2seq.LuongAttention(rnn_size,tiled_encoder_outputs,memory_sequence_length=tiled_sequence_length)
decoder_cell = tf.contrib.seq2seq.AttentionWrapper(decoder_cell, attention_mechanism,attention_layer_size=rnn_size)
initial_state = decoder_cell.zero_state(batch_size*beam_width, dtype=tf.float32).clone(cell_state=tiled_encoder_final_state)
decoder_cell = CopyNetWrapper(decoder_cell, tiled_encoder_outputs, tiled_t,len(set(delta).union(words)),vocab_size,sequence_length=tiled_sequence_length)
initial_state = decoder_cell.zero_state(batch_size*beam_width, dtype=tf.float32).clone(cell_state=initial_state)
decoder = BeamSearchDecoder(cell=decoder_cell,embedding=embeddings,start_tokens=start_tokens,end_token=word2int['EOS'],initial_state=initial_state,beam_width=beam_width,output_layer=None,length_penalty_weight=0.0)
outputs,j,k = tf.contrib.seq2seq.dynamic_decode(decoder,maximum_iterations=2)
logits = outputs.predicted_ids
return logits
def answer():
logits = nn(X,L1,T)
print tf.trainable_variables()
saver = tf.train.Saver()
with tf.Session() as sess:
sess.run(tf.global_variables_initializer())
saver.restore(sess, '/data/shrey/copynet/model' + '/data-all')
array = (sess.run(logits,feed_dict={X:x,L1:len_docs,T:t}))
a=np.shape(array)[1]
b=np.shape(array)[2]
for j in range(b):
for i in range(a):
c=int(array[0,i,j])
print int2word1[c]
print'\n'
answer() |
997,371 | 384de4ad03fa2426e4c2d4e50ca35bc8d425a3f3 | __author__ = 'Aaron Yang'
__email__ = 'byang971@usc.edu'
__date__ = '10/7/2020 4:12 PM'
import nltk
grammar = nltk.CFG.fromstring("""
NP -> NP NP
NP -> NP PP
NP -> Det N
PP -> P NP
NP -> 'Mexico'
NP -> 'funding'
Det -> 'the'
N -> 'wall'
P -> 'with'
""")
tokens = ['funding', 'the', 'wall', 'with', 'Mexico']
parser = nltk.ChartParser(grammar, trace=1)
for tree in parser.parse(tokens):
print(tree)
tree.draw()
# parser = nltk.parse.chart.BottomUpChartParser(grammar, trace=1)
# for tree in parser.parse(tokens):
# print(tree)
# parser = nltk.parse.earleychart.EarleyChartParser(grammar, trace=1)
# for tree in parser.parse(tokens):
# print(tree) |
997,372 | a84c322bdce85217b9a7bf14d0c0d5c6e3290d6e | import sys
from . import logger
from . import env
from . import client
from . import db
from . import filters
from . import processors
from . import output
def main():
logger.init_logger()
try:
env.init_env()
client_obj = client.init_client()
coupons = client.handle_pagination(client_obj, client.get_coupons)
db_connection = db.init_db()
coupons = filters.filter_coupons(db_connection, coupons)
coupons = processors.process_coupons(coupons, [
processors.parse_dates,
processors.generate_final_link,
processors.remove_i3_param,
processors.make_campaigns_register(db_connection),
processors.make_coupons_register(db_connection),
])
output.output_coupons(coupons)
except Exception as exception:
logger.get_logger().error(exception)
sys.exit(1)
except KeyboardInterrupt:
# output a line break after the ^C symbol in a terminal
print('')
sys.exit(1)
|
997,373 | ab15ceae545a87d1b49403ca26030ed1d6d0f3c7 | from django.conf.urls import url
from . import views
app_name = 'service'
urlpatterns = [
# Home page
url(r'^$', views.main_home, name='main_home'),
# Login authentication
url(r'^login/$', views.login, name='login'),
url(r'^logout/$', views.logout, name='logout'),
# Sets up the user profile and user models
url(r'^usersetup/$', views.user_setup, name='user_setup'),
# Reset Password Confirmation
url(r'^reset_password_confirm/(?P<uidb64>[0-9A-Za-z]+)-(?P<token>.+)/$', views.reset_password_confirm,
name='reset_password_confirm'),
# Reset Password
url(r'^reset_password/$', views.reset_password, name='reset_password'),
url(r'^reset_password_confirm_put/$', views.reset_password_confirm, name='reset_password_confirm_put'),
# Register new user
url(r'^register/$', views.register, name='register'),
# Register successful
url(r'^register/check/$', views.register_check, name='register_check'),
# User Home page
url(r'^home/$', views.user_home, name='user_home'),
# User Quiz pages
# Quiz Name Verifier
url(r'^quiz/init/$', views.user_quiz_init, name='user_quiz_init'),
# Quiz Maker
url(r'^quiz/maker/$', views.user_quiz_maker, name='user_quiz_maker'),
# Quiz Answer
url(r'^quiz/verifier/$', views.user_quiz_verifier, name='user_quiz_verifier'),
# Quiz Create
url(r'^quiz/create/$', views.user_quiz_create, name='user_quiz_create'),
# Quiz Delete
url(r'^quiz/delete/$', views.user_quiz_delete, name='user_quiz_delete'),
# My quizzes
url(r'^myquiz/home/$', views.user_myquiz_home, name='user_myquiz_home'),
# Quiz editting page
url(r'^myquiz/home/id/(?P<quiz_id>[-\w\d]+)/$', views.user_myquiz_info, name='user_myquiz_info'),
# Quiz Arena home page
url(r'^quizarena/home/$', views.user_quizarena_home, name='user_quizarena_home'),
# Quiz attempt page
url(r'^quizarena/home/solve/(?P<quiz_id>[-\w\d]+)/$', views.user_quizarena_solve, name='user_quizarena_solve'),
# Quiz result
url(r'^quizarena/home/result$', views.user_quizarena_result, name='user_quizarena_result'),
# User Story home page
url(r'^story/home/', views.user_story_home, name='user_story_home'),
]
|
997,374 | 71e03b8d614ff8d36968b66b3356c70fbf40ad83 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-11-19 03:30
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('tutorial', '0004_auto_20171105_1914'),
]
operations = [
migrations.CreateModel(
name='Review',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(max_length=100)),
('time_review', models.DateTimeField()),
('rating', models.FloatField(default=0)),
('rate_time', models.PositiveIntegerField(default=0)),
],
),
migrations.AddField(
model_name='session',
name='reviewed',
field=models.BooleanField(default=False),
),
migrations.AddField(
model_name='review',
name='session',
field=models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='tutorial.Session'),
),
]
|
997,375 | 42b68c4bd598e299315b4bae575e16b0a1999c59 | # Values of team with names to pass to scraper function in correct format
teams = {
'Arizona Cardinals': 'Arizona',
'Atlanta Falcons': 'Atlanta',
'Buffalo Bills': 'Buffalo',
'Baltimore Ravens': 'Baltimore',
'Carolina Panthers': 'Carolina',
'Cincinnati Bengals': 'Cincinnati',
'Cleveland Browns': 'Cleveland',
'Chicago Bears': 'Chicago',
'Dallas Cowboys': 'Dallas',
'Denver Broncos': 'Denver',
'Detroit Lions': 'Detroit',
'Green Bay Packers': 'Green Bay',
'Houston Texans': 'Houston',
'Indianapolis Colts': 'Indianapolis',
'Kansas City Chiefs': 'Kansas City',
'Los Angeles Chargers' : 'Los Angeles Chargers',
'Los Angeles Rams': 'Los Angeles Rams',
'Jacksonville Jaguars': 'Jacksonville',
'Miami Dolphins': 'Miami',
'Minnesota Vikings': 'Minnesota',
'New England Patrios': 'New England',
'New Orleans Saints': 'New Orleans',
'New York Giants': 'New York Giants',
'New York Jets': 'New York Jets',
'Las Vegas Raiders': 'Las Vegas',
'Philadelphia Eagles': 'Philadelphia',
'San Fransisco 49ers': 'San Fransisco',
'Seattle Seahawks': 'Seattle',
'Pittsburgh Steelers': 'Pittsburgh',
'Tampa Bay Buccaneers': 'Tampa Bay',
'Tennessee Titans': 'Tennessee',
'Washington Football Team': 'Washington'
} |
997,376 | afb758aead5eae6638e5de5843ed58bf5df8da50 | import csv
from xml.etree.ElementTree import Element, SubElement, Comment, tostring
import datetime
import re
import sys
debug = True
if len(sys.argv) < 2 :
print "Usage: python <progName> <csvFile> <includeIfCC>"
sys.exit(2)
input_filename = sys.argv[1]
qbo_template = "qbo_template_CC.qbo"
acct_id = "9999"
is_CC = len(sys.argv)==3
if debug and is_CC:
print "credit card mode"
with open(input_filename) as csvfile:
#csvfile.readline() #absorb excess lines before column headers
reader = csv.DictReader(csvfile)
count = 0
end_date = ""
start_date = ""
trans_list = []
balance_amount = 0
#read CSV row by row and build up container of formatted transaction strings
for row in reader:
date_string = row['Date'][:10]
amount = row['Amount']
if not date_string or not amount:
if debug:
print "null values"
continue
c_name = row['Name'][:40]
c_name = c_name.replace("&","and")
c_name = c_name.replace("amp;","n")
memo = row['Memo'][:40]
memo = memo.replace("&","and")
memo = memo.replace("amp;","n")
if not c_name:
c_name = memo
if not memo:
memo = c_name
c_name = c_name[:30]
if not c_name and not memo:
c_name = "UNKNOWN"
memo = "UNKNOWN"
date_string = (datetime.datetime.strptime(date_string, '%m/%d/%Y')).strftime('%Y%m%d')
if count==0:
#trans_string = "<STMTTRNRS>"
end_date = date_string
start_date = date_string
negative = amount[0]=='-' #check if negative value
num_amount = re.sub("[^\d\.\-]","",amount) #rm nondigit vals
balance_amount+=float(num_amount)
fitid_amount = re.sub("[^\d]", "", amount)
fitid = "{}{:04d}{}".format(date_string,count,fitid_amount)
credit_debit = "CREDIT" if negative else "DEBIT"
#fitid+='M'
if is_CC:
if negative:
credit_debit = "DEBIT"
num_amount = num_amount[1:]
else:
credit_debit = "CREDIT"
num_amount = "-"+num_amount
indent = "\t\t\t\t\t"
trans_string = "<STMTTRN>"
trans_string = indent+trans_string + "\n"
credit_debit = "\t{}<TRNTYPE>{}</TRNTYPE>\n".format(indent,credit_debit)
posted_date_string = "\t{}<DTPOSTED>{}040000.000</DTPOSTED>\n".format(indent,date_string)
user_date_string = "\t{}<DTUSER>{}040000.000</DTUSER>\n".format(indent,date_string)
num_amount_string = "\t{}<TRNAMT>{}</TRNAMT>\n".format(indent,num_amount)
fitid = "\t{}<FITID>{}</FITID>\n".format(indent,fitid)
c_name = "\t{}<NAME>{}</NAME>\n".format(indent,c_name)
cc_acct = "\t{}<CCACCTTO>\n\t\t{}<ACCTID>{}</ACCTID>\n\t{}</CCACCTTO>\n".format(indent,indent,acct_id,indent)
memo = "\t{}<MEMO>{}</MEMO>\n".format(indent,memo)
end_trans = "{}</STMTTRN>\n".format(indent)
trans_string = trans_string + credit_debit + posted_date_string + user_date_string +num_amount_string + fitid + c_name + cc_acct + memo + end_trans
trans_list.append(trans_string)
count = count + 1
# balance_amount = -balance_amount;
output_filename = re.sub("\.csv",".qbo",input_filename)
output_filename = re.sub("\.CSV",".qbo",input_filename)
output = open(output_filename, 'w')
with open(qbo_template) as template:
for _ in range(33):
output.write(template.readline())
output.write("\n{}<ACCTID>{}</ACCTID>\n\t\t\t\t</CCACCTFROM>\n\t\t\t\t<BANKTRANLIST>\n\t\t\t\t\t<DTSTART>{}040000.000</DTSTART>\n\t\t\t\t\t<DTEND>{}040000.000</DTEND>\n".format(indent,acct_id,start_date,end_date))
for trans_string in trans_list:
output.write(trans_string)
output.write("\t\t\t\t</BANKTRANLIST>\n\t\t\t\t<LEDGERBAL>\n\t\t\t\t\t<BALAMT>{}</BALAMT>\n\t\t\t\t\t<DTASOF>{}</DTASOF>\n\t\t\t\t</LEDGERBAL>\n\t\t\t</CCSTMTRS>\n\t\t</CCSTMTTRNRS>\n\t</CREDITCARDMSGSRSV1>\n</OFX>\n".format(balance_amount,end_date))
if debug:
print "finished."
#between LEDGERBAL end tag and CCSTMTRS
# <AVAILBAL>
# <BALAMT>16010.2</BALAMT>
# <DTASOF>20180613142940.708</DTASOF>
# </AVAILBAL>
|
997,377 | e6593de411f0d33d02170c61f77a017be42d0f08 | import requests
import json
import os
import datetime
timestamp = datetime.datetime.utcnow().isoformat()
jsonRequest = { "results":[
{
"fileIds":[],
"keywords": [ "2018springhackathon" ],
"programName":"InsightCM Build",
"properties": {
"currentStatus": os.environ['JOB_BASE_NAME']
},
"startTime": timestamp,
"status": { "statusType":"RUNNING", "statusName":"Running" },
"systemId":"icmr6build",
"updatedAt": timestamp
}]
}
r = requests.post('http://hack-g.amer.corp.natinst.com/nitestmonitor/v1/results', json = jsonRequest, auth=("admin", "hack-g"))
if r.status_code == 201:
file_path = "C:/ProgramData/Hackathon/resultId.txt"
directory = os.path.dirname(file_path)
try:
idFile = open(file_path, 'w')
except IOError:
if not os.path.exists(directory):
os.makedirs(directory)
idFile = open(file_path, 'w')
idFile.write(r.json()['results'][0]['id'])
idFile.close()
|
997,378 | 7c5f2cd7e345a14e968cb7287cd3c4b840c49d18 | import torch
import torch.nn as nn
import torch.nn.functional as F
from .utils import init_param, normalize, loss_fn
from config import cfg
class Block(nn.Module):
expansion = 1
def __init__(self, in_planes, planes, stride):
super(Block, self).__init__()
self.n1 = nn.BatchNorm2d(in_planes)
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.n2 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, padding=1, bias=False)
if stride != 1 or in_planes != self.expansion * planes:
self.shortcut = nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False)
def forward(self, x):
out = F.relu(self.n1(x))
shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x
out = self.conv1(out)
out = self.conv2(F.relu(self.n2(out)))
out += shortcut
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, in_planes, planes, stride):
super(Bottleneck, self).__init__()
self.n1 = nn.BatchNorm2d(in_planes)
self.conv1 = nn.Conv2d(in_planes, planes, kernel_size=1, bias=False)
self.n2 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, padding=1, bias=False)
self.n3 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, self.expansion * planes, kernel_size=1, bias=False)
if stride != 1 or in_planes != self.expansion * planes:
self.shortcut = nn.Conv2d(in_planes, self.expansion * planes, kernel_size=1, stride=stride, bias=False)
def forward(self, x):
out = F.relu(self.n1(x))
shortcut = self.shortcut(out) if hasattr(self, 'shortcut') else x
out = self.conv1(out)
out = self.conv2(F.relu(self.n2(out)))
out = self.conv3(F.relu(self.n3(out)))
out += shortcut
return out
class ResNet(nn.Module):
def __init__(self, data_shape, hidden_size, block, num_blocks, target_size, sneak):
super(ResNet, self).__init__()
self.in_planes = hidden_size[0]
self.conv1 = nn.Conv2d(data_shape[0], hidden_size[0], kernel_size=3, stride=1, padding=1, bias=False)
self.layer1 = self._make_layer(block, hidden_size[0], num_blocks[0], stride=1)
self.layer2 = self._make_layer(block, hidden_size[1], num_blocks[1], stride=2)
self.layer3 = self._make_layer(block, hidden_size[2], num_blocks[2], stride=2)
self.layer4 = self._make_layer(block, hidden_size[3], num_blocks[3], stride=2)
self.n4 = nn.BatchNorm2d(hidden_size[3] * block.expansion)
self.linear = nn.Linear(hidden_size[3] * block.expansion, target_size)
self.sneak = sneak
def _make_layer(self, block, planes, num_blocks, stride):
strides = [stride] + [1] * (num_blocks - 1)
layers = []
for stride in strides:
layers.append(block(self.in_planes, planes, stride))
self.in_planes = planes * block.expansion
return nn.Sequential(*layers)
def forward(self, input):
output = {}
x = input['data']
if 'norm' not in input or ('norm' in input and input['norm']):
x = normalize(x, *cfg['stats'][cfg['data_name']])
out = self.conv1(x)
out = self.layer1(out)
out = self.layer2(out)
out = self.layer3(out)
out = self.layer4(out)
out = F.relu(self.n4(out))
out = F.adaptive_avg_pool2d(out, 1)
out = out.view(out.size(0), -1)
out = self.linear(out)
output['target'] = out
output['loss'] = loss_fn(input, output)
return output
def resnet18(sneak=False):
data_shape = cfg['data_shape']
target_size = cfg['target_size']
hidden_size = cfg['resnet18']['hidden_size']
model = ResNet(data_shape, hidden_size, Block, [1, 1, 1, 2], target_size, sneak)
model.apply(init_param)
return model
def resnet34():
data_shape = cfg['data_shape']
target_size = cfg['target_size']
hidden_size = cfg['resnet34']['hidden_size']
model = ResNet(data_shape, hidden_size, Block, [3, 4, 6, 3], target_size)
model.apply(init_param)
return model
def resnet50():
data_shape = cfg['data_shape']
target_size = cfg['target_size']
hidden_size = cfg['resnet50']['hidden_size']
model = ResNet(data_shape, hidden_size, Bottleneck, [3, 4, 6, 3], target_size)
model.apply(init_param)
return model
def resnet101():
data_shape = cfg['data_shape']
target_size = cfg['target_size']
hidden_size = cfg['resnet101']['hidden_size']
model = ResNet(data_shape, hidden_size, Bottleneck, [3, 4, 23, 3], target_size)
model.apply(init_param)
return model
def resnet152():
data_shape = cfg['data_shape']
target_size = cfg['target_size']
hidden_size = cfg['resnet152']['hidden_size']
model = ResNet(data_shape, hidden_size, Bottleneck, [3, 8, 36, 3], target_size)
model.apply(init_param)
return model
|
997,379 | 3effa5b90bbcfae4236261698086ce80a160918c | import torch.nn as nn
import torch.nn.functional as F
class MultiLayerPerceptron(nn.Module):
def __init__(self, input_size):
x= 64
super(MultiLayerPerceptron, self).__init__()
self.fc1 = nn.Linear(input_size,x)
self.fc2 = nn.Linear(x, 1)
#Overfit example
def forward(self, features):
x = F.sigmoid(self.fc1(features.float() ))
x = F.sigmoid(self.fc2(x))
return x
|
997,380 | 526771b84dcb18a5a96aa32e072f0e9778d80e7f | # datetime module stores information about dates and times in module classes
# it differs from time module which works with floats for representing seconds since epoch
import datetime
import time
import textwrap
# dates are represented with datetime.date class
# creating date instance:
dt=datetime.date(2008,12,15) # creating using the date class constructor with arguments year,month,day
dt=datetime.date.today() # creating with the class method today() in the date class
dt=datetime.date.fromtimestamp(time.time()) # creating from the seconds since the epoch
dt=datetime.date.fromisoformat('2020-03-23')# creating from ISO formatted string 'YYYY-MM-DD'
print(dt)
# now that we have a date as a instance of date class,
# we can invoke several instance methods on the instance
print(dt.timetuple()) # converting datetime.date instance into struct_time instance
print(
textwrap.dedent(
'''
Min date: {}
Max date: {}
Res date: {}
'''.format(dt.min, dt.max, dt.resolution)
)
)
print(dt.replace(year=2018,month=4)) # replacing year or month or day in date instance making new object - date instances are imutable!
# formatting output
print(dt.isoformat())
print(dt.ctime())
print(dt.strftime('%d-%m-%Y'))
print(dt.toordinal()) # days passed since epoch |
997,381 | c7d79e00f4211554a0055751c3d8fbfb519f1549 | import numpy as np
import plotly.offline as pyo
import plotly.graph_objs as go
np.random.seed(100)
x = np.random.randint(0, 101, 50)
y = np.random.randint(0, 101, 50)
obj = go.Scatter(x=x, y=y, mode='markers', marker=dict(
size=12,
color='rgb(51,204,153)',
symbol='circle'), line=dict(width=2))
data = [obj]
layout = go.Layout(title='x vs y scatter', xaxis=dict(
title='x-axis'), yaxis=dict(title='y-axis'), hovermode='closest')
fig = go.Figure(data=data, layout=layout)
pyo.plot(fig, 'scatter.html')
|
997,382 | 71ebf6c892ec12e44758e84bf47ae11519eaed5a | # -*- coding: utf-8 -*-
import random
import math
inCircle = 0
distance = 0.0
for _ in range(1000):
x = 1.0 * random.randint(0, 1000) / 100
y = 1.0 * random.randint(0, 1000) / 100
if x * x + y * y > 100:
continue
else:
inCircle += 1
distance += math.sqrt(x * x + y * y)
print(distance / inCircle)
|
997,383 | 3d926ccce8ec64060013db3ff5e981a9eb864066 | from collections import Counter
def convert_lists(l):
d = {}
if len(l) == 0:
return d
for item in l:
if item == "":
continue
item = item.split("_")[0]
d[item] = 0
return d
out = open("on_contig_summary_new.csv","w")
out.write("ID, Count, AMR, Num, Vir, Num , Plasmid, Num, Avg_Length, Sd_Length\n")
with open("../toxins_categories_new.csv") as f:
for line in f:
if line.startswith("Toxin"):
continue
toks = line.strip().split(",")
cat = toks[4]
if not "spo" in cat:
continue
associated_vir_genes = convert_lists(toks[6].split("/"))
associated_amr_genes = convert_lists(toks[7].split("/"))
associated_plasmid_genes = convert_lists(toks[8].split("/"))
on_contig = toks[5].split()
for v in on_contig:
v = v.replace("(","")
v = v.replace(")","")
v = v.split(";")
v = v[0:2] + v[2].split(":")
for i in range(0,3):
v[i] = v[i].split("_")[0]
if v[i] != "-":
v[i] = v[i].split("-")[0]
v[i] = v[i].split("/")[0]
for gene in associated_amr_genes:
if gene.startswith(v[0]):
associated_amr_genes[gene] += int(v[3])
for gene in associated_vir_genes:
if gene.startswith(v[1]):
associated_vir_genes[gene] += int(v[3])
for gene in associated_plasmid_genes:
if gene.startswith(v[2]):
associated_plasmid_genes[gene] += int(v[3])
out.write(toks[0] + "," + toks[3] + ",")
total = float(toks[3])
out.write( str(len(associated_amr_genes)) + "," + str(sum(associated_amr_genes.values())) + ",")
out.write(str(len(associated_vir_genes)) + "," + str(sum(associated_vir_genes.values())) + ",")
out.write(str(len(associated_plasmid_genes)) + "," + str(sum(associated_plasmid_genes.values())) + "," + toks[-2] + "," + toks[-1] + "\n")
out.close()
|
997,384 | eafe85cf9037c5ed1716d17eed2e4dd9ac9b3ef0 | '''
Created on 28 Feb 2014
@author: siva
'''
import sys
if __name__ == '__main__':
tom_relation_file = sys.argv[1]
tom_types_file = sys.argv[2]
freebase_schema_files = sys.argv[3:]
schema = {}
types = set()
for schema_file in freebase_schema_files:
for line in open(schema_file):
line = line.rstrip()
if line == "":
continue
if line[0] == "\t":
rel, arg, reltype, inv_rel = line.strip().split("\t")
rel = rel.strip()
inv_rel = inv_rel.strip()
if reltype == "master":
schema[rel] = inv_rel
schema[inv_rel] = rel
types.add(arg)
else:
types.add(line.split()[0])
#print schema
relations_covered = set()
relation_count = 0
for line in open(tom_relation_file):
# http://rdf.freebase.com/ns/tv.tv_actor.starring_roles..tv.regular_tv_appearance.series
relation = line.split("/")[-1]
relations = relation.split("..")
for relation in relations:
relation = relation.strip()
#print relation
if relation not in relations_covered:
if schema.has_key(relation):
relation_count += 1
# adding both relation and its inverse
relations_covered.add(relation)
relations_covered.add(schema[relation])
types_covered = set()
type_count = 0
for line in open(tom_types_file):
# http://rdf.freebase.com/ns/tv.tv_actor.starring_roles..tv.regular_tv_appearance.series
type = line.split("/")[-1].strip()
if type not in types_covered:
if type in types:
type_count += 1
types_covered.add(type)
print "Total relations in Tom", relation_count
print "Total types in Tom", type_count |
997,385 | 8a92823913aae3eb4b816ac362c6215157e20551 | #!usr/bin/env python
# -*- coding:utf-8 -*-
import numpy as np
import scipy
def previous_endpoint(endpoint, linkvec, mainvec):
length = np.linalg.norm(linkvec)
before_pos = endpoint - length * mainvec
return before_pos |
997,386 | c34034a0e230cd20c01bba9ede45b415f2f060a0 | VOCAB_SIZE = 10000
# 测试数据参数
DATA_SIZE = 6949708
TEST_DATA_SIZE = int(6949708)
# 文件路径
DATA0_PATH = '../../model_data/error_origins.'+str(DATA_SIZE)
DATA1_PATH = '../../model_data/data1.'+str(DATA_SIZE)
DATA2_PATH = '../../model_data/data2.'+str(DATA_SIZE)
DATA3_PATH = '../../model_data/nears.'+str(DATA_SIZE)
TARGET_PATH = '../../model_data/target.'+str(DATA_SIZE)
VOCAB_PATH = '../../model_data/vocab.10000'
COUNT_SAVE_PATH = '../count.dict'
TEST_RESULT_PATH = '../results/test_results.txt'
STEP_PRINT = 1000 # 输出步频
TP = FP = TN = FN = P = N = 0
TPR = TPW =0 |
997,387 | 8ba9259eeb420e3b71a4d4680093feaa8f88cef9 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 18 22:05:02 2017
@author: levy
"""
import os,time
from wifi import Cell, Scheme
from selenium import webdriver
interface='wlp110s0'
#
#net_state=os.system('ping 58.211.162.30 -c 2')
#if net_state:
# print 'false'
#else:
# print 'ok'
#pip=os.popen("route | grep default | grep %s | awk '{print $2}'"%interface,"r")
#print
#pip.close()
#'nmcli device connect wlp110s0'
#'nmcli device disconnect wlp110s0'
def net_check_reconnect():
try:
driver = webdriver.PhantomJS()
net_state=os.system('ping 58.211.162.31 -c 2')
if not net_state:
return True
os.system('nmcli device disconnect %s'%interface)
time.sleep(2)
os.system('nmcli device connect %s'%interface)
time.sleep(2)
pip=os.popen("route | grep default | grep %s | awk '{print $2}'"%interface,"r")
gw=pip.read()
pip.close()
os.system("route add default gw %s"%gw)
driver.get('http://flights.ctrip.com/actualtime/arrive-weh/')
driver.close()
driver.quit()
except Exception as e:
print('Unable connect to server:', e)
if __name__=='__main__':
while True:
net_check_reconnect()
time.sleep(30) |
997,388 | 8cf35d68910409106415e87f6193bb2e0ae58f6a | Receive_Data()
Verify_Data()
a=test()
Report_Errors()
|
997,389 | d6c2a45b04364872b78c54c7d87e6e7fcad72885 | import random
import pygame
import os
CLOUD = pygame.image.load(os.path.join("../img/Other/Cloud.png"))
class Cloud:
def __init__(self):
self.X = 1280 + random.randint(800, 1000) # So it spawns outside of the screen , 1280 being the width
self.Y = random.randint(30, 80) # Random height
self.image = CLOUD
def update(self, speed):
self.X -= speed
def draw(self, screen):
screen.blit(self.image, (self.X, self.Y))
|
997,390 | 83c6772f7b8efbf42a9989f7b2cdd5d08f51449a | # -*- coding: utf-8 -*-
import unittest
import json
import datetime
from speakers import timespeaker
from speakers.timezone import JST
class TestTimeSpeaker(unittest.TestCase):
"""TestTimeSpeaker
TimeSpeakerのunittest
"""
def test_create(self):
"""test_create
create関数からTimeSpeakerを取得するテスト
"""
speaker = timespeaker.create()
self.assertIsInstance(speaker,timespeaker.TimeSpeaker)
def test_single_condition(self):
"""test_single_condition
conditionsにひとつだけ設定されたsettingから正しい結果を返すテスト
"""
timespeaker.TimeSpeaker.DEBUG_TIME = datetime.time(6,0,0)
speaker = timespeaker.create()
setting_json="""{
"speaker":"time",
"conditions":[
{
"from":"06:00",
"to":"11:00",
"outputs":[
{"serif":"おはようございます"}
]
}
]
}"""
result = speaker.speak({},json.loads(setting_json,"utf-8"),[])
self.assertEqual(len(result),1)
self.assertTrue("serif" in result[0])
self.assertEqual(u"おはようございます",result[0]["serif"])
print json.dumps(result,ensure_ascii=False)
def test_multi_condition(self):
"""test_multi_condition
conditionsに複数指定されているsettingから正しいものを返すテスト
"""
timespeaker.TimeSpeaker.DEBUG_TIME = datetime.time(17,29,0)
speaker = timespeaker.create()
setting_json="""{
"speaker":"time",
"conditions":[
{
"from":"06:00",
"to":"11:00",
"outputs":[
{"serif":"おはようございます"}
]
},
{
"from":"11:00",
"to":"17:30",
"outputs":[
{"serif":"お疲れ様です"}
]
},
{
"from":"17:30",
"to":"22:00",
"outputs":[
{"serif":"遅くまでお疲れ様です"}
]
}
]
}"""
result = speaker.speak({},json.loads(setting_json,"utf-8"),[])
self.assertEqual(len(result),1)
self.assertTrue("serif" in result[0])
self.assertEqual(u"お疲れ様です",result[0]["serif"])
print json.dumps(result,ensure_ascii=False)
def test_match_else(self):
"""test_match_else
複数設定されたconditionsのうち、デフォルトのものを返すパターンのテスト
"""
timespeaker.TimeSpeaker.DEBUG_TIME = datetime.time(0,0,0)
speaker = timespeaker.create()
setting_json="""{
"speaker":"time",
"conditions":[
{
"from":"06:00",
"to":"11:00",
"outputs":[
{"serif":"おはようございます"}
]
},
{
"from":"11:00",
"to":"17:30",
"outputs":[
{"serif":"お疲れ様です"}
]
},
{
"from":"17:30",
"to":"22:00",
"outputs":[
{"serif":"遅くまでお疲れ様です"}
]
},
{
"outputs":[
{"serif":"こんな時間にどうしたんですか?"}
]
}
]
}"""
result = speaker.speak({},json.loads(setting_json,"utf-8"),[])
self.assertEqual(len(result),1)
self.assertTrue("serif" in result[0])
self.assertEqual(u"こんな時間にどうしたんですか?",result[0]["serif"])
print json.dumps(result,ensure_ascii=False)
def test_not_match(self):
"""
conditionsにひとつもマッチしないパターンのテスト
"""
timespeaker.TimeSpeaker.DEBUG_TIME = datetime.time(0,0,0)
speaker = timespeaker.create()
setting_json="""{
"speaker":"time",
"conditions":[
{
"from":"06:00",
"to":"11:00",
"outputs":[
{"serif":"おはようございます"}
]
}
]
}"""
result = speaker.speak({},json.loads(setting_json,"utf-8"),[])
self.assertEqual(len(result),0)
print json.dumps(result,ensure_ascii=False)
|
997,391 | 437bcc07f163cef50052c06b145e58eaa9cb443b | from django.contrib import admin
# Register your models here.
from .models import Cliente, Comisionista, Costo, Movimiento, Regreso, Empresa
#admin.site.register(Cliente)
admin.site.register(Comisionista)
admin.site.register(Costo)
admin.site.register(Movimiento)
admin.site.register(Regreso)
admin.site.register(Empresa)
class ClienteAdmin (admin.ModelAdmin):
list_display = ('id', 'cliente_nombre', 'cliente_calle',
'cliente_numext', 'cliente_numint', 'cliente_colonia',
'cliente_del_mun', 'cliente_cp', 'cliente_telefono',
'cliente_rfc', 'cliente_alta', 'cliente_umodificacion')
list_filter = ('id', 'cliente_nombre', 'cliente_rfc', 'cliente_telefono')
search_fields = ('id', 'cliente_rfc', 'cliente_telefono')
admin.site.register(Cliente, ClienteAdmin) |
997,392 | d4f7d0305b13c8cd51c6aef83a90379ecedfc7b9 | # begin_generated_IBM_copyright_prolog
#
# This is an automatically generated copyright prolog.
# After initializing, DO NOT MODIFY OR MOVE
# ================================================================
#
# (C) Copyright IBM Corp. 2010,2011
# Eclipse Public License (EPL)
#
# ================================================================
#
# end_generated_IBM_copyright_prolog
from ibm.teal.analyzer.gear.common import GCFG_RULES
from ibm.teal.analyzer.gear.ruleset import GearRuleset
from ibm.teal.registry import get_service, TEAL_DATA_DIR
from ibm.teal.teal_error import ConfigurationError
import os
def engine_factory(name, config_dict, event_input=False, alert_input=False, number=0, send_alert=None):
''' Create the appropriate GEAR engine
Currently only the executable ruleset is supported
'''
rules_file = config_dict.get(GCFG_RULES, None)
if rules_file is None:
raise ConfigurationError('Configuration failure for GEAR based event analyzer {0}: rules not specified'.format(name))
data_dir = get_service(TEAL_DATA_DIR)
rules_file_path = os.path.join(data_dir,rules_file)
return GearRuleset(rules_file_path, config_dict, event_input=event_input, alert_input=alert_input, number=number, name=name, send_alert=send_alert)
|
997,393 | 17f2462dc0e3519e77ab2b58a9bc714204f4f0d9 | import ply.lex as lex
_hextoint = False
tokens = (
"COLON",
"EQUAL",
"SEMICOLON",
"COMMA",
"LBRACE",
"RBRACE",
"LPAREN",
"RPAREN",
"LSQUARE",
"RSQUARE",
"COMMENT",
"INTEGER",
"INTEGER64",
"BOOLEAN",
"HEX",
"HEX64",
"FLOAT",
"STRING",
"NAME"
)
def t_BOOLEAN(t):
r"([Tt][Rr][Uu][Ee])|([Ff][Aa][Ll][Ss][Ee])"
t.value = False if t.value.lower() == 'false' else True
return t
def t_HEX64(t):
r"0[Xx][0-9A-Fa-f]+(L(L)?)?"
if _hextoint:
t.value = int(t.value, 0)
return t
def t_HEX(t):
r"0[Xx][0-9A-Fa-f]+"
if _hextoint:
print("to int")
t.value = int(t.value, 0)
print("not to int")
return t
def t_FLOAT(t):
r"([-+]?([0-9]*)?\.[0-9]*([eE][-+]?[0-9]+)?)|([-+]([0-9]+)(\.[0-9]*)?[eE][-+]?[0-9]+)" # noqa: E501
t.value = float(t.value)
return t
def t_INTEGER64(t):
r"[-+]?[0-9]+L(L)?"
t.value = int(t.value[:-1])
return t
def t_INTEGER(t):
r"[-+]?[0-9]+"
t.value = int(t.value)
return t
def t_STRING(t):
r"\"([^\"\\]|\\.)*\""
t.value = t.value[1:-1]
return t
t_NAME = r"[A-Za-z\*][-A-Za-z0-9_\*]*"
t_LBRACE = r"\{"
t_RBRACE = r"\}"
t_LPAREN = r"\("
t_RPAREN = r"\)"
t_LSQUARE = r"\["
t_RSQUARE = r"\]"
t_COMMA = r","
t_COLON = r":"
t_EQUAL = r"="
t_SEMICOLON = r";"
t_ignore = ' \t'
def t_COMMENT(t):
r'\#.*|\/\/.*'
pass
def t_newline(t):
r'\n+'
t.lexer.lineno += len(t.value)
def t_error(t):
raise TypeError("Unknown text '%s'" % (t.value,))
def hextoint(b=True):
global _hextoint
_hextoint = b
lex.lex(debug=0)
|
997,394 | d8242b26faf0978b622d6c8e9ce49cd05c3a2f0a | from django import forms
from .models import Picture
class UserForm(forms.ModelForm):
class Meta:
model = Picture
fields = ['title', 'cover', 'Description']
|
997,395 | db4cd75d0089b6d0a3753197c606d3f0bf7c4f30 | from .aux_input import *
from .common import *
from .ctypes_ import *
from .device import *
from .device_data import *
from .door import *
from .enums import *
from .event import *
from .exceptions import *
from .main import *
from .param import *
from .reader import *
from .relay import *
from .sdk import *
from .tables import *
|
997,396 | 643227247d3990671e338715ba1c93967529158d | import cv2
import numpy as np
img = cv2.imread("imori_noise.jpg")
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
kernel = np.array([[0., 0., 1., 0., 0.], [0., 1., 2., 1., 0.],
[1., 2., -16., 2., 1.], [0., 1., 2., 1., 0.],
[0., 0., 1., 0., 0.]])
log = cv2.filter2D(img, -1, kernel)
cv2.imshow("LoG", log)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
997,397 | a5ecdd7ea2524f019fae09e68c82b2da283f1301 | from Pages.home_page import HomePage
from TestCase.base_test import BaseTest
class TestSetCategory(BaseTest):
def test_intro(self):
homepage = HomePage(self.driver)
homepage.test_set_category()
# python3 -m unittest TestCase.test_03_set_category
|
997,398 | d83d99ce026febcfa4994578837c4c093783f9b7 | from .datasets import Datasets
from .reshape import Reshape
from .reshape_config import ReshapeConfig
from .spark_io import SparkIO, SparkIOContext
from .spark_io_config import SparkIOConfig
from .table_name import TableName
__all__ = [
"Datasets",
"Reshape",
"ReshapeConfig",
"SparkIO",
"SparkIOConfig",
"SparkIOContext",
"TableName",
]
|
997,399 | 232ec02476da3a11cc8f5ab590ff0c099765f320 | def nm_suppression(boxes,scores,overlap=0.50,top_k=200):
cnt=0
keep = scores.new(scores.size(0)).zero_().long()
x1 = boxes[:,0]
y1 = boxes[:,1]
x2 = boxes[:,2]
y2 = boxes[:,3]
area = torch.mul(x2 - x1,y2 - y1)
tmp_x1 = boxes.new()
tmp_y1 = boxes.new()
tmp_x2 = boxes.new()
tmp_y2 = boxes.new()
tmp_w = boxes.new()
tmp_h = boxes.new()
v,idx = scores.sort(0)
#get top200
idx = idx[-top_k:]
while idx.numel() > 0: #0で取り出さなかった部分
i = idx[-1]
keep[cnt]=i
cnt += 1
if idx.size(0) == 1:
break
#取り出したぶんを削除
idx = idx[:-1]
torch.index_select(x1, 0, idx, out=tmp_x1)
torch.index_select(y1, 0, idx, out=tmp_y1)
torch.index_select(x2, 0, idx, out=tmp_x2)
torch.index_select(y2, 0, idx, out=tmp_y2)
#clamp = 上界、下界を示す
tmp_x1 = torch.clamp(tmp_x1, min=x1[i])
tmp_y1 = torch.clamp(tmp_y1, min=y1[i])
tmp_x2 = torch.clamp(tmp_x2, max=x2[i])
tmp_y2 = torch.clamp(tmp_y2, max=y2[i])
tmp_w.resize_as_(tmp_x2)
tmp_h.resize_as_(tmp_y2)
tmp_w = torch.clamp(tmp_w,min=0.0)
tmp_h = torch.clamp(tmp_h,min=0.0)
inter = tmp_h * tmp_w#+
rem_areas = torch.index_select(area,0,idx)
union = (rem_areas - inter) + area[i] #*
IoU = inter/union
idx = idx[IoU.le(overlap)]
return keep,cnt
class Detect(Function):
def __init__(self,conf_thresh=0.01,top_k=200,nms_thresh=0.45):
self.softmax = nn.Softmax(dim=-1)
self.conf_thresh = conf_thresh
self.top_k = top_k
self.nms_thresh = nms_thresh
def forward(self,loc_data,conf_Data,dbox_list):
num_batch = loc_data.size(0)
num_dbox = loc_data.size(1)
num_classes = conf_data.size(2) #注意!今回はclasses=2+1
conf_data = self.softmax(conf_data)
output = torch.zeros(num_batch,num_classes,self.top_k,5)
conf_preds = conf_data.transpose(2,1)
#ミニバッチごとにループ
for i in range(num_batch):
#BBoxを求める[xmin,ymin,xmax,ymax]
decoded_boxes = decode(loc_data[i],dbox_list)
conf_scores = conf_preds[i].clone()
for cl in range(l,num_classes):
c_mask = conf_scores[cl].gt(self.conf_thresh)
scores = conf_scores[cl][c_mask]
if scores.nelement() == 0:
continue
l_mask = c_mask.unsqueeze(1).expand_as(decoded_boxes)
boxes = decoded_boxes[l_mask].view(-1, 4)
ids, count = nm_suppression(
boxes, scores, self.nms_thresh, self.top_k)
output[i, cl, :count] = torch.cat((scores[ids[:count]].unsqueeze(1),
boxes[ids[:count]]), 1)
return output
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.