index int64 0 1,000k | blob_id stringlengths 40 40 | code stringlengths 7 10.4M |
|---|---|---|
15,500 | d7627fbc6f0c4cd1faac9356d2fbb36def84b3d1 | from Id import *
import random
from helper import Helper as hlp
class Vaisseau():
def __init__(self,nom,x,y):
self.id=Id.prochainid()
self.proprietaire=nom
self.x=x
self.y=y
self.cargo=0
self.energie=100
self.vitesse=2
self.cible=None
def avancer(self):
if self.cible:
x=self.cible.x
y=self.cible.y
ang=hlp.calcAngle(self.x,self.y,x,y)
x1,y1=hlp.getAngledPoint(ang,self.vitesse,self.x,self.y)
self.x,self.y=x1,y1 #int(x1),int(y1)
if hlp.calcDistance(self.x,self.y,x,y) <=self.vitesse:
self.cible=None
#print("Change cible")
else:
print("PAS DE CIBLE")
def avancer1(self):
if self.cible:
x=self.cible.x
if self.x>x:
self.x-=self.vitesse
elif self.x<x:
self.x+=self.vitesse
y=self.cible.y
if self.y>y:
self.y-=self.vitesse
elif self.y<y:
self.y+=self.vitesse
if abs(self.x-x)<(2*self.cible.taille) and abs(self.y-y)<(2*self.cible.taille):
self.cible=None |
15,501 | 9f73aeb2e4b22e0f8875689c98669bf365930a23 | s = input()
a = s.count("a")
print(min(len(s), 2*a-1)) |
15,502 | 67e788e44ace82ec489d85c707ae1d6bd95c14f6 | from helpers import render_frames
from graphs.TemporalDelay import TemporalDelay as g
from falcor import *
m.addGraph(g)
m.loadScene('Arcade/Arcade.fscene')
ctx = locals()
# default
render_frames(ctx, 'default', frames=[16,17])
# delay
g.updatePass('TemporalDelayPass', {'delay': 0})
render_frames(ctx, 'delay.' + str(1))
g.updatePass('TemporalDelayPass', {'delay': 32})
render_frames(ctx, 'delay.' + str(32), frames=[32,33])
exit()
|
15,503 | 93f84a750317bced09ecf830422752f2b5ce18ca | import tkinter
class ClientView:
def __init__(self, controller):
"""Handles the view, creating a window with received messages, input field and a send button"""
self.controller = controller
self.top = tkinter.Tk()
self.top.title("Chatter")
self.input = tkinter.StringVar() # For the messages to be sent.
self.input.set("")
"""Message box"""
messages_frame = tkinter.Frame(self.top)
scrollbar = tkinter.Scrollbar(messages_frame)
self.msg_list = tkinter.Listbox(messages_frame, height=15, width=60, yscrollcommand=scrollbar.set)
scrollbar.pack(side=tkinter.RIGHT, fill=tkinter.Y)
self.msg_list.pack(side=tkinter.LEFT, fill=tkinter.BOTH)
self.msg_list.pack()
messages_frame.pack()
"""Input box and send button"""
entry_field = tkinter.Entry(self.top, textvariable=self.input)
entry_field.bind("<Return>", self.controller.msg_to_send)
entry_field.pack()
send_button = tkinter.Button(self.top, text="Send", command=lambda: self.controller.msg_to_send(self.input))
send_button.pack()
"""Send file button"""
file_button = tkinter.Button(self.top, text="Send file", command=lambda: self.controller.find_file())
file_button.pack()
"""On closing the window"""
self.top.protocol("WM_DELETE_WINDOW", self.controller.close)
|
15,504 | 24b70895fc35e2723c691a94a4575a70581775a4 | message = '안녕하세요.'
print(message)
print(message)
print(message)
number = 123
boolean = True # False
print(type(number), number)
print(type(message), message)
print(type(boolean), boolean)
# Command Line Interface / CLI
# cmd, gshell, terminal 등 |
15,505 | 8dd996a307f067aea7fecba91afb7d413a281664 | import pandas.io.data as web
import datetime
import financial
from pyspark import SparkContext
print("*** Ejercicio 1 ***")
start = datetime.datetime(2015, 1, 1)
end = datetime.datetime(2016, 7, 27)
# get web data
msft = web.DataReader("MSFT", "yahoo", start, end)
print("MSFT", msft.head())
# print(msft.shape)
goog = web.DataReader("GOOG", "yahoo", start, end)
print("GOOG", goog.head())
# print(goog.shape)
aapl = web.DataReader("AAPL", "yahoo", start, end)
print("AAPL", aapl.head())
# print(aapl.shape)
# write to file
msft.to_csv(path_or_buf="msft.csv", sep=",")
goog.to_csv(path_or_buf="goog.csv", sep=",")
aapl.to_csv(path_or_buf="aapl.csv", sep=",")
# create Spark context
sc = SparkContext("local", "Simple App")
print(sc)
# parse file and filter by years 2015 and 2016
def parse_and_filter(file):
file_rdd = sc.textFile(file)
split_rdd = financial.transform(file_rdd)
print(split_rdd.take(5))
filter_rdd = split_rdd.filter(lambda x: x.Date.year in [2015, 2016])
# print(filter_RDD.take(5))
return filter_rdd
msft_RDD = parse_and_filter("msft.csv")
print("MSFT", msft_RDD.take(5))
goog_RDD = parse_and_filter("goog.csv")
print("GOOG", goog_RDD.take(5))
aapl_RDD = parse_and_filter("aapl.csv")
print("AAPL", aapl_RDD.take(5))
print("MSFT", msft["2016-01-01":"2016-01-31"])
print("GOOG", goog["2016-01-01":"2016-01-31"])
print("AAPL", aapl["2016-01-01":"2016-01-31"])
|
15,506 | 503f07d811d69aa17f375d7a53404dce1ebacd07 | # class and helper function for scene definition
import numpy as np
from read_helper import *
class Scene:
@staticmethod
def norm_vec(vec):
return vec / np.linalg.norm(vec)
def __init__(self):
# basic setup
self.width = 0
self.height = 0
self.max_depth = 5
self.output_name = 'ray_tracing.png'
# default attenuation
self.light_attenu = np.array([1.0, 0.0, 0.0])
# elements of the scene
self.camera = {}
self.lights = []
self.vertices = []
self.spheres = []
self.triangles = []
def cam_init(self, input):
input = [float(coor) for coor in input]
camera = {}
cam_from = np.array(input[0:3])
camera['loc'] = cam_from
cam_at = np.array(input[3:6])
cam_up = np.array(input[6:9])
camera['fovy'] = input[-1]
camera['fovy_rad'] = camera['fovy'] / 180.0 * np.pi
camera['fovx_rad'] = 2 * np.arctan(np.tan(camera['fovy_rad'] / 2) * self.width / self.height)
camera['fovx'] = camera['fovx_rad'] / np.pi * 180.0
# camera in the -z direction
camera['dir'] = self.norm_vec(cam_from - cam_at)
# construct coordinate frame
camera['u'] = self.norm_vec(np.cross(cam_up, camera['dir']))
camera['v'] = self.norm_vec(np.cross(camera['dir'], camera['u']))
self.camera = camera
def triangle_init(self, input, mtx, material):
# apply transformation to the triangle vertice
vertices = [(mtx @ np.append(np.array(self.vertices[index]), 1))[0:3] for index in input]
normal_vec = self.norm_vec(np.cross(vertices[0] - vertices[1], vertices[0] - vertices[2]))
triangle = {'A':vertices[0], 'B':vertices[1], 'C': vertices[2], \
'transform':np.linalg.inv(mtx), 'surface':normal_vec, **material}
self.triangles.append(triangle)
def sphere_init(self, input, mtx, material):
sphere = {'loc':np.array(input[0:3]), 'radius':input[-1], 'transform':np.linalg.inv(mtx), **material}
self.spheres.append(sphere)
class SceneReader:
# class interpreter map
def_mapping = {'size':def_size, 'camera':def_cam, 'maxdepth':def_depth, 'output':def_filename, \
'directional':def_dirlight, 'point':def_ptlight, 'pushTransform':def_push, 'popTransform':def_pop, \
'translate':def_translate, 'rotate':def_rotation, 'scale':def_scale, 'sphere': def_sphere, \
'tri':def_triangle, 'vertex':def_vertex, 'attenuation':def_attenuation, 'transparent': def_trans}
def __init__(self, file_name):
# assume scene file is in current dir
self.file_name = './' + file_name
self.transform = [np.eye(4)]
self.material = {'ambient':0.2*np.ones(3), 'diffuse':np.zeros(3), 'specular':np.zeros(3), \
'emission':np.zeros(3), 'shininess':np.array(0.0)}
self.scene = Scene()
def read_file(self, file_name = None):
if file_name is None:
file_name = self.file_name
scene_def = open(file_name, 'r')
print('Load Scene Config')
for line in scene_def:
if len(line) <= 1 or line[0] == "#":
continue
arg_list = line.split()
arg_key = arg_list[0]
if arg_key in self.def_mapping.keys():
self.def_mapping[arg_key](arg_list[1:], self)
elif arg_key in self.material.keys():
self.material[arg_key] = np.array([float(val) for val in arg_list[1:]])
scene_def.close()
self.scene.vertices = np.array(self.scene.vertices).T
return self.scene
|
15,507 | 28d80e2c6566a85da6bac12355b3ee1adbeed456 | # coding: utf-8
import dbapi
import traceback
import sap_hana_credentials as credentials
from twitterstream import TwitterFetcher
META_INFO_DIRECTORY = 'meta-info'
TABLE_NAME = '"DEMOUSER00"."uni.vlba.gdelt.data::twitter_stream"'
"""
Author : Viktor Dmitriyev
Goal : Establish connection to SAP HANA DB using shipped with SAP HANA python client routine(dpapi)
and loading GDELT database from daily updates.
Date : 13.08.2014
"""
class TwitterDataLoader():
def __init__(self):
"""
Init method that creates connection and iterates data folder.
"""
self.connection = self.get_connection()
def get_connection(self):
"""
(obj) -> (obj)
Method that will return connection to the database using given credentials.
"""
return dbapi.connect(credentials.SERVER,\
credentials.PORT,\
credentials.USER,\
credentials.PASSWORD)
def _build_test_query01(self):
"""
(obj) -> (str)
Building query for execution
"""
query = 'select "GLOBALEVENTID", "SQLDATE", "MonthYear", "Year" ' + \
'from "DEMOUSER00"."uni.vlba.gdelt.data::gdelt_dailyupdates"'
return query
def fetch_row_into_str(self, row):
"""
(list) -> (str)
Fetching values from the given row(tuple) that are presented in form of list.
"""
str_row = ""
for value in row:
str_row = str_row + str(value) + ' | \t\t'
return str_row[:-5]
def execute_query(self, query, fetch=False):
"""
(obj, str) -> NoneType
Running given query and using given connection.
Fetching result rows and printing them to standard output.
"""
cursor = self.connection.cursor()
executed_cur = cursor.execute(query)
if executed_cur:
if fetch:
result_cur = cursor.fetchall()
for row in result_cur:
print fetch_row_into_str(row)
else:
print "[e] Something wrong with execution."
def line_to_list(self, _line):
"""
(obj, str) -> list()
Converting input line that suppose to be an csv to the separated list.
"""
result = list()
_line_splited = _line.split('\t')
for value in _line_splited:
value_stripped = value.strip().rstrip()
result.append(value_stripped)
return result
def escapeinput_data_for_sql(self, value, sql_type):
"""
(obj, str) -> str
Escape symbols to be used in sql statements.
"""
# print value
value = value.replace('\'', '"')
value = value.replace(',', '_')
if len(value) == 0:
if sql_type in ('BIGINT', 'INTEGER', 'FLOAT', 'DOUBLE'):
return '0'
if sql_type == 'NVARCHAR':
return '\'\''
else:
if sql_type in ('BIGINT', 'INTEGER', 'FLOAT', 'DOUBLE'):
# return value
return '\'' + value + '\''
if sql_type == 'NVARCHAR':
return '\'' + value + '\''
return '\'' + value + '\''
def build_query_part(self, input_data, table_fields_types, query_part):
"""
(obj, list, list, list, boolean) -> (str)
Building part of the query, according to the value passed with 'query_part' parameter (should be 1 or 2).
"""
result_query = '('
for index in xrange(len(input_data)):
if query_part == 1:
proper_value = '"' + input_data[index] + '"'
if query_part == 2:
if "nextval" not in input_data[index]:
proper_value = self.escapeinput_data_for_sql(input_data[index], table_fields_types[index])
else:
proper_value = input_data[index]
result_query = result_query + proper_value + ','
# if query_part == 2:
# result_query = result_query + '\'\'' + ','
result_query = result_query[:len(result_query)-1]
result_query = result_query + ')'
return result_query
def form_insert_query(self, table_name, input_data, table_fields_names=None, table_fields_types=None):
"""
(obj, str, list, list) -> (str)
Returning "insert" SQL statement with values.
"""
# creating first part of the query -> section with columns' names
query_table_structure = self.build_query_part(table_fields_names, table_fields_types, query_part=1)
# creating second part of the query -> section with values
query_values = self.build_query_part(input_data, table_fields_types, query_part=2)
# form query
query = 'INSERT INTO ' + table_name + ' ' + query_table_structure + ' VALUES ' + query_values
return query
def identify_table_mask(self, maskdata_file_name='daily_update_table-mask.txt', delim=';'):
"""
(obj, str) -> (list(), list())
Extracting table identifiers from the ".txt" mask file.
'Table Definitions' are taken by simple "Copy->Paste" from 'Open Definition' visual interface of table in SAP HANA Studio.
"""
table_fields_names, table_fields_types = list(), list()
mask_f = open(META_INFO_DIRECTORY + '/' + maskdata_file_name, "r")
# skipping line with descriptions of attributes
line = mask_f.readline()
# first line with data
line = mask_f.readline()
while line:
value_list = line.split(delim)
table_fields_names.append(value_list[0])
table_fields_types.append(value_list[1])
line = mask_f.readline()
mask_f.close()
return table_fields_names, table_fields_types
def check_if_row_already_loaded(self, row, file_name):
"""
(obj,) -> boolean
Checking if data is already loaded into db's table.
"""
query = "SELECT count(*) FROM " + TABLE_NAME + " WHERE GLOBALEVENTID = " + "'" + row[0] + "'"
try:
# print query
cursor = self.connection.cursor()
executed_cur = cursor.execute(query)
if executed_cur:
result_cur = cursor.fetchall()
for row in result_cur:
if int(row[0]) > 0:
return True
else:
print "[e] Something wrong with execution."
except Exception, e:
print '[e] Exeption: %s while processing "%s" file in method %s' % \
(str(e), DATA_DIRECTORY + '/' + file_name, "check_if_row_already_loaded")
print '\t[q] Query that caused exception \n %s' % (query)
return False
def is_valid_row_to_insert(self, row):
"""
(obj, list) -> boolean
Checking if row is to be valid to inserrted.
"""
if row[5] == COUNTRY or row[15] == COUNTRY:
return True
return False
def insert_data(self, row, table_fields_names, table_fields_types):
"""
(obj, list, list, list) -> NoneType
Inserting one single row to table.
"""
query = ''
try:
query = self.form_insert_query(TABLE_NAME, row, table_fields_names, table_fields_types)
# print query
self.execute_query(query)
except Exception, e:
print '[e] Exeption: %s' % (str(e))
print '\t[q] Query that caused exception \n %s' % (query)
return False
return True
def load_twitter_data_to_db(self, truncate_table=False, skip_loaded_files=False):
"""
(obj) -> NoneType
Fetching data from CSV with GDELT data and loading to database (with insert statements).
"""
table_fields_names, table_fields_types = self.identify_table_mask('twitter_stream_table-mask.txt')
# Truncating table
if truncate_table:
query = 'TRUNCATE TABLE ' + TABLE_NAME;
try:
self.execute_query(query)
except Exception, e:
print '[e] Exeption: %s' % (str(e))
total_queries = 0
error_queries = 0
success_queries = 0
fetcher = TwitterFetcher()
fetched_tweets = fetcher.fetchsamples(10)
for tweet in fetched_tweets:
tweet_as_list = list()
tweet_as_list.append('("uni.vlba.gdelt.data::seq_twitter_stream_id".nextval)')
tweet_as_list.append(tweet)
#print tweet_as_list
if self.insert_data(tweet_as_list, table_fields_names, table_fields_types):
success_queries = success_queries + 1
else:
error_queries = error_queries + 1
total_queries = success_queries + error_queries
print '\n[i] Queries processed in total: %d\n' % (total_queries)
if error_queries > 0:
print '[i] Queries processed in total with errors: %d' % (error_queries)
def main():
"""
(NoneType) -> NoneType
Main method that creates objects and start processing.
"""
gdl = TwitterDataLoader()
gdl.load_twitter_data_to_db(truncate_table=False, skip_loaded_files=True)
if __name__ == '__main__':
main()
|
15,508 | 4a0cb4f09932e8c6789c73a398fadb830d7b681c | def calcstep(begin, end=1, step=1): # 디폴트값은 뒤로 다 몰아야 한다.
total = 0
for num in range(begin, end+1, step):
total += num
return total
print('1~10 =', calcstep(1, 10, 2))
print('1~100 =', calcstep(1, 100))
print('1 =', calcstep(1))
|
15,509 | 8e32949a1dcb53e5c5ea3b9d8a3f785f4c0d6e1b | from django.db import models
from social_django.models import UserSocialAuth
import datetime
import requests
def get_header_token():
oauth_provider = UserSocialAuth.objects.get(provider='drchrono')
access_token = oauth_provider.extra_data['access_token']
return {'Authorization': 'Bearer %s' % access_token,}
def api_get_request(url, params):
return requests.get(url,params=params, headers=get_header_token())
def api_post_request(url, params):
return requests.post(url,params=params, headers=get_header_token())
def api_update_request(url, params):
return requests.get(url,params=params, headers=get_header_token())
class AppointmentApi():
def appointments_today(self):
return api_get_request('https://drchrono.com/api/appointments', {'date' : datetime.date.today().isoformat()})
def appointments_this_year(self):
return api_get_request('https://drchrono.com/api/appointments', {'since' : datetime.date.today().isoformat()})
def set_status(self,id, new_status):
return api_update_request('https://drchrono.com/api/appointments/'+str(id), {'status':new_status} )
def get_patient_info(self,paitent_id):
return 1
def update_patient_info(self, paitent_id, new_info):
return api_update_request('https://drchrono.com/api/paitents/'+str(paitent_id), new_info)
# Add your models here
class Appointment(models.Model):
drchrono_id = models.IntField(null=False)
paitent_drchron_id=models.IntField(null=False)
status=models.CharField(max_length=10)
time_checked_in = models.DateTimeField(blank=True)
time_seen = models.DateTimeField(blank=True)
wait_time = models.IntField()
|
15,510 | 4bb23be7a30e02530846424394b772154f07f974 | """
You have an array of numbers.
Your task is to sort ascending odd numbers but even numbers must be on their places.
Zero isn't an odd number and you don't need to move it. If you have an empty array, you need to return it.
Example
sort_array([5, 3, 2, 8, 1, 4]) == [1, 3, 2, 8, 5, 4]
"""
def sort_array(source_array):
lst = []
odds = sorted([n for n in source_array if n % 2])
for n in source_array:
lst.append(odds.pop(0)) if n % 2 else lst.append(n)
return lst
|
15,511 | 5d6b7d57b78aa34d45178b1f59e182cd45e4edcf | import re,time,json,logging,hashlib,base64,asyncio
from www.coroweb import get,post
from www.models import User,Comment,Blog,next_id
@get('/')
async def index(request):
users=await User.findAll()
return {
'__template__':'test.html',
'users':users
}
|
15,512 | 5ed3a3ed00f4618070a7aabeee869707a8b758c8 | """
Description
-----------
This file contains functions written to test the functions in the grand.samplers sub-module
Marley Samways
"""
import os
import unittest
import numpy as np
from copy import deepcopy
from simtk.openmm.app import *
from simtk.openmm import *
from simtk.unit import *
from grand import samplers
from grand import utils
from openmmtools.integrators import NonequilibriumLangevinIntegrator
outdir = os.path.join(os.path.dirname(__file__), 'output', 'samplers')
def setup_BaseGrandCanonicalMonteCarloSampler():
"""
Set up variables for the GrandCanonicalMonteCarloSampler
"""
# Make variables global so that they can be used
global base_gcmc_sampler
global base_gcmc_simulation
pdb = PDBFile(utils.get_data_file(os.path.join('tests', 'bpti-ghosts.pdb')))
ff = ForceField('amber14-all.xml', 'amber14/tip3p.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=12 * angstroms,
constraints=HBonds)
base_gcmc_sampler = samplers.BaseGrandCanonicalMonteCarloSampler(system=system, topology=pdb.topology,
temperature=300*kelvin,
ghostFile=os.path.join(outdir, 'bpti-ghost-wats.txt'),
log=os.path.join(outdir, 'basegcmcsampler.log'))
# Define a simulation
integrator = NonequilibriumLangevinIntegrator(temperature=300*kelvin, collision_rate=1./picosecond, timestep=2.*femtoseconds)
try:
platform = Platform.getPlatformByName('CUDA')
except:
try:
platform = Platform.getPlatformByName('OpenCL')
except:
platform = Platform.getPlatformByName('CPU')
base_gcmc_simulation = Simulation(pdb.topology, system, integrator, platform)
base_gcmc_simulation.context.setPositions(pdb.positions)
base_gcmc_simulation.context.setVelocitiesToTemperature(300*kelvin)
base_gcmc_simulation.context.setPeriodicBoxVectors(*pdb.topology.getPeriodicBoxVectors())
# Set up the sampler
base_gcmc_sampler.context = base_gcmc_simulation.context
return None
def setup_GCMCSphereSampler():
"""
Set up variables for the GCMCSphereSampler
"""
# Make variables global so that they can be used
global gcmc_sphere_sampler
global gcmc_sphere_simulation
pdb = PDBFile(utils.get_data_file(os.path.join('tests', 'bpti-ghosts.pdb')))
ff = ForceField('amber14-all.xml', 'amber14/tip3p.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=12 * angstroms,
constraints=HBonds)
ref_atoms = [{'name': 'CA', 'resname': 'TYR', 'resid': '10'},
{'name': 'CA', 'resname': 'ASN', 'resid': '43'}]
gcmc_sphere_sampler = samplers.GCMCSphereSampler(system=system, topology=pdb.topology, temperature=300*kelvin,
referenceAtoms=ref_atoms, sphereRadius=4*angstroms,
ghostFile=os.path.join(outdir, 'bpti-ghost-wats.txt'),
log=os.path.join(outdir, 'gcmcspheresampler.log'))
# Define a simulation
integrator = NonequilibriumLangevinIntegrator(temperature=300*kelvin, collision_rate=1./picosecond, timestep=2.*femtoseconds)
try:
platform = Platform.getPlatformByName('CUDA')
except:
try:
platform = Platform.getPlatformByName('OpenCL')
except:
platform = Platform.getPlatformByName('CPU')
gcmc_sphere_simulation = Simulation(pdb.topology, system, integrator, platform)
gcmc_sphere_simulation.context.setPositions(pdb.positions)
gcmc_sphere_simulation.context.setVelocitiesToTemperature(300*kelvin)
gcmc_sphere_simulation.context.setPeriodicBoxVectors(*pdb.topology.getPeriodicBoxVectors())
# Set up the sampler
gcmc_sphere_sampler.initialise(gcmc_sphere_simulation.context, [3054, 3055, 3056, 3057, 3058])
return None
def setup_StandardGCMCSphereSampler():
"""
Set up variables for the StandardGCMCSphereSampler
"""
# Make variables global so that they can be used
global std_gcmc_sphere_sampler
global std_gcmc_sphere_simulation
pdb = PDBFile(utils.get_data_file(os.path.join('tests', 'bpti-ghosts.pdb')))
ff = ForceField('amber14-all.xml', 'amber14/tip3p.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=12 * angstroms,
constraints=HBonds)
ref_atoms = [{'name': 'CA', 'resname': 'TYR', 'resid': '10'},
{'name': 'CA', 'resname': 'ASN', 'resid': '43'}]
std_gcmc_sphere_sampler = samplers.StandardGCMCSphereSampler(system=system, topology=pdb.topology,
temperature=300*kelvin, referenceAtoms=ref_atoms,
sphereRadius=4*angstroms,
ghostFile=os.path.join(outdir, 'bpti-ghost-wats.txt'),
log=os.path.join(outdir, 'stdgcmcspheresampler.log'))
# Define a simulation
integrator = NonequilibriumLangevinIntegrator(temperature=300*kelvin, collision_rate=1./picosecond, timestep=2.*femtoseconds)
try:
platform = Platform.getPlatformByName('CUDA')
except:
try:
platform = Platform.getPlatformByName('OpenCL')
except:
platform = Platform.getPlatformByName('CPU')
std_gcmc_sphere_simulation = Simulation(pdb.topology, system, integrator, platform)
std_gcmc_sphere_simulation.context.setPositions(pdb.positions)
std_gcmc_sphere_simulation.context.setVelocitiesToTemperature(300*kelvin)
std_gcmc_sphere_simulation.context.setPeriodicBoxVectors(*pdb.topology.getPeriodicBoxVectors())
# Set up the sampler
std_gcmc_sphere_sampler.initialise(std_gcmc_sphere_simulation.context, [3054, 3055, 3056, 3057, 3058])
return None
def setup_NonequilibriumGCMCSphereSampler():
"""
Set up variables for the GrandCanonicalMonteCarloSampler
"""
# Make variables global so that they can be used
global neq_gcmc_sphere_sampler
global neq_gcmc_sphere_simulation
pdb = PDBFile(utils.get_data_file(os.path.join('tests', 'bpti-ghosts.pdb')))
ff = ForceField('amber14-all.xml', 'amber14/tip3p.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=12 * angstroms,
constraints=HBonds)
ref_atoms = [{'name': 'CA', 'resname': 'TYR', 'resid': '10'},
{'name': 'CA', 'resname': 'ASN', 'resid': '43'}]
integrator = NonequilibriumLangevinIntegrator(temperature=300*kelvin, collision_rate=1./picosecond, timestep=2.*femtoseconds)
neq_gcmc_sphere_sampler = samplers.NonequilibriumGCMCSphereSampler(system=system, topology=pdb.topology,
temperature=300*kelvin, referenceAtoms=ref_atoms,
sphereRadius=4*angstroms,
integrator=integrator,
nPropStepsPerPert=99, nPertSteps=50,
ghostFile=os.path.join(outdir, 'bpti-ghost-wats.txt'),
log=os.path.join(outdir, 'neqgcmcspheresampler.log'))
# Define a simulation
try:
platform = Platform.getPlatformByName('CUDA')
except:
try:
platform = Platform.getPlatformByName('OpenCL')
except:
platform = Platform.getPlatformByName('CPU')
neq_gcmc_sphere_simulation = Simulation(pdb.topology, system, neq_gcmc_sphere_sampler.compound_integrator, platform)
neq_gcmc_sphere_simulation.context.setPositions(pdb.positions)
neq_gcmc_sphere_simulation.context.setVelocitiesToTemperature(300*kelvin)
neq_gcmc_sphere_simulation.context.setPeriodicBoxVectors(*pdb.topology.getPeriodicBoxVectors())
# Set up the sampler
neq_gcmc_sphere_sampler.initialise(neq_gcmc_sphere_simulation.context, [3054, 3055, 3056, 3057, 3058])
return None
def setup_GCMCSystemSampler():
"""
Set up variables for the GCMCSystemSampler
"""
# Make variables global so that they can be used
global gcmc_system_sampler
global gcmc_system_simulation
pdb = PDBFile(utils.get_data_file(os.path.join('tests', 'water-ghosts.pdb')))
ff = ForceField('tip3p.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=12 * angstroms,
constraints=HBonds)
gcmc_system_sampler = samplers.GCMCSystemSampler(system=system, topology=pdb.topology, temperature=300*kelvin,
boxVectors=np.array(pdb.topology.getPeriodicBoxVectors()),
ghostFile=os.path.join(outdir, 'water-ghost-wats.txt'),
log=os.path.join(outdir, 'gcmcsystemsampler.log'))
# Define a simulation
integrator = NonequilibriumLangevinIntegrator(temperature=300*kelvin, collision_rate=1./picosecond, timestep=2.*femtoseconds)
try:
platform = Platform.getPlatformByName('CUDA')
except:
try:
platform = Platform.getPlatformByName('OpenCL')
except:
platform = Platform.getPlatformByName('CPU')
gcmc_system_simulation = Simulation(pdb.topology, system, integrator, platform)
gcmc_system_simulation.context.setPositions(pdb.positions)
gcmc_system_simulation.context.setVelocitiesToTemperature(300*kelvin)
gcmc_system_simulation.context.setPeriodicBoxVectors(*pdb.topology.getPeriodicBoxVectors())
# Set up the sampler
gcmc_system_sampler.initialise(gcmc_system_simulation.context, [2094, 2095, 2096, 2097, 2098])
return None
def setup_StandardGCMCSystemSampler():
"""
Set up variables for the StandardGCMCSystemSampler
"""
# Make variables global so that they can be used
global std_gcmc_system_sampler
global std_gcmc_system_simulation
pdb = PDBFile(utils.get_data_file(os.path.join('tests', 'water-ghosts.pdb')))
ff = ForceField('tip3p.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=12 * angstroms,
constraints=HBonds)
std_gcmc_system_sampler = samplers.StandardGCMCSystemSampler(system=system, topology=pdb.topology,
temperature=300*kelvin,
boxVectors=np.array(pdb.topology.getPeriodicBoxVectors()),
ghostFile=os.path.join(outdir, 'water-ghost-wats.txt'),
log=os.path.join(outdir, 'stdgcmcsystemsampler.log'))
# Define a simulation
integrator = NonequilibriumLangevinIntegrator(temperature=300*kelvin, collision_rate=1./picosecond, timestep=2.*femtoseconds)
try:
platform = Platform.getPlatformByName('CUDA')
except:
try:
platform = Platform.getPlatformByName('OpenCL')
except:
platform = Platform.getPlatformByName('CPU')
std_gcmc_system_simulation = Simulation(pdb.topology, system, integrator, platform)
std_gcmc_system_simulation.context.setPositions(pdb.positions)
std_gcmc_system_simulation.context.setVelocitiesToTemperature(300*kelvin)
std_gcmc_system_simulation.context.setPeriodicBoxVectors(*pdb.topology.getPeriodicBoxVectors())
# Set up the sampler
std_gcmc_system_sampler.initialise(std_gcmc_system_simulation.context, [2094, 2095, 2096, 2097, 2098])
return None
def setup_NonequilibriumGCMCSystemSampler():
"""
Set up variables for the StandardGCMCSystemSampler
"""
# Make variables global so that they can be used
global neq_gcmc_system_sampler
global neq_gcmc_system_simulation
pdb = PDBFile(utils.get_data_file(os.path.join('tests', 'water-ghosts.pdb')))
ff = ForceField('tip3p.xml')
system = ff.createSystem(pdb.topology, nonbondedMethod=PME, nonbondedCutoff=12 * angstroms,
constraints=HBonds)
integrator = NonequilibriumLangevinIntegrator(temperature=300*kelvin, collision_rate=1./picosecond, timestep=2.*femtoseconds)
neq_gcmc_system_sampler = samplers.NonequilibriumGCMCSystemSampler(system=system, topology=pdb.topology,
temperature=300*kelvin, integrator=integrator,
boxVectors=np.array(pdb.topology.getPeriodicBoxVectors()),
ghostFile=os.path.join(outdir,
'water-ghost-wats.txt'),
log=os.path.join(outdir,
'neqgcmcsystemsampler.log'),
nPropStepsPerPert=50, nPertSteps=99)
# Define a simulation
try:
platform = Platform.getPlatformByName('CUDA')
except:
try:
platform = Platform.getPlatformByName('OpenCL')
except:
platform = Platform.getPlatformByName('CPU')
neq_gcmc_system_simulation = Simulation(pdb.topology, system, neq_gcmc_system_sampler.compound_integrator, platform)
neq_gcmc_system_simulation.context.setPositions(pdb.positions)
neq_gcmc_system_simulation.context.setVelocitiesToTemperature(300*kelvin)
neq_gcmc_system_simulation.context.setPeriodicBoxVectors(*pdb.topology.getPeriodicBoxVectors())
# Set up the sampler
neq_gcmc_system_sampler.initialise(neq_gcmc_system_simulation.context, [2094, 2095, 2096, 2097, 2098])
return None
class TestBaseGrandCanonicalMonteCarloSampler(unittest.TestCase):
"""
Class to store the tests for the GrandCanonicalMonteCarloSampler class
"""
@classmethod
def setUpClass(cls):
"""
Get things ready to run these tests
"""
# Make the output directory if needed
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'output')):
os.mkdir(os.path.join(os.path.dirname(__file__), 'output'))
# Create a new directory if needed
if not os.path.isdir(outdir):
os.mkdir(outdir)
# If not, then clear any files already in the output directory so that they don't influence tests
else:
for file in os.listdir(outdir):
os.remove(os.path.join(outdir, file))
# Need to create the sampler
setup_BaseGrandCanonicalMonteCarloSampler()
return None
def test_move(self):
"""
Make sure the GrandCanonicalMonteCarloSampler.move() method works correctly
"""
# Shouldn't be able to run a move with this sampler
self.assertRaises(NotImplementedError, lambda: base_gcmc_sampler.move(base_gcmc_simulation.context))
return None
def test_report(self):
"""
Make sure the BaseGrandCanonicalMonteCarloSampler.report() method works correctly
"""
# Delete some ghost waters so they can be written out
ghosts = [3054, 3055, 3056, 3057, 3058]
base_gcmc_sampler.deleteGhostWaters(ghostResids=ghosts)
# Report
base_gcmc_sampler.report(base_gcmc_simulation)
# Check the output to the ghost file
assert os.path.isfile(os.path.join(outdir, 'bpti-ghost-wats.txt'))
# Read which ghosts were written
with open(os.path.join(outdir, 'bpti-ghost-wats.txt'), 'r') as f:
n_lines = 0
lines = f.readlines()
for line in lines:
if len(line.split()) > 0:
n_lines += 1
assert n_lines == 1
ghosts_read = [int(resid) for resid in lines[0].split(',')]
assert all(np.isclose(ghosts, ghosts_read))
return None
def test_reset(self):
"""
Make sure the BaseGrandCanonicalMonteCarloSampler.reset() method works correctly
"""
# Set tracked variables to some non-zero values
base_gcmc_sampler.n_accepted = 1
base_gcmc_sampler.n_moves = 1
base_gcmc_sampler.Ns = [1]
# Reset base_gcmc_sampler
base_gcmc_sampler.reset()
# Check that the values have been reset
assert base_gcmc_sampler.n_accepted == 0
assert base_gcmc_sampler.n_moves == 0
assert len(base_gcmc_sampler.Ns) == 0
return None
class TestGCMCSphereSampler(unittest.TestCase):
"""
Class to store the tests for the GCMCSphereSampler class
"""
@classmethod
def setUpClass(cls):
"""
Get things ready to run these tests
"""
# Make the output directory if needed
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'output')):
os.mkdir(os.path.join(os.path.dirname(__file__), 'output'))
# Create a new directory if needed
if not os.path.isdir(outdir):
os.mkdir(outdir)
# If not, then clear any files already in the output directory so that they don't influence tests
else:
for file in os.listdir(outdir):
os.remove(os.path.join(outdir, file))
# Need to create the sampler
setup_GCMCSphereSampler()
return None
def test_initialise(self):
"""
Make sure the GCMCSphereSampler.initialise() method works correctly
"""
# Make sure the variables are all updated
assert isinstance(gcmc_sphere_sampler.context, Context)
assert isinstance(gcmc_sphere_sampler.positions, Quantity)
assert isinstance(gcmc_sphere_sampler.sphere_centre, Quantity)
return None
def test_deleteWatersInGCMCSphere(self):
"""
Make sure the GCMCSphereSampler.deleteWatersInGCMCSphere() method works correctly
"""
# Now delete the waters in the sphere
gcmc_sphere_sampler.deleteWatersInGCMCSphere()
new_ghosts = gcmc_sphere_sampler.getWaterStatusResids(0)
# Check that the list of ghosts is correct
assert new_ghosts == [70, 71, 3054, 3055, 3056, 3057, 3058]
# Check that the variables match there being no waters in the GCMC region
assert gcmc_sphere_sampler.N == 0
assert all([x in [0, 2] for x in gcmc_sphere_sampler.water_status.values()])
return None
def test_updateGCMCSphere(self):
"""
Make sure the GCMCSphereSampler.updateGCMCSphere() method works correctly
"""
# Get initial gcmc_resids and status
gcmc_waters = deepcopy(gcmc_sphere_sampler.getWaterStatusResids(1))
sphere_centre = deepcopy(gcmc_sphere_sampler.sphere_centre)
N = gcmc_sphere_sampler.N
# Update the GCMC sphere (shouldn't change as the system won't have moved)
state = gcmc_sphere_simulation.context.getState(getPositions=True, getVelocities=True)
gcmc_sphere_sampler.updateGCMCSphere(state)
# Make sure that these values are all still the same
assert all(np.isclose(gcmc_waters, gcmc_sphere_sampler.getWaterStatusResids(1)))
assert all(np.isclose(sphere_centre._value, gcmc_sphere_sampler.sphere_centre._value))
assert N == gcmc_sphere_sampler.N
return None
def test_move(self):
"""
Make sure the GCMCSphereSampler.move() method works correctly
"""
# Shouldn't be able to run a move with this sampler
self.assertRaises(NotImplementedError, lambda: gcmc_sphere_sampler.move(gcmc_sphere_simulation.context))
return None
def test_insertRandomWater(self):
"""
Make sure the GCMCSphereSampler.insertRandomWater() method works correctly
"""
# Insert a random water
new_positions, wat_id, atom_ids = gcmc_sphere_sampler.insertRandomWater()
# Check that the indices returned are integers - may not be type int
assert wat_id == int(wat_id)
assert all([i == int(i) for i in atom_ids])
# Check that the new positions are different to the old positions
assert all([any([new_positions[i][j] != gcmc_sphere_sampler.positions[i][j] for j in range(3)])
for i in atom_ids])
assert all([all([new_positions[i][j] == gcmc_sphere_sampler.positions[i][j] for j in range(3)])
for i in range(len(new_positions)) if i not in atom_ids])
return None
def test_deleteRandomWater(self):
"""
Make sure the GCMCSphereSampler.deleteRandomWater() method works correctly
"""
# Insert a random water
delete_water, atom_indices = gcmc_sphere_sampler.deleteRandomWater()
# Check that the indices returned are integers
assert delete_water == int(delete_water)
assert all([i == int(i) for i in atom_indices])
return None
class TestStandardGCMCSphereSampler(unittest.TestCase):
"""
Class to store the tests for the StandardGCMCSphereSampler class
"""
@classmethod
def setUpClass(cls):
"""
Get things ready to run these tests
"""
# Make the output directory if needed
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'output')):
os.mkdir(os.path.join(os.path.dirname(__file__), 'output'))
# Create a new directory if needed
if not os.path.isdir(outdir):
os.mkdir(outdir)
# If not, then clear any files already in the output directory so that they don't influence tests
else:
for file in os.listdir(outdir):
os.remove(os.path.join(outdir, file))
# Create sampler
setup_StandardGCMCSphereSampler()
return None
def test_move(self):
"""
Make sure the StandardGCMCSphereSampler.move() method works correctly
"""
# Run a handful of GCMC moves
n_moves = 10
std_gcmc_sphere_sampler.move(std_gcmc_sphere_simulation.context, n_moves)
# Check that all of the appropriate variables seem to have been updated
# Hard to test individual moves as they are rarely accepted - just need to check the overall behaviour
assert std_gcmc_sphere_sampler.n_moves == n_moves
assert 0 <= std_gcmc_sphere_sampler.n_accepted <= n_moves
assert len(std_gcmc_sphere_sampler.Ns) == n_moves
assert len(std_gcmc_sphere_sampler.acceptance_probabilities) == n_moves
assert isinstance(std_gcmc_sphere_sampler.energy, Quantity)
assert std_gcmc_sphere_sampler.energy.unit.is_compatible(kilocalories_per_mole)
return None
class TestNonequilibriumGCMCSphereSampler(unittest.TestCase):
"""
Class to store the tests for the NonequilibriumGCMCSphereSampler class
"""
@classmethod
def setUpClass(cls):
"""
Get things ready to run these tests
"""
# Make the output directory if needed
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'output')):
os.mkdir(os.path.join(os.path.dirname(__file__), 'output'))
# Create a new directory if needed
if not os.path.isdir(outdir):
os.mkdir(outdir)
# If not, then clear any files already in the output directory so that they don't influence tests
else:
for file in os.listdir(outdir):
os.remove(os.path.join(outdir, file))
# Create sampler
setup_NonequilibriumGCMCSphereSampler()
return None
def test_move(self):
"""
Make sure the NonequilibriumGCMCSphereSampler.move() method works correctly
"""
neq_gcmc_sphere_sampler.reset()
# Just run one move, as they are a bit more expensive
neq_gcmc_sphere_sampler.move(neq_gcmc_sphere_simulation.context, 1)
# Check some of the variables have been updated as appropriate
assert neq_gcmc_sphere_sampler.n_moves == 1
assert 0 <= neq_gcmc_sphere_sampler.n_accepted <= 1
assert len(neq_gcmc_sphere_sampler.Ns) == 1
assert len(neq_gcmc_sphere_sampler.acceptance_probabilities) == 1
# Check the NCMC-specific variables
assert isinstance(neq_gcmc_sphere_sampler.velocities, Quantity)
assert neq_gcmc_sphere_sampler.velocities.unit.is_compatible(nanometers/picosecond)
assert len(neq_gcmc_sphere_sampler.insert_works) + len(neq_gcmc_sphere_sampler.delete_works) == 1
assert 0 <= neq_gcmc_sphere_sampler.n_left_sphere <= 1
assert 0 <= neq_gcmc_sphere_sampler.n_explosions <= 1
return None
def test_insertionMove(self):
"""
Make sure the NonequilibriumGCMCSphereSampler.insertionMove() method works correctly
"""
# Prep for a move
# Read in positions
neq_gcmc_sphere_sampler.context = neq_gcmc_sphere_simulation.context
state = neq_gcmc_sphere_sampler.context.getState(getPositions=True, enforcePeriodicBox=True, getVelocities=True)
neq_gcmc_sphere_sampler.positions = deepcopy(state.getPositions(asNumpy=True))
neq_gcmc_sphere_sampler.velocities = deepcopy(state.getVelocities(asNumpy=True))
# Update GCMC region based on current state
neq_gcmc_sphere_sampler.updateGCMCSphere(state)
# Set to NCMC integrator
neq_gcmc_sphere_sampler.compound_integrator.setCurrentIntegrator(1)
# Just run one move to make sure it doesn't crash
neq_gcmc_sphere_sampler.insertionMove()
# Reset the compound integrator
neq_gcmc_sphere_sampler.compound_integrator.setCurrentIntegrator(0)
return None
def test_deletionMove(self):
"""
Make sure the NonequilibriumGCMCSphereSampler.deletionMove() method works correctly
"""
# Prep for a move
# Read in positions
neq_gcmc_sphere_sampler.context = neq_gcmc_sphere_simulation.context
state = neq_gcmc_sphere_sampler.context.getState(getPositions=True, enforcePeriodicBox=True, getVelocities=True)
neq_gcmc_sphere_sampler.positions = deepcopy(state.getPositions(asNumpy=True))
neq_gcmc_sphere_sampler.velocities = deepcopy(state.getVelocities(asNumpy=True))
# Update GCMC region based on current state
neq_gcmc_sphere_sampler.updateGCMCSphere(state)
# Set to NCMC integrator
neq_gcmc_sphere_sampler.compound_integrator.setCurrentIntegrator(1)
# Just run one move to make sure it doesn't crash
neq_gcmc_sphere_sampler.deletionMove()
# Reset the compound integrator
neq_gcmc_sphere_sampler.compound_integrator.setCurrentIntegrator(0)
return None
class TestGCMCSystemSampler(unittest.TestCase):
"""
Class to store the tests for the GCMCSystemSampler class
"""
@classmethod
def setUpClass(cls):
"""
Get things ready to run these tests
"""
# Make the output directory if needed
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'output')):
os.mkdir(os.path.join(os.path.dirname(__file__), 'output'))
# Create a new directory if needed
if not os.path.isdir(outdir):
os.mkdir(outdir)
# If not, then clear any files already in the output directory so that they don't influence tests
else:
for file in os.listdir(outdir):
os.remove(os.path.join(outdir, file))
# Need to create the sampler
setup_GCMCSystemSampler()
return None
def test_initialise(self):
"""
Make sure the GCMCSystemSampler.initialise() method works correctly
"""
# Make sure the variables are all updated
assert isinstance(gcmc_system_sampler.context, Context)
assert isinstance(gcmc_system_sampler.positions, Quantity)
assert isinstance(gcmc_system_sampler.simulation_box, Quantity)
return None
def test_move(self):
"""
Make sure the GCMCSystemSampler.move() method works correctly
"""
# Shouldn't be able to run a move with this sampler
self.assertRaises(NotImplementedError, lambda: gcmc_system_sampler.move(gcmc_system_simulation.context))
return None
def test_insertRandomWater(self):
"""
Make sure the GCMCSystemSampler.insertRandomWater() method works correctly
"""
# Insert a random water
new_positions, wat_id, atom_ids = gcmc_system_sampler.insertRandomWater()
# Check that the indices returned are integers - may not be type int
assert wat_id == int(wat_id)
assert all([i == int(i) for i in atom_ids])
# Check that the new positions are different to the old positions
assert all([any([new_positions[i][j] != gcmc_system_sampler.positions[i][j] for j in range(3)])
for i in atom_ids])
assert all([all([new_positions[i][j] == gcmc_system_sampler.positions[i][j] for j in range(3)])
for i in range(len(new_positions)) if i not in atom_ids])
return None
def test_deleteRandomWater(self):
"""
Make sure the GCMCSystemSampler.deleteRandomWater() method works correctly
"""
# Insert a random water
delete_water, atom_ids = gcmc_system_sampler.deleteRandomWater()
# Check that the indices returned are integers
assert delete_water == int(delete_water)
assert all([i == int(i) for i in atom_ids])
return None
class TestStandardGCMCSystemSampler(unittest.TestCase):
"""
Class to store the tests for the StandardGCMCSystemSampler class
"""
@classmethod
def setUpClass(cls):
"""
Get things ready to run these tests
"""
# Make the output directory if needed
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'output')):
os.mkdir(os.path.join(os.path.dirname(__file__), 'output'))
# Create a new directory if needed
if not os.path.isdir(outdir):
os.mkdir(outdir)
# If not, then clear any files already in the output directory so that they don't influence tests
else:
for file in os.listdir(outdir):
os.remove(os.path.join(outdir, file))
# Create sampler
setup_StandardGCMCSystemSampler()
return None
def test_move(self):
"""
Make sure the StandardGCMCSystemSampler.move() method works correctly
"""
# Run a handful of GCMC moves
n_moves = 10
std_gcmc_system_sampler.move(std_gcmc_system_simulation.context, n_moves)
# Check that all of the appropriate variables seem to have been updated
# Hard to test individual moves as they are rarely accepted - just need to check the overall behaviour
assert std_gcmc_system_sampler.n_moves == n_moves
assert 0 <= std_gcmc_system_sampler.n_accepted <= n_moves
assert len(std_gcmc_system_sampler.Ns) == n_moves
assert len(std_gcmc_system_sampler.acceptance_probabilities) == n_moves
assert isinstance(std_gcmc_system_sampler.energy, Quantity)
assert std_gcmc_system_sampler.energy.unit.is_compatible(kilocalories_per_mole)
return None
class TestNonequilibriumGCMCSystemSampler(unittest.TestCase):
"""
Class to store the tests for the NonequilibriumGCMCSystemSampler class
"""
@classmethod
def setUpClass(cls):
"""
Get things ready to run these tests
"""
# Make the output directory if needed
if not os.path.isdir(os.path.join(os.path.dirname(__file__), 'output')):
os.mkdir(os.path.join(os.path.dirname(__file__), 'output'))
# Create a new directory if needed
if not os.path.isdir(outdir):
os.mkdir(outdir)
# If not, then clear any files already in the output directory so that they don't influence tests
else:
for file in os.listdir(outdir):
os.remove(os.path.join(outdir, file))
# Create sampler
setup_NonequilibriumGCMCSystemSampler()
return None
def test_move(self):
"""
Make sure the NonequilibriumGCMCSystemSampler.move() method works correctly
"""
neq_gcmc_system_sampler.reset()
# Just run one move, as they are a bit more expensive
neq_gcmc_system_sampler.move(neq_gcmc_system_simulation.context, 1)
# Check some of the variables have been updated as appropriate
assert neq_gcmc_system_sampler.n_moves == 1
assert 0 <= neq_gcmc_system_sampler.n_accepted <= 1
assert len(neq_gcmc_system_sampler.Ns) == 1
assert len(neq_gcmc_system_sampler.acceptance_probabilities) == 1
# Check the NCMC-specific variables
assert isinstance(neq_gcmc_system_sampler.velocities, Quantity)
assert neq_gcmc_system_sampler.velocities.unit.is_compatible(nanometers/picosecond)
assert len(neq_gcmc_system_sampler.insert_works) + len(neq_gcmc_system_sampler.delete_works) == 1
assert 0 <= neq_gcmc_system_sampler.n_explosions <= 1
return None
def test_insertionMove(self):
"""
Make sure the NonequilibriumGCMCSystemSampler.insertionMove() method works correctly
"""
# Prep for a move
# Read in positions
neq_gcmc_system_sampler.context = neq_gcmc_system_simulation.context
state = neq_gcmc_system_sampler.context.getState(getPositions=True, enforcePeriodicBox=True, getVelocities=True)
neq_gcmc_system_sampler.positions = deepcopy(state.getPositions(asNumpy=True))
neq_gcmc_system_sampler.velocities = deepcopy(state.getVelocities(asNumpy=True))
# Set to NCMC integrator
neq_gcmc_system_sampler.compound_integrator.setCurrentIntegrator(1)
# Just run one move to make sure it doesn't crash
neq_gcmc_system_sampler.insertionMove()
# Reset the compound integrator
neq_gcmc_sphere_sampler.compound_integrator.setCurrentIntegrator(0)
return None
def test_deletionMove(self):
"""
Make sure the NonequilibriumGCMCSystemSampler.deletionMove() method works correctly
"""
# Prep for a move
# Read in positions
neq_gcmc_system_sampler.context = neq_gcmc_system_simulation.context
state = neq_gcmc_system_sampler.context.getState(getPositions=True, enforcePeriodicBox=True, getVelocities=True)
neq_gcmc_system_sampler.positions = deepcopy(state.getPositions(asNumpy=True))
neq_gcmc_system_sampler.velocities = deepcopy(state.getVelocities(asNumpy=True))
# Set to NCMC integrator
neq_gcmc_system_sampler.compound_integrator.setCurrentIntegrator(1)
# Just run one move to make sure it doesn't crash
neq_gcmc_system_sampler.deletionMove()
# Reset the compound integrator
neq_gcmc_sphere_sampler.compound_integrator.setCurrentIntegrator(0)
return None
|
15,513 | b44dad2673237bd414afe746e0223dccc94dd4fb | def spiralPrint(m, n, a) :
k = 0; l = 0
''' k - starting row index
m - ending row index
l - starting column index
n - ending column index
i - iterator '''
while (k < m and l < n) :
for i in range(l, n) :
print(a[i][k], end = " ")
k += 1
for i in range(k, m) :
print(a[n-1][i], end = " ")
n -= 1
if ( k < m) :
for i in range(n - 1, (l - 1), -1) :
print(a[i][m-1], end = " ")
m -= 1
if (l < n) :
for i in range(m - 1, k - 1, -1) :
print(a[l][i], end = " ")
l += 1
p= int(input())
m = []
for i in range(1,p+1):
l = list(map(int, input ().split ()))
m.append(l)
spiralPrint(p,p, m)
|
15,514 | 202f9f723e0b5f882bb1c1e04aed778b1c9cf0d7 |
from django.conf import settings
from django.conf.urls import include, patterns, url
from django.conf.urls.static import static
from django.contrib.sitemaps.views import sitemap
from django.views.generic.base import RedirectView, TemplateView
from feincms.module.page.sitemap import PageSitemap
from leonardo.views.page import Handler
from leonardo.views.defaults import trigger_error
from leonardo.views.select2 import Select2ResponseView
from constance import config
from .base import leonardo
__all__ = ['handler400', 'handler403', 'handler404', 'handler500']
urlpatterns = [
url(r'^contrib/', include('horizon_contrib.urls'),),
url(r"^fields/auto.json$",
Select2ResponseView.as_view(), name="django_select2-json"),
]
if getattr(settings, 'LEONARDO_AUTH', True):
urlpatterns += patterns('',
url(r'^auth/',
include('leonardo.module.leonardo_auth.auth_urls')),
)
urlpatterns += leonardo.urlpatterns
if getattr(settings, 'HORIZON_ENABLED', True):
import horizon
urlpatterns += patterns('',
url(r'', include(horizon.urls)),
)
# sentry verification url
urlpatterns += patterns('',
url(r'^sentry-debug/$', trigger_error)
)
# translation
urlpatterns += patterns('',
url(r'^i18n/js/(?P<packages>\S+?)/$',
'django.views.i18n.javascript_catalog',
name='jsi18n'),
url(r'^i18n/setlang/$',
'django.views.i18n.set_language',
name="set_language"),
url(r'^i18n/', include('django.conf.urls.i18n'))
)
# secure media
urlpatterns += patterns('',
url(r'^', include('leonardo.module.media.server.urls'))
)
if settings.DEBUG or getattr(settings, 'LEONARDO_SERVE_STATIC', False):
urlpatterns += static(settings.STATIC_URL,
document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
try:
import debug_toolbar
urlpatterns += patterns('',
url(r'^__debug__/', include(debug_toolbar.urls)),
)
except ImportError:
pass
handler = Handler.as_view()
urlpatterns += patterns(
'',
url(r'^$', handler, name='feincms_home'),
url(r'^(.*)/$', handler, name='feincms_handler'),
)
sitemaps = {
'pages': PageSitemap(extended_navigation=True),
}
if not settings.DEBUG or not getattr(settings, 'LEONARDO_PREVIEW', False):
robots_template = 'robots.txt'
else:
robots_template = 'robots_dev.txt'
urlpatterns += patterns('',
(r'^sitemap\.xml$', sitemap, {'sitemaps': sitemaps}),
(r'^favicon\.ico$', RedirectView.as_view(
**{'permanent': True, 'url': config.FAVICON_PATH}),),
(r'^robots\.txt$',
TemplateView.as_view(template_name=robots_template)),
(r'^crossdomain\.xml$',
TemplateView.as_view(template_name='crossdomain.xml')),
)
handler400 = getattr(settings, "HANDLER_400",
'leonardo.views.defaults.bad_request')
handler403 = getattr(settings, "HANDLER_403",
'leonardo.views.defaults.permission_denied')
handler404 = getattr(settings, "HANDLER_404",
'leonardo.views.defaults.page_not_found')
handler500 = getattr(settings, "HANDLER_500",
'leonardo.views.defaults.server_error')
|
15,515 | 2ea15cf44b4fd5622fc0e931fe4ed287652c1b6c | from abc import ABC, abstractmethod
import torch
from .metrics import nltk_bleu
import numpy as np
import os
import sys
from .useful_utils import string_split_v3, string_split_v1, chunks
import pytrec_eval
import json
import subprocess
import csv
import re
import ast
from tqdm.auto import tqdm
from .bleu_score import compute_bleu
class Experiment(ABC):
def __init__(self, task_data):
"""
task_data: [(str, str)]: input/target pairs for translation evaluation.
"""
self.task_data = task_data
@abstractmethod
def evaluate(self, prediction_fn):
"""
This function should compute all relevant metrics to the task,
prediction_fn: (inp) -> (pred): it's an end-to-end prediction function from any model.
returns: dict: metrics
"""
pass
def save(self, path):
"""
Saves the entire object ready to be loaded.
"""
torch.save(self, path)
def load(path):
"""
STATIC METHOD
accessed through class, loads a pre-existing experiment.
"""
return torch.load(path)
class TranslationExperiment(Experiment):
def __init__(self, task_data, src_splitter=string_split_v1, tgt_splitter=string_split_v1):
"""
task_data: [(str, str)]: this is the expected data format.
>>> from src.Experiments import TranslationExperiment
>>> translation_experiment = TranslationExperiment(validation_pairs)
>>> def simple_translate(src):
>>> return "return output"
>>> translation_experiment.evaluate(simple_translate)
{'BLEU': 1.4384882092392364e-09}
"""
super().__init__(task_data)
self.src_splitter = src_splitter
self.tgt_splitter = tgt_splitter
def evaluate(self, prediction_fn, save_dir=None, save_name="translation_eval.txt", batched=None):
"""
Produces evaluation scores and saves the results to a file. The tokenisation is done through string_split_v1.
So any non spaced text will be considered as one token.
prediction_fn: (str)->(str) or [str]->[str]
save_dir: str: folder to save the file
save_name: str: name of file
batched: int or None: size to use for the prediction function
"""
if batched:
src_sents = [src for (src, tgt) in self.task_data]
chunked_sents = list(chunks(src_sents, batched))
predictions = [prediction_fn(sents) for sents in tqdm.tqdm(chunked_sents, desc="predicting", total=len(chunked_sents))]
predictions = [val for sublist in predictions for val in sublist] # flattening
else:
predictions = [prediction_fn(src) for (src, tgt) in tqdm.tqdm(self.task_data, desc="predicting")]
# BLEU calculation
BLEU_scores = []
for (src, tgt), pred in tqdm.tqdm(list(zip(self.task_data, predictions)), desc="calculating bleu"):
BLEU_score = nltk_bleu(self.tgt_splitter(tgt), self.tgt_splitter(pred))
BLEU_scores.append(BLEU_score)
total_BLEU = np.average(BLEU_scores)
# Write to file
if save_dir != None:
save_path = os.path.join(save_dir, save_name)
print(f"saving translation eval to file: {save_path}")
with open(save_path, "w", encoding="utf-8") as out_fp:
for (src, tgt), pred, BLEU in zip(self.task_data, predictions, BLEU_scores):
out_fp.write("SRC :" + src + "\n")
out_fp.write("TGT :" + tgt + "\n")
out_fp.write("PRED :" + pred + "\n")
out_fp.write("BLEU :" + str(BLEU) + "\n")
out_fp.write("\n")
out_fp.write("\n\n| EVALUATION | BLEU: {:5.2f} |\n".format(total_BLEU))
print("| EVALUATION | BLEU: {:5.3f} |".format(total_BLEU))
return {"BLEU":total_BLEU}
class CAsT_experiment(Experiment):
def __init__(self, topics):
'''
topics: (context:[q_ids], q_id, q_rel:[d_ids])
'''
self.topics = topics
def evaluate(self, prediction_fn, save_dir=None, save_name="translation_eval.txt", hits=100):
full_q_rels = {}
run = {}
for topic in self.topics:
pred_d_ids = prediction_fn(topic, hits=100)
context, q_id, q_rels = topic
full_q_rels[q_id] = {d_id:1 for d_id in q_rels}
run[q_id] = {d_id:score for (d_id, score) in pred_d_ids}
evaluator = pytrec_eval.RelevanceEvaluator(full_q_rels, {'map', 'ndcg'})
results = evaluator.evaluate(run)
aggregate = self.dict_mean(list(results.values()))
return aggregate, results
def dict_mean(self, dict_list):
mean_dict = {}
for key in dict_list[0].keys():
mean_dict[key] = sum(d[key] for d in dict_list) / len(dict_list)
return mean_dict
class TREC_Eval_Command_Experiment():
def __init__(self, trec_eval_command='trec_eval -q -c -M1000 -m ndcg_cut.3,5,10,15,20,100,1000 -m all_trec qRELS RUN_FILE',
relevant_metrics=['ndcg_cut_3', 'ndcg_cut_5', 'ndcg_cut_1000', 'map_cut_1000', 'recall_500', 'recall_1000'],
q_rel_file='datasets/TREC_CAsT/2020qrels.txt'):
'''
This is an experiment transform that uses the official trec_eval command to compute scores for each query
and return valid results according to the command specified.
'''
self.trec_eval_command = trec_eval_command
self.relevant_metrics = relevant_metrics
self.q_rel_file = q_rel_file
self.temp_run_file = '/tmp/temp_run_by_carlos.run'
self.run_file_exporter = RUN_File_Transform_Exporter(self.temp_run_file, model_name='temp_model_by_carlos')
def __call__(self, samples):
'''
samples: [dict]: [{'q_id':"xxx", 'search_results':[("MARCO_xxx", 0.63)...]},...]
returns: [dict]: [{'q_id':"xxx", 'search_results':[("MARCO_xxx", 0.63)...], 'ndcg_cut_3':0.33, 'ndcg_cut_5'...},...]
'''
self.run_file_exporter(samples)
resolved_command = self.trec_eval_command.replace('qRELS', self.q_rel_file).replace('RUN_FILE', self.temp_run_file)
print(f'Running the following command: {resolved_command} > /tmp/temp_run.eval')
os.system(f'{resolved_command} > /tmp/temp_run.eval')
with open('/tmp/temp_run.eval', 'r') as eval_f:
eval_results = {}
for row in eval_f:
if not any([metric in row for metric in self.relevant_metrics]):
continue
metric, q_id, score = row.split()
if q_id not in eval_results:
eval_results[q_id] = {}
eval_results[q_id][metric] = float(score)
for sample in samples:
if sample['q_id'] not in eval_results:
print(f"q_rel missing for q_id {sample['q_id']}. No scores added to sample")
continue
sample.update(eval_results[sample['q_id']])
return samples
class Ranking_Experiment():
def __init__(self, q_rels, save_dir=None, save_name="rerank_eval.run"):
'''
q_rels: dict: {'q_id':[d_id, d_id,...],...}
'''
pytrec_q_rels = {}
for q_id, d_ids in q_rels.items():
pytrec_q_rels[q_id] = {d_id:1 for d_id in d_ids}
self.evaluator = pytrec_eval.RelevanceEvaluator(pytrec_q_rels, {'map', 'ndcg_cut_3', 'set_recall', 'recip_rank'})
def dict_mean(self, dict_list):
mean_dict = {}
for key in dict_list[0].keys():
mean_dict[key] = sum(d[key] for d in dict_list) / len(dict_list)
return mean_dict
def __call__(self, samples):
'''
samples: [dict]: [{'q_id':"xxx", 'search_results':[("MARCO_xxx", 0.63)...]},...]
'''
pytrec_run = {}
for sample_obj in samples:
q_id = sample_obj['q_id']
pytrec_run[q_id] = {}
for d_id, score in sample_obj['search_results']:
pytrec_run[q_id][d_id] = score
results = self.evaluator.evaluate(pytrec_run)
for sample_obj, result in zip(samples, results.values()):
sample_obj.update(result)
aggregate = self.dict_mean(list(results.values()))
return aggregate
class Sequence_BLEU_Experiment():
def __init__(self, fields={}, debug=True):
'''
An Experiment to evaluate sequence similarity through metrics like: BLEU or token accuracy.
'''
self.fields = {'predicted_seq':'predicted_seq', 'target_seq':'target_seq'}
self.debug = debug
self.fields.update(fields)
def __call__(self, samples):
'''
samples: [dict]: [{'target_seq':"taget text", 'predicted_seq':"pred text"},...]
returns: [dict]: [{'target_seq':"taget text", 'predicted_seq':"pred text", "BELU":0.6},...]
'''
for sample_obj in samples:
pred_tokens = self.tokenize_for_bleu_eval(sample_obj[self.fields['predicted_seq']])
refrence_tokens = self.tokenize_for_bleu_eval(sample_obj[self.fields['target_seq']])
if pred_tokens==[]:
pred_tokens = ['']
sample_obj["nltk_BLEU"] = nltk_bleu(refrence_tokens, pred_tokens)
if self.debug:
corpus_bleu = compute_bleu([[self.tokenize_for_bleu_eval(s[self.fields['target_seq']])] for s in samples], [self.tokenize_for_bleu_eval(s[self.fields['predicted_seq']]) for s in samples], smooth=False)[0]
nltk_BLEU = np.average([s["nltk_BLEU"] for s in samples])
print(f'corpus_official_BLEU: {corpus_bleu}')
print(f'nltk_BLEU: {nltk_BLEU}')
return samples
def overall(self, samples):
samples = self(samples)
corpus_bleu = compute_bleu([[self.tokenize_for_bleu_eval(s[self.fields['target_seq']])] for s in samples],
[self.tokenize_for_bleu_eval(s[self.fields['predicted_seq']]) for s in samples], smooth=False)[0]
nltk_BLEU = np.average([s["nltk_BLEU"] for s in samples])
return {'nltk_BLEU':nltk_BLEU, 'corpus_BLEU':corpus_bleu}
def tokenize_for_bleu_eval(self, code):
"""
The tokenizer that we use for code submissions, from Wang Ling et al., Latent Predictor Networks for Code Generation (2016)
@param code: string containing a code snippet
@return: list of code tokens
"""
code = re.sub(r'([^A-Za-z0-9_])', r' \1 ', code)
code = re.sub(r'([a-z])([A-Z])', r'\1 \2', code)
code = re.sub(r'\s+', ' ', code)
code = code.replace('"', '`')
code = code.replace('\'', '`')
tokens = [t for t in code.split(' ') if t]
return tokens
class Compilability_Experiment():
def __init__(self, fields={}):
'''
an experiment to evaluate the vallidity of a sequence as actual compilable code. Here in Python 3.
'''
self.fields = {'code_field': 'code'}
self.fields.update(fields)
def __call__(self, samples):
'''
samples: [dict]: [{'code':'print("foo")'},...]
returns: [dict]: [{'code':'print("foo")', 'compiles':1},...]
'''
for sample_obj in samples:
try:
code = sample_obj[self.fields['code_field']]
ast.parse(code)
sample_obj['compiles'] = 1
except:
sample_obj['compiles'] = 0
return samples
def overall(self, samples):
samples = self(samples)
compilability_score = np.average([s["compiles"] for s in samples])
return {'compilability_score':compilability_score}
class RUN_File_Transform_Exporter():
def __init__(self, run_file_path, model_name='model_by_carlos'):
'''
A Transform Exporter that creates a RUN file from samples returnedd by a search engine.
'''
self.run_file_path = run_file_path
self.model_name = model_name
def __call__(self, samples):
'''
samples: [dict]: [{'q_id':"xxx", 'search_results':[("MARCO_xxx", 0.63)...]},...]
'''
total_samples = 0
with open(self.run_file_path, 'w') as run_file:
for sample_obj in tqdm(samples, desc='Writing to RUN file', leave=False):
q_id = sample_obj['q_id']
search_results = sample_obj['search_results']
ordered_results = sorted(search_results, key=lambda res: res[1], reverse=True)
for idx, result in enumerate(ordered_results):
d_id, score = result
total_samples+=1
run_file.write(f"{q_id} Q0 {d_id} {idx+1} {score} {self.model_name}\n")
print(f"Successfully written {total_samples} samples from {len(samples)} queries run to: {self.run_file_path}") |
15,516 | f18db4d3ffe0a545f087b8d6c9e422736bb652b3 | import networkx as nx
import matplotlib.pyplot as plt
from collections import Counter
import numpy as np
import scipy.linalg as la
class plot():
def degree_distribution(self,filename, scale='log'):
G = nx.read_gexf(filename)
data = [G.degree(n) for n in G.nodes()]
data = dict(Counter(data))
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
plt.grid()
plt.scatter(list(data.keys()), list(data.values()))
if scale == 'log':
ax.set_yscale('log')
ax.set_xscale('log')
plt.xlabel("Degree(d)")
plt.ylabel("Frequency")
plt.title('Degree Distribution')
def cumulative_dd(self,filename):
G = nx.read_gexf(filename)
M = nx.to_scipy_sparse_matrix(G)
degrees = M.sum(0).A[0]
degree_distribution = np.bincount(degrees)
s = float(degree_distribution.sum())
cdf = degree_distribution.cumsum(0) / s
ccdf = 1 - cdf
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
plt.grid()
plt.plot(range(len(ccdf)), ccdf)
ax.set_yscale('log')
ax.set_xscale('log')
plt.ylabel('P(x>=d)')
plt.xlabel('Degree(d) [vertices]')
plt.title("Cumulative Degree Distribution")
def assortativity(self,filename):
G = nx.read_gexf(filename)
temp = nx.average_neighbor_degree(G)
avg_neigh = list(temp.values())
degree = [G.degree(n) for n in G.nodes()]
plt.scatter(degree, avg_neigh, s=0.75)
plt.xlabel("Degree(d)")
plt.ylabel("Average Neighbour Degree")
plt.xscale('log')
plt.yscale('log')
plt.title('Assortativity')
plt.show()
def gini(self,arr):
sorted_arr = arr.copy()
sorted_arr.sort()
n = arr.size
coef_ = 2. / n
const_ = (n + 1.) / n
weighted_sum = sum([(i + 1) * yi for i, yi in enumerate(sorted_arr)])
return coef_ * weighted_sum / (sorted_arr.sum()) - const_
def closest_node(self,node1, node2):
node2 = np.asarray(node2)
deltas = node2 - node1
dist_2 = np.einsum('ij,ij->i', deltas, deltas)
return np.argmin(dist_2)
def lorenz_curve(self,filename):
G = nx.read_gexf(filename)
temp_deg = [G.degree(n) for n in G.nodes()]
temp_deg.sort()
X = np.array(temp_deg)
X_lorenz = X.cumsum() / X.sum()
X_lorenz = np.insert(X_lorenz, 0, 0)
X_lorenz[0], X_lorenz[-1]
fig, ax = plt.subplots(figsize=[6, 6])
ax.plot(np.arange(X_lorenz.size) / (X_lorenz.size - 1), X_lorenz, color='darkgreen')
ax.plot([0, 1], [0, 1], color='k', linestyle=":")
ax.plot([1, 0], [0, 1], color='k', linestyle=":")
y_value = ['{:,.0f}'.format(x * 100) + '%' for x in ax.get_yticks()]
x_value = ['{:,.0f}'.format(x * 100) + '%' for x in ax.get_xticks()]
ax.set_yticklabels(y_value)
ax.set_xticklabels(x_value)
lor = []
temp = np.arange(X_lorenz.size) / (X_lorenz.size - 1)
lor.append(list(temp))
temp = X_lorenz
lor.append(list(temp))
lor = np.array(lor)
lor = lor.transpose()
opp_d = []
temp = np.arange(0, len(lor), 1)
temp = [(i / len(lor)) for i in temp]
opp_d.append(list(temp))
temp.reverse()
opp_d.append(temp)
opp_d = np.array(opp_d)
opp_d = opp_d.transpose()
int_point = lor[self.closest_node(opp_d, lor)]
ax.scatter(int_point[0], int_point[1], color='red')
ax.set_xlabel("Share of nodes with smallest degree")
ax.set_ylabel("Share of edges")
ax.annotate("P = {:,.2f}%".format(int_point[1] * 100),
xy=(int_point[0], int_point[1]), xycoords='data',
xytext=(0.8, 0), textcoords='data',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc3"),
)
ax.set_title("Lorenz Curve")
ax.text(0.25, 0.2, "G = {:,.2f}%".format(self.gini(X) * 100))
def spectral_plot(self,filename):
G = nx.read_gexf(filename)
A = nx.adjacency_matrix(G)
N = nx.normalized_laplacian_matrix(G)
L = nx.laplacian_matrix(G)
A_eig = la.eigvals(A.toarray())
A_eig = [round(i.real, -1) for i in A_eig]
N_eig = la.eigvals(N.toarray())
N_eig = [round(i.real, -1) for i in N_eig]
L_eig = la.eigvals(L.toarray())
L_eig = [round(i.real, -1) for i in L_eig]
f = plt.figure(figsize=(12, 3))
ax1 = f.add_subplot(131)
ax2 = f.add_subplot(132)
ax3 = f.add_subplot(133)
ax1.hist(A_eig)
l1 = ax1.get_xlim()
ax1.set_xlim(-l1[1], l1[1])
ax1.set_yscale('log')
ax1.set_xlabel("Eigenvalue")
ax1.set_ylabel('Frequency')
ax1.set_title("Spec Dist of the eigenvalues of A")
ax2.hist(N_eig)
l2 = ax2.get_xlim()
ax2.set_xlim(-l2[1], l2[1])
ax2.set_yscale('log')
ax2.set_xlabel("Eigenvalue")
ax2.set_ylabel('Frequency')
ax2.set_title("Spec Dist of the eigenvalues of N")
ax3.hist(L_eig)
l3 = ax3.get_xlim()
ax3.set_xlim(-l3[1], l3[1])
ax3.set_yscale('log')
ax3.set_xlabel("Eigenvalue")
ax3.set_ylabel('Frequency')
ax3.set_title("Spec Dist of the eigenvalues of L") |
15,517 | 0b6c0c1b6bd6e5a1f47d31163c5bca3d2ceb49cf | import json
import os
from datetime import datetime
from flask import Blueprint,request,current_app,send_from_directory
from flask_login import current_user,login_user,logout_user
from ..models.note import Note,File,Compliments
from ..extensions import db
from ..models.user import User
from ..utils import random_filename
note_bp = Blueprint('note',__name__)
@note_bp.before_request #!@#!@#
def login_project():
route = ['avatar','file']
method = request.method
ext = request.path
flag = False
for i in route :
if i in ext :
flag = True
if method == 'GET' and flag :
pass
else :
result = {}
if current_user.is_authenticated == False:
result['code'] = -1
result['msg'] = '您当前未登录!'
return json.dumps(result)
else :
id =current_user.get_id()
user = User.query.get(id)
if user.is_verify == False and user.is_admin == False:
result['code'] = -2
result['msg'] = '请先实名制认证!'
return json.dumps(result)
@note_bp.route('',methods=['POST'])
def release():
results = {}
data = {}
title = request.form.get('title')
tag = request.form.get('tag')
content = request.form.get('content')
uid = int(current_user.get_id())
note = Note(
title = title,
tag = tag,
content = content,
note_date = datetime.today(),
user_id = uid
)
db.session.add(note)
db.session.commit()
data['id'] = note.id
results['code'] = 0
results['msg'] = '发布成功'
results['data'] = data
return json.dumps(results)
@note_bp.route('/published',methods=['GET'])
def my_published():
results = {}
data = []
id = int(current_user.get_id())
notes = Note.query.filter_by(user_id=id).order_by(Note.note_date.desc()).all()
if notes != None :
for item in notes:
flag = True
cc = Compliments.query.filter_by(note_id=item.id, user_id=id).first()
if cc is None:
flag = False
d = {
"note_id" : item.id,
"title" : item.title,
"tag" : item.tag,
"content" : item.content[0:32] + '...' ,
"compliments" : item.compliments,
"flag" : flag,
"note_date" : item.note_date.strftime('%Y-%m-%d'),
}
data.append(d)
results['code'] = 0
results['msg'] = 'success'
results['data'] = data
return json.dumps(results)
@note_bp.route('/search',methods=['GET'])
def search():
results = {}
Data = []
data = []
word = str(request.args.get('word'))
notes = Note.query.order_by(Note.note_date.desc()).all()
if word != None:
words = word.split(' ')
for i in words:
for note in notes:
if i in note.tag:
if note not in Data:
flag = True
cc = Compliments.query.filter_by(note_id=note.id, user_id=current_user.id).first()
if cc is None :
flag = False
d = {
"publisher_id": note.user_id,
'publisher_nickname': note.user.nickname,
'note_id' : note.id,
'title': note.title,
'tag': note.tag,
'content': note.content,
"note_date": note.note_date.strftime('%Y-%m-%d'),
"compliments" : note.compliments,
"flag" : flag
}
Data.append(note)
data.append(d)
results['code'] = 0
results['msg'] = "查找成功"
results['data'] = data
return json.dumps(results)
@note_bp.route('/<int:id>',methods=['GET'])
def note(id):
results = {}
note = Note.query.get(id)
flag = True#current_user.id
cc = Compliments.query.filter_by(note_id=id, user_id=current_user.id).first()
if cc is None:
flag = False
data = {
"publisher_id" : note.user_id,
"title" : note.title,
"tag" : note.tag,
"content" : note.content,
"note_date" : note.note_date.strftime('%Y-%m-%d'),
"compliments" : note.compliments,
"flag" : flag
}
results['code'] = 0
results['msg'] = '查看成功'
results['data'] = data
return json.dumps(results)
@note_bp.route('/upload',methods=['POST'])
def upload():
results = {}
f = request.files.get('file')
filename = random_filename(f.filename)
f.save(os.path.join(current_app.config['FILE_PATH'], filename))
file = File(
filename=filename
)
db.session.add(file)
db.session.commit()
data = {
"file_id":file.id
}
results['code'] = 0
results['msg'] = '上传成功'
results['data'] = data
return json.dumps(results)
@note_bp.route('/file/<int:id>',methods=['GET'])
def get_file(id):
file = File.query.get(id)
filename = file.filename
return send_from_directory(current_app.config['FILE_PATH'], filename)
@note_bp.route('/categories',methods=['GET'])
def categories():
results = {}
data = []
notes = Note.query.all()
for note in notes :
if note.tag not in data:
data.append(note.tag)
results['code'] = 0
results['msg'] = 'success'
results['data'] = data
return json.dumps(results)
@note_bp.route('/index',methods=['GET'])
def index():
results={}
data = []
tag = request.args.get('tag')
page = int(request.args.get('page'))
each_page = int(request.args.get('each_page'))
length = Note.query.filter_by(tag=tag).count()
pagination = Note.query.filter_by(tag=tag).order_by(Note.note_date.desc()).paginate(page,per_page=each_page)
notes = pagination.items
for note in notes :
flag = True
cc = Compliments.query.filter_by(note_id=note.id, user_id=current_user.id).first()
if cc is None:
flag = False
d = {}
d["publisher_id"] = note.user_id
d["publisher_nickname"] = note.user.nickname
d["note_id"] = note.id
d["title"] = note.title
d["content"] = note.content[0:32] +'...'
d["note_date"] = note.note_date.strftime('%Y-%m-%d')
d["compliments"] = note.compliments
d['flag'] = flag
data.append(d)
results['code'] = 0
results['msg'] = '返回成功'
results['data'] = data
results['length'] = length
return json.dumps(results)
@note_bp.route('/edit',methods=['POST'])
def edit():
results = {}
id = request.form.get('note_id')
content = request.form.get('content')
note = Note.query.get(id)
note.content = content
note.note_date = datetime.today()
db.session.commit()
results['code'] = 0
results['msg'] = '修改成功'
return json.dumps(results)
@note_bp.route('/compliments',methods=['POST'])
def compliments():
results = {}
id = request.form.get('note_id')
uid = current_user.id
cc = Compliments.query.filter_by(note_id=id, user_id=uid).first()
if cc != None :
results['code'] = 1
results['msg'] = '你已经点过赞了'
return json.dumps(results)
c = Compliments(
note_id = id,
user_id = uid
)
note = Note.query.get(id)
note.compliments += 1
db.session.add(c)
db.session.commit()
results['code'] = 0
results['msg'] = '点赞成功'
return json.dumps(results)
@note_bp.route('/recomp',methods=['POST'])
def recomp():
results = {}
id = request.form.get('note_id')
uid = current_user.id
c = Compliments.query.filter_by(note_id = id,user_id = uid).first()
note = Note.query.get(id)
note.compliments -= 1
db.session.delete(c)
db.session.commit()
results['code'] = 0
results['msg'] = '取消点赞成功'
return json.dumps(results) |
15,518 | 9d34133cc54538b8e471cf9cb837f72f02253c24 | from argparse import ArgumentParser
from csv import DictReader, DictWriter
def get_args():
parser = ArgumentParser(description='splits infile into n smaller chunks')
parser.add_argument('-f', '--filename',
help='file to import and split')
parser.add_argument('-n', '--number',
help='number of output files to split into')
parser.add_argument('-p', '--path',
help='path to file')
return parser.parse_args()
def read_file(infile):
with open(infile, 'rU') as f:
reader = DictReader(f)
fields = reader.fieldnames
data = [row for row in reader]
return fields, data
def write_file(outfile, fields, data):
with open(outfile, 'w') as f:
writer = DictWriter(f, fieldnames=fields)
writer.writeheader()
for d in data:
writer.writerow(d)
def split():
args = get_args()
split_num = int(args.number)
infile = args.filename
if args.path:
path = args.path
else:
path = ''
# path = '/Users/jcolazzi/Dropbox/BIP Production/candidates/reports/social/'
# path = '/Users/jcolazzi/bip/candidate_classifier/web/srsplit/'
path = '/Users/jcolazzi/bip/candidate_classifier/twitter/srsplit/'
fields, data = read_file(path+infile)
# x and y are indices for the m sized split files
# R is the remainder and will be tacked on the final chunk
m = len(data) / split_num
R = len(data) % split_num
x = 0
y = x + m
print 'SPLITS WILL BE LEN {}'.format(m)
for i in range(split_num):
outfile = 'SPLIT_{}_{}'.format(i, infile)
write_file(path+outfile, fields, data[x:y])
x = y
y = x + m
if i == (split_num - 2):
y += R
if __name__ == '__main__':
split()
|
15,519 | bb2194f0c4212588fcb4477352f5eb860a94818c | #Write a Python function that takes a list of words and returns the length of the
# longest one.
list1 = []
n = int(input("enter number of words you want to enter:"))
for i in range(0, n):
element =str(input())
list1.append(element)
print(list1)
length =len(element)
max = len(list1[0])
temp = list1[0]
for i in list1:
if len(i) > max:
max=len(i)
temp=i
print("The word with the longest length is:")
print(temp)
|
15,520 | 6c276cd1e2eccd1784afeb7f4fdf8b2755629e73 | """
LICENCE
-------
Copyright 2013 by Kitware, Inc. All Rights Reserved. Please refer to
KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
"""
__author__ = 'dhan'
from WebUI import app
import nltk
NLTK_DATAPATH = app.config["NLTK_DATAPATH"]
nltk.data.path = [NLTK_DATAPATH]
from nltk.corpus import wordnet as wn
import flask
import json
import os
mod = flask.Blueprint('api_suggest', __name__)
@mod.route('/suggest_similar', methods=["GET"])
def suggest_similar():
obj = {}
obj["query"] = {}
obj["query"]["search"] = flask.request.args.get("search", "")
words = obj["query"]["search"].split(" ");
obj["query"]["word"] = words[0]
scores = []
fin = open(os.path.join(app.config['STATIC_DIR'], "data/vocabulary.js"),"r")
vocab = json.loads(fin.read())
# Compute dist for each of hte attributes
for aword in vocab:
one = wn.synsets(words[0])
two = wn.synsets(aword)
if len(one) < 1 or len(two) < 1:
continue
score = one[0].path_similarity(two[0])
scores.append([score, aword])
# Preload vocabulary
obj["suggestions"] = sorted(scores, reverse=True)
return flask.jsonify(obj)
|
15,521 | f928760c48ef5e3240041038dac436fdd0e4bb62 | from sner.classes import Rule, Token
from sner.scripts.ner import rulesperformance
def test_rulesperformance_rateRulePerformance():
"""
"""
alpha = 0.1
k = 2.0
accept_threshold = 0.9
rule = Rule(Rule.Type.spelling, 'bb', 1.0)
name = Token('aa-aa', 'bb-bb', 'cc-cc', Token.Type.personal_name)
name.name_probability = 1.0
names = set()
names.add(name)
rulesperformance.rateRulePerformance(names, rule, alpha, k, accept_threshold)
expected_strength = (1.0 + alpha) / (1.0 + k * alpha)
expected_occurrences = 1.0
assert rule.strength == expected_strength
assert rule.occurrences == expected_occurrences
|
15,522 | bc56475db6b8f77d7c3c61f23be6a2f742bb9609 |
import timeit
from collections import defaultdict
from itertools import product
import heapq
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.cm as cm
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import LinearLocator, FormatStrFormatter
def timeit_compare(funcs, inputs, setups='pass', **kwargs):
"""Compares speed of functions across input conditions.
'funcs' should be a list of functions expressed as strings.
String substitution is done on each function while iterating
over ranges of values in 'inputs' to compare speed.
'inputs' should be an iterable range of values over which 'funcs'
should be tested.
'setups' can be 'pass' for no setup, 'main' to import each function
from the local environment, or a list of setup strings that maps
to the list 'funcs'.
Other inputs:
'number' - the number of times to run the function (a timeit option)
'print_conditions' - if True, print the combinations of input conditions
>>> functions = ['"-".join(str(n) for n in range({0}))',
'"-".join([str(n) for n in range({0})])',
'"-".join(map(str, range({0})))']
>>> data = timeit_compare(functions, [range(10,101,10)], number=10000)
testing "-".join(str(n) for n in range({0}))...
testing "-".join([str(n) for n in range({0})])...
testing "-".join(map(str, range({0})))...
Returns a defaultdict that has function names as keys, results as values.
"""
number = kwargs.get('number', 100000)
print_conditions = kwargs.get('print_conditions', False)
performance = defaultdict(list)
if isinstance(setups, list):
# user specifies their own list of setups corresponding to funcs
pass
elif setups == 'pass':
# specify no setups for built-in functions like join
setups = ['pass' for f in funcs]
elif setups == 'main':
# uniformly import all setups from the local environment
fnames = [f[:f.find("(")] for f in funcs]
setups = ["from __main__ import " + fname for fname in fnames]
# convert the input ranges to a set of conditions
conditions = get_conditions(inputs)
if print_conditions:
print "conditions: " + conditions
def timer(func, value, setup):
return timeit.Timer(func.format(*value), setup=setup)
for i, f in enumerate(funcs):
print "testing " + f + "..."
for value in conditions:
test = timer(f, value, setups[i])
result = test.timeit(number=number)
performance[f].append(list(value) + [result])
return performance
def get_conditions(inputs):
"""Converts conditions for individual variables into an
exhaustive list of combinations for timeit_compare().
"""
# itertools.product summarizes all combinations of ordered conditions
# at len = 1 it wraps values in tuples (0,) that confuse the timer below
if hasattr(inputs[0], '__iter__'):
return list(product(*inputs))
else:
return [[n] if not isinstance(n,(list,tuple)) else n for n in inputs]
# TODO: function to filter a large dataset
# TODO: filter and options to plot 2 and 3 grams in different colors
# on the same chart
def timeit_plot2D(data, xlabel='xlabel', title='title', **kwargs):
"""Plots the results from a defaultdict returned by timeit_compare.
Each function will be plotted as a different series.
timeit_compare may test many conditions, and the order of the conditions
in the results data can be understood from the string substitutions
noted in the keys of the defaultdict. By default series=0 means
that the first series is plotted, but this can be changed to plot
any of the testing conditions available.
"""
series = kwargs.get('series', 0)
style = kwargs.get('style', 'line')
size = kwargs.get('size', 500)
ylabel = kwargs.get('ylabel', 'time')
cmap = kwargs.get('cmap', 'rainbow')
lloc = kwargs.get('lloc', 2)
dataT = {}
# set color scheme
c = iter(plt.get_cmap(cmap)(np.linspace(0, 1, len(data))))
# transpose the data from [x, y, z]... into ([x...], [y...], [z...])
for k, v in data.items():
dataT[k] = zip(*v)
fig, ax = plt.subplots()
for k, v in dataT.items():
if style == 'scatter':
ax.scatter(v[series], v[-1], s=size, c=next(c), alpha=.75)
elif style == 'bubble':
x, y, z = v[series[0]], v[series[1]], v[-1]
ax.scatter(x, y, s=[size*i for i in z], c=next(c), alpha=.5)
else:
ax.plot(v[series], v[-1], c=next(c), lw=2)
# TODO: BUG: no way to set other parameters manually (README fig2)
ax.legend([substitute_titles(k,series) for k in dataT.keys()], loc=lloc)
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_title(title)
ax.grid(True)
return fig
def timeit_plot3D(data, xlabel='xlabel', ylabel='ylabel', **kwargs):
"""3D plot of timeit data, one chart per function.
"""
dataT = {}
figs = []
series = kwargs.get('series', (0,1))
cmap = kwargs.get('cmap', cm.coolwarm)
for k, v in data.items():
dataT[k] = zip(*v)
fig = plt.figure()
ax = fig.gca(projection='3d')
X, Y, Z = dataT[k][series[0]], dataT[k][series[1]], dataT[k][-1]
wide, tall = (max(X)-min(X)+1), (max(Y)-min(Y)+1)
intervalX = max(X) - min(heapq.nlargest(2,set(X)))
intervalY = max(Y) - min(heapq.nlargest(2,set(Y)))
wide, tall = 1+wide/intervalX, 1+tall/intervalY
X = np.reshape(X, [wide, tall])
Y = np.reshape(Y, [wide, tall])
# TODO: BUG: fix so that Z transposes with x & y reversed
Z = np.reshape(Z, [wide, tall])
surf = ax.plot_surface(X, Y, Z, rstride=1, cstride=1, cmap=cmap, linewidth=0, antialiased=False)
ax.zaxis.set_major_locator(LinearLocator(10))
ax.zaxis.set_major_formatter(FormatStrFormatter('%.02f'))
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_title(substitute_titles(k,series))
fig.colorbar(surf, shrink=0.5, aspect=5)
figs.append(fig)
return figs
def timeit_heatmap(data, xlabel='xlabel', ylabel='ylabel', **kwargs):
"""Heatmap plot of timeit data, one chart per function.
"""
dataT = {}
figs = []
series = kwargs.get('series', (0,1))
cmap = kwargs.get('cmap', cm.coolwarm)
for k, v in data.items():
dataT[k] = zip(*v)
X, Y, Z = dataT[k][series[0]], dataT[k][series[1]], dataT[k][-1]
left, right = min(X), max(X)
bottom, top = min(Y), max(Y)
extent = [left, right, bottom, top]
wide, tall = (max(X)-min(X)+1), (max(Y)-min(Y)+1)
intervalX = max(X) - min(heapq.nlargest(2,set(X)))
intervalY = max(Y) - min(heapq.nlargest(2,set(Y)))
if intervalX > 1:
wide = 1 + wide/intervalX
else:
wide = 1
if intervalY > 1:
tall = 1 + tall/intervalY
else:
tall = 1
# TODO: BUG: fix so that Z transposes with x & y series reversed
Z = np.reshape(Z, [wide, tall])
Z = list(zip(*Z)) # Z is transposed
Z = [i for i in Z[::-1]] # Z is upside down
fig, ax = plt.subplots()
hmap = ax.imshow(Z, extent=extent, cmap=cmap, interpolation='nearest')
fig.colorbar(hmap).set_label("time")
ax.set_xlabel(xlabel)
ax.set_ylabel(ylabel)
ax.set_title(substitute_titles(k,series))
figs.append(fig)
return figs
def substitute_titles(label, series):
ordered_axes=["x", "y", "z"]
try:
for i, v in enumerate(series):
label = label.replace("{"+str(v)+"}", ordered_axes[i])
except:
label = label.replace("{"+str(series)+"}", ordered_axes[0])
return label
|
15,523 | 52f6348ea45f17068f6e3369d7dd0e84b469e960 | from Util.MemoryDataInterpreter import MemoryDataInterpreter
class CreateMutexA:
DEFAULT_READ_SIZE = 50
@staticmethod
def get_arguments_list(vivisect_engine):
arguments_array = vivisect_engine.get_function_arguments(3)
lp_mutex_attributes = arguments_array[0]
b_initial_owner = arguments_array[1]
lp_name = arguments_array[2]
mutex_name = ''
if lp_name != 0x0:
bytes = vivisect_engine.read_memory(lp_name, CreateMutexA.DEFAULT_READ_SIZE)
mutex_name = MemoryDataInterpreter.bytearray_to_ascii_string(bytes)
return [hex(lp_mutex_attributes), bool(b_initial_owner), mutex_name] |
15,524 | bbe5b4726b9c8f6572f848cfe1a6ff3c2477c9bd | class Plateau(object):
def __init__(self, plateau_size):
self.plateau_size = plateau_size
def set_robotic_rover(self, robotic_rover):
self.robotic_rover = robotic_rover
def move_robotic_rover(self, moves):
for move in moves:
if move == 'L' or move == 'R':
self.robotic_rover.rotate(move)
else: # M
self.robotic_rover.move() |
15,525 | 2b09074838f5d68152b3f15adc4b4440d9b2f756 | from django.urls import path
from . import views
app_name = 'products'
urlpatterns = [
path('',views.productlist,name='product_list'),
path('add_product/', views.add_product, name='add_pro'),
path('update_product/<str:slug>/<int:id>/',views.update_product, name='update_product'),
path('add_product_image/<str:slug>/<int:id>/', views.add_image_product, name='add_product_image'),
path('<str:category_slug>/',views.productlist,name = 'product_list_category'),
path('details/<str:slug>/<int:id>/',views.productdetail,name = 'product_detail'),
path('delete/<str:slug>/<int:id>/',views.delete_product,name = 'delete_product'),
]
|
15,526 | 08e51b5f380be7bc3a6f7d127cf0d721b9017901 | import os
import unittest
from eva_cttv_pipeline.evidence_string_generation import utilities
class GetResourceFileTest(unittest.TestCase):
def test_get_resource_file_existent(self):
self.assertTrue(utilities.get_resource_file("eva_cttv_pipeline", "resources/json_schema"))
def test_get_resource_file_nonexistent(self):
self.assertEqual(utilities.get_resource_file("not_a_real_package_39146", "not_a_real_file"),
None)
class ArgParserTest(unittest.TestCase):
ignore = '/path/to/ignore/file'
out = '/path/to/out/file'
efo_map_file = '/path/to/efo/file'
snp_2_gene_file = '/path/to/snp/to/gene/file'
ot_schema_path = '/path/to/ot/schema/json'
@classmethod
def setUpClass(cls):
argv = ['clinvar_to_evidence_strings.py', '--ignore',
cls.ignore, '--out', cls.out, '-e', cls.efo_map_file, '-g', cls.snp_2_gene_file,
'--ot-schema', cls.ot_schema_path]
cls.argparser = utilities.ArgParser(argv)
def test_ignore(self):
self.assertEqual(self.argparser.ignore_terms_file, self.ignore)
def test_out(self):
self.assertEqual(self.argparser.out, self.out)
def test_efo_map_file(self):
self.assertEqual(self.argparser.efo_mapping_file, self.efo_map_file)
def test_snp_2_gene_file(self):
self.assertEqual(self.argparser.snp_2_gene_file, self.snp_2_gene_file)
def test_ot_schema_path(self):
self.assertEqual(self.argparser.ot_schema, self.ot_schema_path)
class CheckDirExistsCreateTest(unittest.TestCase):
def test_create(self):
directory = "./test_tmp"
utilities.check_dir_exists_create(directory)
self.assertTrue(os.path.exists(directory))
os.rmdir(directory)
|
15,527 | 7c3b24930327ed17ab07690cb67e2fd0316cf755 | # encoding: utf-8
from django.contrib.auth.decorators import login_required
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.models import User
from django.shortcuts import render_to_response
from django.conf import settings
from django.template import RequestContext
from django.utils import simplejson
from django.http import *
from django.utils.translation import ugettext_lazy as _
from django.utils.functional import Promise
from django.utils.encoding import force_unicode
from django.views.decorators.csrf import ensure_csrf_cookie
#from django.core.context_processors import csrf
from teacher.models import Teacher
from configurations.models import Configuration
import datetime
import json
class LazyEncoder(simplejson.JSONEncoder):
"""Encodes django's lazy i18n strings.
"""
def default(self, obj):
if isinstance(obj, Promise):
return force_unicode(obj)
return obj
def login_user(request):
logout(request)
username = password = ''
msg = ""
if request.POST:
username = request.POST['username']
password = request.POST['password']
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
try:
teacher = Teacher.objects.get(user=user)
login(request, user)
return HttpResponseRedirect('/aulas/lista/')
except:
msg = "Usuario debe ser un Docente"
else:
msg = "No existe este usuario o la contraseña es incorrecta"
else:
msg = ""
return render_to_response('login.html',{'msg':msg}, context_instance=RequestContext(request))
@login_required(login_url='/login/')
def inicio(request):
return render_to_response('index.html' , context_instance=RequestContext(request))
def logout_view(request):
logout(request)
return HttpResponseRedirect("/login/")
def register(request):
registered = False
msg = ""
if request.method == 'POST':
username = request.POST['username']
email = request.POST['email']
password = request.POST['password']
first_name = request.POST['firstname']
last_name = request.POST['lastname']
gender = request.POST['gender']
str_dob = request.POST['date_birth']
#print str_dob, 'fecha nac'
try:
dob = datetime.datetime.strptime(str_dob,"%Y-%m-%d").date()
print 'converti exitosamente'
except:
dob = datetime.datetime.now().date()
print 'NOOO converti exitosamente'
try:
t = Teacher.objects.get(user__username=username)
msg = "Ya existe un usuario con este usuario"
except:
user = User.objects.create_user(username,email,password)
user.is_staff = False
user.is_active = True
user.save()
t = Teacher(
name=first_name,
last_name=last_name,
date_of_birth=dob,
gender=gender,
user=user)
t.save()
c = Configuration(teacher=t)
c.save()
user = authenticate(username=username, password=password)
if user is not None:
if user.is_active:
login(request, user)
msg = "Gracias por tu registro"
registered = True
return HttpResponseRedirect("/registro_exitoso/")
return render_to_response('register.html', {'registered': registered, 'msg':msg}, context_instance=RequestContext(request))
def user_validation(request):
exist_user = False
print "entra"
type = "error"
try:
if request.POST:
user_post = request.POST['user']
try:
print "entra"
#user = User.objects.get(username=user_post)
exist_user = True
except:
pass
type = "success"
except:
message = "Hubo un error"
result = simplejson.dumps({
"exist_user":exist_user,
"message":message,
"type":type,
}, cls = LazyEncoder)
return HttpResponse(result, mimetype = 'application/javascript')
def register_success(request):
try:
username = request.user.username
return HttpResponseRedirect("/aulas/lista/")
except:
username = ""
return render_to_response('register_success.html', {'username':username}, context_instance=RequestContext(request))
def redirect_login(request):
return HttpResponseRedirect("/login/")
|
15,528 | 2a4f1c2ccfebd6feb26d19990a9ddc97e965be42 | #**************Bisection search square root *********///
#num = 25
#epsilon = 0.01
#step = 0.01
#start = 1.0
#end = num
#totalSteps = 0
#ans = (start + end) /2
#while abs(ans**2 - num) >= epsilon:
# if(ans**2 > num):
# end = ans
# ans = (start + end)/2
# totalSteps += 1
# else:
# start = ans
# ans = (start + end)/2
# totalSteps += 1
#print ("number of guesses are: ", str(totalSteps))
#print(str(ans), "is close to square root of ", str(num))
#**************Bisection search cube root *********///
#num = 27
#epsilon = 0.00001
##step = 0.01
#start = 1.0
#end = num
#totalSteps = 0
#ans = (start + end) /2
#while abs(ans**3 - num) >= epsilon:
# if(ans**3 > num):
# end = ans
# ans = (start + end)/2
# totalSteps += 1
# else:
# start = ans
# ans = (start + end)/2
# totalSteps += 1
#print ("number of guesses are: ", str(totalSteps))
#print(str(ans), "is close to cube root of ", str(num)) |
15,529 | ceffe5b756cfa7ff69650d53344706a525f3b604 | from django.contrib import admin
from .models import CouncilSampleModel
# Register your models here.
admin.site.register(CouncilSampleModel)
|
15,530 | 9e83190ba101ff865ded4f3fba7ad806dcc6226b | 6691161,兵庫県,神戸市北区,道場町生野
6600000,兵庫県,尼崎市,
6610981,兵庫県,尼崎市,猪名寺
6600064,兵庫県,尼崎市,稲葉荘
6600055,兵庫県,尼崎市,稲葉元町
6600812,兵庫県,尼崎市,今福
6600096,兵庫県,尼崎市,扇町
6600076,兵庫県,尼崎市,大島
6600072,兵庫県,尼崎市,大庄川田町
6600075,兵庫県,尼崎市,大庄中通
6600077,兵庫県,尼崎市,大庄西町
6600063,兵庫県,尼崎市,大庄北
6600842,兵庫県,尼崎市,大高洲町
6610023,兵庫県,尼崎市,大西町
6600095,兵庫県,尼崎市,大浜町
6610022,兵庫県,尼崎市,尾浜町
6600862,兵庫県,尼崎市,開明町
6600821,兵庫県,尼崎市,梶ケ島
6610979,兵庫県,尼崎市,上坂部
6610014,兵庫県,尼崎市,上ノ島町
6610971,兵庫県,尼崎市,瓦宮
6610964,兵庫県,尼崎市,神崎町
6600884,兵庫県,尼崎市,神田中通
6600885,兵庫県,尼崎市,神田南通
6600883,兵庫県,尼崎市,神田北通
6600826,兵庫県,尼崎市,北城内
6600878,兵庫県,尼崎市,北竹谷町
6600804,兵庫県,尼崎市,北大物町
6600834,兵庫県,尼崎市,北初島町
6600806,兵庫県,尼崎市,金楽寺町
6600813,兵庫県,尼崎市,杭瀬寺島
6600814,兵庫県,尼崎市,杭瀬本町
6600822,兵庫県,尼崎市,杭瀬南新町
6600815,兵庫県,尼崎市,杭瀬北新町
6610977,兵庫県,尼崎市,久々知
6610978,兵庫県,尼崎市,久々知西町
6610983,兵庫県,尼崎市,口田中
6610013,兵庫県,尼崎市,栗山町
6600873,兵庫県,尼崎市,玄番南之町
6600872,兵庫県,尼崎市,玄番北之町
6610982,兵庫県,尼崎市,食満
6600074,兵庫県,尼崎市,琴浦町
6610972,兵庫県,尼崎市,小中島
6610024,兵庫県,尼崎市,三反田町
6600808,兵庫県,尼崎市,潮江
6610976,兵庫県,尼崎市,潮江
6600866,兵庫県,尼崎市,汐町
6610952,兵庫県,尼崎市,椎堂
6610975,兵庫県,尼崎市,下坂部
6600811,兵庫県,尼崎市,常光寺
6600881,兵庫県,尼崎市,昭和通
6600882,兵庫県,尼崎市,昭和南通
6600082,兵庫県,尼崎市,水明町
6600094,兵庫県,尼崎市,末広町
6600071,兵庫県,尼崎市,崇徳院
6610973,兵庫県,尼崎市,善法寺町
6600823,兵庫県,尼崎市,大物町
6610963,兵庫県,尼崎市,高田町
6600876,兵庫県,尼崎市,竹谷町
6610025,兵庫県,尼崎市,立花町
6600871,兵庫県,尼崎市,建家町
6610951,兵庫県,尼崎市,田能
6610002,兵庫県,尼崎市,塚口町
6610001,兵庫県,尼崎市,塚口本町
6600858,兵庫県,尼崎市,築地
6610965,兵庫県,尼崎市,次屋
6610046,兵庫県,尼崎市,常松
6610042,兵庫県,尼崎市,常吉
6600092,兵庫県,尼崎市,鶴町
6600867,兵庫県,尼崎市,寺町
6600083,兵庫県,尼崎市,道意町
6610961,兵庫県,尼崎市,戸ノ内町
6610003,兵庫県,尼崎市,富松町
6600851,兵庫県,尼崎市,中在家町
6600091,兵庫県,尼崎市,中浜町
6600802,兵庫県,尼崎市,長洲中通
6600801,兵庫県,尼崎市,長洲東通
6600807,兵庫県,尼崎市,長洲西通
6600803,兵庫県,尼崎市,長洲本通
6600073,兵庫県,尼崎市,菜切山町
6610974,兵庫県,尼崎市,若王寺
6600052,兵庫県,尼崎市,七松町
6600093,兵庫県,尼崎市,西海岸町
6610966,兵庫県,尼崎市,西川
6610047,兵庫県,尼崎市,西昆陽
6600865,兵庫県,尼崎市,西桜木町
6600845,兵庫県,尼崎市,西高洲町
6600054,兵庫県,尼崎市,西立花町
6600827,兵庫県,尼崎市,西大物町
6600805,兵庫県,尼崎市,西長洲町
6600893,兵庫県,尼崎市,西難波町
6600874,兵庫県,尼崎市,西本町
6600863,兵庫県,尼崎市,西本町北通
6600837,兵庫県,尼崎市,西松島町
6600868,兵庫県,尼崎市,西御園町
6600857,兵庫県,尼崎市,西向島町
6610962,兵庫県,尼崎市,額田町
6610967,兵庫県,尼崎市,浜
6600062,兵庫県,尼崎市,浜田町
6600843,兵庫県,尼崎市,東海岸町
6600864,兵庫県,尼崎市,東桜木町
6610953,兵庫県,尼崎市,東園田町
6600841,兵庫県,尼崎市,東高洲町
6600828,兵庫県,尼崎市,東大物町
6610011,兵庫県,尼崎市,東塚口町
6600051,兵庫県,尼崎市,東七松町
6600892,兵庫県,尼崎市,東難波町
6600832,兵庫県,尼崎市,東初島町
6600844,兵庫県,尼崎市,東浜町
6600824,兵庫県,尼崎市,東本町
6600831,兵庫県,尼崎市,東松島町
6600835,兵庫県,尼崎市,東向島東之町
6600856,兵庫県,尼崎市,東向島西之町
6600891,兵庫県,尼崎市,扶桑町
6600846,兵庫県,尼崎市,船出
6600087,兵庫県,尼崎市,平左衛門町
6600086,兵庫県,尼崎市,丸島町
6610026,兵庫県,尼崎市,水堂町
6610984,兵庫県,尼崎市,御園
6600861,兵庫県,尼崎市,御園町
6610985,兵庫県,尼崎市,南清水
6600825,兵庫県,尼崎市,南城内
6600875,兵庫県,尼崎市,南竹谷町
6610012,兵庫県,尼崎市,南塚口町
6600053,兵庫県,尼崎市,南七松町
6600833,兵庫県,尼崎市,南初島町
6610033,兵庫県,尼崎市,南武庫之荘
6600877,兵庫県,尼崎市,宮内町
6600084,兵庫県,尼崎市,武庫川町
6610044,兵庫県,尼崎市,武庫町
6610041,兵庫県,尼崎市,武庫の里
6610035,兵庫県,尼崎市,武庫之荘
6610031,兵庫県,尼崎市,武庫之荘本町
6610032,兵庫県,尼崎市,武庫之荘東
6610034,兵庫県,尼崎市,武庫之荘西
6610043,兵庫県,尼崎市,武庫元町
6610045,兵庫県,尼崎市,武庫豊町
6610021,兵庫県,尼崎市,名神町
6600085,兵庫県,尼崎市,元浜町
6610970,兵庫県,尼崎市,弥生ケ丘町
6600061,兵庫県,尼崎市,蓬川荘園
6600081,兵庫県,尼崎市,蓬川町
6620000,兵庫県,西宮市,
6620063,兵庫県,西宮市,相生町
6620862,兵庫県,西宮市,青木町
6691122,兵庫県,西宮市,青葉台
6638186,兵庫県,西宮市,上鳴尾町
6620925,兵庫県,西宮市,朝凪町
6620842,兵庫県,西宮市,芦原町
6620871,兵庫県,西宮市,愛宕山
6620946,兵庫県,西宮市,荒戎町
6638025,兵庫県,西宮市,荒木町
6620911,兵庫県,西宮市,池田町
6638137,兵庫県,西宮市,池開町
6620928,兵庫県,西宮市,石在町
6620074,兵庫県,西宮市,石刎町
6620932,兵庫県,西宮市,泉町
6620873,兵庫県,西宮市,一ケ谷町
6620975,兵庫県,西宮市,市庭町
6638002,兵庫県,西宮市,一里山町
6620972,兵庫県,西宮市,今在家町
6638214,兵庫県,西宮市,今津曙町
6638213,兵庫県,西宮市,今津上野町
6638221,兵庫県,西宮市,今津大東町
6638222,兵庫県,西宮市,今津久寿川町
6638229,兵庫県,西宮市,今津社前町
6638223,兵庫県,西宮市,今津巽町
6638227,兵庫県,西宮市,今津出在家町
6638225,兵庫県,西宮市,今津西浜町
6638212,兵庫県,西宮市,今津野田町
6638228,兵庫県,西宮市,今津二葉町
6638224,兵庫県,西宮市,今津真砂町
6638215,兵庫県,西宮市,今津水波町
6638226,兵庫県,西宮市,今津港町
6638211,兵庫県,西宮市,今津山中町
6620886,兵庫県,西宮市,上ケ原山田町
6620885,兵庫県,西宮市,上ケ原山手町
6620891,兵庫県,西宮市,上ケ原一番町
6620892,兵庫県,西宮市,上ケ原二番町
6620893,兵庫県,西宮市,上ケ原三番町
6620894,兵庫県,西宮市,上ケ原四番町
6620895,兵庫県,西宮市,上ケ原五番町
6620896,兵庫県,西宮市,上ケ原六番町
6620881,兵庫県,西宮市,上ケ原七番町
6620882,兵庫県,西宮市,上ケ原八番町
6620883,兵庫県,西宮市,上ケ原九番町
6620884,兵庫県,西宮市,上ケ原十番町
6638134,兵庫県,西宮市,上田中町
6638133,兵庫県,西宮市,上田東町
6638135,兵庫県,西宮市,上田西町
6620855,兵庫県,西宮市,江上町
6638143,兵庫県,西宮市,枝川町
6620085,兵庫県,西宮市,老松町
6620036,兵庫県,西宮市,大井手町
6638017,兵庫県,西宮市,大島町
6620054,兵庫県,西宮市,大谷町
6620836,兵庫県,西宮市,大畑町
6620957,兵庫県,西宮市,大浜町
6638023,兵庫県,西宮市,大森町
6638106,兵庫県,西宮市,大屋町
6620827,兵庫県,西宮市,岡田山
6620022,兵庫県,西宮市,奥畑
6620961,兵庫県,西宮市,御茶家所町
6638182,兵庫県,西宮市,学文殿町
6620977,兵庫県,西宮市,神楽町
6638136,兵庫県,西宮市,笠屋町
6620052,兵庫県,西宮市,霞町
6620001,兵庫県,西宮市,甲山町
6638003,兵庫県,西宮市,上大市
6620865,兵庫県,西宮市,神垣町
6638114,兵庫県,西宮市,上甲子園
6620813,兵庫県,西宮市,上甲東園
6620027,兵庫県,西宮市,神園町
6638021,兵庫県,西宮市,上之町
6620954,兵庫県,西宮市,上葭原町
6620097,兵庫県,西宮市,柏堂町
6620098,兵庫県,西宮市,柏堂西町
6620944,兵庫県,西宮市,川添町
6620951,兵庫県,西宮市,川西町
6620861,兵庫県,西宮市,河原町
6620945,兵庫県,西宮市,川東町
6638107,兵庫県,西宮市,瓦林町
6620823,兵庫県,西宮市,神呪町
6620021,兵庫県,西宮市,神原
6620078,兵庫県,西宮市,菊谷町
6620062,兵庫県,西宮市,木津山町
6638035,兵庫県,西宮市,北口町
6620833,兵庫県,西宮市,北昭和町
6620025,兵庫県,西宮市,北名次町
6620091,兵庫県,西宮市,北山町
6691131,兵庫県,西宮市,清瀬台
6620077,兵庫県,西宮市,久出ケ谷町
6691135,兵庫県,西宮市,国見台
6620927,兵庫県,西宮市,久保町
6638103,兵庫県,西宮市,熊野町
6620064,兵庫県,西宮市,雲井町
6620926,兵庫県,西宮市,鞍掛町
6620083,兵庫県,西宮市,苦楽園一番町
6620082,兵庫県,西宮市,苦楽園二番町
6620081,兵庫県,西宮市,苦楽園三番町
6620088,兵庫県,西宮市,苦楽園四番町
6620087,兵庫県,西宮市,苦楽園五番町
6620086,兵庫県,西宮市,苦楽園六番町
6620037,兵庫県,西宮市,結善町
6620099,兵庫県,西宮市,剣谷町
6638156,兵庫県,西宮市,甲子園網引町
6638165,兵庫県,西宮市,甲子園浦風町
6638151,兵庫県,西宮市,甲子園洲鳥町
6638162,兵庫県,西宮市,甲子園砂田町
6638166,兵庫県,西宮市,甲子園高潮町
6638167,兵庫県,西宮市,甲子園浜田町
6638161,兵庫県,西宮市,甲子園春風町
6638163,兵庫県,西宮市,甲子園三保町
6638164,兵庫県,西宮市,甲子園六石町
6638171,兵庫県,西宮市,甲子園一番町
6638172,兵庫県,西宮市,甲子園二番町
6638173,兵庫県,西宮市,甲子園三番町
6638174,兵庫県,西宮市,甲子園四番町
6638175,兵庫県,西宮市,甲子園五番町
6638176,兵庫県,西宮市,甲子園六番町
6638177,兵庫県,西宮市,甲子園七番町
6638178,兵庫県,西宮市,甲子園八番町
6638179,兵庫県,西宮市,甲子園九番町
6638113,兵庫県,西宮市,甲子園口
6638112,兵庫県,西宮市,甲子園口北町
6638152,兵庫県,西宮市,甲子園町
6638155,兵庫県,西宮市,甲子園浜
6620812,兵庫県,西宮市,甲東園
6620832,兵庫県,西宮市,甲風園
6620965,兵庫県,西宮市,郷免町
6620018,兵庫県,西宮市,甲陽園山王町
6620012,兵庫県,西宮市,甲陽園東山町
6620017,兵庫県,西宮市,甲陽園西山町
6620014,兵庫県,西宮市,甲陽園日之出町
6620015,兵庫県,西宮市,甲陽園本庄町
6620011,兵庫県,西宮市,甲陽園目神山町
6620016,兵庫県,西宮市,甲陽園若江町
6620092,兵庫県,西宮市,甑岩町
6620006,兵庫県,西宮市,越水社家郷山
6620864,兵庫県,西宮市,越水町
6638122,兵庫県,西宮市,小曽根町
6620047,兵庫県,西宮市,寿町
6638123,兵庫県,西宮市,小松東町
6638125,兵庫県,西宮市,小松西町
6638124,兵庫県,西宮市,小松南町
6638126,兵庫県,西宮市,小松北町
6638127,兵庫県,西宮市,小松町
6620844,兵庫県,西宮市,西福町
6620032,兵庫県,西宮市,桜谷町
6620071,兵庫県,西宮市,桜町
6620875,兵庫県,西宮市,五月ケ丘
6638183,兵庫県,西宮市,里中町
6620978,兵庫県,西宮市,産所町
6691101,兵庫県,西宮市,塩瀬町生瀬
6691251,兵庫県,西宮市,塩瀬町名塩
6691141,兵庫県,西宮市,塩瀬町名塩
6620026,兵庫県,西宮市,獅子ケ口町
6620033,兵庫県,西宮市,清水町
6638004,兵庫県,西宮市,下大市東町
6638005,兵庫県,西宮市,下大市西町
6620956,兵庫県,西宮市,下葭原町
6620974,兵庫県,西宮市,社家町
6620004,兵庫県,西宮市,鷲林寺
6620003,兵庫県,西宮市,鷲林寺町
6620002,兵庫県,西宮市,鷲林寺南町
6620856,兵庫県,西宮市,城ケ堀町
6620822,兵庫県,西宮市,松籟荘
6620023,兵庫県,西宮市,城山
6620843,兵庫県,西宮市,神祇官町
6620013,兵庫県,西宮市,新甲陽町
6620845,兵庫県,西宮市,神明町
6620041,兵庫県,西宮市,末広町
6620096,兵庫県,西宮市,角石町
6620913,兵庫県,西宮市,染殿町
6620867,兵庫県,西宮市,大社町
6638033,兵庫県,西宮市,高木東町
6638032,兵庫県,西宮市,高木西町
6620872,兵庫県,西宮市,高座町
6638141,兵庫県,西宮市,高須町
6620066,兵庫県,西宮市,高塚町
6638202,兵庫県,西宮市,高畑町
6638204,兵庫県,西宮市,高松町
6638201,兵庫県,西宮市,田代町
6638001,兵庫県,西宮市,田近野町
6620943,兵庫県,西宮市,建石町
6620973,兵庫県,西宮市,田中町
6638006,兵庫県,西宮市,段上町
6620046,兵庫県,西宮市,千歳町
6620853,兵庫県,西宮市,津田町
6638012,兵庫県,西宮市,堤町
6638244,兵庫県,西宮市,津門綾羽町
6638242,兵庫県,西宮市,津門飯田町
6638247,兵庫県,西宮市,津門稲荷町
6638243,兵庫県,西宮市,津門大箇町
6638241,兵庫県,西宮市,津門大塚町
6638245,兵庫県,西宮市,津門呉羽町
6638234,兵庫県,西宮市,津門住江町
6638231,兵庫県,西宮市,津門西口町
6638246,兵庫県,西宮市,津門仁辺町
6638232,兵庫県,西宮市,津門宝津町
6638233,兵庫県,西宮市,津門川町
6638104,兵庫県,西宮市,天道町
6620043,兵庫県,西宮市,常磐町
6638121,兵庫県,西宮市,戸崎町
6620916,兵庫県,西宮市,戸田町
6620065,兵庫県,西宮市,殿山町
6638105,兵庫県,西宮市,中島町
6620851,兵庫県,西宮市,中須佐町
6620852,兵庫県,西宮市,中殿町
6620952,兵庫県,西宮市,中浜町
6620857,兵庫県,西宮市,中前田町
6620868,兵庫県,西宮市,中屋町
6620955,兵庫県,西宮市,中葭原町
6638034,兵庫県,西宮市,長田町
6691147,兵庫県,西宮市,名塩
6691149,兵庫県,西宮市,名塩赤坂
6691143,兵庫県,西宮市,名塩ガーデン
6691136,兵庫県,西宮市,名塩木之元
6691146,兵庫県,西宮市,名塩さくら台
6691142,兵庫県,西宮市,名塩山荘
6691144,兵庫県,西宮市,名塩茶園町
6691148,兵庫県,西宮市,名塩東久保
6691145,兵庫県,西宮市,名塩平成台
6691132,兵庫県,西宮市,名塩南台
6691162,兵庫県,西宮市,名塩美山
6691134,兵庫県,西宮市,名塩新町
6620024,兵庫県,西宮市,名次町
6691103,兵庫県,西宮市,生瀬東町
6691111,兵庫県,西宮市,生瀬高台
6691102,兵庫県,西宮市,生瀬町
6691104,兵庫県,西宮市,生瀬武庫川町
6638184,兵庫県,西宮市,鳴尾町
6638142,兵庫県,西宮市,鳴尾浜
6620038,兵庫県,西宮市,南郷町
6620814,兵庫県,西宮市,仁川五ケ山町
6620815,兵庫県,西宮市,仁川百合野町
6620811,兵庫県,西宮市,仁川町
6620034,兵庫県,西宮市,西田町
6620934,兵庫県,西宮市,西宮浜
6620933,兵庫県,西宮市,西波止町
6620093,兵庫県,西宮市,西平町
6620838,兵庫県,西宮市,能登町
6638015,兵庫県,西宮市,野間町
6620051,兵庫県,西宮市,羽衣町
6620854,兵庫県,西宮市,櫨塚町
6638187,兵庫県,西宮市,花園町
6691121,兵庫県,西宮市,花の峯
6620915,兵庫県,西宮市,馬場町
6638154,兵庫県,西宮市,浜甲子園
6620942,兵庫県,西宮市,浜町
6620923,兵庫県,西宮市,浜松原町
6620941,兵庫県,西宮市,浜脇町
6638014,兵庫県,西宮市,林田町
6638132,兵庫県,西宮市,東鳴尾町
6620924,兵庫県,西宮市,東浜町
6620922,兵庫県,西宮市,東町
6691133,兵庫県,西宮市,東山台
6620094,兵庫県,西宮市,毘沙門町
6620084,兵庫県,西宮市,樋之池町
6638011,兵庫県,西宮市,樋ノ口町
6638022,兵庫県,西宮市,日野町
6620835,兵庫県,西宮市,平木町
6620044,兵庫県,西宮市,平松町
6620837,兵庫県,西宮市,広田町
6638203,兵庫県,西宮市,深津町
6620067,兵庫県,西宮市,深谷町
6638031,兵庫県,西宮市,伏原町
6638111,兵庫県,西宮市,二見町
6638185,兵庫県,西宮市,古川町
6620042,兵庫県,西宮市,分銅町
6691112,兵庫県,西宮市,宝生ケ丘
6620072,兵庫県,西宮市,豊楽町
6620953,兵庫県,西宮市,堀切町
6620914,兵庫県,西宮市,本町
6620931,兵庫県,西宮市,前浜町
6620076,兵庫県,西宮市,松生町
6620073,兵庫県,西宮市,松風町
6620061,兵庫県,西宮市,松ケ丘町
6620962,兵庫県,西宮市,松下町
6620053,兵庫県,西宮市,松園町
6638102,兵庫県,西宮市,松並町
6620912,兵庫県,西宮市,松原町
6638101,兵庫県,西宮市,松山町
6620831,兵庫県,西宮市,丸橋町
6620031,兵庫県,西宮市,満池谷町
6620095,兵庫県,西宮市,美作町
6638153,兵庫県,西宮市,南甲子園
6620075,兵庫県,西宮市,南越木岩町
6620834,兵庫県,西宮市,南昭和町
6620976,兵庫県,西宮市,宮西町
6620947,兵庫県,西宮市,宮前町
6638131,兵庫県,西宮市,武庫川町
6620863,兵庫県,西宮市,室川町
6620846,兵庫県,西宮市,森下町
6638013,兵庫県,西宮市,門前町
6620826,兵庫県,西宮市,門戸岡田町
6620824,兵庫県,西宮市,門戸東町
6620828,兵庫県,西宮市,門戸西町
6620825,兵庫県,西宮市,門戸荘
6638024,兵庫県,西宮市,薬師町
6620963,兵庫県,西宮市,屋敷町
6620045,兵庫県,西宮市,安井町
6620866,兵庫県,西宮市,柳本町
6620005,兵庫県,西宮市,湯元町
6620964,兵庫県,西宮市,弓場町
6620921,兵庫県,西宮市,用海町
6620917,兵庫県,西宮市,与古道町
6620841,兵庫県,西宮市,両度町
6620918,兵庫県,西宮市,六湛寺町
6620874,兵庫県,西宮市,六軒町
6638181,兵庫県,西宮市,若草町
6620035,兵庫県,西宮市,若松町
6638016,兵庫県,西宮市,若山町
6620971,兵庫県,西宮市,和上町
6640000,兵庫県,伊丹市,
6640001,兵庫県,伊丹市,荒牧
6640008,兵庫県,伊丹市,荒牧南
6640864,兵庫県,伊丹市,安堂寺町
6640027,兵庫県,伊丹市,池尻
6640846,兵庫県,伊丹市,伊丹
6640861,兵庫県,伊丹市,稲野町
6640011,兵庫県,伊丹市,鋳物師
6640843,兵庫県,伊丹市,岩屋
6640856,兵庫県,伊丹市,梅ノ木
6640899,兵庫県,伊丹市,大鹿
6640003,兵庫県,伊丹市,大野
6640002,兵庫県,伊丹市,荻野
6640031,兵庫県,伊丹市,荻野西
6640025,兵庫県,伊丹市,奥畑
6640833,兵庫県,伊丹市,小阪田
6640863,兵庫県,伊丹市,柏木町
6640893,兵庫県,伊丹市,春日丘
6640831,兵庫県,伊丹市,北伊丹
6640837,兵庫県,伊丹市,北河原
6640891,兵庫県,伊丹市,北園
6640007,兵庫県,伊丹市,北野
6640836,兵庫県,伊丹市,北本町
6640857,兵庫県,伊丹市,行基町
6640844,兵庫県,伊丹市,口酒井
6640872,兵庫県,伊丹市,車塚
6640839,兵庫県,伊丹市,桑津
6640006,兵庫県,伊丹市,鴻池
6640855,兵庫県,伊丹市,御願塚
6640881,兵庫県,伊丹市,昆陽
6640015,兵庫県,伊丹市,昆陽池
6640885,兵庫県,伊丹市,昆陽泉町
6640016,兵庫県,伊丹市,昆陽北
6640886,兵庫県,伊丹市,昆陽東
6640888,兵庫県,伊丹市,昆陽南
6640897,兵庫県,伊丹市,桜ケ丘
6640894,兵庫県,伊丹市,清水
6640832,兵庫県,伊丹市,下河原
6640882,兵庫県,伊丹市,鈴原町
6640898,兵庫県,伊丹市,千僧
6640892,兵庫県,伊丹市,高台
6640851,兵庫県,伊丹市,中央
6640026,兵庫県,伊丹市,寺本
6640020,兵庫県,伊丹市,寺本東
6640022,兵庫県,伊丹市,中野東
6640023,兵庫県,伊丹市,中野西
6640029,兵庫県,伊丹市,中野北
6640838,兵庫県,伊丹市,中村
6640834,兵庫県,伊丹市,西桑津
6640858,兵庫県,伊丹市,西台
6640028,兵庫県,伊丹市,西野
6640873,兵庫県,伊丹市,野間
6640875,兵庫県,伊丹市,野間北
6640845,兵庫県,伊丹市,東有岡
6640835,兵庫県,伊丹市,東桑津
6640004,兵庫県,伊丹市,東野
6640853,兵庫県,伊丹市,平松
6640014,兵庫県,伊丹市,広畑
6640847,兵庫県,伊丹市,藤ノ木
6640896,兵庫県,伊丹市,船原
6640871,兵庫県,伊丹市,堀池
6640024,兵庫県,伊丹市,松ケ丘
6640884,兵庫県,伊丹市,美鈴町
6640017,兵庫県,伊丹市,瑞ケ丘
6640005,兵庫県,伊丹市,瑞原
6640013,兵庫県,伊丹市,瑞穂町
6640012,兵庫県,伊丹市,緑ケ丘
6640883,兵庫県,伊丹市,南鈴原
6640854,兵庫県,伊丹市,南町
6640865,兵庫県,伊丹市,南野
6640887,兵庫県,伊丹市,南野北
6640852,兵庫県,伊丹市,南本町
6640895,兵庫県,伊丹市,宮ノ前
6640842,兵庫県,伊丹市,森本
6640874,兵庫県,伊丹市,山田
6640862,兵庫県,伊丹市,若菱町
6680000,兵庫県,豊岡市,
6680801,兵庫県,豊岡市,赤石
6680001,兵庫県,豊岡市,伊賀谷
6680261,兵庫県,豊岡市,出石町荒木
6680213,兵庫県,豊岡市,出石町伊木
6680207,兵庫県,豊岡市,出石町伊豆
6680218,兵庫県,豊岡市,出石町入佐
6680244,兵庫県,豊岡市,出石町上野
6680215,兵庫県,豊岡市,出石町魚屋
6680214,兵庫県,豊岡市,出石町内町
6680271,兵庫県,豊岡市,出石町大谷
6680201,兵庫県,豊岡市,出石町奥小野
6680251,兵庫県,豊岡市,出石町奥山
6680235,兵庫県,豊岡市,出石町鍛冶屋
6680279,兵庫県,豊岡市,出石町片間
6680255,兵庫県,豊岡市,出石町上村
6680231,兵庫県,豊岡市,出石町川原
6680242,兵庫県,豊岡市,出石町桐野
6680202,兵庫県,豊岡市,出石町口小野
6680264,兵庫県,豊岡市,出石町暮坂
6680256,兵庫県,豊岡市,出石町小人
6680216,兵庫県,豊岡市,出石町材木
6680205,兵庫県,豊岡市,出石町嶋
6680211,兵庫県,豊岡市,出石町下谷
6680233,兵庫県,豊岡市,出石町田結庄
6680209,兵庫県,豊岡市,出石町田多地
6680212,兵庫県,豊岡市,出石町谷山
6680273,兵庫県,豊岡市,出石町坪井
6680257,兵庫県,豊岡市,出石町坪口
6680223,兵庫県,豊岡市,出石町鉄砲
6680241,兵庫県,豊岡市,出石町寺坂
6680222,兵庫県,豊岡市,出石町寺町
6680217,兵庫県,豊岡市,出石町東條
6680272,兵庫県,豊岡市,出石町鳥居
6680275,兵庫県,豊岡市,出石町長砂
6680243,兵庫県,豊岡市,出石町中野
6680254,兵庫県,豊岡市,出石町中村
6680203,兵庫県,豊岡市,出石町袴狭
6680237,兵庫県,豊岡市,出石町馬場
6680246,兵庫県,豊岡市,出石町日野辺
6680266,兵庫県,豊岡市,出石町平田
6680238,兵庫県,豊岡市,出石町弘原
6680206,兵庫県,豊岡市,出石町福居
6680263,兵庫県,豊岡市,出石町福住
6680265,兵庫県,豊岡市,出石町福見
6680262,兵庫県,豊岡市,出石町細見
6680224,兵庫県,豊岡市,出石町本町
6680221,兵庫県,豊岡市,出石町町分
6680236,兵庫県,豊岡市,出石町松枝
6680277,兵庫県,豊岡市,出石町丸中
6680278,兵庫県,豊岡市,出石町三木
6680204,兵庫県,豊岡市,出石町宮内
6680274,兵庫県,豊岡市,出石町水上
6680276,兵庫県,豊岡市,出石町森井
6680225,兵庫県,豊岡市,出石町八木
6680208,兵庫県,豊岡市,出石町安良
6680234,兵庫県,豊岡市,出石町柳
6680245,兵庫県,豊岡市,出石町百合
6680232,兵庫県,豊岡市,出石町宵田
6680253,兵庫県,豊岡市,出石町榎見
6680252,兵庫県,豊岡市,出石町和屋
6680021,兵庫県,豊岡市,泉町
6680821,兵庫県,豊岡市,市場
6680851,兵庫県,豊岡市,今森
6680081,兵庫県,豊岡市,岩井
6680002,兵庫県,豊岡市,岩熊
6680071,兵庫県,豊岡市,内町
6680852,兵庫県,豊岡市,江本
6680041,兵庫県,豊岡市,大磯町
6680861,兵庫県,豊岡市,大篠岡
6680072,兵庫県,豊岡市,大谷
6680031,兵庫県,豊岡市,大手町
6680822,兵庫県,豊岡市,奥野
6696123,兵庫県,豊岡市,小島
6680022,兵庫県,豊岡市,小田井町
6680871,兵庫県,豊岡市,梶原
6680862,兵庫県,豊岡市,香住
6680023,兵庫県,豊岡市,加広町
6680811,兵庫県,豊岡市,鎌田
6680011,兵庫県,豊岡市,上陰
6680061,兵庫県,豊岡市,上佐野
6680863,兵庫県,豊岡市,上鉢山
6680831,兵庫県,豊岡市,神美台
6680841,兵庫県,豊岡市,加陽
6680864,兵庫県,豊岡市,木内
6696103,兵庫県,豊岡市,城崎町今津
6696116,兵庫県,豊岡市,城崎町上山
6696115,兵庫県,豊岡市,城崎町来日
6696111,兵庫県,豊岡市,城崎町楽々浦
6696114,兵庫県,豊岡市,城崎町戸島
6696112,兵庫県,豊岡市,城崎町飯谷
6696113,兵庫県,豊岡市,城崎町結
6696102,兵庫県,豊岡市,城崎町桃島
6696101,兵庫県,豊岡市,城崎町湯島
6680042,兵庫県,豊岡市,京町
6680832,兵庫県,豊岡市,倉見
6696124,兵庫県,豊岡市,気比
6680872,兵庫県,豊岡市,河谷
6680003,兵庫県,豊岡市,江野
6680051,兵庫県,豊岡市,九日市上町
6680053,兵庫県,豊岡市,九日市中町
6680052,兵庫県,豊岡市,九日市下町
6680024,兵庫県,豊岡市,寿町
6680802,兵庫県,豊岡市,金剛寺
6680025,兵庫県,豊岡市,幸町
6680812,兵庫県,豊岡市,栄町
6680043,兵庫県,豊岡市,桜町
6680062,兵庫県,豊岡市,佐野
6680044,兵庫県,豊岡市,山王町
6680054,兵庫県,豊岡市,塩津町
6680012,兵庫県,豊岡市,下陰
6680803,兵庫県,豊岡市,下鶴井
6680813,兵庫県,豊岡市,下宮
6680865,兵庫県,豊岡市,下鉢山
6680082,兵庫県,豊岡市,庄
6680814,兵庫県,豊岡市,祥雲寺
6680873,兵庫県,豊岡市,庄境
6680045,兵庫県,豊岡市,城南町
6680063,兵庫県,豊岡市,正法寺
6680853,兵庫県,豊岡市,清冷寺
6680055,兵庫県,豊岡市,昭和町
6680004,兵庫県,豊岡市,新堂
6696122,兵庫県,豊岡市,瀬戸
6696125,兵庫県,豊岡市,田結
6680064,兵庫県,豊岡市,高屋
6680005,兵庫県,豊岡市,滝
6696214,兵庫県,豊岡市,竹野町阿金谷
6696223,兵庫県,豊岡市,竹野町芦谷
6696202,兵庫県,豊岡市,竹野町宇日
6696352,兵庫県,豊岡市,竹野町大森
6696218,兵庫県,豊岡市,竹野町奥須井
6696224,兵庫県,豊岡市,竹野町鬼神谷
6696333,兵庫県,豊岡市,竹野町御又
6696354,兵庫県,豊岡市,竹野町川南谷
6696334,兵庫県,豊岡市,竹野町河内
6696216,兵庫県,豊岡市,竹野町切浜
6696229,兵庫県,豊岡市,竹野町金原
6696215,兵庫県,豊岡市,竹野町草飼
6696353,兵庫県,豊岡市,竹野町桑野本
6696341,兵庫県,豊岡市,竹野町小城
6696222,兵庫県,豊岡市,竹野町小丸
6696226,兵庫県,豊岡市,竹野町下塚
6696221,兵庫県,豊岡市,竹野町須谷
6696351,兵庫県,豊岡市,竹野町須野谷
6696203,兵庫県,豊岡市,竹野町田久日
6696201,兵庫県,豊岡市,竹野町竹野
6696343,兵庫県,豊岡市,竹野町段
6696225,兵庫県,豊岡市,竹野町轟
6696342,兵庫県,豊岡市,竹野町二連原
6696344,兵庫県,豊岡市,竹野町椒
6696213,兵庫県,豊岡市,竹野町羽入
6696217,兵庫県,豊岡市,竹野町浜須井
6696227,兵庫県,豊岡市,竹野町林
6696228,兵庫県,豊岡市,竹野町東大谷
6696331,兵庫県,豊岡市,竹野町坊岡
6696212,兵庫県,豊岡市,竹野町松本
6696355,兵庫県,豊岡市,竹野町三原
6696332,兵庫県,豊岡市,竹野町森本
6696335,兵庫県,豊岡市,竹野町門谷
6696211,兵庫県,豊岡市,竹野町和田
6680866,兵庫県,豊岡市,駄坂
6680046,兵庫県,豊岡市,立野町
6680833,兵庫県,豊岡市,立石
6680324,兵庫県,豊岡市,但東町相田
6680352,兵庫県,豊岡市,但東町赤花
6680316,兵庫県,豊岡市,但東町天谷
6680374,兵庫県,豊岡市,但東町後
6680372,兵庫県,豊岡市,但東町大河内
6680335,兵庫県,豊岡市,但東町太田
6680353,兵庫県,豊岡市,但東町奥赤
6680343,兵庫県,豊岡市,但東町奥藤
6680301,兵庫県,豊岡市,但東町奥矢根
6680323,兵庫県,豊岡市,但東町小谷
6680321,兵庫県,豊岡市,但東町唐川
6680331,兵庫県,豊岡市,但東町木村
6680341,兵庫県,豊岡市,但東町口藤
6680373,兵庫県,豊岡市,但東町久畑
6680363,兵庫県,豊岡市,但東町栗尾
6680315,兵庫県,豊岡市,但東町河本
6680333,兵庫県,豊岡市,但東町高龍寺
6680376,兵庫県,豊岡市,但東町小坂
6680354,兵庫県,豊岡市,但東町坂津
6680334,兵庫県,豊岡市,但東町坂野
6680325,兵庫県,豊岡市,但東町佐々木
6680364,兵庫県,豊岡市,但東町佐田
6680361,兵庫県,豊岡市,但東町正法寺
6680311,兵庫県,豊岡市,但東町出合
6680313,兵庫県,豊岡市,但東町出合市場
6680337,兵庫県,豊岡市,但東町東里
6680342,兵庫県,豊岡市,但東町中藤
6680345,兵庫県,豊岡市,但東町中山
6680317,兵庫県,豊岡市,但東町西谷
6680332,兵庫県,豊岡市,但東町西野々
6680303,兵庫県,豊岡市,但東町畑
6680351,兵庫県,豊岡市,但東町畑山
6680375,兵庫県,豊岡市,但東町東中
6680314,兵庫県,豊岡市,但東町日殿
6680336,兵庫県,豊岡市,但東町日向
6680362,兵庫県,豊岡市,但東町平田
6680304,兵庫県,豊岡市,但東町水石
6680312,兵庫県,豊岡市,但東町南尾
6680322,兵庫県,豊岡市,但東町三原
6680344,兵庫県,豊岡市,但東町虫生
6680371,兵庫県,豊岡市,但東町薬王寺
6680302,兵庫県,豊岡市,但東町矢根
6680033,兵庫県,豊岡市,中央町
6680032,兵庫県,豊岡市,千代田町
6696121,兵庫県,豊岡市,津居山
6680073,兵庫県,豊岡市,辻
6680083,兵庫県,豊岡市,栃江
6680065,兵庫県,豊岡市,戸牧
6680013,兵庫県,豊岡市,中陰
6680842,兵庫県,豊岡市,中郷
6680874,兵庫県,豊岡市,中谷
6680834,兵庫県,豊岡市,長谷
6680074,兵庫県,豊岡市,野垣
6680804,兵庫県,豊岡市,野上
6680014,兵庫県,豊岡市,野田
6680854,兵庫県,豊岡市,八社宮
6696127,兵庫県,豊岡市,畑上
6680843,兵庫県,豊岡市,引野
6680844,兵庫県,豊岡市,土渕
6695314,兵庫県,豊岡市,日高町赤崎
6695331,兵庫県,豊岡市,日高町上石
6695315,兵庫県,豊岡市,日高町浅倉
6695356,兵庫県,豊岡市,日高町荒川
6695326,兵庫県,豊岡市,日高町池上
6695367,兵庫県,豊岡市,日高町石井
6695375,兵庫県,豊岡市,日高町稲葉
6695357,兵庫県,豊岡市,日高町猪子垣
6695338,兵庫県,豊岡市,日高町猪爪
6695346,兵庫県,豊岡市,日高町伊府
6695302,兵庫県,豊岡市,日高町岩中
6695301,兵庫県,豊岡市,日高町江原
6695339,兵庫県,豊岡市,日高町大岡
6695324,兵庫県,豊岡市,日高町上郷
6695354,兵庫県,豊岡市,日高町観音寺
6695345,兵庫県,豊岡市,日高町久田谷
6695342,兵庫県,豊岡市,日高町久斗
6695372,兵庫県,豊岡市,日高町栗栖野
6695361,兵庫県,豊岡市,日高町栗山
6695341,兵庫県,豊岡市,日高町国分寺
6695366,兵庫県,豊岡市,日高町頃垣
6695351,兵庫県,豊岡市,日高町佐田
6695355,兵庫県,豊岡市,日高町篠垣
6695362,兵庫県,豊岡市,日高町芝
6695365,兵庫県,豊岡市,日高町十戸
6695364,兵庫県,豊岡市,日高町庄境
6695332,兵庫県,豊岡市,日高町竹貫
6695371,兵庫県,豊岡市,日高町太田
6695336,兵庫県,豊岡市,日高町谷
6695352,兵庫県,豊岡市,日高町知見
6695313,兵庫県,豊岡市,日高町鶴岡
6695321,兵庫県,豊岡市,日高町土居
6695343,兵庫県,豊岡市,日高町道場
6695369,兵庫県,豊岡市,日高町栃本
6695358,兵庫県,豊岡市,日高町殿
6695337,兵庫県,豊岡市,日高町中
6695335,兵庫県,豊岡市,日高町奈佐路
6695379,兵庫県,豊岡市,日高町名色
6695344,兵庫県,豊岡市,日高町夏栗
6695328,兵庫県,豊岡市,日高町西芝
6695305,兵庫県,豊岡市,日高町祢布
6695363,兵庫県,豊岡市,日高町野
6695327,兵庫県,豊岡市,日高町野々庄
6695359,兵庫県,豊岡市,日高町羽尻
6695311,兵庫県,豊岡市,日高町日置
6695303,兵庫県,豊岡市,日高町東構
6695373,兵庫県,豊岡市,日高町東河内
6695312,兵庫県,豊岡市,日高町日高
6695322,兵庫県,豊岡市,日高町府市場
6695333,兵庫県,豊岡市,日高町藤井
6695323,兵庫県,豊岡市,日高町府中新
6695325,兵庫県,豊岡市,日高町堀
6695307,兵庫県,豊岡市,日高町松岡
6695376,兵庫県,豊岡市,日高町万劫
6695378,兵庫県,豊岡市,日高町万場
6695306,兵庫県,豊岡市,日高町水上
6695374,兵庫県,豊岡市,日高町水口
6695353,兵庫県,豊岡市,日高町森山
6695377,兵庫県,豊岡市,日高町山田
6695368,兵庫県,豊岡市,日高町山宮
6695334,兵庫県,豊岡市,日高町山本
6695304,兵庫県,豊岡市,日高町宵田
6680015,兵庫県,豊岡市,一日市
6680815,兵庫県,豊岡市,日撫
6680075,兵庫県,豊岡市,福成寺
6680084,兵庫県,豊岡市,福田
6680855,兵庫県,豊岡市,伏
6680076,兵庫県,豊岡市,船谷
6680805,兵庫県,豊岡市,船町
6680816,兵庫県,豊岡市,法花寺
6680047,兵庫県,豊岡市,三坂町
6696126,兵庫県,豊岡市,三原
6680085,兵庫県,豊岡市,宮井
6680823,兵庫県,豊岡市,三宅
6680016,兵庫県,豊岡市,宮島
6680056,兵庫県,豊岡市,妙楽寺
6680077,兵庫県,豊岡市,目坂
6680026,兵庫県,豊岡市,元町
6680806,兵庫県,豊岡市,森
6680824,兵庫県,豊岡市,森尾
6680006,兵庫県,豊岡市,森津
6680057,兵庫県,豊岡市,弥栄町
6680807,兵庫県,豊岡市,山本
6680875,兵庫県,豊岡市,百合地
6680078,兵庫県,豊岡市,吉井
6680817,兵庫県,豊岡市,六地蔵
6680027,兵庫県,豊岡市,若松町
6650000,兵庫県,宝塚市,
6650014,兵庫県,宝塚市,青葉台
6650822,兵庫県,宝塚市,安倉中
6650825,兵庫県,宝塚市,安倉西
6650823,兵庫県,宝塚市,安倉南
6650821,兵庫県,宝塚市,安倉北
6650835,兵庫県,宝塚市,旭町
6650851,兵庫県,宝塚市,泉ガ丘
6650864,兵庫県,宝塚市,泉町
6650033,兵庫県,宝塚市,伊孑志
6650007,兵庫県,宝塚市,伊孑志
6650862,兵庫県,宝塚市,今里町
6650004,兵庫県,宝塚市,梅野町
6691211,兵庫県,宝塚市,大原野
6650057,兵庫県,宝塚市,大吹町
6650034,兵庫県,宝塚市,小林
6650017,兵庫県,宝塚市,小林西山
6650054,兵庫県,宝塚市,鹿塩
6650824,兵庫県,宝塚市,金井町
6691202,兵庫県,宝塚市,上佐曽利
6650047,兵庫県,宝塚市,亀井町
6650842,兵庫県,宝塚市,川面
6650848,兵庫県,宝塚市,川面
6650836,兵庫県,宝塚市,清荒神
6691241,兵庫県,宝塚市,切畑
6660161,兵庫県,宝塚市,切畑
6650808,兵庫県,宝塚市,切畑
6650812,兵庫県,宝塚市,口谷東
6650813,兵庫県,宝塚市,口谷西
6650023,兵庫県,宝塚市,蔵人
6650832,兵庫県,宝塚市,向月町
6691201,兵庫県,宝塚市,香合新田
6650045,兵庫県,宝塚市,光明町
6650041,兵庫県,宝塚市,御所の前町
6650841,兵庫県,宝塚市,御殿山
6650865,兵庫県,宝塚市,寿町
6650827,兵庫県,宝塚市,小浜
6650053,兵庫県,宝塚市,駒の町
6691222,兵庫県,宝塚市,境野
6650845,兵庫県,宝塚市,栄町
6650035,兵庫県,宝塚市,逆瀬川
6650024,兵庫県,宝塚市,逆瀬台
6650846,兵庫県,宝塚市,桜ガ丘
6691205,兵庫県,宝塚市,芝辻新田
6691203,兵庫県,宝塚市,下佐曽利
6650012,兵庫県,宝塚市,寿楽荘
6650052,兵庫県,宝塚市,新明和町
6650044,兵庫県,宝塚市,末成町
6650031,兵庫県,宝塚市,末広町
6650847,兵庫県,宝塚市,すみれガ丘
6650055,兵庫県,宝塚市,大成町
6650051,兵庫県,宝塚市,高司
6650043,兵庫県,宝塚市,高松町
6650076,兵庫県,宝塚市,谷口町
6691231,兵庫県,宝塚市,玉瀬
6650072,兵庫県,宝塚市,千種
6650001,兵庫県,宝塚市,長寿ガ丘
6650002,兵庫県,宝塚市,月見山
6650833,兵庫県,宝塚市,鶴の荘
6650073,兵庫県,宝塚市,塔の町
6650032,兵庫県,宝塚市,東洋町
6650021,兵庫県,宝塚市,中州
6650874,兵庫県,宝塚市,中筋
6650872,兵庫県,宝塚市,中筋
6650875,兵庫県,宝塚市,中筋山手
6650056,兵庫県,宝塚市,中野町
6650877,兵庫県,宝塚市,中山桜台
6650871,兵庫県,宝塚市,中山五月台
6650868,兵庫県,宝塚市,中山荘園
6650876,兵庫県,宝塚市,中山台
6650861,兵庫県,宝塚市,中山寺
6650807,兵庫県,宝塚市,長尾台
6650873,兵庫県,宝塚市,長尾町
6691204,兵庫県,宝塚市,長谷
6650065,兵庫県,宝塚市,仁川旭ガ丘
6650064,兵庫県,宝塚市,仁川うぐいす台
6650062,兵庫県,宝塚市,仁川高台
6650063,兵庫県,宝塚市,仁川高丸
6650066,兵庫県,宝塚市,仁川団地
6650067,兵庫県,宝塚市,仁川月見ガ丘
6650075,兵庫県,宝塚市,仁川宮西町
6650061,兵庫県,宝塚市,仁川北
6650074,兵庫県,宝塚市,仁川台
6650022,兵庫県,宝塚市,野上
6691221,兵庫県,宝塚市,波豆
6650802,兵庫県,宝塚市,花屋敷荘園
6650803,兵庫県,宝塚市,花屋敷つつじガ丘
6650801,兵庫県,宝塚市,花屋敷松ガ丘
6660162,兵庫県,宝塚市,花屋敷緑ガ丘
6650015,兵庫県,宝塚市,光ガ丘
6650805,兵庫県,宝塚市,雲雀丘
6650804,兵庫県,宝塚市,雲雀丘山手
6650816,兵庫県,宝塚市,平井
6650817,兵庫県,宝塚市,平井山荘
6650046,兵庫県,宝塚市,福井町
6650806,兵庫県,宝塚市,ふじガ丘
6650016,兵庫県,宝塚市,宝松苑
6650013,兵庫県,宝塚市,宝梅
6650866,兵庫県,宝塚市,星の荘
6650831,兵庫県,宝塚市,米谷
6650837,兵庫県,宝塚市,米谷清
6650863,兵庫県,宝塚市,三笠町
6650834,兵庫県,宝塚市,美座
6650011,兵庫県,宝塚市,南口
6650811,兵庫県,宝塚市,南ひばりガ丘
6650843,兵庫県,宝塚市,宮の町
6650042,兵庫県,宝塚市,美幸町
6650844,兵庫県,宝塚市,武庫川町
6650005,兵庫県,宝塚市,武庫山
6650852,兵庫県,宝塚市,売布
6650855,兵庫県,宝塚市,売布きよしガ丘
6650856,兵庫県,宝塚市,売布自由ガ丘
6650867,兵庫県,宝塚市,売布東の町
6650854,兵庫県,宝塚市,売布山手町
6650853,兵庫県,宝塚市,売布ガ丘
6650006,兵庫県,宝塚市,紅葉ガ丘
6650071,兵庫県,宝塚市,社町
6650887,兵庫県,宝塚市,山手台東
6650886,兵庫県,宝塚市,山手台西
6650883,兵庫県,宝塚市,山本中
6650814,兵庫県,宝塚市,山本野里
6650815,兵庫県,宝塚市,山本丸橋
6650881,兵庫県,宝塚市,山本東
6650884,兵庫県,宝塚市,山本西
6650882,兵庫県,宝塚市,山本南
6650885,兵庫県,宝塚市,山本台
6650826,兵庫県,宝塚市,弥生町
6650025,兵庫県,宝塚市,ゆずり葉台
6650003,兵庫県,宝塚市,湯本町
6660000,兵庫県,川西市,
6660148,兵庫県,川西市,赤松
6660156,兵庫県,川西市,石道
6660146,兵庫県,川西市,芋生
6660007,兵庫県,川西市,鴬が丘
6660133,兵庫県,川西市,鴬台
6660001,兵庫県,川西市,鴬の森町
6660014,兵庫県,川西市,小戸
6660015,兵庫県,川西市,小花
6660031,兵庫県,川西市,霞ケ丘
6660025,兵庫県,川西市,加茂
6660012,兵庫県,川西市,絹延町
6660135,兵庫県,川西市,錦松台
6660024,兵庫県,川西市,久代
6660103,兵庫県,川西市,国崎
6660101,兵庫県,川西市,黒川
6660107,兵庫県,川西市,下財町
6660145,兵庫県,川西市,けやき坂
6660115,兵庫県,川西市,向陽台
6660033,兵庫県,川西市,栄町
6660021,兵庫県,川西市,栄根
6660104,兵庫県,川西市,笹部
6660022,兵庫県,川西市,下加茂
6660125,兵庫県,川西市,新田
6660116,兵庫県,川西市,水明台
6660158,兵庫県,川西市,清流台
6660142,兵庫県,川西市,清和台東
6660143,兵庫県,川西市,清和台西
6660111,兵庫県,川西市,大和東
6660112,兵庫県,川西市,大和西
6660002,兵庫県,川西市,滝山町
6660126,兵庫県,川西市,多田院
6660127,兵庫県,川西市,多田院多田所町
6660128,兵庫県,川西市,多田院西
6660124,兵庫県,川西市,多田桜木
6660016,兵庫県,川西市,中央町
6660123,兵庫県,川西市,鼓が滝
6660011,兵庫県,川西市,出在家町
6660034,兵庫県,川西市,寺畑
6660113,兵庫県,川西市,長尾町
6660155,兵庫県,川西市,西畦野
6660138,兵庫県,川西市,西多田
6660004,兵庫県,川西市,萩原
6660005,兵庫県,川西市,萩原台東
6660006,兵庫県,川西市,萩原台西
6660134,兵庫県,川西市,萩原台西
6660035,兵庫県,川西市,花屋敷
6660036,兵庫県,川西市,花屋敷山手町
6660017,兵庫県,川西市,火打
6660117,兵庫県,川西市,東畦野
6660114,兵庫県,川西市,東畦野山手
6660023,兵庫県,川西市,東久代
6660122,兵庫県,川西市,東多田
6660032,兵庫県,川西市,日高町
6660153,兵庫県,川西市,一庫
6660121,兵庫県,川西市,平野
6660037,兵庫県,川西市,松が丘町
6660003,兵庫県,川西市,丸の内町
6660152,兵庫県,川西市,丸山台
6650891,兵庫県,川西市,満願寺町
6660013,兵庫県,川西市,美園町
6660157,兵庫県,川西市,緑が丘
6660129,兵庫県,川西市,緑台
6660136,兵庫県,川西市,南野坂
6660026,兵庫県,川西市,南花屋敷
6660105,兵庫県,川西市,見野
6660151,兵庫県,川西市,美山台
6660141,兵庫県,川西市,虫生
6660131,兵庫県,川西市,矢問
6660132,兵庫県,川西市,矢問東町
6660144,兵庫県,川西市,柳谷
6660106,兵庫県,川西市,山下町
6660154,兵庫県,川西市,山原
6660137,兵庫県,川西市,湯山台
6660102,兵庫県,川西市,横路
6660147,兵庫県,川西市,若宮
6691300,兵庫県,三田市,
6691536,兵庫県,三田市,三田市の次に番地がくる場合
6691526,兵庫県,三田市,相生町
6691358,兵庫県,三田市,藍本
6691323,兵庫県,三田市,あかしあ台
6691548,兵庫県,三田市,池尻
6691414,兵庫県,三田市,市之瀬
6691356,兵庫県,三田市,井ノ草
6691336,兵庫県,三田市,馬渡
6691502,兵庫県,三田市,永沢寺
6691528,兵庫県,三田市,駅前町
6691349,兵庫県,三田市,大川瀬
6691354,兵庫県,三田市,大畑
6691515,兵庫県,三田市,大原
6691503,兵庫県,三田市,乙原
6691504,兵庫県,三田市,小野
6691337,兵庫県,三田市,学園
6691507,兵庫県,三田市,香下
6691346,兵庫県,三田市,上相野
6691301,兵庫県,三田市,上青野
6691316,兵庫県,三田市,上井沢
6691338,兵庫県,三田市,上内神
6691415,兵庫県,三田市,上槻瀬
6691542,兵庫県,三田市,上深田
6691351,兵庫県,三田市,上本庄
6691311,兵庫県,三田市,加茂
6691514,兵庫県,三田市,川除
6691403,兵庫県,三田市,川原
6691541,兵庫県,三田市,貴志
6691304,兵庫県,三田市,北浦
6691521,兵庫県,三田市,桑原
6691321,兵庫県,三田市,けやき台
6691412,兵庫県,三田市,木器
6691401,兵庫県,三田市,小柿
6691405,兵庫県,三田市,酒井
6691325,兵庫県,三田市,さくら坂
6691335,兵庫県,三田市,沢谷
6691533,兵庫県,三田市,三田町
6691506,兵庫県,三田市,志手原
6691345,兵庫県,三田市,下相野
6691302,兵庫県,三田市,下青野
6691315,兵庫県,三田市,下井沢
6691333,兵庫県,三田市,下内神
6691416,兵庫県,三田市,下里
6691522,兵庫県,三田市,下田中
6691413,兵庫県,三田市,下槻瀬
6691543,兵庫県,三田市,下深田
6691303,兵庫県,三田市,末
6691402,兵庫県,三田市,末吉
6691417,兵庫県,三田市,鈴鹿
6691322,兵庫県,三田市,すずかけ台
6691352,兵庫県,三田市,須磨田
6691525,兵庫県,三田市,対中町
6691512,兵庫県,三田市,高次
6691406,兵庫県,三田市,田中
6691529,兵庫県,三田市,中央町
6691347,兵庫県,三田市,つつじが丘南
6691348,兵庫県,三田市,つつじが丘北
6691339,兵庫県,三田市,テクノパーク
6691523,兵庫県,三田市,寺村町
6691531,兵庫県,三田市,天神
6691404,兵庫県,三田市,十倉
6691516,兵庫県,三田市,友が丘
6691334,兵庫県,三田市,中内神
6691527,兵庫県,三田市,中町
6691355,兵庫県,三田市,長坂
6691517,兵庫県,三田市,成谷
6691341,兵庫県,三田市,西相野
6691314,兵庫県,三田市,西野上
6691537,兵庫県,三田市,西山
6691505,兵庫県,三田市,尼寺
6691545,兵庫県,三田市,狭間が丘
6691411,兵庫県,三田市,波豆川
6691524,兵庫県,三田市,八景町
6691312,兵庫県,三田市,東野上
6691357,兵庫県,三田市,東本庄
6691353,兵庫県,三田市,東山
6691332,兵庫県,三田市,広沢
6691331,兵庫県,三田市,広野
6691407,兵庫県,三田市,布木
6691313,兵庫県,三田市,福島
6691547,兵庫県,三田市,富士が丘
6691343,兵庫県,三田市,洞
6691344,兵庫県,三田市,溝口
6691535,兵庫県,三田市,南が丘
6691317,兵庫県,三田市,宮脇
6691513,兵庫県,三田市,三輪
6691544,兵庫県,三田市,武庫が丘
6691501,兵庫県,三田市,母子
6691532,兵庫県,三田市,屋敷町
6691511,兵庫県,三田市,山田
6691546,兵庫県,三田市,弥生が丘
6691324,兵庫県,三田市,ゆりのき台
6691534,兵庫県,三田市,横山町
6691342,兵庫県,三田市,四ツ辻
6692300,兵庫県,篠山市,
6692205,兵庫県,篠山市,網掛
6692402,兵庫県,篠山市,県守
6692822,兵庫県,篠山市,明野
6692223,兵庫県,篠山市,味間奥
6692224,兵庫県,篠山市,味間北
6692214,兵庫県,篠山市,味間新
6692222,兵庫県,篠山市,味間南
6692114,兵庫県,篠山市,油井
6692804,兵庫県,篠山市,荒子新田
6692354,兵庫県,篠山市,有居
6692541,兵庫県,篠山市,井串
6692436,兵庫県,篠山市,池上
6692811,兵庫県,篠山市,石住
6692406,兵庫県,篠山市,泉
6692813,兵庫県,篠山市,一印谷
6692613,兵庫県,篠山市,市野々
6692718,兵庫県,篠山市,市山
6692335,兵庫県,篠山市,乾新町
6692105,兵庫県,篠山市,犬飼
6692416,兵庫県,篠山市,井ノ上
6692304,兵庫県,篠山市,今谷
6692357,兵庫県,篠山市,今福
6692454,兵庫県,篠山市,岩崎
6692336,兵庫県,篠山市,魚屋町
6692106,兵庫県,篠山市,牛ケ瀬
6692715,兵庫県,篠山市,打坂
6692455,兵庫県,篠山市,宇土
6692801,兵庫県,篠山市,追入
6692306,兵庫県,篠山市,大上
6692318,兵庫県,篠山市,大熊
6692212,兵庫県,篠山市,大沢
6692211,兵庫県,篠山市,大沢新
6692363,兵庫県,篠山市,大谷
6692355,兵庫県,篠山市,大野
6692611,兵庫県,篠山市,大藤
6692309,兵庫県,篠山市,大渕
6692803,兵庫県,篠山市,大山上
6692823,兵庫県,篠山市,大山下
6692827,兵庫県,篠山市,大山新
6692802,兵庫県,篠山市,大山宮
6692326,兵庫県,篠山市,小川町
6692401,兵庫県,篠山市,奥県守
6692302,兵庫県,篠山市,奥畑
6692501,兵庫県,篠山市,奥原山
6692616,兵庫県,篠山市,奥山
6692614,兵庫県,篠山市,小倉
6692731,兵庫県,篠山市,小坂
6692435,兵庫県,篠山市,小多田
6692532,兵庫県,篠山市,小立
6692704,兵庫県,篠山市,遠方
6692522,兵庫県,篠山市,小野奥谷
6692523,兵庫県,篠山市,小野新
6692604,兵庫県,篠山市,小原
6692554,兵庫県,篠山市,貝田
6692714,兵庫県,篠山市,垣屋
6692408,兵庫県,篠山市,春日江
6692732,兵庫県,篠山市,上板井
6692534,兵庫県,篠山市,上筱見
6692415,兵庫県,篠山市,上宿
6692722,兵庫県,篠山市,河内台
6692724,兵庫県,篠山市,川北
6692725,兵庫県,篠山市,川北新田
6692701,兵庫県,篠山市,川阪
6692735,兵庫県,篠山市,川西
6692512,兵庫県,篠山市,川原
6692325,兵庫県,篠山市,河原町
6692451,兵庫県,篠山市,北
6692317,兵庫県,篠山市,北沢田
6692417,兵庫県,篠山市,北島
6692332,兵庫県,篠山市,北新町
6692824,兵庫県,篠山市,北野
6692825,兵庫県,篠山市,北野新田
6692438,兵庫県,篠山市,京町
6692113,兵庫県,篠山市,草野
6692544,兵庫県,篠山市,草ノ上
6692723,兵庫県,篠山市,口阪本
6692367,兵庫県,篠山市,熊谷
6692407,兵庫県,篠山市,倉谷
6692713,兵庫県,篠山市,倉本
6692711,兵庫県,篠山市,栗柄
6692465,兵庫県,篠山市,栗栖野
6692321,兵庫県,篠山市,黒岡
6692726,兵庫県,篠山市,黒田
6692703,兵庫県,篠山市,桑原
6692341,兵庫県,篠山市,郡家
6692112,兵庫県,篠山市,古森
6692545,兵庫県,篠山市,小田中
6692412,兵庫県,篠山市,小中
6692124,兵庫県,篠山市,不来坂
6692322,兵庫県,篠山市,呉服町
6692461,兵庫県,篠山市,小枕
6692144,兵庫県,篠山市,今田町間新田
6692155,兵庫県,篠山市,今田町芦原新田
6692154,兵庫県,篠山市,今田町市原
6692152,兵庫県,篠山市,今田町荻野分
6692143,兵庫県,篠山市,今田町釜屋
6692132,兵庫県,篠山市,今田町上小野原
6692135,兵庫県,篠山市,今田町上立杭
6692162,兵庫県,篠山市,今田町黒石
6692145,兵庫県,篠山市,今田町木津
6692151,兵庫県,篠山市,今田町今田
6692153,兵庫県,篠山市,今田町今田新田
6692156,兵庫県,篠山市,今田町佐曽良新田
6692161,兵庫県,篠山市,今田町四斗谷
6692133,兵庫県,篠山市,今田町下小野原
6692141,兵庫県,篠山市,今田町下立杭
6692131,兵庫県,篠山市,今田町辰巳
6692142,兵庫県,篠山市,今田町東庄
6692163,兵庫県,篠山市,今田町本荘
6692134,兵庫県,篠山市,今田町休場
6692542,兵庫県,篠山市,細工所
6692712,兵庫県,篠山市,坂本
6692365,兵庫県,篠山市,佐倉
6692405,兵庫県,篠山市,佐貫谷
6692425,兵庫県,篠山市,後川奥
6692422,兵庫県,篠山市,後川上
6692424,兵庫県,篠山市,後川下
6692421,兵庫県,篠山市,後川新田
6692423,兵庫県,篠山市,後川中
6692439,兵庫県,篠山市,渋谷
6692733,兵庫県,篠山市,下板井
6692535,兵庫県,篠山市,下筱見
6692503,兵庫県,篠山市,下原山
6692543,兵庫県,篠山市,塩岡
6692311,兵庫県,篠山市,新荘
6692204,兵庫県,篠山市,杉
6692312,兵庫県,篠山市,菅
6692315,兵庫県,篠山市,筋山
6692125,兵庫県,篠山市,住山
6692231,兵庫県,篠山市,住吉台
6692303,兵庫県,篠山市,瀬利
6692444,兵庫県,篠山市,曽地奥
6692442,兵庫県,篠山市,曽地口
6692443,兵庫県,篠山市,曽地中
6692805,兵庫県,篠山市,園田分
6692812,兵庫県,篠山市,高倉
6692717,兵庫県,篠山市,高坂
6692727,兵庫県,篠山市,高屋
6692612,兵庫県,篠山市,立金
6692323,兵庫県,篠山市,立町
6692453,兵庫県,篠山市,谷山
6692531,兵庫県,篠山市,垂水
6692362,兵庫県,篠山市,知足
6692814,兵庫県,篠山市,長安寺
6692815,兵庫県,篠山市,町ノ田
6692411,兵庫県,篠山市,辻
6692366,兵庫県,篠山市,寺内
6692821,兵庫県,篠山市,東河地
6692111,兵庫県,篠山市,当野
6692826,兵庫県,篠山市,徳永
6692525,兵庫県,篠山市,栃梨
6692434,兵庫県,篠山市,殿町
6692601,兵庫県,篠山市,中
6692213,兵庫県,篠山市,中野
6692502,兵庫県,篠山市,中原山
6692331,兵庫県,篠山市,二階町
6692346,兵庫県,篠山市,西岡屋
6692736,兵庫県,篠山市,西木之部
6692221,兵庫県,篠山市,西古佐
6692728,兵庫県,篠山市,西阪本
6692447,兵庫県,篠山市,西荘
6692334,兵庫県,篠山市,西新町
6692721,兵庫県,篠山市,西谷
6692504,兵庫県,篠山市,西野々
6692352,兵庫県,篠山市,西浜谷
6692206,兵庫県,篠山市,西吹
6692404,兵庫県,篠山市,西本荘
6692342,兵庫県,篠山市,西町
6692433,兵庫県,篠山市,西八上
6692521,兵庫県,篠山市,二之坪
6692353,兵庫県,篠山市,野尻
6692452,兵庫県,篠山市,野中
6692445,兵庫県,篠山市,野々垣
6692313,兵庫県,篠山市,野間
6692716,兵庫県,篠山市,乗竹
6692103,兵庫県,篠山市,波賀野
6692122,兵庫県,篠山市,波賀野新田
6692524,兵庫県,篠山市,箱谷
6692418,兵庫県,篠山市,畑井
6692413,兵庫県,篠山市,畑市
6692505,兵庫県,篠山市,安口
6692553,兵庫県,篠山市,幡路
6692305,兵庫県,篠山市,畑宮
6692101,兵庫県,篠山市,初田
6692307,兵庫県,篠山市,般若寺
6692301,兵庫県,篠山市,火打岩
6692441,兵庫県,篠山市,日置
6692345,兵庫県,篠山市,東岡屋
6692737,兵庫県,篠山市,東木之部
6692201,兵庫県,篠山市,東古佐
6692314,兵庫県,篠山市,東沢田
6692324,兵庫県,篠山市,東新町
6692351,兵庫県,篠山市,東浜谷
6692202,兵庫県,篠山市,東吹
6692403,兵庫県,篠山市,東本荘
6692343,兵庫県,篠山市,風深
6692344,兵庫県,篠山市,吹上
6692203,兵庫県,篠山市,吹新
6692603,兵庫県,篠山市,福井
6692513,兵庫県,篠山市,福住
6692369,兵庫県,篠山市,藤岡奥
6692368,兵庫県,篠山市,藤岡口
6692605,兵庫県,篠山市,藤坂
6692552,兵庫県,篠山市,藤之木
6692123,兵庫県,篠山市,古市
6692702,兵庫県,篠山市,本郷
6692511,兵庫県,篠山市,本明谷
6692316,兵庫県,篠山市,前沢田
6692527,兵庫県,篠山市,松ケ鼻
6692462,兵庫県,篠山市,真南条上
6692464,兵庫県,篠山市,真南条下
6692463,兵庫県,篠山市,真南条中
6692361,兵庫県,篠山市,丸山
6692121,兵庫県,篠山市,見内
6692602,兵庫県,篠山市,三熊
6692333,兵庫県,篠山市,南新町
6692102,兵庫県,篠山市,南矢代
6692615,兵庫県,篠山市,宮代
6692734,兵庫県,篠山市,宮田
6692414,兵庫県,篠山市,宮ノ前
6692526,兵庫県,篠山市,向井
6692437,兵庫県,篠山市,糯ケ坪
6692432,兵庫県,篠山市,八上内
6692446,兵庫県,篠山市,八上上
6692431,兵庫県,篠山市,八上下
6692356,兵庫県,篠山市,矢代
6692104,兵庫県,篠山市,矢代新
6692551,兵庫県,篠山市,安田
6692337,兵庫県,篠山市,山内町
6692533,兵庫県,篠山市,山田
6692364,兵庫県,篠山市,鷲尾
6692308,兵庫県,篠山市,和田
6670000,兵庫県,養父市,
6670115,兵庫県,養父市,上箇
6670103,兵庫県,養父市,浅野
6670104,兵庫県,養父市,伊豆
6670132,兵庫県,養父市,稲津
6670131,兵庫県,養父市,上野
6671119,兵庫県,養父市,鵜縄
6671127,兵庫県,養父市,大久保
6671102,兵庫県,養父市,大谷
6670125,兵庫県,養父市,大塚
6670134,兵庫県,養父市,大坪
6670436,兵庫県,養父市,大屋町明延
6670322,兵庫県,養父市,大屋町筏
6670431,兵庫県,養父市,大屋町糸原
6670301,兵庫県,養父市,大屋町上山
6670303,兵庫県,養父市,大屋町おうみ
6670314,兵庫県,養父市,大屋町大杉
6670311,兵庫県,養父市,大屋町大屋市場
6670313,兵庫県,養父市,大屋町笠谷
6670433,兵庫県,養父市,大屋町門野
6670315,兵庫県,養父市,大屋町加保
6670321,兵庫県,養父市,大屋町蔵垣
6670325,兵庫県,養父市,大屋町栗ノ下
6670434,兵庫県,養父市,大屋町須西
6670302,兵庫県,養父市,大屋町樽見
6670304,兵庫県,養父市,大屋町中
6670305,兵庫県,養父市,大屋町夏梅
6670306,兵庫県,養父市,大屋町宮垣
6670432,兵庫県,養父市,大屋町宮本
6670312,兵庫県,養父市,大屋町山路
6670324,兵庫県,養父市,大屋町横行
6670323,兵庫県,養父市,大屋町若杉
6670435,兵庫県,養父市,大屋町和田
6670111,兵庫県,養父市,大薮
6670121,兵庫県,養父市,奥米地
6671104,兵庫県,養父市,尾崎
6671114,兵庫県,養父市,小路頃
6671115,兵庫県,養父市,葛畑
6670123,兵庫県,養父市,鉄屋米地
6671113,兵庫県,養父市,川原場
6671122,兵庫県,養父市,草出
6670124,兵庫県,養父市,口米地
6670114,兵庫県,養父市,小城
6670105,兵庫県,養父市,左近山
6670102,兵庫県,養父市,十二所
6670107,兵庫県,養父市,新津
6671105,兵庫県,養父市,関宮
6670142,兵庫県,養父市,建屋
6670106,兵庫県,養父市,玉見
6671124,兵庫県,養父市,丹戸
6671112,兵庫県,養父市,出合
6671117,兵庫県,養父市,轟
6671121,兵庫県,養父市,外野
6671116,兵庫県,養父市,中瀬
6670145,兵庫県,養父市,長野
6670122,兵庫県,養父市,中米地
6671123,兵庫県,養父市,梨ケ原
6671125,兵庫県,養父市,奈良尾
6670143,兵庫県,養父市,能座
6670133,兵庫県,養父市,畑
6670101,兵庫県,養父市,広谷
6671126,兵庫県,養父市,福定
6670135,兵庫県,養父市,船谷
6671128,兵庫県,養父市,別宮
6670126,兵庫県,養父市,堀畑
6671103,兵庫県,養父市,万久里
6670136,兵庫県,養父市,三谷
6671101,兵庫県,養父市,三宅
6670144,兵庫県,養父市,餅耕地
6670141,兵庫県,養父市,森
6671118,兵庫県,養父市,安井
6670112,兵庫県,養父市,養父市場
6670113,兵庫県,養父市,薮崎
6670001,兵庫県,養父市,八鹿町青山
6670024,兵庫県,養父市,八鹿町朝倉
6670011,兵庫県,養父市,八鹿町浅間
6670012,兵庫県,養父市,八鹿町伊佐
6670051,兵庫県,養父市,八鹿町石原
6670014,兵庫県,養父市,八鹿町岩崎
6670015,兵庫県,養父市,八鹿町大江
6670053,兵庫県,養父市,八鹿町小佐
6670004,兵庫県,養父市,八鹿町上小田
6670023,兵庫県,養父市,八鹿町上網場
6670044,兵庫県,養父市,八鹿町国木
6670031,兵庫県,養父市,八鹿町九鹿
6670032,兵庫県,養父市,八鹿町小山
6670042,兵庫県,養父市,八鹿町今滝寺
6670013,兵庫県,養父市,八鹿町坂本
6670005,兵庫県,養父市,八鹿町下小田
6670022,兵庫県,養父市,八鹿町下網場
6670003,兵庫県,養父市,八鹿町宿南
6670043,兵庫県,養父市,八鹿町高柳
6670052,兵庫県,養父市,八鹿町日畑
6670016,兵庫県,養父市,八鹿町舞狂
6670002,兵庫県,養父市,八鹿町三谷
6670045,兵庫県,養父市,八鹿町米里
6670041,兵庫県,養父市,八鹿町八木
6670021,兵庫県,養父市,八鹿町八鹿
6671111,兵庫県,養父市,吉井
6693300,兵庫県,丹波市,
6693831,兵庫県,丹波市,青垣町市原
6693827,兵庫県,丹波市,青垣町稲土
6693843,兵庫県,丹波市,青垣町奥塩久
6693812,兵庫県,丹波市,青垣町小倉
6693822,兵庫県,丹波市,青垣町大名草
6693823,兵庫県,丹波市,青垣町大稗
6693841,兵庫県,丹波市,青垣町口塩久
6693802,兵庫県,丹波市,青垣町栗住野
6693824,兵庫県,丹波市,青垣町小稗
6693811,兵庫県,丹波市,青垣町佐治
6693842,兵庫県,丹波市,青垣町沢野
6693825,兵庫県,丹波市,青垣町惣持
6693804,兵庫県,丹波市,青垣町田井縄
6693832,兵庫県,丹波市,青垣町遠阪
6693834,兵庫県,丹波市,青垣町中佐治
6693803,兵庫県,丹波市,青垣町西芦田
6693801,兵庫県,丹波市,青垣町東芦田
6693821,兵庫県,丹波市,青垣町桧倉
6693826,兵庫県,丹波市,青垣町文室
6693833,兵庫県,丹波市,青垣町山垣
6694324,兵庫県,丹波市,市島町市島
6694316,兵庫県,丹波市,市島町岩戸
6694321,兵庫県,丹波市,市島町上垣
6694317,兵庫県,丹波市,市島町上牧
6694336,兵庫県,丹波市,市島町乙河内
6694323,兵庫県,丹波市,市島町梶原
6694344,兵庫県,丹波市,市島町上鴨阪
6694322,兵庫県,丹波市,市島町上田
6694341,兵庫県,丹波市,市島町上竹田
6694315,兵庫県,丹波市,市島町喜多
6694325,兵庫県,丹波市,市島町北岡本
6694312,兵庫県,丹波市,市島町北奥
6694337,兵庫県,丹波市,市島町酒梨
6694343,兵庫県,丹波市,市島町下鴨阪
6694301,兵庫県,丹波市,市島町下竹田
6694313,兵庫県,丹波市,市島町多利
6694332,兵庫県,丹波市,市島町勅使
6694345,兵庫県,丹波市,市島町徳尾
6694333,兵庫県,丹波市,市島町戸坂
6694311,兵庫県,丹波市,市島町戸平
6694302,兵庫県,丹波市,市島町中竹田
6694331,兵庫県,丹波市,市島町東勅使
6694334,兵庫県,丹波市,市島町白毫寺
6694314,兵庫県,丹波市,市島町南
6694342,兵庫県,丹波市,市島町矢代
6694335,兵庫県,丹波市,市島町与戸
6693314,兵庫県,丹波市,柏原町挙田
6693307,兵庫県,丹波市,柏原町石戸
6693315,兵庫県,丹波市,柏原町大新屋
6693309,兵庫県,丹波市,柏原町柏原
6693304,兵庫県,丹波市,柏原町上小倉
6693316,兵庫県,丹波市,柏原町鴨野
6693306,兵庫県,丹波市,柏原町北中
6693313,兵庫県,丹波市,柏原町北山
6693308,兵庫県,丹波市,柏原町小南
6693305,兵庫県,丹波市,柏原町下小倉
6693312,兵庫県,丹波市,柏原町田路
6693302,兵庫県,丹波市,柏原町東奥
6693311,兵庫県,丹波市,柏原町母坪
6693303,兵庫県,丹波市,柏原町見長
6693301,兵庫県,丹波市,柏原町南多田
6693411,兵庫県,丹波市,春日町朝日
6694121,兵庫県,丹波市,春日町池尾
6693413,兵庫県,丹波市,春日町石才
6694135,兵庫県,丹波市,春日町稲塚
6693404,兵庫県,丹波市,春日町牛河内
6693414,兵庫県,丹波市,春日町歌道谷
6694251,兵庫県,丹波市,春日町上三井庄
6694262,兵庫県,丹波市,春日町栢野
6694253,兵庫県,丹波市,春日町鹿場
6694141,兵庫県,丹波市,春日町黒井
6694273,兵庫県,丹波市,春日町国領
6694122,兵庫県,丹波市,春日町小多利
6693415,兵庫県,丹波市,春日町坂
6694252,兵庫県,丹波市,春日町下三井庄
6693402,兵庫県,丹波市,春日町新才
6693412,兵庫県,丹波市,春日町園部
6694125,兵庫県,丹波市,春日町多田
6694274,兵庫県,丹波市,春日町棚原
6694123,兵庫県,丹波市,春日町多利
6693403,兵庫県,丹波市,春日町長王
6694265,兵庫県,丹波市,春日町中山
6694131,兵庫県,丹波市,春日町七日市
6694124,兵庫県,丹波市,春日町野上野
6694261,兵庫県,丹波市,春日町野瀬
6694132,兵庫県,丹波市,春日町野村
6693416,兵庫県,丹波市,春日町野山
6694272,兵庫県,丹波市,春日町東中
6694133,兵庫県,丹波市,春日町平松
6694263,兵庫県,丹波市,春日町広瀬
6694134,兵庫県,丹波市,春日町古河
6694264,兵庫県,丹波市,春日町松森
6693401,兵庫県,丹波市,春日町山田
6694271,兵庫県,丹波市,春日町柚津
6693112,兵庫県,丹波市,山南町青田
6693111,兵庫県,丹波市,山南町阿草
6693125,兵庫県,丹波市,山南町池谷
6693143,兵庫県,丹波市,山南町井原
6693141,兵庫県,丹波市,山南町岩屋
6693158,兵庫県,丹波市,山南町応地
6693123,兵庫県,丹波市,山南町大河
6693113,兵庫県,丹波市,山南町太田
6693124,兵庫県,丹波市,山南町大谷
6693127,兵庫県,丹波市,山南町岡本
6693144,兵庫県,丹波市,山南町奥
6693121,兵庫県,丹波市,山南町奥野々
6693166,兵庫県,丹波市,山南町小野尻
6693167,兵庫県,丹波市,山南町小畑
6693154,兵庫県,丹波市,山南町梶
6693156,兵庫県,丹波市,山南町金倉
6693128,兵庫県,丹波市,山南町金屋
6693101,兵庫県,丹波市,山南町上滝
6693105,兵庫県,丹波市,山南町北太田
6693152,兵庫県,丹波市,山南町北和田
6693148,兵庫県,丹波市,山南町きらら通
6693151,兵庫県,丹波市,山南町草部
6693161,兵庫県,丹波市,山南町五ケ野
6693155,兵庫県,丹波市,山南町小新屋
6693162,兵庫県,丹波市,山南町坂尻
6693103,兵庫県,丹波市,山南町篠場
6693147,兵庫県,丹波市,山南町子茂田
6693102,兵庫県,丹波市,山南町下滝
6693131,兵庫県,丹波市,山南町谷川
6693122,兵庫県,丹波市,山南町玉巻
6693165,兵庫県,丹波市,山南町富田
6693126,兵庫県,丹波市,山南町長野
6693168,兵庫県,丹波市,山南町西谷
6693145,兵庫県,丹波市,山南町野坂
6693104,兵庫県,丹波市,山南町畑内
6693153,兵庫県,丹波市,山南町前川
6693146,兵庫県,丹波市,山南町南中
6693159,兵庫県,丹波市,山南町美和
6693142,兵庫県,丹波市,山南町村森
6693132,兵庫県,丹波市,山南町山崎
6693163,兵庫県,丹波市,山南町山本
6693164,兵庫県,丹波市,山南町若林
6693157,兵庫県,丹波市,山南町和田
6693606,兵庫県,丹波市,氷上町上成松
6693574,兵庫県,丹波市,氷上町朝阪
6693643,兵庫県,丹波市,氷上町伊佐口
6693464,兵庫県,丹波市,氷上町石生
6693461,兵庫県,丹波市,氷上町市辺
6693632,兵庫県,丹波市,氷上町井中
6693466,兵庫県,丹波市,氷上町稲継
6693581,兵庫県,丹波市,氷上町稲畑
6693604,兵庫県,丹波市,氷上町犬岡
6693462,兵庫県,丹波市,氷上町大崎
6693621,兵庫県,丹波市,氷上町大谷
6693612,兵庫県,丹波市,氷上町長野
6693575,兵庫県,丹波市,氷上町小野
6693611,兵庫県,丹波市,氷上町柿柴
6693613,兵庫県,丹波市,氷上町上新庄
6693631,兵庫県,丹波市,氷上町賀茂
6693645,兵庫県,丹波市,氷上町鴨内
6693463,兵庫県,丹波市,氷上町北野
6693653,兵庫県,丹波市,氷上町北油良
6693641,兵庫県,丹波市,氷上町絹山
6693626,兵庫県,丹波市,氷上町清住
6693605,兵庫県,丹波市,氷上町黒田
6693642,兵庫県,丹波市,氷上町香良
6693646,兵庫県,丹波市,氷上町小谷
6693633,兵庫県,丹波市,氷上町御油
6693652,兵庫県,丹波市,氷上町桟敷
6693582,兵庫県,丹波市,氷上町佐野
6693614,兵庫県,丹波市,氷上町下新庄
6693602,兵庫県,丹波市,氷上町常楽
6693571,兵庫県,丹波市,氷上町新郷
6693572,兵庫県,丹波市,氷上町谷村
6693625,兵庫県,丹波市,氷上町中
6693623,兵庫県,丹波市,氷上町中野
6693601,兵庫県,丹波市,氷上町成松
6693603,兵庫県,丹波市,氷上町西中
6693634,兵庫県,丹波市,氷上町沼
6693651,兵庫県,丹波市,氷上町氷上
6693644,兵庫県,丹波市,氷上町日比宇
6693583,兵庫県,丹波市,氷上町福田
6693467,兵庫県,丹波市,氷上町本郷
6693624,兵庫県,丹波市,氷上町三方
6693654,兵庫県,丹波市,氷上町南油良
6693622,兵庫県,丹波市,氷上町三原
6693573,兵庫県,丹波市,氷上町油利
6693465,兵庫県,丹波市,氷上町横田
6695200,兵庫県,朝来市,
6695125,兵庫県,朝来市,山東町粟鹿
6695123,兵庫県,朝来市,山東町一品
6695112,兵庫県,朝来市,山東町大内
6695102,兵庫県,朝来市,山東町大垣
6695153,兵庫県,朝来市,山東町大月
6695142,兵庫県,朝来市,山東町越田
6695143,兵庫県,朝来市,山東町柿坪
6695152,兵庫県,朝来市,山東町楽音寺
6695115,兵庫県,朝来市,山東町金浦
6695133,兵庫県,朝来市,山東町喜多垣
6695151,兵庫県,朝来市,山東町小谷
6695113,兵庫県,朝来市,山東町塩田
6695124,兵庫県,朝来市,山東町柴
6695111,兵庫県,朝来市,山東町新堂
6695101,兵庫県,朝来市,山東町滝田
6695114,兵庫県,朝来市,山東町野間
6695134,兵庫県,朝来市,山東町迫間
6695131,兵庫県,朝来市,山東町柊木
6695104,兵庫県,朝来市,山東町末歳
6695132,兵庫県,朝来市,山東町溝黒
6695141,兵庫県,朝来市,山東町三保
6695135,兵庫県,朝来市,山東町森
6695103,兵庫県,朝来市,山東町矢名瀬町
6695136,兵庫県,朝来市,山東町与布土
6695121,兵庫県,朝来市,山東町和賀
6695122,兵庫県,朝来市,山東町早田
6695221,兵庫県,朝来市,和田山町秋葉台
6695238,兵庫県,朝来市,和田山町朝日
6695234,兵庫県,朝来市,和田山町市場
6695262,兵庫県,朝来市,和田山町市御堂
6695236,兵庫県,朝来市,和田山町内海
6695204,兵庫県,朝来市,和田山町駅北
6695246,兵庫県,朝来市,和田山町岡
6695224,兵庫県,朝来市,和田山町岡田
6695264,兵庫県,朝来市,和田山町加都
6695253,兵庫県,朝来市,和田山町久世田
6695228,兵庫県,朝来市,和田山町久田和
6695263,兵庫県,朝来市,和田山町久留引
6695214,兵庫県,朝来市,和田山町桑原
6695251,兵庫県,朝来市,和田山町栄町
6695256,兵庫県,朝来市,和田山町三波
6695258,兵庫県,朝来市,和田山町城南台
6695220,兵庫県,朝来市,和田山町白井
6695243,兵庫県,朝来市,和田山町高田
6695252,兵庫県,朝来市,和田山町竹田
6695237,兵庫県,朝来市,和田山町竹ノ内
6695233,兵庫県,朝来市,和田山町高生田
6695268,兵庫県,朝来市,和田山町立ノ原
6695213,兵庫県,朝来市,和田山町玉置
6695265,兵庫県,朝来市,和田山町筒江
6695232,兵庫県,朝来市,和田山町寺内
6695203,兵庫県,朝来市,和田山町寺谷
6695255,兵庫県,朝来市,和田山町殿
6695226,兵庫県,朝来市,和田山町中
6695225,兵庫県,朝来市,和田山町野村
6695231,兵庫県,朝来市,和田山町林垣
6695241,兵庫県,朝来市,和田山町土田
6695202,兵庫県,朝来市,和田山町東谷
6695227,兵庫県,朝来市,和田山町東和田
6695266,兵庫県,朝来市,和田山町比治
6695261,兵庫県,朝来市,和田山町枚田
6695215,兵庫県,朝来市,和田山町枚田岡
6695211,兵庫県,朝来市,和田山町平野
6695257,兵庫県,朝来市,和田山町藤和
6695245,兵庫県,朝来市,和田山町法道寺
6695267,兵庫県,朝来市,和田山町法興寺
6695222,兵庫県,朝来市,和田山町万葉台
6695229,兵庫県,朝来市,和田山町宮
6695244,兵庫県,朝来市,和田山町宮内
6695242,兵庫県,朝来市,和田山町宮田
6695223,兵庫県,朝来市,和田山町室尾
6695254,兵庫県,朝来市,和田山町安井
6695212,兵庫県,朝来市,和田山町柳原
6695216,兵庫県,朝来市,和田山町弥生が丘
6695235,兵庫県,朝来市,和田山町和田
6695201,兵庫県,朝来市,和田山町和田山
6660200,兵庫県,川辺郡猪名川町,
6660212,兵庫県,川辺郡猪名川町,旭ケ丘
6660246,兵庫県,川辺郡猪名川町,猪名川台
6660255,兵庫県,川辺郡猪名川町,猪渕
6660244,兵庫県,川辺郡猪名川町,上野
6660241,兵庫県,川辺郡猪名川町,内馬場
6660243,兵庫県,川辺郡猪名川町,柏梨田
6660204,兵庫県,川辺郡猪名川町,柏原
6660202,兵庫県,川辺郡猪名川町,鎌倉
6660231,兵庫県,川辺郡猪名川町,上阿古谷
6660236,兵庫県,川辺郡猪名川町,北田原
6660234,兵庫県,川辺郡猪名川町,北野
6660225,兵庫県,川辺郡猪名川町,木津
6660254,兵庫県,川辺郡猪名川町,肝川
6660256,兵庫県,川辺郡猪名川町,銀山
6660223,兵庫県,川辺郡猪名川町,木間生
6660227,兵庫県,川辺郡猪名川町,笹尾
6660253,兵庫県,川辺郡猪名川町,差組
6660215,兵庫県,川辺郡猪名川町,島
6660214,兵庫県,川辺郡猪名川町,清水
6660213,兵庫県,川辺郡猪名川町,清水東
6660237,兵庫県,川辺郡猪名川町,下阿古谷
6660257,兵庫県,川辺郡猪名川町,白金
6660201,兵庫県,川辺郡猪名川町,杉生
6660238,兵庫県,川辺郡猪名川町,荘苑
6660232,兵庫県,川辺郡猪名川町,民田
6660222,兵庫県,川辺郡猪名川町,槻並
6660245,兵庫県,川辺郡猪名川町,つつじが丘
6660221,兵庫県,川辺郡猪名川町,杤原
6660203,兵庫県,川辺郡猪名川町,西畑
6660211,兵庫県,川辺郡猪名川町,仁頂寺
6660226,兵庫県,川辺郡猪名川町,林田
6660242,兵庫県,川辺郡猪名川町,原
6660252,兵庫県,川辺郡猪名川町,広根
6660262,兵庫県,川辺郡猪名川町,伏見台
6660261,兵庫県,川辺郡猪名川町,松尾台
6660224,兵庫県,川辺郡猪名川町,万善
6660235,兵庫県,川辺郡猪名川町,南田原
6660233,兵庫県,川辺郡猪名川町,紫合
6660251,兵庫県,川辺郡猪名川町,若葉
6696500,兵庫県,美方郡香美町,
6671533,兵庫県,美方郡香美町,小代区秋岡
6671512,兵庫県,美方郡香美町,小代区石寺
6671503,兵庫県,美方郡香美町,小代区大谷
6671542,兵庫県,美方郡香美町,小代区鍛治屋
6671531,兵庫県,美方郡香美町,小代区茅野
6671511,兵庫県,美方郡香美町,小代区神水
6671514,兵庫県,美方郡香美町,小代区神場
6671501,兵庫県,美方郡香美町,小代区久須部
6671522,兵庫県,美方郡香美町,小代区実山
6671541,兵庫県,美方郡香美町,小代区佐坊
6671502,兵庫県,美方郡香美町,小代区城山
6671543,兵庫県,美方郡香美町,小代区忠宮
6671532,兵庫県,美方郡香美町,小代区新屋
6671544,兵庫県,美方郡香美町,小代区貫田
6671521,兵庫県,美方郡香美町,小代区野間谷
6671545,兵庫県,美方郡香美町,小代区東垣
6671523,兵庫県,美方郡香美町,小代区平野
6671513,兵庫県,美方郡香美町,小代区広井
6671515,兵庫県,美方郡香美町,小代区水間
6696404,兵庫県,美方郡香美町,香住区相谷
6696432,兵庫県,美方郡香美町,香住区上計
6696671,兵庫県,美方郡香美町,香住区余部
6696431,兵庫県,美方郡香美町,香住区浦上
6696425,兵庫県,美方郡香美町,香住区大梶
6696554,兵庫県,美方郡香美町,香住区大谷
6696555,兵庫県,美方郡香美町,香住区大野
6696433,兵庫県,美方郡香美町,香住区沖浦
6696552,兵庫県,美方郡香美町,香住区加鹿野
6696544,兵庫県,美方郡香美町,香住区香住
6696414,兵庫県,美方郡香美町,香住区上岡
6696411,兵庫県,美方郡香美町,香住区九斗
6696402,兵庫県,美方郡香美町,香住区訓谷
6696559,兵庫県,美方郡香美町,香住区小原
6696541,兵庫県,美方郡香美町,香住区境
6696413,兵庫県,美方郡香美町,香住区下岡
6696564,兵庫県,美方郡香美町,香住区下浜
6696551,兵庫県,美方郡香美町,香住区守柄
6696556,兵庫県,美方郡香美町,香住区中野
6696546,兵庫県,美方郡香美町,香住区七日市
6696416,兵庫県,美方郡香美町,香住区丹生地
6696415,兵庫県,美方郡香美町,香住区西下岡
6696423,兵庫県,美方郡香美町,香住区畑
6696422,兵庫県,美方郡香美町,香住区土生
6696424,兵庫県,美方郡香美町,香住区隼人
6696542,兵庫県,美方郡香美町,香住区一日市
6696557,兵庫県,美方郡香美町,香住区藤
6696561,兵庫県,美方郡香美町,香住区間室
6696426,兵庫県,美方郡香美町,香住区三川
6696553,兵庫県,美方郡香美町,香住区三谷
6696401,兵庫県,美方郡香美町,香住区無南垣
6696412,兵庫県,美方郡香美町,香住区米地
6696421,兵庫県,美方郡香美町,香住区本見塚
6696545,兵庫県,美方郡香美町,香住区森
6696403,兵庫県,美方郡香美町,香住区安木
6696563,兵庫県,美方郡香美町,香住区矢田
6696558,兵庫県,美方郡香美町,香住区八原
6696562,兵庫県,美方郡香美町,香住区油良
6696672,兵庫県,美方郡香美町,香住区鎧
6696543,兵庫県,美方郡香美町,香住区若松
6671312,兵庫県,美方郡香美町,村岡区相田
6671346,兵庫県,美方郡香美町,村岡区池ケ平
6671315,兵庫県,美方郡香美町,村岡区板仕野
6671324,兵庫県,美方郡香美町,村岡区市原
6671368,兵庫県,美方郡香美町,村岡区入江
6671344,兵庫県,美方郡香美町,村岡区大笹
6671321,兵庫県,美方郡香美町,村岡区大糠
6671333,兵庫県,美方郡香美町,村岡区大野
6671323,兵庫県,美方郡香美町,村岡区耀山
6671366,兵庫県,美方郡香美町,村岡区川会
6671313,兵庫県,美方郡香美町,村岡区神坂
6671342,兵庫県,美方郡香美町,村岡区口大谷
6671353,兵庫県,美方郡香美町,村岡区熊波
6671335,兵庫県,美方郡香美町,村岡区黒田
6671354,兵庫県,美方郡香美町,村岡区柤岡
6671326,兵庫県,美方郡香美町,村岡区光陽
6671303,兵庫県,美方郡香美町,村岡区小城
6671301,兵庫県,美方郡香美町,村岡区境
6671316,兵庫県,美方郡香美町,村岡区鹿田
6671325,兵庫県,美方郡香美町,村岡区高井
6671345,兵庫県,美方郡香美町,村岡区高坂
6671365,兵庫県,美方郡香美町,村岡区高津
6671331,兵庫県,美方郡香美町,村岡区作山
6671322,兵庫県,美方郡香美町,村岡区寺河内
6671352,兵庫県,美方郡香美町,村岡区長板
6671343,兵庫県,美方郡香美町,村岡区中大谷
6671364,兵庫県,美方郡香美町,村岡区長須
6671361,兵庫県,美方郡香美町,村岡区長瀬
6671314,兵庫県,美方郡香美町,村岡区萩山
6671362,兵庫県,美方郡香美町,村岡区原
6671337,兵庫県,美方郡香美町,村岡区日影
6671334,兵庫県,美方郡香美町,村岡区福岡
6671351,兵庫県,美方郡香美町,村岡区丸味
6671363,兵庫県,美方郡香美町,村岡区味取
6671311,兵庫県,美方郡香美町,村岡区村岡
6671341,兵庫県,美方郡香美町,村岡区森脇
6671332,兵庫県,美方郡香美町,村岡区八井谷
6671336,兵庫県,美方郡香美町,村岡区宿
6671302,兵庫県,美方郡香美町,村岡区山田
6671317,兵庫県,美方郡香美町,村岡区用野
6671304,兵庫県,美方郡香美町,村岡区和佐父
6671367,兵庫県,美方郡香美町,村岡区和田
6671347,兵庫県,美方郡香美町,村岡区和池
6696700,兵庫県,美方郡新温泉町,
6696714,兵庫県,美方郡新温泉町,赤崎
6696701,兵庫県,美方郡新温泉町,芦屋
6696832,兵庫県,美方郡新温泉町,飯野
6696751,兵庫県,美方郡新温泉町,居組
6696953,兵庫県,美方郡新温泉町,石橋
6696805,兵庫県,美方郡新温泉町,伊角
6696801,兵庫県,美方郡新温泉町,井土
6696803,兵庫県,美方郡新温泉町,今岡
6696808,兵庫県,美方郡新温泉町,歌長
6696945,兵庫県,美方郡新温泉町,内山
6696952,兵庫県,美方郡新温泉町,海上
6696811,兵庫県,美方郡新温泉町,多子
6696946,兵庫県,美方郡新温泉町,越坂
6696802,兵庫県,美方郡新温泉町,金屋
6696942,兵庫県,美方郡新温泉町,鐘尾
6696752,兵庫県,美方郡新温泉町,釜屋
6696954,兵庫県,美方郡新温泉町,岸田
6696711,兵庫県,美方郡新温泉町,清富
6696815,兵庫県,美方郡新温泉町,桐岡
6696812,兵庫県,美方郡新温泉町,切畑
6696721,兵庫県,美方郡新温泉町,久谷
6696727,兵庫県,美方郡新温泉町,久斗山
6696804,兵庫県,美方郡新温泉町,熊谷
6696726,兵庫県,美方郡新温泉町,境
6696712,兵庫県,美方郡新温泉町,指杭
6696833,兵庫県,美方郡新温泉町,塩山
6696741,兵庫県,美方郡新温泉町,七釜
6696723,兵庫県,美方郡新温泉町,正法庵
6696742,兵庫県,美方郡新温泉町,新市
6696713,兵庫県,美方郡新温泉町,田井
6696728,兵庫県,美方郡新温泉町,対田
6696722,兵庫県,美方郡新温泉町,高末
6696761,兵庫県,美方郡新温泉町,竹田
6696831,兵庫県,美方郡新温泉町,竹田
6696813,兵庫県,美方郡新温泉町,丹土
6696943,兵庫県,美方郡新温泉町,千谷
6696941,兵庫県,美方郡新温泉町,千原
6696745,兵庫県,美方郡新温泉町,栃谷
6696814,兵庫県,美方郡新温泉町,中辻
6696702,兵庫県,美方郡新温泉町,浜坂
6696807,兵庫県,美方郡新温泉町,春来
6696806,兵庫県,美方郡新温泉町,桧尾
6696732,兵庫県,美方郡新温泉町,福富
6696725,兵庫県,美方郡新温泉町,藤尾
6696731,兵庫県,美方郡新温泉町,二日市
6696743,兵庫県,美方郡新温泉町,古市
6696746,兵庫県,美方郡新温泉町,戸田
6696724,兵庫県,美方郡新温泉町,辺地
6696822,兵庫県,美方郡新温泉町,細田
6696951,兵庫県,美方郡新温泉町,前
6696715,兵庫県,美方郡新温泉町,三尾
6696747,兵庫県,美方郡新温泉町,三谷
6696944,兵庫県,美方郡新温泉町,宮脇
6696753,兵庫県,美方郡新温泉町,諸寄
6696821,兵庫県,美方郡新温泉町,湯
6696744,兵庫県,美方郡新温泉町,用土
6696716,兵庫県,美方郡新温泉町,和田 |
15,531 | 46d004c8fc46bc84455695244b53980c8e4dee63 |
#THIS CODE IS AUTO-GENERATED by simple_template, DO NOT DIRECTLY EDIT UNLESS YOU KNOW WHAT YOU ARE DOING!
from numba import njit, void, prange
import numpy as np
import math
from numpy import float64, int32, int64
@njit(nogil=True, cache=True)
def LAPSE_PREMIUM_FREQ_IDX(PREM_FREQ,LAPSE_PREM_FREQ_IND):
if LAPSE_PREM_FREQ_IND==0:
return 999999
else:
return PREM_FREQ
@njit(nogil=True, parallel=True, cache=True)
def wrapped_LAPSE_PREMIUM_FREQ_IDX(PREM_FREQ,LAPSE_PREM_FREQ_IND):
arr_LAPSE_PREMIUM_FREQ_IDX=np.zeros((PREM_FREQ.shape[0],1),dtype=int64)
for y in prange(PREM_FREQ.shape[0]):
arr_LAPSE_PREMIUM_FREQ_IDX[y,0]=LAPSE_PREMIUM_FREQ_IDX(PREM_FREQ[y,0],LAPSE_PREM_FREQ_IND[y,0])
return arr_LAPSE_PREMIUM_FREQ_IDX
|
15,532 | 41e1ae57f31a5466d2a91ad338e7c1fc227f4e7d | a,b,c,d = map(int,input().split())
e = 0
if a <= c:
e += (c-a)
else:
e += (a-c)
if b <= d:
e += (d-b)
else:
e += (b-d)
print(e) |
15,533 | 00ba6d7e2816ee41d2496e91ee935e85ae2804a5 |
from shared import Object
class Item(Object):
pass
celtic_knife = Item("Knife", '🔪', 1000)
celtic_knife.description = """
A knife with a celtic knot engraved on the hilt. The knife is sunk into a heavy stone
and cannot be removed.
"""
clover = Item("Clover", '🍀', 1)
clover.description = """
A clover leaf.
"""
lent_doll = Item("Doll", '࿄', 1000)
lent_doll.description = """
A doll, crudely sown out of burlap with a simple linen clothes with
a repeating geometric pattern, wearing a small lump of vaguely shaped
wrought iron as a medallion; its face is left blank without features. It
is suspended from a coarse pole on a hemp rope attached to its upper back.
The pole is firmly implanted into the ground.
"""
items = [i for i in locals().values() if isinstance(i, Item)]
|
15,534 | 884c72ac9ee443f65a260807e7a45d65d737ed24 | import OSC
import collections
import gobject
import gtk
import itertools
import json
import os
import scalpel.gtkui
import sys
import threading
import time
gtk.gdk.threads_init()
def coord_str(c):
return ','.join([ str(x) for x in c ])
class GTKSound:
def __init__(self, recv_port=8001):
self.start_osc_server(recv_port)
self._state = 0
self._iteration = 0
def load_file(self, filename):
self.filename = os.path.realpath(filename)
self.sound = scalpel.gtkui.app.edit.Sound(filename)
self.player = scalpel.gtkui.app.player.Player(self.sound)
self.graph = scalpel.gtkui.app.graphmodel.Graph(self.sound)
self.cursor = scalpel.gtkui.app.cursor.Cursor(self.graph, self.player)
self.selection = scalpel.gtkui.app.selection.Selection(
self.graph, self.cursor
)
self.controller = scalpel.gtkui.app.control.Controller(
self.sound, self.player, self.graph, self.selection
)
scalpel.gtkui.app.new_sound_loaded(
self.controller, self.graph, self.selection, self.cursor
)
self.graph.zoom_out_full()
self.selection.select_all()
self._start, self._end = self.selection.get()
t = threading.Thread(target=self.update_selection_loop)
t.start()
def start_osc_server(self, port):
s = OSC.ThreadingOSCServer(('localhost', port))
s.addDefaultHandlers()
s.addMsgHandler('/monome/enc/key', self.osc_dispatch)
s.addMsgHandler('/monome/enc/delta', self.osc_dispatch)
t = threading.Thread(target=s.serve_forever)
t.start()
def update_selection_loop(self):
while not time.sleep(.05):
try:
self.selection.set(self._start, self._end)
except:
pass
def osc_dispatch(self, pattern, tags, data, client_address):
if pattern == '/monome/enc/delta':
if self._state == 1:
if self._iteration == 25:
self.graph.zoom_on(
self.selection.pixels()[data[0]], (100 - (data[1] * 10)) / 100.
)
self._iteration = 0
else:
self._iteration += 1
elif self._state == 0:
start, end = self.selection.get()
v_start, v_end = self.graph.view()
v_width = v_end - v_start
mod_ratio = (v_width / 800.)
if data[1] > 0:
mod = int(abs(data[1]) * mod_ratio)
else:
mod = -int(abs(data[1]) * mod_ratio)
if data[0] == 0:
start += mod
if start < 0:
start = 0
self._start = start
elif data[0] == 1:
end += mod
if end > self.graph.numframes():
end = self.graph.numframes()
self._end = end
class MonomeCutInterface:
def __init__(self, xmit_host='127.0.0.1', xmit_port=17448, recv_port=8000,
model=64):
self.xmit_host = xmit_host
self.xmit_port = xmit_port
self.recv_port = recv_port
self.recorded_selections = {}
self.current_selection = {}
self.current_selection['coord_one'] = None
self.current_selection['coord_two'] = None
self.current_selection['coords'] = []
self.model = model
self.setup_model()
self.page_id = 0
self.start_osc_server()
self.set_level_all(0)
self.blink = threading.Event()
self.blink_on = threading.Event()
self.blink_thread = threading.Thread(target=self.blink_loop)
self.blink_thread.start()
self.control_panel_map = {
'0,1': self._update_zoom_state,
'1,0': self._zoom_out_left,
'0,0': self._zoom_in_left,
'6,0': self._edit_recorded_selection,
'6,1': self._print_recorded_selections,
'7,0': self._clear_selection,
'7,1': self._record_selection,
}
self.gtk_sound = GTKSound()
self.set_page()
self.playable_coord_stack = collections.deque(maxlen=2)
def start_osc_server(self):
s = OSC.ThreadingOSCServer(('127.0.0.1', self.recv_port))
s.addDefaultHandlers()
s.addMsgHandler('/monome/grid/key', self.osc_dispatch)
t = threading.Thread(target=s.serve_forever)
t.start()
self.osc_client = s.client
def set_page(self, page_id=None):
if self.current_selection['coords']:
print 'WARN: cannot change pages while selection is active'
return None
if page_id is not None:
self.page_id = page_id
self.set_levels(self.page_button_coords, 0)
self.set_level(self.page_button_coords[self.page_id], 15)
self._clear_selection()
def osc_dispatch(self, pattern, tags, data, client_address):
if pattern == '/monome/grid/key':
coord, state = coord_str(data[:2]), data[2]
self.button_states[coord] = state
if coord in self.control_panel_map:
return self.control_panel_map[coord](coord, state)
elif (
coord not in self.playable_button_coords and
coord not in self.page_button_coords
):
print 'unknown button:', data
if state is 1:
self.playable_coord_stack.append(coord)
if coord in self.playable_button_coords:
if coord in self.current_selection['coords']:
print 'coord in current selection', coord
elif self._recorded_selection_for_coord(coord):
if self.button_states[(6, 0)] is 1:
self._update_selection_with_prerecorded_block(coord)
else:
return self.play_coord(coord)
if self.current_selection['coord_one'] is None:
return self._start_selection(coord)
else:
if self.current_selection['coord_two'] is None:
return self._stage_selection(coord)
else:
if self.blink.is_set():
if coord == self.current_selection['coord_two']:
self._update_selection(coord)
elif coord == self.current_selection['coord_one']:
self._clear_selection()
else:
self._stage_selection(coord)
else:
print 'would play button'
elif coord in self.page_button_coords:
self.set_page(self.page_button_coords.index(coord))
def _edit_recorded_selection(self, coord=None, state=None):
if not state: return
print 'preparing to edit recorded selection'
def _update_zoom_state(self, coord, state):
self.gtk_sound._state = state
def _zoom_out_left(self, coord, state):
if state:
pix = self.gtk_sound.selection.pixels()[0]
self.gtk_sound.graph.zoom_on(pix, 1.5)
def _zoom_in_left(self, coord, state):
if state:
pix = self.gtk_sound.selection.pixels()[0]
self.gtk_sound.graph.zoom_on(pix, 0.5)
def _recorded_selection_for_coord(self, coord):
retval = ()
for block, selection in self.recorded_selections[self.page_id].items():
if coord in [ x[0] for x in selection['coords'] ]:
retval = (block, selection)
return retval
def _start_selection(self, coord):
print 'starting selection'
self.current_selection['coord_one'] = coord
self.current_selection['coords'] = [coord]
self.blink.set()
def _stage_selection(self, coord):
print 'staging current selection'
self.set_levels(self.current_selection['coords'], 0)
self.current_selection['coord_two'] = coord
self.current_selection['coords'] = self.coords_in_block(','.join([
self.current_selection['coord_one'], self.current_selection['coord_two']
]))
self.set_levels(self.current_selection['coords'], 15)
def _clear_selection(self, coord=None, state=None):
if not state: return
print 'clearing selection'
self.current_selection['coord_one'] = None
self.current_selection['coord_two'] = None
self.blink.clear()
self.set_levels(self.playable_button_coords, 0)
for block in self.recorded_selections[self.page_id]:
self.set_levels(self.coords_in_block(block), 5)
self.current_selection['coords'] = []
def _update_selection(self, coord):
print 'updating current selection'
self.current_selection.update({
'graph': {'selection': self.gtk_sound.selection.get()},
'latest_coord': self.current_selection['coords'][0],
})
self.blink.clear()
self.set_levels(self.current_selection['coords'], 5)
def _update_selection_with_prerecorded_block(self, coord):
print 'updating selection w/prerecorded block'
for coords in self.recorded_selections[self.page_id]:
if coord in self.coords_in_block(coords):
frames = self.recorded_selections[self.page_id][coords]['frames'][:]
break
self.current_selection['graph']['selection'] = frames
self._start_selection(coords[0])
self._stage_selection(coords[1])
del(self.recorded_selections[self.page_id][coords])
def _record_selection(self, coord=None, state=None):
if not state: return
print 'recording selection'
block_coords = (
self.current_selection['coords'][0:1][0],
self.current_selection['coords'][-1:][0],
)
self.recorded_selections[self.page_id].update({
block_coords: {
'filename': self.gtk_sound.filename,
'frames': self.current_selection['graph']['selection'],
'coords': [],
},
})
for coord in self.current_selection['coords']:
i = self.current_selection['coords'].index(coord)
self.recorded_selections[self.page_id][block_coords]['coords'].append(
(coord, {'frames': self._selection_slice_frames(i)})
)
self._clear_selection()
def _print_recorded_selections(self, coord, state):
if not state: return
print json.dumps(dict(self.recorded_selections), sort_keys=True, indent=2)
def _selection_slice_frames(self, index):
frames = self.current_selection['graph']['selection']
len_frames = frames[1] - frames[0]
slice_size = len_frames / len(self.current_selection['coords'])
slice_start = (slice_size * index) + frames[0]
slice_end = frames[1]
return slice_start, slice_end
def play_coord(self, coord):
block, selection = self._recorded_selection_for_coord(coord)
if self.playable_coord_stack[-2] == coord:
for x in self.recorded_selections[self.page_id][block]['coords']:
if x[0] == coord:
i = self.recorded_selections[self.page_id][block]['coords'].index(x)
self.recorded_selections[self.page_id][block]['coords'][i] = \
(coord, {'frames': (self.gtk_sound._start, self.gtk_sound._end)})
break
else:
data = selection['coords'][coord]
self.gtk_sound.selection.set(*data['frames'])
self.gtk_sound._start = data['frames'][0]
self.gtk_sound._end = data['frames'][1]
self.gtk_sound.controller.play()
def coords_in_block(self, block):
ax, ay, bx, by = [ int(x) for x in block.split(',') ]
coords = []
for y in range(sorted((ay, by))[0], sorted((ay, by))[1] + 1):
for x in range(sorted((ax, bx))[0], sorted((ax, bx))[1] + 1):
coords.append((x, y))
coords = sorted(coords, key=lambda x: (x[1], x[0]), reverse=True)
return tuple([ coord_str(x) for x in coords ])
def setup_model(self):
if self.model == 64:
self.x_size = 8
self.y_size = 8
self.playable_button_coords = self.coords_in_block('0,7,7,4')
self.page_button_coords = self.coords_in_block('0,3,7,3')
all_button_coords = self.coords_in_block('0,0,7,7')
self.button_states = dict([ (x, 0) for x in all_button_coords ])
for x in range(len(self.page_button_coords)):
self.recorded_selections[x] = {}
def blink_loop(self):
self.blink_on.set()
while True:
self.blink.wait()
if self.blink_on.is_set():
self.set_levels(self.current_selection['coords'], 15)
for x in range(500):
if self.blink_on.is_set():
time.sleep(0.001)
else:
break
if self.blink.is_set():
self.blink_on.set()
self.set_levels(self.current_selection['coords'], 0)
for x in range(500):
if self.blink.is_set():
time.sleep(0.001)
else:
break
def set_level_all(self, level):
msg = OSC.OSCMessage('/monome/grid/led/all')
msg.append(level)
self.osc_client.sendto(msg, (self.xmit_host, self.xmit_port))
def set_levels(self, coords, level):
for coord in coords:
self.set_level(coord, level)
def set_level(self, coord, level):
msg = OSC.OSCMessage('/monome/grid/led/set')
msg += [ int(x) for x in coord.split(',') ] + [level]
self.osc_client.sendto(msg, (self.xmit_host, self.xmit_port))
class MockMonome(gtk.Window):
def __init__(self, height=8, width=8, recv_port=9500, xmit_port=9000):
self.height = height
self.width = width
self.recv_port = recv_port
self.xmit_port = xmit_port
self.buttons = {}
self.start_osc_server()
self.setup_gtk_window()
def setup_gtk_window(self):
# Create the toplevel window
gtk.Window.__init__(self)
self.set_title(self.__class__.__name__)
self.set_border_width(10)
main_vbox = gtk.VBox()
self.add(main_vbox)
frame_horiz = gtk.Frame(str(self.recv_port))
main_vbox.pack_start(frame_horiz, padding=10)
vbox = gtk.VBox()
vbox.set_border_width(10)
frame_horiz.add(vbox)
for y in sorted(range(self.height), reverse=True):
vbox.pack_start(self.create_button_row(y), padding=0)
self.show_all()
def create_button_row(self, y):
frame = gtk.Frame(None)
bbox = gtk.HButtonBox()
bbox.set_border_width(5)
bbox.set_layout(gtk.BUTTONBOX_SPREAD)
bbox.set_spacing(0)
frame.add(bbox)
print 'adding', bbox
for x in range(self.width):
button = self.buttons[(x, y)] = gtk.Button(label='0')
button.set_name('%d_%d' % (x, y))
button.connect('pressed', self.pressed)
button.connect('released', self.released)
bbox.add(button)
return frame
def pressed(self, button):
dat = [ int(x) for x in button.name.split('_') ] + [1]
msg = OSC.OSCMessage('/monome/grid/key') + dat
self.osc_client.sendto(msg, ('127.0.0.1', self.xmit_port))
def released(self, button):
dat = [ int(x) for x in button.name.split('_') ] + [0]
msg = OSC.OSCMessage('/monome/grid/key') + dat
self.osc_client.sendto(msg, ('127.0.0.1', self.xmit_port))
def start_osc_server(self):
s = OSC.ThreadingOSCServer(('127.0.0.1', self.recv_port))
s.addDefaultHandlers()
s.addMsgHandler('/monome/grid/led/set', self.osc_dispatch)
t = threading.Thread(target=s.serve_forever)
t.start()
self.osc_client = s.client
def osc_dispatch(self, pattern, tags, data, client_address):
if pattern == '/monome/grid/led/set':
gobject.idle_add(self.set_led, *data)
def set_led(self, x, y, level):
self.buttons[(x, y)].set_label(str(level))
if __name__ == '__main__':
filename = '/Users/josh/tmp/5_gongs.wav'
#filename = '/Users/josh/tmp/cw.wav'
#mock_monome = MockMonome(recv_port=17448, xmit_port=8000)
# to set up the remote device port create an OSCClient as c and do:
# c.sendto(OSC.OSCMessage('/sys/port') + 8001, ('127.0.0.1', 17441))
monome_cut_interface = MonomeCutInterface()
monome_cut_interface.gtk_sound.load_file(filename)
scalpel.gtkui.main_loop()
|
15,535 | aa32bd6bb798d5025f43549de594f8f58d543c0e | class Coord:
def __init__(self,x=0,y=0):
self.x = x
self.y = y
def __sum__(self, other):
x_distance = (self.x - other.x)**2
y_distance = (self.y - other.y)**2
return ( (x_distance+y_distance) ** (1/2) )
class Entity:
def __init__(self, coord):
self.coord = coord
def colorSeen(human, cloud):
if cloud.coord + human.coord < 5:
return("grey")
return("blue")
human_entity = Entity(Coord(1,1))
cloud_entity = Entity(Coord(1,1))
print( f"I see {colorSeen(human_entity, cloud_entity)}" ) |
15,536 | 4083ea9396d33eea956881afe777fc01391602f0 | # Copyright 2022 MosaicML Composer authors
# SPDX-License-Identifier: Apache-2.0
"""Loss-related utilities."""
from __future__ import annotations
import warnings
from typing import Optional
import torch
__all__ = ['infer_target_type', 'ensure_targets_one_hot', 'check_for_index_targets']
def infer_target_type(input: torch.Tensor, targets: torch.Tensor) -> str:
"""Infers whether the target is in indices format or one_hot format.
Example indices format: [1, 4, 7] Example one_hot format [[0, 1, 0], [1, 0, 0], ...]
"""
if input.shape == targets.shape:
return 'one_hot'
elif input.ndim == targets.ndim + 1:
return 'indices'
else:
raise RuntimeError(f'Unable to infer indices or one_hot. Targets has shape {targets.shape}'
f' and the inputs to cross entropy has shape {input.shape}. For one_hot, '
'expect targets.shape == inputs.shape. For indices, expect '
'inputs.ndim == targets.ndim + 1')
def ensure_targets_one_hot(input: torch.Tensor,
targets: torch.Tensor,
num_classes: Optional[int] = None) -> torch.Tensor:
r"""Ensures that the targets are in a one-hot format rather than an index format.
Args:
input (torch.Tensor): :math:`(N, C)` where `C = number of classes` or :math:`(N, C, H, W)`
in case of 2D Loss, or :math:`(N, C, d_1, d_2, ..., d_K)` where :math:`K \geq 1`
in the case of K-dimensional loss. `input` is expected to contain unnormalized scores
(often referred to as logits).
targets (torch.Tensor) : If containing class indices, shape :math:`(N)` where each value is
:math:`0 \leq \text{targets}[i] \leq C-1`, or :math:`(N, d_1, d_2, ..., d_K)` with
:math:`K \geq 1` in the case of K-dimensional loss. If containing class probabilities,
same shape as the input.
num_classes (int, optional): Number of classes. If not specified, this will be inferred
from input. Default: ``None``
"""
if infer_target_type(input, targets) == 'indices':
# If the number of classes isn't specified, attempt to infer it from the input
if num_classes is None:
num_classes = input.shape[1]
# Convert to one-hot tensor
targets = _one_hot(targets, num_classes=num_classes, dim=1)
return targets.float()
def check_for_index_targets(targets: torch.Tensor) -> bool:
"""Checks if a given set of targets are indices by looking at the type."""
index_dtypes = [torch.uint8, torch.int8, torch.int16, torch.int32, torch.int64]
return targets.dtype in index_dtypes
def _one_hot(tensor: torch.Tensor, num_classes: int = -1, dim: int = -1) -> torch.Tensor:
"""Converts a tensor of index class labels to a tensor of one-hot class labels.
Implementation is based on MONAI one-hot conversion function:
`<https://github.com/Project-MONAI/MONAI/blob/b390b0956334325edc0e5000afb58e2be7cbe550/monai/networks/utils.py#L49>`_.
Args:
tensor (torch.Tensor): Tensor containing index class labels.
num_classes (int): Size of the class dimension for the output one-hot tensor. If set to -1,
the number of classes will be inferred to be one greater than the largest value in ``tensor``.
dim (int): Location of the new class dimension of size ``num_classes``.
Returns:
torch.Tensor: One-hot class labels i.e. the same shape as ``tensor`` except with an
extra dimension of size ``num_classes`` inserted after the first dimension
"""
if not check_for_index_targets(tensor):
raise ValueError(f'tensor must be integer type, current type: {tensor.dtype}')
max_index = tensor.max() + 1
if num_classes == -1:
num_classes = int(max_index)
if num_classes < max_index:
raise ValueError(f'num_classes must be greater than or equal to tensor.max() + 1: {num_classes} < {max_index}')
# Remove negative indices
neg_indices = tensor.min() < 0
if neg_indices:
warnings.warn('Negative label indices are being ignored in conversion to one-hot labels')
tensor = tensor.clone().long()
tensor[tensor < 0] = num_classes
num_classes += 1 # Add extra class for negative indices
# Assume class dimension is inserted after the first dimension
tensor = tensor.unsqueeze(dim)
tensor_shape = list(tensor.shape)
tensor_shape[dim] = num_classes
# Convert to one-hot
one_hot_tensor = torch.zeros(size=tensor_shape, dtype=tensor.dtype, device=tensor.device)
one_hot_tensor.scatter_(dim=dim, index=tensor, value=1)
# Remove negative indices
if neg_indices:
one_hot_tensor = one_hot_tensor[:, 0:-1]
return one_hot_tensor
|
15,537 | ebe6c8578e5671dfb34bb23cf7a3d2a8bc2242dc | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
import time
driver = webdriver.Chrome()
driver.get("https://www.messenger.com/t/link-to-chat-conversation")
def test_function_template(sleeptime, keyword):
# Customize this function according to your testing needs.
#
# You can get the XPATH, SELECTOR, ID, NAME, etc. of the elements via a browser,
# e.g. Ctrl-Shift-I in Chrome.
#
# For more info, visit: https://selenium-python.readthedocs.io/
xpath = "Replace me with XPATH value."
selector = "Replace me with SELECTOR value."
#Locating an element via its XPATH.
driver.find_element_by_xpath(xpath).click()
#Locating an element via its CSS SELECTOR and typing in text.
driver.find_element_by_css_selector(selector).send_keys(text)
#Example of how to press ENTER.
driver.find_element_by_css_selector(selector).send_keys(Keys.RETURN)
#Click a link with text "Start Over".
driver.find_element_by_link_text("Start Over").click()
#TODO: Sleep is a little cheat to wait for the elements to load. Refactor later on.
time.sleep(sleeptime)
def test_sample_flow(sleeptime, text):
selector = "#cch_f1c03c147c28ab8 > div._20bp > div._4_j4 > div._4rv3._7og6 > div > div._7kpk > div > div > div:nth-child(1) > div > div._5rpb > div"
#Type text
driver.find_element_by_css_selector(selector).send_keys(text)
time.sleep(sleeptime)
#Press send
driver.find_element_by_css_selector(selector).send_keys(Keys.RETURN)
time.sleep(sleeptime)
def main():
# The file list. txt contains the list of keywords to be tested, seperated by newlines.
f = open('keywords.txt', 'r')
keywords = f.readlines()
for kw in keywords:
test_nagtetake(1, kw) |
15,538 | f15d4e2baf7a88ff1c33afe84904c856657bc87e | #!/usr/bin/env python2.7
from scapy.all import *
from utility.wpa_struct_for_scapy import *
from utility.util import *
from Crypto.Hash import HMAC , SHA
from Crypto.Cipher import ARC4
from crypto.util import *
import binascii
import inspect
class HandleEAPoL:
""" Handle Extensible Authentication Protocol (EAP) over LAN (EAPoL) Frames.
"""
######################################################################################
### Initializer ######################################################################
######################################################################################
def __init__( self , logger , iface , addr1 , addr2 , addr3 , ssid ):
""" Initializer.
"""
self.logger = logger
self.iface = iface
self.addr1 = addr1
self.addr2 = addr2
self.addr3 = addr3
self.broadcast = 'ff:ff:ff:ff:ff:ff'
self.ssid = ssid
# Settings
self.passphrase = None
self.ANonce = None
self.SNonce = os.urandom( 32 ) # Random 32-octet nonce.
self.A = 'Pairwise key expansion'
self.B = None
self.keyID = 'idx0' # The default Key Identifier.
# Replay Counter
self.replayCounter = 0
# Keys
self.PMK = None # Pairwise Master Key
self.PTK = None # Pairwise Transient Key
self.KCK = None # EAPOL-Key Confirmation Key
self.KEK = None # EAPOL-Key Encryption Key
self.TK = None # Temporal Key
self.MMICTxK = None # Michael MIC Authenticator Tx Key
self.MMICRxK = None # Michael MIC Authenticator Rx Key
self.GTK = None # Group Temporal Key
# Handlers for cryptographic encapsulation and decapsulation.
self.handleTKIP = None
self.handleAES = None
def setPassphrase( self , passphrase ):
""" Set the passphrase used in TKIP and AES, and generate the PMK.
"""
self.passphrase = passphrase
# Generate and log the generated PMK.
self.PMK = pbkdf2_bin( self.passphrase , self.ssid , 4096 , 32 )
self.logger.logKey( 'Pairwise Master Key' , self.PMK )
def setCryptographicHandlers( self , tkip = None , aes = None ):
""" Set the cryptographic handlers for encapsulation and decapsulation.
"""
self.handleTKIP = tkip
self.handleAES = aes
def __assertWPAKeyMIC( self , packet , digest ):
""" Assert that the EAPoL WPA Key layer has a valid MIC.
"""
# Get the Key Information and assert that the MIC bit was set.
keyinfo = packet.getlayer( EAPOL_WPAKey ).KeyInfo
flaglist = self.__getFlaglist( keyinfo )
assert( 'mic' in flaglist ), \
'The MIC flag in the EAPoL WPA Key layer was not set.'
# Save the received MIC.
micReceived = packet.getlayer( EAPOL_WPAKey ).WPAKeyMIC
# Retrieve the EAPoL layer and clear its original MIC.
# Re-calculate the MIC over the resulting string.
eapolPacket = packet.getlayer( EAPOL )
eapolPacket.getlayer( EAPOL_WPAKey ).WPAKeyMIC = '\x00'*16
data = str( eapolPacket )
micCalculated = HMAC.new( self.KCK , msg=data , digestmod=digest )
micCalculated = micCalculated.digest()[:16]
# Assert the integrity by comparing the original and calculated digest.
assert( micReceived == micCalculated ), \
'The received WPA Key MIC "%s" does not match the calculated WPA Key MIC ' \
'"%s".' % ( micReceived.encode('hex') , micCalculated.encode('hex') )
def __getKeyInformation( self , flaglist ):
""" Generates the integer for the Key Information field. Note that not all the
bits defined in the specification are supported here.
Ref. IEEE 802.11i specification; EAPOL-Key frames.
"""
keyinfo = 0
if 'HMAC_MD5_RC4' in flaglist:
keyinfo = setBit( keyinfo , 0 )
if 'HMAC_SHA1_AES' in flaglist:
keyinfo = setBit( keyinfo , 1 )
if 'group' in flaglist:
pass
if 'pairwise' in flaglist:
keyinfo = setBit( keyinfo , 3 )
if 'idx0' in flaglist:
pass
if 'idx1' in flaglist:
keyinfo = setBit( keyinfo , 4 )
if 'idx2' in flaglist:
keyinfo = setBit( keyinfo , 5 )
if 'install' in flaglist:
keyinfo = setBit( keyinfo , 6 )
if 'ack' in flaglist:
keyinfo = setBit( keyinfo , 7 )
if 'mic' in flaglist:
keyinfo = setBit( keyinfo , 8 )
if 'secure' in flaglist:
keyinfo = setBit( keyinfo , 9 )
if 'error' in flaglist:
keyinfo = setBit( keyinfo , 10 )
if 'request' in flaglist:
keyinfo = setBit( keyinfo , 11 )
if 'encrypted' in flaglist:
keyinfo = setBit( keyinfo , 12 )
return keyinfo
def __getFlaglist( self , keyinfo ):
""" Generates the flaglist from the Key Information field. Note that not all the
bits defined in the specification are supported here.
Ref. IEEE 802.11i specification; EAPOL-Key frames.
"""
flaglist = []
if( getBit( keyinfo , 0 ) == 1 ):
flaglist.append( 'HMAC_MD5_RC4' )
if( getBit( keyinfo , 1 ) == 1 ):
flaglist.append( 'HMAC_SHA1_AES' )
if( getBit( keyinfo , 3 ) == 0 ):
flaglist.append( 'group' )
if( getBit( keyinfo , 3 ) == 1 ):
flaglist.append( 'pairwise' )
if( getBit( keyinfo , 4 ) == 0 and getBit( keyinfo , 5 ) == 0 ):
flaglist.append( 'idx0' )
if( getBit( keyinfo , 4 ) == 1 ):
flaglist.append( 'idx1' )
if( getBit( keyinfo , 5 ) == 1 ):
flaglist.append( 'idx2' )
if( getBit( keyinfo , 6 ) == 1 ):
flaglist.append( 'install' )
if( getBit( keyinfo , 7 ) == 1 ):
flaglist.append( 'ack' )
if( getBit( keyinfo , 8 ) == 1 ):
flaglist.append( 'mic' )
if( getBit( keyinfo , 9 ) == 1 ):
flaglist.append( 'secure' )
if( getBit( keyinfo , 10 ) == 1 ):
flaglist.append( 'error' )
if( getBit( keyinfo , 11 ) == 1 ):
flaglist.append( 'request' )
if( getBit( keyinfo , 12 ) == 1 ):
flaglist.append( 'encrypted' )
return flaglist
def __setKeyIDFromFlaglist( self , flaglist ):
""" Set the key ID from the flaglist.
"""
if 'idx0' in flaglist:
self.keyID = 'idx0'
if 'idx1' in flaglist:
self.keyID = 'idx1'
if 'idx2' in flaglist:
self.keyID = 'idx2'
######################################################################################
### Four Way Handshake ###############################################################
######################################################################################
# ------------------------------------------------------------------------------------
# --- Four Way Handshake 1/4 ---------------------------------------------------------
# ------------------------------------------------------------------------------------
def fw_handshake_1_4( self , packet ):
""" 4-Way Handshake 1/4.
"""
# Check if the Frame Check Sequence (FCS) flag is set in the Radiotap header, and
# if so assert the correctness of the FCS.
radiotapFCSFlag = hasFCS( packet )
if radiotapFCSFlag is True:
assertDot11FCS( packet )
packet.getlayer( EAPOL_WPAKey ).remove_payload() # Remove the FCS.
# Assert on the flags in the Key Information to verify it is FWHS Message 1/4.
# It is either HMAC_MD5_RC4 or HMAC_SHA1_AES.
flaglist = self.__getFlaglist( packet.getlayer( EAPOL_WPAKey ).KeyInfo )
errorMessage = 'The received packet is not 4-Way Handshake Message 1/4.'
assert( 'pairwise' in flaglist ), errorMessage
assert( 'install' not in flaglist ), errorMessage
assert( 'ack' in flaglist ), errorMessage
assert( 'mic' not in flaglist ), errorMessage
assert( 'secure' not in flaglist ), errorMessage
self.logger.log( self.logger.RECEIVED , 'EAPOL 4-Way Handshake Message 1/4' )
# Retrieve the authenticator nonce and calculate the pre-requirements for the PTK.
nonce = packet.getlayer( EAPOL_WPAKey ).Nonce
addr1 = binascii.a2b_hex( self.addr1.replace( ':' , '' ) )
addr2 = binascii.a2b_hex( self.addr2.replace( ':' , '' ) )
self.ANonce = binascii.a2b_hex( nonce.encode('hex') )
self.B = min( addr1 , addr2 ) + max( addr1 , addr2 )
self.B += min( self.ANonce , self.SNonce ) + max( self.ANonce , self. SNonce )
# Update the Replay Counter.
self.replayCounter = packet.getlayer( EAPOL_WPAKey ).ReplayCounter
# Generate the PTK and set the KCK, KEK, TK, MMICTxK and MMICRxK.
self.PTK = customPRF512( self.PMK , self.A , self.B )
self.KCK = self.PTK[00:16]
self.KEK = self.PTK[16:32]
self.TK = self.PTK[32:48]
self.MMICTxK = self.PTK[48:56]
self.MMICRxK = self.PTK[56:64]
# Log the generated keys.
self.logger.logKey( 'Pairwise Transient Key' , self.PTK )
self.logger.logKey( 'EAPOL-Key Confirmation Key' , self.KCK )
self.logger.logKey( 'EAPOL-Key Encryption Key' , self.KEK )
self.logger.logKey( 'Temporal Key' , self.TK )
self.logger.logKey( 'Michael MIC Authenticator Tx Key' , self.MMICTxK )
self.logger.logKey( 'Michael MIC Authenticator Rx Key' , self.MMICRxK )
# ------------------------------------------------------------------------------------
# --- Four Way Handshake 2/4 ---------------------------------------------------------
# ------------------------------------------------------------------------------------
def fw_handshake_2_4_tkip( self , vendor , eapolMIC = True , eapolMICFlag = True , customFlaglist = None , customRC = None ):
""" 4-Way Handshake 2/4 (TKIP).
"""
parameterList = 'vendor=' + str(vendor) + ',eapolMIC=' + str(eapolMIC) + ',eapolMICFlag=' + str(eapolMICFlag) + ',customFlaglist=' + str(customFlaglist) + ',customRC=' + str(customRC)
self.logger.log( self.logger.TRANSMIT , 'EAPOL 4-Way Handshake Message 2/4 TKIP (' + parameterList + ')')
try:
# Create an empty EAPOL WPA Key packet.
packet = EAPOL( version=1 , type='EAPOL-Key' )/EAPOL_Key()/EAPOL_WPAKey()
packetKey = packet.getlayer( EAPOL_WPAKey )
if vendor != 'NONE':
vendorInfo = Dot11Elt( ID='vendor' , info=getVendorInfo( type=vendor ) )
flaglist = ['HMAC_MD5_RC4','idx0','pairwise']
if eapolMICFlag is True:
flaglist.append('mic')
# Fill in the fields.
if customFlaglist is not None:
flaglist = customFlaglist
packetKey.KeyInfo = self.__getKeyInformation( flaglist )
if customRC is not None:
if customRC == 'lower':
self.replayCounter -= 1
elif customRC == 'higher':
self.replayCounter += 1
packetKey.ReplayCounter = self.replayCounter
packetKey.Nonce = self.SNonce
if vendor != 'NONE':
packetKey.WPAKeyLength = len( vendorInfo )
packetKey.WPAKey = vendorInfo
# Calculate and add the MIC.
if eapolMIC is True:
mic = HMAC.new( self.KCK , msg=str( packet ) , digestmod=Crypto.Hash.MD5 )
packetKey.WPAKeyMIC = mic.digest()
# Transmit.
sendp(RadioTap()/
Dot11( addr1=self.addr1 , addr2=self.addr2 , addr3=self.addr1 , type='Data' , subtype=0x00 , FCfield='to-DS' )/
LLC( dsap=0xaa , ssap=0xaa , ctrl=0x03 )/
SNAP( OUI=0x000000 , code=0x888e )/
packet,
iface=self.iface , verbose=False )
except:
raise
def fw_handshake_2_4_aes( self , vendor , eapolMIC = True , eapolMICFlag = True , customFlaglist = None , customRC = None ):
""" 4-Way Handshake 2/4 (WPA).
"""
parameterList = 'vendor=' + str(vendor) + ',eapolMIC=' + str(eapolMIC) + ',eapolMICFlag=' + str(eapolMICFlag) + ',customFlaglist=' + str(customFlaglist) + ',customRC=' + str(customRC)
self.logger.log( self.logger.TRANSMIT , 'EAPOL 4-Way Handshake Message 2/4 AES (' + parameterList + ')')
try:
# Create an empty EAPOL WPA Key packet.
packet = EAPOL( version=1 , type='EAPOL-Key' )/EAPOL_Key()/EAPOL_WPAKey()
packetKey = packet.getlayer( EAPOL_WPAKey )
if vendor != 'NONE':
vendorInfo = Dot11Elt( ID='vendor' , info=getVendorInfo( type=vendor ) )
flaglist = ['HMAC_SHA1_AES','idx0','pairwise']
if eapolMICFlag is True:
flaglist.append('mic')
# Fill in the fields.
if customFlaglist is not None:
flaglist = customFlaglist
packetKey.KeyInfo = self.__getKeyInformation( flaglist )
if customRC is not None:
if customRC == 'lower':
self.replayCounter -= 1
elif customRC == 'higher':
self.replayCounter += 1
packetKey.ReplayCounter = self.replayCounter
packetKey.Nonce = self.SNonce
if vendor != 'NONE':
packetKey.WPAKeyLength = len( vendorInfo )
packetKey.WPAKey = vendorInfo
# Calculate and add the MIC.
if eapolMIC is True:
mic = HMAC.new( self.KCK , msg=str( packet ) , digestmod=Crypto.Hash.SHA )
packetKey.WPAKeyMIC = mic.digest()
# Transmit.
sendp(RadioTap()/
Dot11( addr1=self.addr1 , addr2=self.addr2 , addr3=self.addr1 , type='Data' , subtype=0x00 , FCfield='to-DS' )/
LLC( dsap=0xaa , ssap=0xaa , ctrl=0x03 )/
SNAP( OUI=0x000000 , code=0x888e )/
packet,
iface=self.iface , verbose=False )
except:
raise
# ------------------------------------------------------------------------------------
# --- Four Way Handshake 3/4 ---------------------------------------------------------
# ------------------------------------------------------------------------------------
def fw_handshake_3_4_tkip( self , packet ):
""" 4-Way Handshake 3/4 (TKIP).
"""
# Check if the Frame Check Sequence (FCS) flag is set in the Radiotap header, and
# if so assert the correctness of the FCS.
radiotapFCSFlag = hasFCS( packet )
if radiotapFCSFlag is True:
assertDot11FCS( packet )
packet.getlayer( EAPOL_WPAKey ).remove_payload() # Remove the FCS.
# Assert on the flags in the Key Information to verify it is FWHS Message 3/4.
keyinfoReceived = packet.getlayer( EAPOL_WPAKey ).KeyInfo
self.replayCounter = packet.getlayer( EAPOL_WPAKey ).ReplayCounter
flaglist = ['HMAC_MD5_RC4','idx0','pairwise','install','ack','mic']
keyinfoCalculated = self.__getKeyInformation( flaglist )
assert( keyinfoReceived == keyinfoCalculated ), \
'The received packet is not 4-Way Handshake Message 3/4.'
self.logger.log( self.logger.RECEIVED , 'EAPOL 4-Way Handshake Message 3/4 TKIP' )
# Assert that the EAPoL WPA Key layer has a valid MIC.
self.__assertWPAKeyMIC( packet , Crypto.Hash.MD5 )
def fw_handshake_3_4_aes( self , packet ):
""" 4-Way Handshake 3/4 (WPA).
"""
# Check if the Frame Check Sequence (FCS) flag is set in the Radiotap header, and
# if so assert the correctness of the FCS.
radiotapFCSFlag = hasFCS( packet )
if radiotapFCSFlag is True:
assertDot11FCS( packet )
packet.getlayer( EAPOL_WPAKey ).remove_payload() # Remove the FCS.
# Assert on the flags in the Key Information to verify it is FWHS Message 3/4.
keyinfoReceived = packet.getlayer( EAPOL_WPAKey ).KeyInfo
self.replayCounter = packet.getlayer( EAPOL_WPAKey ).ReplayCounter
flaglist = ['HMAC_SHA1_AES','idx0','pairwise','install','ack','mic']
keyinfoCalculated = self.__getKeyInformation( flaglist )
assert( keyinfoReceived == keyinfoCalculated ), \
'The received packet is not 4-Way Handshake Message 3/4.'
self.logger.log( self.logger.RECEIVED , 'EAPOL 4-Way Handshake Message 3/4 AES' )
# Assert that the EAPoL WPA Key layer has a valid MIC.
self.__assertWPAKeyMIC( packet , Crypto.Hash.SHA )
# ------------------------------------------------------------------------------------
# --- Four Way Handshake 4/4 ---------------------------------------------------------
# ------------------------------------------------------------------------------------
def fw_handshake_4_4_tkip( self , eapolMIC = True , eapolMICFlag = True , customFlaglist = None , addNonce = None , customRC = None , addData = None ):
""" 4-Way Handshake 4/4 (TKIP).
NOTE: IEEE 802.11i specification requires 'secure' flag. Works with and
without, yet Wireshark does not identify message as 4/4 when the
secure flag has been set.
"""
parameterList = 'eapolMIC=' + str(eapolMIC) + ',eapolMICFlag=' + str(eapolMICFlag) + ',customFlaglist=' + str(customFlaglist) + ',addNonce=' + str(addNonce) + ',customRC=' + str(customRC) + ',addData=' + str(addData)
self.logger.log( self.logger.TRANSMIT , 'EAPOL 4-Way Handshake Message 4/4 TKIP (' + parameterList + ')')
try:
# Create an empty EAPOL WPA Key packet.
packet = EAPOL( version=1 , type='EAPOL-Key' )/EAPOL_Key()/EAPOL_WPAKey()
packetKey = packet.getlayer( EAPOL_WPAKey )
flaglist = ['HMAC_MD5_RC4','idx0','pairwise']
if eapolMICFlag is True:
flaglist.append('mic')
# Fill in the fields.
if customFlaglist is not None:
flaglist = customFlaglist
packetKey.KeyInfo = self.__getKeyInformation( flaglist )
if customRC is not None:
if customRC == 'lower':
self.replayCounter -= 1
elif customRC == 'higher':
self.replayCounter += 1
packetKey.ReplayCounter = self.replayCounter
if addNonce is not None:
if addNonce == 'supplicant':
packetKey.Nonce = self.SNonce
if addNonce == 'authenticator':
packetKey.Nonce = self.ANonce
if addNonce == 'random':
packetKey.Nonce = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData is not None:
if addData == 'data':
packetKey.WPAKeyLength = 32
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataNoLength':
packetKey.WPAKeyLength = 0
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataShortLength':
packetKey.WPAKeyLength = 16
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataLongLength':
packetKey.WPAKeyLength = 48
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
# Calculate and add the MIC.
if eapolMIC is True:
mic = HMAC.new( self.KCK , msg=str( packet ) , digestmod=Crypto.Hash.MD5 )
packetKey.WPAKeyMIC = mic.digest()
# Transmit.
sendp(RadioTap()/
Dot11( addr1=self.addr1 , addr2=self.addr2 , addr3=self.addr1 , type='Data' , subtype=0x00 , FCfield='to-DS' )/
LLC( dsap=0xaa , ssap=0xaa , ctrl=0x03 )/
SNAP( OUI=0x000000 , code=0x888e )/
packet,
iface=self.iface , verbose=False )
except:
raise
def fw_handshake_4_4_aes( self , eapolMIC = True , eapolMICFlag = True , customFlaglist = None , addNonce = None , customRC = None , addData = None ):
""" 4-Way Handshake 4/4 (WPA).
NOTE: IEEE 802.11i specification requires 'secure' flag. Works with and
without, yet Wireshark does not identify message as 4/4 when the
secure flag has been set.
"""
parameterList = 'eapolMIC=' + str(eapolMIC) + ',eapolMICFlag=' + str(eapolMICFlag) + ',customFlaglist=' + str(customFlaglist) + ',addNonce=' + str(addNonce) + ',customRC=' + str(customRC) + ',addData=' + str(addData)
self.logger.log( self.logger.TRANSMIT , 'EAPOL 4-Way Handshake Message 4/4 AES (' + parameterList + ')')
try:
# Create an empty EAPOL WPA Key packet.
packet = EAPOL( version=1 , type='EAPOL-Key' )/EAPOL_Key()/EAPOL_WPAKey()
packetKey = packet.getlayer( EAPOL_WPAKey )
flaglist = ['HMAC_SHA1_AES','idx0','pairwise']
if eapolMICFlag is True:
flaglist.append('mic')
# Fill in the fields.
if customFlaglist is not None:
flaglist = customFlaglist
packetKey.KeyInfo = self.__getKeyInformation( flaglist )
if customRC is not None:
if customRC == 'lower':
self.replayCounter -= 1
elif customRC == 'higher':
self.replayCounter += 1
packetKey.ReplayCounter = self.replayCounter
if addNonce is not None:
if addNonce == 'supplicant':
packetKey.Nonce = self.SNonce
if addNonce == 'authenticator':
packetKey.Nonce = self.ANonce
if addNonce == 'random':
packetKey.Nonce = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData is not None:
if addData == 'data':
packetKey.WPAKeyLength = 32
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataNoLength':
packetKey.WPAKeyLength = 0
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataShortLength':
packetKey.WPAKeyLength = 16
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataLongLength':
packetKey.WPAKeyLength = 48
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
# Calculate and add the MIC.
if eapolMIC is True:
mic = HMAC.new( self.KCK , msg=str( packet ) , digestmod=Crypto.Hash.SHA )
packetKey.WPAKeyMIC = mic.digest()
# Transmit.
sendp(RadioTap()/
Dot11( addr1=self.addr1 , addr2=self.addr2 , addr3=self.addr1 , type='Data' , subtype=0x00 , FCfield='to-DS' )/
LLC( dsap=0xaa , ssap=0xaa , ctrl=0x03 )/
SNAP( OUI=0x000000 , code=0x888e )/
packet,
iface=self.iface , verbose=False )
except:
raise
######################################################################################
### Group Key Handshake ##############################################################
######################################################################################
# ------------------------------------------------------------------------------------
# --- Group Key Handshake 1/2 --------------------------------------------------------
# ------------------------------------------------------------------------------------
def gk_handshake_1_2_tkip( self , packet ):
""" Group Key Handshake 1/2 (TKIP).
"""
try:
# Decapsulate the TKIP packet, and rebuild the plaintext packet.
plaintext = self.handleTKIP.decapsulate( packet , self.TK , self.MMICTxK )
packet = LLC()/SNAP()/EAPOL()/EAPOL_Key()/EAPOL_WPAKey()
new_packet = packet.__class__( plaintext )
# Assert on the flags in the Key Information to verify it is GKHS Message 1/2.
keyinfoReceived = new_packet.getlayer( EAPOL_WPAKey ).KeyInfo
self.__setKeyIDFromFlaglist( self.__getFlaglist( keyinfoReceived ) )
flaglist = ['HMAC_MD5_RC4','group','ack','mic','secure']
flaglist.append( self.keyID ) # Copying the Key ID from the received packet.
keyinfoCalculated = self.__getKeyInformation( flaglist )
assert( keyinfoReceived == keyinfoCalculated ), \
'The received packet is not Group Key Handshake Message 1/2.'
self.logger.log( self.logger.RECEIVED , 'EAPOL Group Key Handshake Message 1/2 TKIP' )
# Assert that the EAPoL WPA Key layer has a valid MIC.
self.__assertWPAKeyMIC( new_packet , Crypto.Hash.MD5 )
# Update the Replay Counter.
self.replayCounter = new_packet.getlayer( EAPOL_WPAKey ).ReplayCounter
# Use ARC4 to decrypt the WPAKey-field, containing the Group Temporal Key.
# First skip the first 256 bytes of ARC4, then decrypt the cipher.
# Ref. IEEE 802.11i specification (2004); EAPOL-Key frames (Key Descriptor
# Version 1).
key = new_packet.KeyIV + self.KEK
arc4 = ARC4.new( key )
arc4.decrypt( '\x00'*256 )
self.GTK = arc4.decrypt( new_packet.WPAKey ) # Resulting key of 32 octets.
self.logger.logKey( 'Group Temporal Key' , self.GTK )
except:
raise
def gk_handshake_1_2_aes( self , packet ):
""" Group Key Handshake 1/2 (WPA).
The packet is decrypted with AES under the CTR with CBC-MAC Protocol (CCMP).
CCM combines CTR for data confidentiality and CBC-MAC for authentication and
integrity.
"""
try:
# Decapsulate the TKIP packet, and rebuild the plaintext packet.
plaintext = self.handleAES.decapsulate( packet , self.TK )
packet = LLC()/SNAP()/EAPOL()/EAPOL_Key()/EAPOL_WPAKey()
new_packet = packet.__class__( plaintext )
# Assert on the flags in the Key Information to verify it is GKHS Message 1/2.
keyinfoReceived = new_packet.getlayer( EAPOL_WPAKey ).KeyInfo
self.__setKeyIDFromFlaglist( self.__getFlaglist( keyinfoReceived ) )
flaglist = ['HMAC_SHA1_AES','group','ack','mic','secure']
flaglist.append( self.keyID ) # Copying the Key ID from the received packet.
keyinfoCalculated = self.__getKeyInformation( flaglist )
assert( keyinfoReceived == keyinfoCalculated ), \
'The received packet is not Group Key Handshake Message 1/2.'
self.logger.log( self.logger.RECEIVED , 'EAPOL Group Key Handshake Message 1/2 AES' )
# Assert that the EAPoL WPA Key layer has a valid MIC.
self.__assertWPAKeyMIC( new_packet , Crypto.Hash.SHA )
# Update the Replay Counter.
self.replayCounter = new_packet.getlayer( EAPOL_WPAKey ).ReplayCounter
# Retrieve the Group Temporal key.
self.GTK = self.handleAES.unwrapKey( new_packet.WPAKey , self.KEK ) # Resulting key of 16/32 octets.
self.logger.logKey( 'Group Temporal Key' , self.GTK )
except:
raise
# ------------------------------------------------------------------------------------
# --- Group Key Handshake 2/2 --------------------------------------------------------
# ------------------------------------------------------------------------------------
def gk_handshake_2_2_tkip( self , eapolMIC = True , eapolMICFlag = True , wepMIC = True , customFlaglist = None , addNonce = None , customRC = None , addData = None ):
""" Group Key Handshake 2/2 (TKIP).
"""
parameterList = 'eapolMIC=' + str(eapolMIC) + ',eapolMICFlag=' + str(eapolMICFlag) + ',wepMIC=' + str(wepMIC) + ',customFlaglist=' + str(customFlaglist) + ',addNonce=' + str(addNonce) + ',customRC=' + str(customRC) + ',addData=' + str(addData)
self.logger.log( self.logger.TRANSMIT , 'EAPOL Group Key Handshake Message 2/2 TKIP (' + parameterList + ')')
try:
# Create an empty EAPOL WPA Key packet.
packet = EAPOL( version=1 , type='EAPOL-Key' )/EAPOL_Key()/EAPOL_WPAKey()
packetKey = packet.getlayer( EAPOL_WPAKey )
flaglist = ['HMAC_MD5_RC4','group','secure']
flaglist.append( self.keyID )
if eapolMICFlag is True:
flaglist.append('mic')
# Fill in the fields.
if customFlaglist is not None:
flaglist = customFlaglist
packetKey.KeyInfo = self.__getKeyInformation( flaglist )
if customRC is not None:
if customRC == 'lower':
self.replayCounter -= 1
elif customRC == 'higher':
self.replayCounter += 1
packetKey.ReplayCounter = self.replayCounter
if addNonce is not None:
if addNonce == 'supplicant':
packetKey.Nonce = self.SNonce
if addNonce == 'authenticator':
packetKey.Nonce = self.ANonce
if addNonce == 'random':
packetKey.Nonce = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData is not None:
if addData == 'data':
packetKey.WPAKeyLength = 32
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataNoLength':
packetKey.WPAKeyLength = 0
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataShortLength':
packetKey.WPAKeyLength = 16
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataLongLength':
packetKey.WPAKeyLength = 48
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
# Calculate and add the MIC.
if eapolMIC is True:
mic = HMAC.new( self.KCK , msg=str( packet ) , digestmod=Crypto.Hash.MD5 )
packetKey.WPAKeyMIC = mic.digest()
# Get the plaintext and generate the Logical-Link Control (LLC),
# and Subnetwork Access Protocol (SNAP).
plaintext = str( packet )
llcSnap = LLC( dsap=0xaa , ssap=0xaa , ctrl=0x03 )
llcSnap /= SNAP( OUI=0x000000 , code=0x888e )
plaintext = str( llcSnap ) + plaintext
# Generate the dot11 header and request the encapsulated dot11wep message.
dot11 = Dot11( addr1=self.addr1 , addr2=self.addr2 , addr3=self.addr1 , FCfield='wep+to-DS' , type='Data' , subtype=0 )
addr1 = binascii.a2b_hex( self.addr1.replace( ':' , '' ) )
addr2 = binascii.a2b_hex( self.addr2.replace( ':' , '' ) )
priority = 0
dot11wep = self.handleTKIP.encapsulate( plaintext , addr2 , addr1 , priority , self.MMICRxK , self.TK )
if wepMIC is False:
dot11wep.icv = 0 # NOTE: This only clears the ICV, not MICHAEL.
# Transmit the packet.
packet = RadioTap()/dot11/dot11wep
sendp( packet , iface=self.iface , verbose=False )
except:
raise
def gk_handshake_2_2_aes( self , eapolMIC = True , eapolMICFlag = True , wepMIC = True , customFlaglist = None , addNonce = None , customRC = None , addData = None ):
""" Group Key Handshake 2/2 (WPA).
"""
parameterList = 'eapolMIC=' + str(eapolMIC) + ',eapolMICFlag=' + str(eapolMICFlag) + ',wepMIC=' + str(wepMIC) + ',customFlaglist=' + str(customFlaglist) + ',addNonce=' + str(addNonce) + ',customRC=' + str(customRC) + ',addData=' + str(addData)
self.logger.log( self.logger.TRANSMIT , 'EAPOL Group Key Handshake Message 2/2 AES (' + parameterList + ')')
try:
# Create an empty EAPOL WPA Key packet.
packet = EAPOL( version=1 , type='EAPOL-Key' )/EAPOL_Key()/EAPOL_WPAKey()
packetKey = packet.getlayer( EAPOL_WPAKey )
flaglist = ['HMAC_SHA1_AES','group','secure']
flaglist.append( self.keyID )
if eapolMICFlag is True:
flaglist.append('mic')
# Fill in the fields.
if customFlaglist is not None:
flaglist = customFlaglist
packetKey.KeyInfo = self.__getKeyInformation( flaglist )
if customRC is not None:
if customRC == 'lower':
self.replayCounter -= 1
elif customRC == 'higher':
self.replayCounter += 1
packetKey.ReplayCounter = self.replayCounter
if addNonce is not None:
if addNonce == 'supplicant':
packetKey.Nonce = self.SNonce
if addNonce == 'authenticator':
packetKey.Nonce = self.ANonce
if addNonce == 'random':
packetKey.Nonce = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData is not None:
if addData == 'data':
packetKey.WPAKeyLength = 32
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataNoLength':
packetKey.WPAKeyLength = 0
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataShortLength':
packetKey.WPAKeyLength = 16
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
if addData == 'dataLongLength':
packetKey.WPAKeyLength = 48
packetKey.WPAKey = binascii.a2b_hex( os.urandom( 32 ).encode('hex') )
# Calculate and add the MIC.
if eapolMIC is True:
mic = HMAC.new( self.KCK , msg=str( packet ) , digestmod=Crypto.Hash.SHA )
packetKey.WPAKeyMIC = mic.digest()
# Get the plaintext and generate the Logical-Link Control (LLC),
# and Subnetwork Access Protocol (SNAP).
plaintext = str( packet )
llcSnap = LLC( dsap=0xaa , ssap=0xaa , ctrl=0x03 )
llcSnap /= SNAP( OUI=0x000000 , code=0x888e )
plaintext = str( llcSnap ) + plaintext
# Generate the dot11 header and request the encapsulated dot11wep message.
dot11 = Dot11( addr1=self.addr1 , addr2=self.addr2 , addr3=self.addr1 , FCfield='wep+to-DS' , type='Data' , subtype=0 )
dot11wep = self.handleAES.encapsulate( plaintext , self.TK , self.addr1 , self.addr2 , self.addr3 )
if wepMIC is False:
dot11wep.icv = 0 # NOTE/FIXME: This only clears part of the MIC, still making it incorrect though.
# Transmit the packet.
packet = RadioTap()/dot11/dot11wep
sendp( packet , iface=self.iface , verbose=False )
except:
raise
|
15,539 | 768624fc443c6f1b1a5b3b7c1f00dff65f1a11d4 | from printer import Printer
def read_file(printers):
with open(printers) as file:
printers_list = []
for row in file:
row = row.split(",")
printers_list.append(Printer(row[0], row[1], row[2]))
return printers_list
|
15,540 | 29a070649cbd98be639377950e6a84ad0d24b714 |
from circle import Circle
from circle import Sphere
from math import pi
def test_init():
Circle(10)
#Step 1
def test_radius():
c = Circle(25)
assert c.radius == 25
# Step 2
def test_diameter():
c = Circle(5)
assert c.diameter == 10
# Step 3
def test_set_radius():
c = Circle(8)
c.diameter = 14
assert c.radius == 7
assert c.diameter == 14
# Step 4
def test_area():
c = Circle(2)
assert c.area == pi * 4
# Step 5 Alternate Constructor???
# Step 6
def test_str():
c = Circle(10)
assert str(c) == 'Circle with radius: 10.000000'
def test_repr():
c = Circle(4)
assert repr(c) == 'Circle(4)'
# Step 7
def test_addition():
c1 = Circle(2)
c2 = Circle(4)
c3 = c1 + c2
assert c3.radius == 6
def test_multiply():
c2 = Circle(4)
c3 = c2 * 3
assert c3.radius == 12
def test_reverse_multiply():
c = Circle(3)
c2 = 3 * Circle(3)
assert c2.radius == 9.0
# Comparing Circles
# Greter than
def test_gt():
c1 = Circle(3)
c2 = Circle(5)
#c3 = c1 > c2
#assert c3
assert c1 > c2
# Less than
def test_lt():
c1 = Circle(3)
c2 = Circle(5)
assert c1 < c2
# Equal
def test_not_eq():
c1 = Circle(3)
c2 = Circle(5)
assert c1 != c2
# Equal is true
def test_eq():
c1 = Circle(3)
c2 = Circle(5)
c3 = Circle(5)
assert c2 == c3
# # Circles sorted
"""How do I use the sorted method for this???"""
# def test_sorted_circles():
# circ_list = [Circle(0), Circle(1), Circle(2), Circle(3), Circle(4),
# Circle(5), Circle(6), Circle(7), Circle(8),
# ]
# # circ_list.sorted()
# assert circ_list[0] == Circle(0)
# assert circ_list[6] == Circle(6)
# assert [circ_list[0] < circ_list[1] < circ_list[2] < circ_list[3] < circ_list[4]
# < circ_list[5] < circ_list[6] < circ_list[7] < circ_list[8]
# ]
# def test_reverse_sorted_circles():
# circ_list = [Circle(0), Circle(1), Circle(2), Circle(3), Circle(4),
# Circle(5), Circle(6), Circle(7), Circle(8),
# ]
# assert circ_list[0] == Circle(8)
# assert circ_list[7] == Circle(0)
# Using the example test from class
def test_sort():
a_list = [Circle(20), Circle(10), Circle(15), Circle(5)]
a_list.sort()
assert a_list[0] == Circle(5)
assert a_list[3] == Circle(20)
assert a_list[0] < a_list[1] < a_list[2] < a_list[3]
# Step 9 Subclasing
def test_sphere_volume():
s = Sphere(5)
print(s.volume())
assert s.volume() == 654.4984694978737
def test_sphere_area():
s = Sphere(4)
assert NotImplementedError
def test_str_sphere():
s = Sphere(5)
assert str(s) == "Sphere with radius: 5"
def test_repr_sphere():
s = Sphere(12)
assert repr(s) == "Sphere(12)" |
15,541 | 31c336621e519be21c175ce837a9f921f8900d68 | # coding: utf-8
"""
Xero Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class PaymentTerm(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {"bills": "Bill", "sales": "Bill"}
attribute_map = {"bills": "Bills", "sales": "Sales"}
def __init__(self, bills=None, sales=None): # noqa: E501
"""PaymentTerm - a model defined in OpenAPI""" # noqa: E501
self._bills = None
self._sales = None
self.discriminator = None
if bills is not None:
self.bills = bills
if sales is not None:
self.sales = sales
@property
def bills(self):
"""Gets the bills of this PaymentTerm. # noqa: E501
:return: The bills of this PaymentTerm. # noqa: E501
:rtype: Bill
"""
return self._bills
@bills.setter
def bills(self, bills):
"""Sets the bills of this PaymentTerm.
:param bills: The bills of this PaymentTerm. # noqa: E501
:type: Bill
"""
self._bills = bills
@property
def sales(self):
"""Gets the sales of this PaymentTerm. # noqa: E501
:return: The sales of this PaymentTerm. # noqa: E501
:rtype: Bill
"""
return self._sales
@sales.setter
def sales(self, sales):
"""Sets the sales of this PaymentTerm.
:param sales: The sales of this PaymentTerm. # noqa: E501
:type: Bill
"""
self._sales = sales
|
15,542 | 81c618c759b852df250ce5c88fa2e2e9893783f2 | from tools.ProjectionTools.Gated2RGB.lib.warp_gatedimage import WarpingClass
from tools.ProjectionTools.Gated2RGB.lib.data_loader import load_vehicle_speed, load_time, load_stearing_ange
from tools.Raw2LUTImages.conversion_lib.process import Rectify_image
from tools.CreateTFRecords.generic_tf_tools.resize import resize
from tools.ProjectionTools.Gated2RGB.lib.image_transformer import disparity2depth_psm
import cv2
import os
import numpy as np
import argparse
def parsArgs():
parser = argparse.ArgumentParser(description='Gated2RGB projection tool')
parser.add_argument('--root', '-r', help='Enter the root folder', default='./example_data')
parser.add_argument('--depth_folder', '-d', help='Data folder precise depth', default='psmnet_sweden', choices=['cam_stereo_sgm', 'psmnet_sweden'])
parser.add_argument('--debug', '-deb', type=bool, help='Save human readable image', default=True)
parser.add_argument('--suffix', '-s', type=str, help='Define suffix for warped images', default='psm_warped')
args = parser.parse_args()
return args
class DepthWarpingWrapper():
# Class loads Raw Images
gated_keys = ['gated0_raw', 'gated1_raw', 'gated2_raw']
image_keys = ['cam_stereo_left'] # Add raw
history_images = ['cam_stereo_left_raw_history_%d'%i for i in range(-6,5)]
def __init__(self, source_dir=None, dest_root=None, suffix=None, DEBUG=True, depthfolder='psm_sweden'):
self.source_dir = source_dir
self.dest_root = dest_root
self.suffix = suffix
self.r = resize('RGB2Gatedv2')
self.DEBUG = DEBUG
self.depth_folder = depthfolder
self.WarpGated = WarpingClass()
self.WarpGated.InitTransformer(source_dir)
self.RL = Rectify_image(self.source_dir, 'calib_cam_stereo_left.json')
self.RR = Rectify_image(self.source_dir, 'calib_cam_stereo_right.json')
self.RG = Rectify_image(self.source_dir, 'calib_gated_bwv.json', DEBUG=False)
def read_data_and_process(self, entry_id, vehicle_speed, delay, angle):
dist_images = {}
gated_images = {}
dist_images_shape = {}
gated_images_shape = {}
if self.DEBUG == True:
for folder in self.image_keys:
file_path = os.path.join(self.source_dir, folder, entry_id + '.tiff')
img = self.r.crop(self.RL.process_lut(file_path))
img_height, img_width, _ = img.shape
dist_images[folder] = img
dist_images_shape[folder] = ([img_height, img_width, 3])
for folder in self.gated_keys:
file_path = os.path.join(self.source_dir, folder, entry_id + '.tiff')
if self.DEBUG==True:
img = self.RG.process_rect_lut_gated8(file_path)
else:
img = self.RG.process_rect_gated(file_path)
img_height, img_width = img.shape
img = img[:,:, np.newaxis]
img = np.concatenate([img]*3, axis=2)
if 'psmnet' in self.depth_folder:
# Take care PSMNet was trained on half the resolution! Therefore, the disparity has to be multiplied by two!!
# Also the cam_stereo_sgm disparity maps are caclulated on half the resolution
depth_single = cv2.resize(disparity2depth_psm(2*np.load(os.path.join(self.source_dir, self.depth_folder, entry_id + '.npz'))['arr_0']), (1920, 1024)) #
else:
depth_single = cv2.resize(disparity2depth_psm(np.load(os.path.join(self.source_dir, self.depth_folder, entry_id + '.npz'))['arr_0']), (1920, 1024)) #
img = self.WarpGated.process_image_ego_motion((768, 1280), img, depth_single, vehicle_speed, angle, delay[folder.split('_')[0]], self.RG.PC.K)
gated_images[folder] = img
gated_images_shape[folder] = ([img_height, img_width, 1])
data = {}
data['image_data'] = dist_images
data['gated_data'] = gated_images
data['image_shape'] = dist_images_shape
data['gated_shape'] = gated_images_shape
data['name'] = entry_id
return data
def save_gated_data(self, data, key):
if self.dest_root is not None:
alpha = 0.5
if self.DEBUG==True:
for folder in self.gated_keys:
overlay1 = cv2.addWeighted(data['image_data']['cam_stereo_left'], alpha,
data['gated_data'][folder], 1 - alpha, 0)
path = os.path.join(self.dest_root, folder.split('_')[0] + '_' + self.suffix + '_debug')
if not os.path.exists(path):
os.makedirs(path)
cv2.imwrite(os.path.join(path, key + '.png'), overlay1)
output = np.max((data['gated_data']['gated0_raw'],data['gated_data']['gated1_raw'],data['gated_data']['gated2_raw']),axis=-1).astype(np.uint8).transpose((1,2,0))
print(output.shape, data['image_data']['cam_stereo_left'].shape)
overlay = cv2.addWeighted(data['image_data']['cam_stereo_left'], alpha,
cv2.cvtColor(cv2.cvtColor(output, cv2.COLOR_BGR2GRAY),cv2.COLOR_GRAY2BGR), 1 - alpha, 0)
return overlay, data['image_data']['cam_stereo_left'], output
else:
for folder in self.gated_keys:
path = os.path.join(self.dest_root, folder.split('_')[0] + '_' + self.suffix)
if not os.path.exists(path):
os.makedirs(path)
print(data['gated_data'][folder].dtype, np.min(data['gated_data'][folder]), np.max(data['gated_data'][folder]))
cv2.imwrite(os.path.join(path, key + '.tiff'), data['gated_data'][folder])
return None, None, None
if __name__ == '__main__':
args = parsArgs()
if args.debug:
cv2.namedWindow("DEBUG", cv2.WINDOW_NORMAL)
T = DepthWarpingWrapper(source_dir=args.root, dest_root=args.root, suffix=args.suffix, DEBUG=args.debug, depthfolder=args.depth_folder)
T2 = None
if args.debug:
T2 = DepthWarpingWrapper(source_dir=args.root, dest_root=args.root, suffix=args.suffix+'_no_correction', DEBUG=args.debug, depthfolder=args.depth_folder)
cv2.namedWindow('DEBUG', cv2.WINDOW_NORMAL)
# Read files
files = os.listdir(os.path.join(args.root, 'cam_stereo_left'))
print(files)
for key in files:
key = key.split('.tiff')[0]
print(key)
delta0 = float(load_time('gated0',key)[1] - load_time('rgb', key)[1])/10**9
delta1 = float(load_time('gated1',key)[1] - load_time('rgb', key)[1])/10**9
delta2 = float(load_time('gated2',key)[1] - load_time('rgb', key)[1])/10**9
delays = {
'gated0': delta0,
'gated1': delta1,
'gated2': delta2
}
speed = load_vehicle_speed(args.root, key)/3.6 # conversion from km/h to m/s.
angle = load_stearing_ange(args.root, key)/520*30 # conversion from steering angle to heading. Assumption of 3 steering wheel rotations from end to end and a maximum heading of 30°.
data = T.read_data_and_process(key, speed, delays, angle)
img1, rgb1, output1 = T.save_gated_data(data, key)
if args.debug == True:
delays2 = {
'gated0': 0,
'gated1': 0,
'gated2': 0
}
data2 = T2.read_data_and_process(key, speed, delays2, 0)
img2, rgb2, output2 = T2.save_gated_data(data2, key)
cv2.imshow('DEBUG', np.hstack((img1, img2)))
print(speed, angle, delays['gated0'])
cv2.waitKey()
cv2.imshow('DEBUG', np.hstack((output1, output2)))
cv2.waitKey()
cv2.imshow('DEBUG', np.vstack((np.hstack((rgb1, output1)),np.hstack((img1, img2)))))
cv2.waitKey()
|
15,543 | 2a0d2bce0ce5ae0d1da93c5d10a4930d305c9750 | import datetime
import json
import urllib
import webapp2
from datamodel import Event, Friendship, Likes, Page, Place, RSVP, Swipe, User
from exceptions import MissingProperty
from google.appengine.ext import ndb
class RestAPI(webapp2.RequestHandler):
def create_envelope(self, api):
envelope = dict()
envelope['API']= api
return envelope
def jsonify(self, data):
return json.dumps(data)
def parse_request(self):
json_string = urllib.unquote(self.request.body)
return json.loads(json_string)
def get_property(self, data, prop, required=False):
if prop not in data:
if required is False:
return None
else:
raise MissingProperty(prop)
else:
return data[prop]
def write_response(self, response):
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
self.response.out.write(response)
def print_error(self, error):
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
self.error(500)
self.response.out.write(error)
class EventAPI(RestAPI):
def get_all_events(self):
envelope = self.create_envelope('get_all_events')
envelope['results'] = [ event.to_dict() for event in Event.query().fetch() ]
self.write_response(self.jsonify(envelope))
def get_single_event(self, fid):
envelope = self.create_envelope('get_single_event')
event = Event.query(Event.fid == fid).get()
if event is None:
self.print_error("event does not exist")
return
envelope['event'] = event.to_dict()
envelope['rsvps'] = [ rsvp.to_dict() for rsvp in RSVP.query(RSVP.event == fid).fetch() ]
self.write_response(self.jsonify(envelope))
def post_single_event(self):
request = self.parse_request()
try:
fid = self.get_property(request, 'fid', required=True)
name = self.get_property(request, 'name', required=True)
place = self.get_property(request, 'place', required=True)
description = self.get_property(request, 'description', required=True)
cover = self.get_property(request, 'cover')
start_time_s = self.get_property(request, 'start_time')
# XXX fix this code
start_time = datetime.datetime(start_time_s)
end_time_s = self.get_property(request, 'end_time')
end_time = datetime.datetime(end_time_s)
except MissingProperty, e:
self.print_error(e.printable_error())
return
except ValueError, e:
self.print_error(e.printable_error())
return
ancor = ndb.Key("Place", place)
event = Event(ancor=ancor, id=fid, fid=fid, name=name, description=description, place=place, cover=cover, start_time=start_time, end_time=end_time)
event.put()
class PageAPI(RestAPI):
def get_all_pages(self):
envelope = self.create_envelope('get_all_pages')
envelope['results'] = [ page.to_dict() for page in Page.query().fetch() ]
self.write_response(self.jsonify(envelope))
def get_single_page(self, fid):
envelope = self.create_envelope('get_single_page')
page = ndb.Key("Page", fid).get()
if page is None:
self.print_error("page does not exist")
return
envelope['page'] = page.to_dict()
envelope['likes'] = [ like.to_dict() for like in Likes.query(Likes.page == fid).fetch() ]
self.write_response(self.jsonify(envelope))
def post_single_page(self):
request = self.parse_request()
try:
fid = self.get_property(request, 'fid', required=True)
name = self.get_property(request, 'name', required=True)
icon = self.get_property(request, 'icon')
except MissingProperty, e:
self.print_error(e.printable_error())
return
page = Page(id=fid, fid=fid, name=name, icon=icon)
page.put()
class PlaceAPI(RestAPI):
def get_all_places(self):
envelope = self.create_envelope('get_all_places')
envelope['results'] = [ place.to_dict() for place in Place.query().fetch() ]
self.write_response(self.jsonify(envelope))
def get_single_place(self, fid):
envelope = self.create_envelope('get_single_place')
place = ndb.Key("Place", fid).get()
if place is None:
self.print_error("place does not exist")
return
envelope['place'] = place.to_dict()
envelope['events'] = [ event.to_dict() for event in Event.query(ancestor=place).fetch() ]
envelope['likes'] = [ like.to_dict() for like in Likes.query(Likes.page == fid).fetch() ]
self.write_response(self.jsonify(envelope))
def post_single_place(self):
request = self.parse_request()
try:
fid = self.get_property(request, 'fid', required=True)
name = self.get_property(request, 'name', required=True)
description = self.get_property(request, 'description', required=True)
icon = self.get_property(request, 'icon')
place_type = self.get_property(request, 'place_type', required=True)
location_data = self.get_property(request, 'location', required=True)
latitude = self.get_property(location_data, 'latitude', required=True)
except MissingProperty, e:
self.print_error(e.printable_error())
return
place = Place(id=fid, fid=fid, name=name, description=description, icon=icon, place_type=place_type)
place.put()
class UserAPI(RestAPI):
def get_all_users(self):
envelope = self.create_envelope('get_all_users')
envelope['results'] = [ user.to_dict() for user in User.query().fetch() ]
self.write_response(self.jsonify(envelope))
def get_single_user(self, fid):
envelope = self.create_envelope('get_single_user')
user = ndb.Key("User", fid).get()
if user is None:
self.print_error("user does not exist")
return
envelope['friends'] = [ friend.to_dict() for friend in Friendship.query(ancestor=user).fetch() ]
envelope['likes'] = [ like.to_dict() for like in Likes.query(ancestor=user).fetch() ]
envelope['rsvps'] = [ rsvp.to_dict() for rsvp in RSVP.query(ancestor=user).fetch() ]
envelope['swiped'] = [ swipe.to_dict() for swipe in Swipe.query(ancestor=user).fetch() ]
envelope['swiped_by'] = [ swiped.to_dict() for swiped in Swipe.query(Swipe.destination == fid).fetch() ]
self.write_response(self.jsonify(envelope))
|
15,544 | d82dc1d9cd353e7f47a6abb9ca3678f0dfd81aba | import lyse
import numpy as np
import matplotlib.pyplot as plt
import mloop_config
from fake_result import fake_result
try:
df = lyse.data()
config = mloop_config.get()
x=[]
# print(len(config['mloop_params']))
for ind in np.arange(len(config['mloop_params'])):
# print(ind)
x.append( list(config['mloop_params'])[ind])
y = config['cost_key']
# print(y)
try:
# Try to use the most recent mloop_session ID
gb = df.groupby('mloop_session')
mloop_session = list(gb.groups.keys())[-1]
subdf = gb.get_group(mloop_session)
except Exception as e:
# Fallback to the entire lyse DataFrame
subdf = df
mloop_session = None
print(e)
for ind in np.arange(len(config['mloop_params'])):
plt.figure(str(x[ind]))
subdf.plot(x=x[ind], y=y, kind='scatter')
# print(x[ind],y)
x_p = np.linspace(df[x[ind]].min(), df[x[ind]].max(), 200)
# plt.plot(x_p, fake_result(x_p, s=0))
# plt.axis(ymin=18500, ymax=27500)
plt.title('M-LOOP session: {:}'.format(mloop_session))
plt.rcParams.update({'font.size': 20})
plt.subplots_adjust(left=0.2, right=0.9, top=0.9, bottom=0.2)
except Exception as e:
print (e) |
15,545 | d170ee39a4fe90dac08536dea83abcaeeed3b21f | # The program will ask for the length and width of two rectangles and tell
# the user which rectangle has greater area, or if two rectangles have equal areas.
# 10/01/19
# CTI-110 P3T1 - Areas of Rectangles
# Farnaz Jeddi Moghaddam
# Get the length and width of the rectangles
l1=float(input('Please enter the length of the first rectangle:' ))
w1=float(input('Please enter the width of the first rectangle:' ))
l2=float(input('Please enter the length of the second rectangle:' ))
w2=float(input('Please enter the width of the second rectangle:' ))
# Calculate the areas of the two rectangles
area1=float(l1 * w1)
area2=float(l2 * w2)
# Compare the areas of the rectangles and show the result
if area1>area2:
print('The first rectangle has a greater area')
elif area1<area2:
print('The second rectangle has a greater area')
else:
print('The rectangles have equal areas')
|
15,546 | 0091742841ab531f9e4beaa5d31bd9a56acb128f | def list_filter(l):
odd_list = []
even_list = []
odd_sum = 0
even_sum = 0
for x in l:
if x % 2 == 0:
even_list.append(x)
even_sum += x
else:
odd_list.append(x)
odd_sum += x
return{
"odd": {
"sum": odd_sum,
"list": odd_list,
"len": len(odd_list)
},
"even": {
"sum": even_sum,
"list": even_list,
"len": len(even_list)
}
}
numbers = [2,100,3,12,313,44,33,22]
print(list_filter(numbers))
print('-----------------')
palindrome = 'Talat'
def check_palindrome(string):
hold = ""
for x in string:
hold = x + hold
if string.lower() == hold.lower():
return True
return False
print(check_palindrome(palindrome)) |
15,547 | 5b9a3fe45c40706f07962c22f17f6bf822c621f5 | # Generated by Django 2.2.4 on 2019-10-04 07:21
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('Hospital', '0003_auto_20191003_2226'),
]
operations = [
migrations.AlterField(
model_name='hospital',
name='hospitalprofile',
field=models.ImageField(upload_to='hospitalimage'),
),
migrations.AlterField(
model_name='hospital',
name='login_date',
field=models.DateField(default=datetime.date(2019, 10, 4)),
),
]
|
15,548 | 7fd74d0a419b55cfbbf06d4cc9897e1ea098e91d | """This module contains the general information for PkiKeyRing ManagedObject."""
from ...ucsmo import ManagedObject
from ...ucscoremeta import MoPropertyMeta, MoMeta
from ...ucsmeta import VersionMeta
class PkiKeyRingConsts:
ADMIN_STATE_COMPLETED = "completed"
ADMIN_STATE_CREATED = "created"
ADMIN_STATE_REQ_CREATED = "reqCreated"
ADMIN_STATE_STARTED = "started"
ADMIN_STATE_TP_SET = "tpSet"
CERT_STATUS_CERT_CHAIN_TOO_LONG = "certChainTooLong"
CERT_STATUS_EMPTY_CERT = "emptyCert"
CERT_STATUS_EXPIRED = "expired"
CERT_STATUS_FAILED_TO_VERIFY_WITH_PRIVATE_KEY = "failedToVerifyWithPrivateKey"
CERT_STATUS_FAILED_TO_VERIFY_WITH_TP = "failedToVerifyWithTp"
CERT_STATUS_NOT_YET_VALID = "notYetValid"
CERT_STATUS_REVOKED = "revoked"
CERT_STATUS_SELF_SIGNED_CERTIFICATE = "selfSignedCertificate"
CERT_STATUS_UNKNOWN = "unknown"
CERT_STATUS_VALID = "valid"
CONFIG_STATE_NOT_APPLIED = "not-applied"
CONFIG_STATE_OK = "ok"
INT_ID_NONE = "none"
MODULUS_MOD2048 = "mod2048"
MODULUS_MOD2560 = "mod2560"
MODULUS_MOD3072 = "mod3072"
MODULUS_MOD3584 = "mod3584"
MODULUS_MOD4096 = "mod4096"
MODULUS_MODINVALID = "modinvalid"
POLICY_OWNER_LOCAL = "local"
POLICY_OWNER_PENDING_POLICY = "pending-policy"
POLICY_OWNER_POLICY = "policy"
REGEN_FALSE = "false"
REGEN_NO = "no"
REGEN_TRUE = "true"
REGEN_YES = "yes"
class PkiKeyRing(ManagedObject):
"""This is PkiKeyRing class."""
consts = PkiKeyRingConsts()
naming_props = set(['name'])
mo_meta = MoMeta("PkiKeyRing", "pkiKeyRing", "keyring-[name]", VersionMeta.Version101e, "InputOutput", 0xfff, [], ["aaa", "admin"], ['pkiEp'], ['faultInst', 'pkiCertReq'], ["Add", "Get", "Remove", "Set"])
prop_meta = {
"admin_state": MoPropertyMeta("admin_state", "adminState", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, None, None, None, ["completed", "created", "reqCreated", "started", "tpSet"], []),
"cert": MoPropertyMeta("cert", "cert", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x2, None, None, None, [], []),
"cert_status": MoPropertyMeta("cert_status", "certStatus", "string", VersionMeta.Version203a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["certChainTooLong", "emptyCert", "expired", "failedToVerifyWithPrivateKey", "failedToVerifyWithTp", "notYetValid", "revoked", "selfSignedCertificate", "unknown", "valid"], []),
"child_action": MoPropertyMeta("child_action", "childAction", "string", VersionMeta.Version101e, MoPropertyMeta.INTERNAL, 0x4, None, None, r"""((deleteAll|ignore|deleteNonPresent),){0,2}(deleteAll|ignore|deleteNonPresent){0,1}""", [], []),
"config_state": MoPropertyMeta("config_state", "configState", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, ["not-applied", "ok"], []),
"config_status_message": MoPropertyMeta("config_status_message", "configStatusMessage", "string", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, 0, 510, None, [], []),
"descr": MoPropertyMeta("descr", "descr", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x8, None, None, r"""[ !#$%&\(\)\*\+,\-\./:;\?@\[\]_\{\|\}~a-zA-Z0-9]{0,256}""", [], []),
"dn": MoPropertyMeta("dn", "dn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x10, 0, 256, None, [], []),
"int_id": MoPropertyMeta("int_id", "intId", "string", VersionMeta.Version101e, MoPropertyMeta.INTERNAL, None, None, None, None, ["none"], ["0-4294967295"]),
"key": MoPropertyMeta("key", "key", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"modulus": MoPropertyMeta("modulus", "modulus", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x20, None, None, None, ["mod2048", "mod2560", "mod3072", "mod3584", "mod4096", "modinvalid"], []),
"name": MoPropertyMeta("name", "name", "string", VersionMeta.Version101e, MoPropertyMeta.NAMING, 0x40, None, None, r"""[\-\.:_a-zA-Z0-9]{1,16}""", [], []),
"policy_level": MoPropertyMeta("policy_level", "policyLevel", "uint", VersionMeta.Version211a, MoPropertyMeta.READ_ONLY, None, None, None, None, [], []),
"policy_owner": MoPropertyMeta("policy_owner", "policyOwner", "string", VersionMeta.Version211a, MoPropertyMeta.READ_WRITE, 0x80, None, None, None, ["local", "pending-policy", "policy"], []),
"regen": MoPropertyMeta("regen", "regen", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x100, None, None, None, ["false", "no", "true", "yes"], []),
"rn": MoPropertyMeta("rn", "rn", "string", VersionMeta.Version101e, MoPropertyMeta.READ_ONLY, 0x200, 0, 256, None, [], []),
"sacl": MoPropertyMeta("sacl", "sacl", "string", VersionMeta.Version302c, MoPropertyMeta.READ_ONLY, None, None, None, r"""((none|del|mod|addchild|cascade),){0,4}(none|del|mod|addchild|cascade){0,1}""", [], []),
"status": MoPropertyMeta("status", "status", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x400, None, None, r"""((removed|created|modified|deleted),){0,3}(removed|created|modified|deleted){0,1}""", [], []),
"tp": MoPropertyMeta("tp", "tp", "string", VersionMeta.Version101e, MoPropertyMeta.READ_WRITE, 0x800, None, None, r"""[\-\.:_a-zA-Z0-9]{0,16}""", [], []),
}
prop_map = {
"adminState": "admin_state",
"cert": "cert",
"certStatus": "cert_status",
"childAction": "child_action",
"configState": "config_state",
"configStatusMessage": "config_status_message",
"descr": "descr",
"dn": "dn",
"intId": "int_id",
"key": "key",
"modulus": "modulus",
"name": "name",
"policyLevel": "policy_level",
"policyOwner": "policy_owner",
"regen": "regen",
"rn": "rn",
"sacl": "sacl",
"status": "status",
"tp": "tp",
}
def __init__(self, parent_mo_or_dn, name, **kwargs):
self._dirty_mask = 0
self.name = name
self.admin_state = None
self.cert = None
self.cert_status = None
self.child_action = None
self.config_state = None
self.config_status_message = None
self.descr = None
self.int_id = None
self.key = None
self.modulus = None
self.policy_level = None
self.policy_owner = None
self.regen = None
self.sacl = None
self.status = None
self.tp = None
ManagedObject.__init__(self, "PkiKeyRing", parent_mo_or_dn, **kwargs)
|
15,549 | 08381a435043f1bb535e326d14f8f42b81b0b204 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed May 10 23:19:43 2017
@author: dharma naidu and vikashsingh
"""
#CS 188 Medical Imaging Project
import pandas as pd
import sklearn
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import roc_curve, auc
from sklearn.metrics import roc_auc_score
from sklearn.cross_validation import StratifiedKFold
from sklearn.naive_bayes import GaussianNB
from sklearn import linear_model, datasets
from sklearn.cross_validation import train_test_split
rows_to_train = 50000
data = pd.read_csv('cs188TD.csv', header=None) #Load the data from the csv file using pandas, and store it in "data"
print("Data loaded New")
print(data.shape)
data=data.dropna() #Drop empty rows just in case data isn't formatted perfectly
print(data.shape)
X=data.iloc[0:rows_to_train,4:622] #Store first 622 column, or our training parameters, in "X"
Y=data.iloc[0:rows_to_train, 622] #store last column, or result column, in y
#convert to matrix to avoid errors
X = X.as_matrix()
Y= Y.as_matrix()
AUC=[] #empty auc array to store AUCs of each k fold
#make a kfold data structure with 10 folds, a shuffle, and a random seed of 5
kfold = StratifiedKFold(y = Y, n_folds = 10, shuffle = True, random_state=5)
#empty global prediction array to store prediction probability of each k fold (Y value in AUC graph)
globpred=[]
#empty global prediction array to store test probability of each k fold (X value in AUC graph)
globy_test=[]
for i, (train, test) in enumerate(kfold): #for each fold in the kfold
print(i)
gnb = GaussianNB() #create a new Gaussian Naive Bayes model
#print(test);
gnb.fit(X[train], Y[train]) #train the model using data from X and Y
predictionsproba = gnb.predict_proba(X[test])[:,1]#store prediction probability in predictproba
AUC.append(roc_auc_score(Y[test], predictionsproba)) #append predict proba to our auc array
globpred += predictionsproba.tolist() #add predictproba info to globred (for use in AUC graph)
globy_test += Y[test].tolist() #add Y test info to globytest(for use in AUC graph)
print(np.mean(AUC)) #print the mean AUC of all our k folds
#graph the AUC graph
false_positive_rate, true_positive_rate, thresholds=roc_curve(globy_test, globpred)
roc_auc = auc(false_positive_rate, true_positive_rate)
plt.title('Receiver Operating Characteristic GNB')
plt.plot(false_positive_rate, true_positive_rate, 'b',
label='AUC = %0.2f'% roc_auc)
plt.legend(loc='lower right')
plt.plot([0,1],[0,1],'r--')
plt.xlim([-0.1,1.2])
plt.ylim([-0.1,1.2])
plt.ylabel('True Positive Rate')
plt.xlabel('False Positive Rate')
plt.show()
#plt.savefig("savedFigs/gnb")
|
15,550 | 9b6db1a771c75f106ed3d940d9f906358450cc94 | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
my_driver = webdriver.Chrome(executable_path="/Users/avielb/Downloads/chromedriver")
my_driver.get("https://github.com")
my_driver.find_element_by_name("q").send_keys("devops" + Keys.ENTER) |
15,551 | 5eb38e040f0b40311f0294673416dd4b3037db3e | import os
import sys
from subprocess import Popen, PIPE
def get_commit():
p = Popen(['git', 'rev-parse', 'HEAD'], cwd=os.path.dirname(__file__),
stdout=PIPE, stderr=PIPE)
std, _ = p.communicate()
p.wait()
if p.returncode:
return None
if sys.version_info[0] == 3:
return std[:8].decode(sys.stdout.encoding)
else:
return std[:8]
|
15,552 | 1faccf75bcdf56edc44ccdfbc7bdf6ccf8f76259 | from django.contrib import admin
from rtwilio.models import TwilioResponse
class TwilioResponseAdmin(admin.ModelAdmin):
list_display = ('message', 'date', 'sender', 'recipient',
'sent')
ordering = ('-date',)
list_filter = ('date',)
def sent(self, response):
return response.status == 'sent'
sent.boolean = True
admin.site.register(TwilioResponse, TwilioResponseAdmin)
|
15,553 | 65865711004a8b791385090459fb4d6dedb8e1c1 | from .JsonObject import *
from exceptions.Exceptions import *
from util.Stringify import *
class JSONArray(list) :
def __init__(self) :
self.data = list()
def size(self) :
return len(self.data)
def get(self, i) :
return self.data[i]
def append(self, i) :
self.data.append(i)
def __getitem__(self, i) :
return self.data[i]
def __setitem__(self, i, value) :
self.data[i] = value
def getJSONObject(self, i) :
ret = self.data[i]
if isinstance(ret, JsonObject) :
return ret
raise JsonTypeErrorException('JSONObject', str(type(ret)))
def getJSONArray(self, i) :
ret = self.data[i]
if isinstance(ret, JSONArray) :
return ret
raise JsonTypeErrorException('JSONArray', str(type(ret)))
def __str__(self) :
return arrayToString(self, 0)
def __iter__(self) :
return iter(self.data)
def __eq__(self, array) :
if isinstance(array, JSONArray) :
if not self.size() == array.size() :
return False
for i in range(0, self.size()) :
if not self.get[i] == array.get(i) :
return False
return True
return False |
15,554 | 6083439f5d4ddffcb94a6c4c154dd09f44fbfd72 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.apps import AppConfig
class CombinerConfig(AppConfig):
name = 'combiner'
|
15,555 | 0afe581390dc2360746f4df9ddde9d70736c5a9d | """SplunkStore plugin to index data in Splunk using HEC token."""
import json
import logging
import requests
_log = logging.getLogger(__name__)
class SplunkHECStore:
"""SplunkHECStore plugin to index cloud data in Splunk using HEC token."""
def __init__(self, uri, token, index, ca_cert, buffer_size=1000):
"""Create an instance of :class:`SplunkHECStore` plugin.
Arguments:
uri (str): Splunk collector service URI.
token (str): Splunk HEC token.
index (str): Splunk HEC token accessible index.
ca_cert (str): Location of cetificate file to verify the identity
of host in URI, or False to disable verification
buffer_size (int): Maximum number of records to hold in
in-memory buffer for each record type.
"""
self._uri = uri
self._token = token
self._index = index
self._ca_cert = ca_cert
self._buffer_size = buffer_size
self._buffer = []
# For maintaining session between multiple _flush calls
self._session = requests.session()
def write(self, record):
"""Save the record in a bulk-buffer.
Also, flush the buffer by saving its content to Splunk when the buffer
size exceeds configured self._buffer_size
Arguments:
record (dict): Data to save to the Splunk.
"""
# Make Splunk ready payload data and append it to self._buffers list.
self._buffer.append({
'index': self._index,
'sourcetype': 'json',
'event': record
})
# If the records count in self._buffer is more than allowed by
# self._buffer_size, send those records to Splunk.
if len(self._buffer) >= self._buffer_size:
self._flush()
def _flush(self):
"""Perform bulk insert of buffered records into Splunk."""
buffer_len = len(self._buffer)
if buffer_len == 0:
_log.info('No pending records to index; URI: %s; index: %s',
self._uri, self._index)
return
_log.info('Indexing %d records; URI: %s; index: %s ...',
buffer_len, self._uri, self._index)
headers = {'Authorization': 'Splunk ' + self._token}
try:
response = self._session.post(self._uri,
headers=headers,
data=json.dumps(self._buffer),
verify=self._ca_cert)
log_data = ('URI: {}; index: {}; response status: {}; '
'response content: {}'
.format(self._uri, self._index,
response.status_code, response.text))
if response.status_code != 200:
_log.error('Failed to index %d records; HTTP status '
'code indicates error; %s',
buffer_len, log_data)
return
try:
j = response.json()
except Exception as e:
_log.error('Failed to get JSON from response; %s; '
'error: %s; %s', log_data, type(e).__name__, e)
return
if j['code'] != 0:
_log.error('Failed to index %d records; Splunk status '
'code in JSON indicates error; %s',
buffer_len, log_data)
return
_log.info('Indexed %d records; %s', buffer_len, log_data)
del self._buffer[:]
except requests.ConnectionError as e:
_log.error('Failed to index %d records; connection error; '
'URI: %s; index: %s; error: %s: %s; ',
buffer_len, self._uri, self._index,
type(e).__name__, e)
except Exception as e:
_log.error('Failed to index %d records; unexpected error; '
'URI: %s; index: %s; error: %s: %s',
buffer_len, self._uri, self._index,
type(e).__name__, e)
def done(self):
"""Flush any remaining records."""
self._flush()
|
15,556 | 1bec559c7d653959e66ca2b6c6c781a578be7e43 | import os
import yaml
import pyresample
def get_project_root_path():
try:
project_path = os.environ['ICE_HOME']
except:
project_path = os.path.split(os.path.abspath(os.path.dirname(__file__)))[0]
return project_path
def get_area_filepath():
project_path = get_project_root_path()
area_filepath = os.path.join(project_path, 'areas.cfg')
return area_filepath
def load_area_def(area_name):
path_to_area_cfg = get_area_filepath()
area_def = pyresample.utils.load_area(path_to_area_cfg, area_name)
return area_def
def parse_extension(filepath):
"""
Parse file extension and return format string
to make other bits aware which format driver should be used
"""
extension = os.path.splitext(filepath)[1][1:]
extensions_dict = {"netcdf": ['nc'],
"mitiff": ['mitiff'],
"geotiff": ['gtiff', 'tiff', 'tif']}
driver = None
for key in extensions_dict:
if extension in extensions_dict[key]:
driver = key
if driver is not None:
return driver
else:
raise Exception("Unknown file extension, cannot guess file format")
def load_yaml_config(filepath):
with open(filepath, 'r') as fh:
yaml_dict = yaml.load(fh)
return yaml_dict
def window_blocks(large_array, window_size):
"""
Split a large 1D array into smaller non-overlapping arrays
Args:
large_array (numpy.ndarray): 1d array to be split in smaller blocks
window_size (int): window size, array shape should be divisible by this number
Returns:
numpy.ndarray: Resulting array with multiple small blocks of size `window_size`
"""
y_size = large_array.shape[0]/window_size
blocks_array = large_array.reshape(y_size, window_size)
return blocks_array
def rescale_lac_array_to_gac(lac_array):
"""
Create a GAC AVHRR array by averaging 4 consecutive LAC pixels
Take only every forth scan line, omit the rest
Args:
lac_array (numpy.ndtype): array with scan width of 2001 pixels
Returns:
gac_array (numpy.ndtype): array with scan width of 400 pixels
Note:
Original GAC data contains 401 pixels per scanline, for the sake
of simplicity we take only 400 pixels.
"""
window_size = 5
lac_array_with_omitted_lines = lac_array[::4]
lac_array_2000px = lac_array_with_omitted_lines[:,:-1]
flat_lac_array = lac_array_2000px.flatten()
gac_array_flat = np.mean(window_blocks(flat_lac_array, window_size)[:,:-1], axis=1)
gac_length = gac_array_flat.shape[0]
gac_array_2d = gac_array_flat.reshape(gac_length/400, 400)
return gac_array_2d
def parse_proj_string(proj_string):
"""
Parse proj4 string and create a dictionary out of it
"""
regex_pattern = "(\+(\w+)=([A-Z\d+\w+\.]*))"
regex = re.compile(regex_pattern)
regex_results = regex.findall(proj_string)
proj_dict = {}
for proj_element in regex_results:
proj_dict[proj_element[1]] = proj_element[2]
return proj_dict
def geotiff_meta_to_areadef(meta):
"""
Transform (Rasterio) geotiff meta dictionary to pyresample area definition
Arguments:
meta (dictionary) : dictionary containing projection and image geometry
information (formed by Rasterio)
Returns:
area_def (pyresample.geometry.AreaDefinition) : Area definition object
"""
area_id = ""
name = ""
proj_id = "Generated from GeoTIFF"
proj_dict = meta['crs']
proj_dict_with_string_values = dict(zip([str(key) for key in proj_dict.keys()], [str(value) for value in proj_dict.values()]))
x_size = meta['width']
x_res = meta['transform'][1]
y_res = meta['transform'][5] * -1
y_size = meta['height']
x_ll = meta['transform'][0]
y_ur = meta['transform'][3]
y_ll = y_ur - y_size * y_res
x_ur = x_ll + x_size * x_res
area_extent = [x_ll, y_ll, x_ur, y_ur]
area_def = pyresample.geometry.AreaDefinition(area_id,
name,
proj_id,
proj_dict_with_string_values,
x_size,
y_size,
area_extent)
return area_def
|
15,557 | 011d4f41f04573ed456a38c9ae01e10b1e258e08 | from crane import db, app
from flask import jsonify, request
import json
class Template(db.Model):
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(256))
template = db.Column(db.Text())
def __init__(self, name, template):
self.name = name
self.template = template
@app.route('/template', methods=['POST'])
def new_template():
data = request.get_json();
template = Template(data['name'],json.dumps(data['template']));
db.session.add(template)
db.session.commit()
return ""
@app.route('/template', methods=['GET'])
def get_templates():
templates = Template.query.all()
result = []
for template in templates:
template_json = json.loads(template.template)
template_json['id'] = template.id
template_json['name'] = template.name
result.append(template_json)
return jsonify(result=result)
@app.route('/template/<template_id>', methods=['DELETE'])
def delete_template(template_id):
template = Template.query.filter_by(id=template_id).first()
if template:
db.session.delete(template)
db.session.commit()
return ""
|
15,558 | bb4f75f27cd39f60dfd26aa4b6dfb65b651e672d | import cv2
cap = cv2.VideoCapture(0) #common source = 0
while (True):
ret, frame = cap.read()
edge1 = cv2.Canny(frame, 100, 200)
edge2 = cv2.Canny(frame, 10, 20)
cv2.imshow("Original", frame)
cv2.imshow("Canny 1", edge1)
cv2.imshow("Canny 2", edge2)
#print(edge)
if cv2.waitKey(1) & 0xFF ==ord('q'):
break
cap.release()
cv2.destroyAllWindows()
|
15,559 | 92f764416d4acb62accdf9eea1b8cec7d811d0b4 | # -*- coding: utf-8 -*-
import pytest
import time
from .server import InfinispanServer, Mode
from infinispan.client import Infinispan
from infinispan import error
class TestClientStandalone(object):
@classmethod
def setup_class(cls):
cls.server = InfinispanServer()
cls.server.start()
@classmethod
def teardown_class(cls):
try:
cls.server.stop()
except RuntimeError:
# is ok, already stopped
pass
@pytest.yield_fixture
def client(self):
client = Infinispan()
yield client
client.disconnect()
def test_ping(self, client):
assert client.ping()
def test_put(self, client):
result = client.put("key1", "value1")
assert result is True
def test_get(self, client):
value = client.get("key1")
assert value == "value1"
def test_get_non_existing(self, client):
value = client.get("notexisting")
assert value is None
def test_get_with_version(self, client):
value, version = client.get_with_version("key1")
assert value == "value1"
assert version == b'\x00\x00\x00\x00\x00\x00\x00\x01'
def test_get_with_version_non_existing(self, client):
value, version = client.get_with_version("nonexisting")
assert value is None
assert version is None
def test_put_with_lifespan(self, client):
result = client.put("key2", "value2", lifespan='2s')
assert result is True
value = client.get("key2")
assert value == "value2"
time.sleep(2)
value = client.get("key2")
assert value is None
def test_put_with_max_idle(self, client):
result = client.put("key3", "value3", max_idle='2s')
assert result is True
time.sleep(1)
value = client.get("key3")
assert value == "value3"
time.sleep(1)
value = client.get("key3")
assert value == "value3"
time.sleep(2)
value = client.get("key3")
assert value is None
def test_get_with_metadata_infinite(self, client):
value, metadata = client.get_with_metadata("key1")
assert value == "value1"
assert "created" not in metadata
assert "lifespan" not in metadata
assert "last_used" not in metadata
assert "max_idle" not in metadata
assert metadata["version"] is not None
def test_get_with_metadata_ephemeral(self, client):
client.put("key4", "value4", lifespan='10s', max_idle='2s')
value, metadata = client.get_with_metadata("key4")
assert value == "value4"
assert "created" in metadata
assert metadata["lifespan"] == 10
assert "last_used" in metadata
assert metadata["max_idle"] == 2
assert metadata["version"] is not None
def test_get_with_metadata_nonexisting(self, client):
value, metadata = client.get_with_metadata("nonexisting_key1")
assert value is None
assert metadata == {}
def test_put_with_force_previous_value(self, client):
result = client.put("key1", "value2", previous=True)
assert result == "value1"
def test_put_when_absent_with_force_previous_value(self, client):
result = client.put("key1_absent_until_now", "value2", previous=True)
assert result is None
def test_put_if_absent_when_absent(self, client):
result = client.put_if_absent("absent_key", "value")
assert result is True
def test_put_if_absent_when_not_absent(self, client):
result = client.put_if_absent("key1", "value3")
assert result is False
def test_put_if_absent_when_absent_force_previous_value(self, client):
result = client.put_if_absent("absent_key2", "value2", previous=True)
assert result is None
def test_put_if_absent_when_not_absent_force_previous_value(self, client):
result = client.put_if_absent("key1", "value3", previous=True)
assert result == "value2"
def test_replace_when_absent(self, client):
result = client.replace("absent_key3", "value")
assert result is False
def test_replace_when_not_absent(self, client):
result = client.replace("key1", "value4")
assert result is True
def test_replace_when_absent_force_previous_value(self, client):
result = client.replace("absent_key3", "value2", previous=True)
assert result is None
def test_replace_when_not_absent_force_previous_value(self, client):
result = client.replace("key1", "value5", previous=True)
assert result == "value4"
def test_replace_with_version_when_version_matches(self, client):
_, version = client.get_with_version("key1")
result = client.replace_with_version("key1", "value6", version)
assert result is True
def test_replace_with_version_when_version_doesnt_match(self, client):
result = client.replace_with_version(
"key1", "value7", b'\x00\x00\x00\x00\x00\x00\x00\xff')
assert result is False
def test_replace_with_version_when_absent(
self, client):
non_existing_version = b'\x00\x00\x00\x00\x00\x00\x00\xff'
result = client.replace_with_version(
"absent_key1", "value1", non_existing_version)
assert result is False
def test_replace_with_version_when_version_matches_force_prev_value(
self, client):
_, version = client.get_with_version("key1")
result = client.replace_with_version(
"key1", "value7", version, previous=True)
assert result == "value6"
def test_replace_with_version_when_version_doesnt_match_force_prev_value(
self, client):
non_existing_version = b'\x00\x00\x00\x00\x00\x00\x00\xff'
result = client.replace_with_version(
"key1", "value8", non_existing_version, previous=True)
assert result == "value7"
def test_replace_with_version_when_absent_force_prev_value(
self, client):
non_existing_version = b'\x00\x00\x00\x00\x00\x00\x00\xff'
result = client.replace_with_version(
"absent_key1", "value1", non_existing_version, previous=True)
assert result is None
def test_remove(self, client):
result = client.remove("key1")
assert result is True
def test_remove_when_absent(self, client):
result = client.remove("absent_key1")
assert result is False
def test_remove_with_force_previous_value(self, client):
client.put("key1", "value1")
result = client.remove("key1", previous=True)
assert result == "value1"
def test_remove_when_absent_with_force_previous_value(self, client):
result = client.remove("absent_key1", previous=True)
assert result is None
def test_remove_with_version_when_version_matches(self, client):
client.put("key1", "value1")
_, version = client.get_with_version("key1")
result = client.remove_with_version("key1", version)
assert result is True
def test_remove_with_version_when_version_matches_force_prev_value(
self, client):
client.put("key1", "value1")
_, version = client.get_with_version("key1")
result = client.remove_with_version("key1", version, previous=True)
assert result == "value1"
def test_remove_with_version_when_version_doesnt_match(self, client):
client.put("key1", "value1")
non_existing_version = b'\x00\x00\x00\x00\x00\x00\x00\xff'
result = client.remove_with_version("key1", non_existing_version)
assert result is False
def test_remove_with_version_when_version_doesnt_match_force_prev_value(
self, client):
non_existing_version = b'\x00\x00\x00\x00\x00\x00\x00\xff'
result = client.remove_with_version(
"key1", non_existing_version, previous=True)
assert result == "value1"
def test_contains_key(self, client):
client.remove("key1")
assert client.contains_key("key1") is False
client.put("key1", "value1")
assert client.contains_key("key1") is True
def test_put_to_different_cache(self, client):
client.cache_name = "memcachedCache"
result = client.put("key2", "value2")
value2 = client.get("key2")
value1 = client.get("key1")
assert result is True
assert value2 == "value2"
assert value1 is None
def test_put_to_non_existing_cache(self, client):
client.cache_name = "nonexistingCache"
with pytest.raises(error.ClientError):
client.put("key1", "value1")
def test_context_manager(self):
with Infinispan() as client:
assert client.protocol.conn.connected is True
assert client.protocol.conn.connected is False
def test_async(self, client):
f = client.put_async("test_async", "value")
assert f.done() is False
assert f.result() is True
assert client.get("test_async") == "value"
def test_stats(self, client):
result = client.stats()
assert "stores" in result
def test_clear(self, client):
client.put("key1", "value1")
client.put("key2", "value2")
result = client.clear()
assert result is True
assert client.get("key1") is None
assert client.get("key2") is None
class TestClientDomain(object):
@classmethod
def setup_class(cls):
cls.server = InfinispanServer(mode=Mode.DOMAIN)
cls.server.start()
@classmethod
def teardown_class(cls):
try:
cls.server.stop()
except RuntimeError:
# is ok, already stopped
pass
@pytest.yield_fixture
def client(self):
client = Infinispan()
yield client
client.disconnect()
def test_ping_with_topology_change(self, client):
assert client.protocol.conn.size == 1
assert client.ping()
assert client.protocol.conn.size == 2
|
15,560 | 3a8c851f14a09fb1ea24b7acfca9d8a8564f713f | import requests
import xml.etree.ElementTree as ET
def get_article_ids():
url = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/esearch.fcgi?db=pubmed&term=post+traumatic+stress+disorder+AND&retmode=json&retmax=10"
payload={}
headers = {}
response = requests.request("GET", url, headers=headers, data=payload)
articles = response.json()
#print(len(articles['esearchresult']['idlist']))
article_list = []
for i in range(len(articles['esearchresult']['idlist'])):
article_list.append(articles['esearchresult']['idlist'][i])
return article_list
def get_articles_with_details():
# Get articles from method get_article_ids()
articles = get_article_ids()
# Join article id for posting in URL. For instance, '34022747', '34022659', '34020974'.
articles_id = ','.join(articles)
print(articles)
# Create posting url.
url = "https://eutils.ncbi.nlm.nih.gov/entrez/eutils/efetch.fcgi?db=pubmed&id="+articles_id+"&rettype=abstract"
print(url)
payload={}
headers = {}
response = requests.request("GET", url, headers=headers, data=payload)
# Create xml tree using Element Tree.
abs_tree = ET.fromstring(response.content)
#root=abs_tree.tag
## New empty list. We will append all information into this list.
## articles list will not be used.
all_articles = []
# 'PubmedArticle/MedlineCitation' will find each article.
# Using each article we will find PMID, Title, Abstract and Authors
for title in abs_tree.findall('PubmedArticle/MedlineCitation'):
# Find ID of article.
id = title.find('PMID').text
# Find title of article.
article_title = title.find('Article/ArticleTitle').text
# Abstract field can have multiple AbstractText fields. We should get all of them.
# Create an empty "abstract_all" variable. Iteratively search for AbstractText field and append it.
abstract_all = ""
for abstract in title.findall('Article/Abstract/AbstractText'):
abstract_all += str(abstract.text)
# AuthorList field can have multiple Author fields. Create an empty "author_list" variable.
# Iteratively search for Author fields and append it to "author_list".
# LastName and Forename are different fields. Merge them and put ";" between authors.
# Remove ";" at the end of "author_list" using strip(';') function.
author_list = ""
for author in title.findall('Article/AuthorList/Author'):
author_name = author.find('LastName').text + " " + author.find('ForeName').text + ";"
author_list += author_name
author_list = author_list.strip(';')
#print(author_list)
# For each article, Put id, title and abstract_all and author_list into an array.
each_article_row = [id, article_title, abstract_all, author_list]
# Append this into "all_articles" list. This will create 2D list.
# all _articles = [[id1, title1, abstract1, author_list1],
# [id2, title2, abstract2, author_list2]]
all_articles.append(each_article_row)
return all_articles
print(get_articles_with_details())
|
15,561 | 7454449fcb6ff37e53cc81e72c5d8bd45eef8944 | f = open('w3res.txt','a')
f.write("Olzhas appended the text!\n")
f.close()
|
15,562 | d9b1fbd718a7c41899132b99c699742b745de87b | from django.contrib import admin
from .models import Booking, Property, AvailabilityDate, PricingPeriod
from .booking_form import BookingForm
from .pricing_period_form import PricingPeriodForm
class BookingAdmin(admin.ModelAdmin):
view_on_site = False
exclude = ('dates',)
form = BookingForm
list_display = ('property', 'start_date', 'end_date', 'customer_name')
list_filter = ('property', 'start_date',)
class AvailabilityAdmin(admin.ModelAdmin):
view_on_site = False
list_display = ('date', 'cottage_booking_status', 'barn_booking_status',
'cottage_week_price', 'cottage_week_price_discount', 'barn_week_price', 'barn_week_price_discount')
list_filter = ('date', 'cottage_booking_status', 'barn_booking_status',)
class PropertyAdmin(admin.ModelAdmin):
view_on_site = False
class PricingPeriodAdmin(admin.ModelAdmin):
view_on_site = False
exclude = ('dates',)
form = PricingPeriodForm
list_display = ('property', 'start_date', 'end_date', 'price', 'discount')
list_filter = ('property', 'start_date',)
# Register your models here.
admin.site.register(AvailabilityDate, AvailabilityAdmin)
admin.site.register(Booking, BookingAdmin)
admin.site.register(Property, PropertyAdmin)
admin.site.register(PricingPeriod, PricingPeriodAdmin)
|
15,563 | 177a03d26d847206ca1a74590b350b1f7900a127 | from django.urls import path
from . import views
urlpatterns=[
path('',views.login),
path('ForgotPassword',views.forgotPassword),
path('Signup',views.signup),
path('user',views.logged),
] |
15,564 | f65f3861e4af73cac09ac8e60fa12fe17a8b0d70 | #!/usr/bin/python3
""" This documents declares the class rectangle """
class Rectangle ():
"""This is the start, is empty for now"""
pass
|
15,565 | b557c39a4627d036020f21e73de0ae89c068dc80 | from selenium import webdriver
from time import sleep
driver=webdriver.Firefox()
driver.get("http://www.51zxw.net")
driver.find_element_by_link_text("平面设计").click()
sleep(1)
driver.find_element_by_partial_link_text(" CC的工作界面(2)").click()
sleep(2)
driver.quit() |
15,566 | 8c84df07a40d34aec5bb79f0442813c65d7f2895 | from string import Template
class FileTemplate:
def __init__(self, template_string):
self.template_string = template_string
def replace(self, template_key, value):
template = Template(self.template_string)
replacement = {template_key: value}
self.template_string = template.safe_substitute(replacement)
def replace_with_values(self, template_key, values):
self.replace(template_key, values[template_key])
def contents(self):
return self.template_string
|
15,567 | c19430c3e1b5dce7247d58e7ce285a427536f8dc | import numpy
print("Printing Original array")
sampleArray = numpy.array([[34,43,73],[82,22,12],[53,94,66]])
print (sampleArray)
minOfAxisOne = numpy.amin(sampleArray, 1)
print("Printing amin Of Axis 1")
print(minOfAxisOne)
maxOfAxisOne = numpy.amax(sampleArray, 0)
print("Printing amax Of Axis 0")
print(maxOfAxisOne)
|
15,568 | c417b59d47e03ef7927cc58c7ad16a5c69dbc28e | import functools
import re
from typing import Dict, Sequence, Pattern, Callable, List, Optional, Tuple, Any
from solver import Clue, ClueValue, Location, Clues, ConstraintSolver, Intersection, Letter
from solver import EquationSolver
from solver.equation_solver import KnownClueDict, KnownLetterDict
GRID = """
X.XXXXXXXX
XX.X...X..
.X....X...
X.XXX.XXXX
XX.X.XXX..
X...X..X..
X..X..X...
"""
ACROSS = """
1 (MAR)^C + H (4) (5)
4 COM(E + T) (3) (4)
7 TRIT + O – N (3) (4)
10 CO(M + B) + A + T + S (3) (3)
12 DA(M + E) (3) (4)
13 ROU – T (3) (4)
14 A(NG + S) + T! (3) (4)
15 VIV + A + S (3) (4)
16 INC(U – R + S + I + O + N) + S (3) (4)
18 BANT(E + R – S) – M + E + T (4) (5)
21 TA + R^R + IE + R – S (3) (4)
25 G + O – I + N – G + B + A + N^A + N^(A + S) (3) (4)
28 A(KA + V)A (3) (3)
30 W^(A – S) – O + K (3) (3)
31 A(I + D) + E! (3) (3)
32 COOL – S (3) (4)
33 B^(O – B)SL + E^(I – G) + HT + EA + M (3) (6)
34 NIN + E – S (3) (3)
35 MALT (4) (4)
"""
DOWN = """
1 BBILB + (O + B)(A + G) – GI + N + S (4) (5)
2 WAR(L – O + C + K) (3) (4)
3 R^A – T + (E + N – T + E – R + S)^A(BIN – N) (4) (6)
4 T – O + T + A^I – WAN (3) (4)
5 U – S – E + B^(E – A)N + TI(N + S) (4) (4)
6 L^(E – C) + (T + U + R(E + R) + O/O)M (3) (5)
7 M(A – G – N + E + TI + S)M (4) (5)
8 GL(OB + U – L – E) (3) (4)
9 MARACA + S (3) (4)
11 TALL – M + E – N (3) (4)
17 C^(AR) + N(I + V + A(L + S)) (4) (5)
19 VERV + E – S (4) (4)
20 WRIN + K – L + E + S (4) (4)
22 (R – A)B^A – T (3) (4)
23 B^ANN + TV (4) (5)
24 L(U + M + BE + R + S) (3) (4)
25 MO(O + D) (3) (4)
26 (L + O + G)I^C (3) (4)
27 C^E(R – A + M + I + C) – S (3) (4)
29 T^A (3) (3)
"""
class OuterSolver(EquationSolver):
@staticmethod
def run() -> None:
grid = Clues.get_locations_from_grid(GRID)
clues = OuterSolver.create_from_text(ACROSS, DOWN, grid)
solver = OuterSolver(clues)
for clue in clues:
if clue.name in ('8d', '24d', '29d'):
solver.add_constraint([clue], lambda value: value == value[::-1])
else:
solver.add_constraint([clue], lambda value: value != value[::-1])
solver.solve(debug=True)
@staticmethod
def create_from_text(across: str, down: str, locations: Sequence[Location]) -> Sequence[Clue]:
result: List[Clue] = []
for lines, is_across, letter in ((across, True, 'a'), (down, False, 'd')):
for line in lines.splitlines():
line = line.strip()
if not line:
continue
match = re.fullmatch(r'(\d+) (.*) \((\d+)\) \((\d+)\)', line)
assert match
number = int(match.group(1))
location = locations[number - 1]
clue = Clue(f'{number}{letter}', is_across, location, int(match.group(3)), expression=match.group(2))
clue.context = int(match.group(4))
result.append(clue)
return result
def __init__(self, clues: Sequence[Clue]):
super().__init__(clues, items=range(1, 20))
def make_pattern_generator(self, clue: Clue, intersections: Sequence[Intersection]) -> \
Callable[[Dict[Clue, ClueValue]], Pattern[str]]:
assert(all(intersection.this_clue == clue for intersection in intersections))
if clue.length == clue.context:
default_item = '[1-9]'
lookahead = ''
else:
default_item = '(1?[1-9]|10)'
lookahead = f'(?=[0-9]{{{clue.context}}}$)'
def getter(known_clues: Dict[Clue, ClueValue]) -> Pattern[str]:
pattern_list = [default_item] * clue.length
for intersection in intersections:
other_clue = intersection.other_clue
pattern = self.get_nth_digit_pattern(other_clue, known_clues[other_clue], intersection.other_index)
pattern_list[intersection.this_index] = pattern
pattern = ''.join(pattern_list)
regexp = lookahead + pattern
return re.compile(regexp)
return getter
@staticmethod
def get_nth_digit_pattern(clue: Clue, value: str, index: int) -> str:
# Shortcut. A normal clue in which only one letter can go into each square
if clue.context == clue.length:
return value[index]
parsings = OuterSolver.parse_with_pairs(clue.context - clue.length, value)
results = {parse[index] for parse in parsings}
return '(' + '|'.join(results) + ')'
@staticmethod
@functools.lru_cache(None)
def parse_with_pairs(pairs: int, value: str) -> List[Tuple[str, ...]]:
if pairs == 0:
return [tuple(value)]
if value == '' or value[0] == '0':
return []
start = value[0]
result = [(start,) + x for x in OuterSolver.parse_with_pairs(pairs, value[1:])]
if value[0] == '1':
start = value[0:2]
result.extend((start,) + x for x in OuterSolver.parse_with_pairs(pairs - 1, value[2:]))
return result
def show_solution(self, known_clues: KnownClueDict, known_letters: KnownLetterDict) -> None:
print(tuple((letter, value) for letter, value in known_letters.items()))
print(tuple((clue.name, value) for clue, value in known_clues.items()))
self.show_letter_values(known_letters)
InnerSolver.run(self._clue_list, known_clues, known_letters)
class InnerSolver(ConstraintSolver):
clue_values: KnownClueDict
letter_values: KnownLetterDict
@staticmethod
def run(clue_list: Sequence[Clue], clue_values: KnownClueDict, letter_values: KnownLetterDict) -> None:
solver = InnerSolver(clue_list, clue_values, letter_values)
solver.solve(debug=True)
@staticmethod
def test() -> None:
letters = (('A', 3), ('T', 7), ('K', 18), ('V', 17), ('B', 10), ('R', 5), ('N', 9), ('L', 11), ('M', 19),
('E', 6), ('S', 1), ('I', 8), ('W', 13), ('G', 4), ('D', 15), ('C', 2), ('O', 14), ('U', 16),
('H', 12))
print(', '.join(x for x, _ in letters), '=', ', '.join(str(x) for _, x in letters))
letter_values = {Letter(letter): value for letter, value in letters}
grid = Clues.get_locations_from_grid(GRID)
clues = OuterSolver.create_from_text(ACROSS, DOWN, grid)
clue_values: KnownClueDict = {clue: clue.evaluators[0](letter_values) for clue in clues}
solver = InnerSolver(clues, clue_values, letter_values)
solver.solve(debug=True)
def __init__(self, clue_list: Sequence[Clue], clue_values: KnownClueDict, letter_values: KnownLetterDict):
super().__init__(clue_list)
for clue in clue_list:
clue.generator = self.generator
self.clue_values = clue_values
self.letter_values = letter_values
def generator(self, clue: Clue) -> Sequence[str]:
value = self.clue_values[clue] # The calculated value of this clue (as a string)
parsings = OuterSolver.parse_with_pairs(clue.context - clue.length, value)
for parsing in parsings:
result = [chr(int(digit) + 48) for digit in parsing]
yield ''.join(result)
def draw_grid(self, **args: Any) -> None:
location_to_entry: Dict[Location, str] = args['location_to_entry']
args['location_to_entry'] = {location : str(ord(code) - 48) for location, code in location_to_entry.items()}
super().draw_grid(**args)
mapping = { value: letter for letter, value in self.letter_values.items() }
args['location_to_entry'] = {location : mapping[ord(code) - 48] for location, code in location_to_entry.items()}
super().draw_grid(**args)
if __name__ == '__main__':
InnerSolver.test()
|
15,569 | 8c11b8a18faba1a12398163e74685e31a04d3e25 | digits = [
'xxxxxx...xx...xx...xx...xx...xxxxxx',
'....x....x....x....x....x....x....x',
'xxxxx....x....xxxxxxx....x....xxxxx',
'xxxxx....x....xxxxxx....x....xxxxxx',
'x...xx...xx...xxxxxx....x....x....x',
'xxxxxx....x....xxxxx....x....xxxxxx',
'xxxxxx....x....xxxxxx...xx...xxxxxx',
'xxxxx....x....x....x....x....x....x',
'xxxxxx...xx...xxxxxxx...xx...xxxxxx',
'xxxxxx...xx...xxxxxx....x....xxxxxx',
'.......x....x..xxxxx..x....x.......']
grid = [input() for _ in range(7)]
s = []
for i in range(0,len(grid[0]),6):
c = digits.index(''.join((grid[j][i:i+5] for j in range(7))))
s.append(str(c) if c < 10 else '+')
res = str(eval(''.join(s)))
grid = [['.'] * (len(res)*6-1) for _ in range(7)]
for i, c in enumerate(res):
for j in range(7):
grid[j][i*6:i*6+5] = digits[int(c)][j*5:j*5+5]
print('\n'.join(''.join(r) for r in grid))
|
15,570 | 027bab62904e1d050d8ac83507b04bcecb888af9 | import pandas as pd
import numpy as np
train_data = pd.read_csv("clean_train.csv")
X_clean = train_data.iloc[:,[4]].values
y_clean = train_data.iloc[:,[80]].values
import matplotlib.pyplot as plt
plt.boxplot(y_clean)
"""
First try of removing outlier function
"""
def reject_outliers(data, m=2):
return data[abs(data - np.mean(data)) < m * np.std(data)]
y_rej = reject_outliers(y_clean)
plt.boxplot(X_clean)
X_rej = reject_outliers(X_clean)
X_df = pd.DataFrame(X_clean)
y_df = pd.DataFrame(y_clean)
train = pd.concat([X_df,y_df],axis=1)
train.columns = ['Area','Price']
X = train.iloc[0:1397,[0]].values
y = train.iloc[0:1397,[1]].values
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X_clean, y_clean, test_size = 1/3, random_state = 0)
# Fitting Simple Linear Regression to the Training set
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(X_train, y_train)
# Predicting the Test set results
y_pred = regressor.predict(X_test)
plt.scatter(X_train, y_train, color = 'red')
plt.plot(X_train, regressor.predict(X_train), color = 'blue')
plt.title('Housing Price ')
plt.xlabel('Area')
plt.ylabel('Price')
plt.show()
plt.scatter(X_test, y_test, color = 'red')
plt.plot(X_train, regressor.predict(X_train), color = 'blue')
plt.title('Housing Price ')
plt.xlabel('Area')
plt.ylabel('Price')
plt.show()
"""
####################################################################################
"""
import pandas as pd
train_data = pd.read_csv("clean_train.csv")
train = train_data.iloc[:,[4,80]].values
train_df = pd.DataFrame(train, index= None)
train_df.columns = ['Area','Price']
train_df.isnull().values.any()
def replace(group):
mean, std = group.mean(), group.std()
outliers = (group - mean).abs() > 3*std
group[outliers] = mean # or "group[~outliers].mean()"
return group
X = train_df.groupby('Area').transform(replace)
plt.boxplot(y['Area'])
plt.show()
y = train_df.groupby('Price').transform(replace)
plt.scatter(train_df['Area'], train_df['Price'], color = 'red')
rej = reject_outliers(train)
"""
##################################################################################
"""
import pandas as pd
train_data = pd.read_csv("clean_train.csv")
train = train_data.iloc[:,[4,80]].values
train_df = pd.DataFrame(train, index= None)
train_df.columns = ['Area','Price']
train_df.isnull().values.any()
# Removing Outliers
a = train_df[train_df.apply(lambda x: np.abs(x - x.mean()) / x.std() < 3).all(axis=1)]
a.isnull().values.any()
# Again Removing outliers
b = a[a.apply(lambda x: np.abs(x - x.mean()) / x.std() < 3).all(axis=1)]
b.isnull().values.any()
# Converting dataframe into csv file
b.to_csv("clean_train1.csv")
# Checking boxplot
plt.boxplot(train_df['Area'])
plt.boxplot(train_df['Price'])
# Setting data variables
clean_train1 = pd.read_csv("clean_train1.csv")
clean_train1 = clean_train1.drop('Unnamed: 0', 1)
X = clean_train1.iloc[:,[0]].values
y = clean_train1.iloc[:,[1]].values
# Splitting the dataset into the Training set and Test set
from sklearn.model_selection import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 1/3, random_state = 0)
# Fitting Simple Linear Regression to the Training set
from sklearn.linear_model import LinearRegression
regressor = LinearRegression()
regressor.fit(X_train, y_train)
# Predicting the Test set results
y_pred = regressor.predict(X_test)
plt.scatter(X_train, y_train, color = 'red')
plt.plot(X_train, regressor.predict(X_train), color = 'blue')
plt.title('Housing Price ')
plt.xlabel('Area')
plt.ylabel('Price')
plt.show()
plt.scatter(X_test, y_test, color = 'red')
plt.plot(X_train, regressor.predict(X_train), color = 'blue')
plt.title('Housing Price ')
plt.xlabel('Area')
plt.ylabel('Price')
plt.show()
import math
from sklearn.metrics import r2_score
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
err = mean_squared_error(y_test,y_pred)
r2_score(y_test, y_pred)
rmse = math.sqrt(err)
abserr = mean_absolute_error(y_test, y_pred)
plt.hist(X, normed=False, bins=40)
|
15,571 | faf7cafa9803b88bbbc85ce546f3d1582f7e28ab | from __future__ import print_function
import ROOT
from libs.bin_cls import Bin
from libs.funcs import binned, extend
from collections import OrderedDict
from itertools import product
import glob
import json
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--muonid' , required = True , type = str, help = 'muon ID under test')
parser.add_argument('--sample' , required = True , type = str, help = 'sample under test')
parser.add_argument('--version' , default = '' , type = str, help = 'fit version (label)')
parser.add_argument('--n_threads', default = 1 , type = int, help = 'number of threads')
parser.add_argument('--visualize', action = 'store_true', help = 'won\'t run in batch mode')
args = parser.parse_args()
ROOT.gROOT.SetBatch(not args.visualize)
if args.n_threads > 1:
ROOT.ROOT.EnableImplicitMT(args.n_threads)
OUTDIR = 'Efficiencies{}'.format(args.version)
if __name__ == '__main__':
input_files = ROOT.std.vector('std::string')()
json_dict = OrderedDict()
files = {
'mc' : glob.glob('/eos/cms/store/group/phys_muon/TagAndProbe/ULRereco/2017/102X/Jpsi/12Jun2020/Mu8Mu17Mu20/MC_Jpsi_pt8GeV/*.root'),
'2017B': glob.glob('/eos/cms/store/group/phys_muon/TagAndProbe/ULRereco/2017/102X/Jpsi/12Jun2020/Mu8Mu17Mu20/Run2017B/*.root'),
'2017C': glob.glob('/eos/cms/store/group/phys_muon/TagAndProbe/ULRereco/2017/102X/Jpsi/12Jun2020/Mu8Mu17Mu20/Run2017C/*.root'),
'2017D': glob.glob('/eos/cms/store/group/phys_muon/TagAndProbe/ULRereco/2017/102X/Jpsi/12Jun2020/Mu8Mu17Mu20/Run2017D/*.root'),
'2017E': glob.glob('/eos/cms/store/group/phys_muon/TagAndProbe/ULRereco/2017/102X/Jpsi/12Jun2020/Mu8Mu17Mu20/Run2017E/*.root'),
'2017F': glob.glob('/eos/cms/store/group/phys_muon/TagAndProbe/ULRereco/2017/102X/Jpsi/12Jun2020/Mu8Mu17Mu20/Run2017F/*.root'),
} ; files['2017all'] = [jj for key, paths in files.items() for jj in paths if not key == 'mc']
for file in files[args.sample]:
input_files.push_back(str(file))
dataframe = ROOT.RDataFrame('tpTree/fitter_tree', input_files)
from libs.models import cbgauss_sum, expo_pass, expo_fail, crystalball, gaussian, doublegauss
wspace = ROOT.RooWorkspace('wspace')
getattr(wspace, 'import')(crystalball.Clone('signal'))
getattr(wspace, 'import')(expo_pass.Clone('backgroundPass'))
getattr(wspace, 'import')(expo_fail.Clone('backgroundFail'))
extend(wspace)
binned_variables = OrderedDict()
#binned_variables['abseta'] = binned([0, 0.9, 1.2, 2.1, 2.4])
binned_variables['abseta'] = binned([0, 2.4])
binned_variables['pt' ] = binned([2, 2.5, 2.75, 3, 3.25, 3.5, 3.75, 4, 4.5, 5, 6, 8, 10, 15, 20, 25, 30, 40, 50, 9999])#, 60, 120, 200, 300, 500, 700, 1200])
tight2016 = ('tight2016', 'Glb == 1 && PF == 1 && glbChi2 < 10 && glbValidMuHits > 0 && numberOfMatchedStations > 1 && dB < 0.2 && dzPV < 0.5 && tkValidPixelHits > 0 && tkTrackerLay > 5')
soft2016 = ('soft2016' , 'TMOST == 1 && Track_HP == 1 && tkTrackerLay > 5 && tkPixelLay > 0 && abs(dzPV) < 20. && abs(dB) < 0.3')
denominator = ' && '.join([
'tag_pt > 8',
'pt > 2',
'abseta > 0 && abseta < 2.4',
'(tag_Mu8 || tag_Mu17 || tag_Mu20)',
'pair_dz < 0.5',
'pair_drM1 >= 0.3',
])
dataframe = dataframe.Define(*tight2016).Define(*soft2016)
dataframe = dataframe.Filter(denominator)
numerator = {
'tight2016' : 'tight2016',
'medium2016': 'Medium2016',
'loose' : 'Loose',
'soft2016' : 'soft2016',
'mediumNOTtight': 'Medium2016 && !tight2016',
'looseNOTmedium': 'Loose && !Medium2016 && !tight2016',
'softNOTloose' : 'soft2016 && !Loose && !Medium2016 && !tight2016',
}
## convert all the possible combination of binned_variables in a smart structure
bin_list = [[(a, b) for b in binned_variables[a]] for a in binned_variables.keys()]
bin_list = [[('{K}/{LO}_{UP}'.format(K = k, LO = x[0], UP = x[1]), '{K} >= {LO} && {K} < {UP}'.format(K = k, LO = x[0], UP = x[1])) for k, x in X] for X in bin_list]
bin_list = [x for x in product(*[X for X in bin_list])]
bin_list = [('/'.join([x[0] for x in X]), ' && '.join([x[1] for x in X])) for X in bin_list]
#for bb in bin_list: bb[1] = '({}) && ({})'.format(denominator, bb[1])
bins = [
Bin(den = den, num = numerator[args.muonid], tdir = tdir, dataframe = dataframe, workspace = wspace.Clone(), isMC = args.sample == 'mc')
for tdir, den in bin_list
]
print ('[INFO] loading histograms')
for bb in bins:
bb.load_histograms()
for bb in bins:
bb.run_fit(out_dir = '{}/{}_{}'.format(OUTDIR, args.sample, args.muonid), json_dict = json_dict)
print ('[INFO] all done. RDF was read', getattr(dataframe, 'GetNRuns', lambda: '[RDF.GetNRuns() not available in this ROOT version]')(), 'time(s)')
json.dump(json_dict, open('{}/{}_{}.json'.format(OUTDIR, args.sample, args.muonid), 'w'), indent = 4) |
15,572 | 8f9c12cc661061fbbdfbdd226145ba80deb31cf3 |
from backend.asm.inttype import IntType
from backend.asm.statistics import Statistics
from backend.asm.assembly import Comment
from backend.asm.assembly import Label
from backend.asm.assembly import Directive
from backend.asm.assembly import Instruction
from backend.asm.operand import DirectMemoryReference
from backend.asm.operand import AbsoluteAddress
from backend.asm.operand import Operand
from backend.asm.operand import IndirectMemoryReference
from backend.asm.literal import Symbol
from backend.asm.literal import Literal
from backend.asm.literal import IntegerLiteral
from backend.sys_dep.x86.register import RegisterClass
from backend.sys_dep.x86.register import x86Register
# Describe a kind of ASM DSL (Domain Specific Languages) so that it can convert from ASM objects to ASM codes easily
class AssemblyCode():
def __init__(self, natural_type, stack_wordsize, label_Symbols, verbose):
self.natural_type = natural_type
self.stack_wordsize = stack_wordsize
self.label_symbols = label_Symbols
self.verbose = verbose
self.virtual_stack = VirtualStack(self.natural_type)
self._assemblies = []
self._comment_indent_level = 0
self._statistics = Statistics ()
def assemblies(self):
return self._assemblies
def add_all (self, assemblies):
return self._assemblies.extend(assemblies)
def to_source (self):
buf = ""
for asm in self._assemblies:
buf += asm.to_source (self.label_symbols)
buf+= "\n"
return buf
def dump (self):
for asm in self._assemblies:
print (asm.dump())
def apply (self, opt):
#optmization
return
def __statistics (self):
if not self._statistics:
self._statistics = Statistics.collect(self._assemblies)
return self._statistics
def does_uses (self, reg):
return self._statistics.does_register_used (reg.base_name())
def comment (self, code_str):
self._assemblies.append (Comment (code_str, self._comment_indent_level))
def indent_comment (self):
self._comment_indent_level += 1
def unindent_comment (self):
self._comment_indent_level -= 1
def label (self, sym):
if isinstance(sym, Symbol):
self._assemblies.append(Label (sym))
elif isinstance(sym, Symbol):
self._assemblies.append(sym)
def reduce_label (self):
stats = self._statistics
result = []
for asm in self._assemblies:
if (asm.is_label ()) and (not (stats.does_symbol_used (Label(asm)))):
continue
else:
result.append(asm)
self._assemblies = result
def _directive (self, direc):
self._assemblies.append(Directive(direc))
def _insn (self, t = None, op = None, suffix = None, a = None,b = None):
if op and not t and not suffix and not a and not b:
self._assemblies.append(Instruction (mnemonic = op))
elif op and not t and not suffix and a and not b:
self._assemblies.append(Instruction(mnemonic = op, suffix = "", a1 = a))
elif op and not t and suffix and a and not b:
self._assemblies.append(Instruction(mnemonic = op, suffix= suffix, a1 = a))
elif op and t and not suffix and a and not b:
self._assemblies.append(Instruction (mnemonic = op, suffix = self._type_suffix(t), a1 = a))
elif op and not t and suffix and a and b:
self._assemblies.append(Instruction(mnemonic = op, suffix = suffix, a1 = a, a2 = b))
elif op and t and not suffix and a and b:
self._assemblies.append(Instruction(mnemonic = op, suffix = self._type_suffix(t), a1= a, a2=b))
else:
raise Exception ("wrong operand")
def _type_suffix (self, t1, t2 = None):
type_str = ""
if t1:
if t1.size() == IntType.INT8:
type_str += "b"
elif t1.size() == IntType.INT16:
type_str += "w"
elif t1.size() == IntType.INT32:
type_str += "l"
elif t1.size() == IntType.INT64:
type_str += "q"
else:
raise Exception ("unknown register type: " + t1.size())
if t2:
if t2 == IntType.INT8:
type_str += "b"
elif t2 == IntType.INT16:
type_str += "w"
elif t2 == IntType.INT32:
type_str += "l"
elif t2 == IntType.INT64:
type_str += "q"
else:
raise Exception("unknown register type: " + t2.size())
return type_str
#directives
def _file (self, name):
self._directive(".file\t" + name)
def _text (self):
self._directive("\t.text")
def _data (self):
self._directive("\t.data")
def _section (self, name, flags = None, code_type = None, group = None, linkage = None):
if flags and code_type and group and linkage:
self._directive("\t.section\t" + name + "," + flags + "," + code_type + "," + group + "," + linkage)
elif (not flags) and (not type) and (not group) and (not linkage):
self._directive("\t.section\t" + name)
def _globl (self, sym):
self._directive(".globl " + sym.name())
def _local (self, sym):
self._directive(".local " + sym.name())
def _hidden (self, sym):
self._directive("\t.hidden\t", sym.name())
def _comm (self, sym, size, alignment):
self._directive("\t.comm\t" + sym.name() + "," + str (size) + "," + str (alignment))
def _align (self, n):
self._directive("\t.align\t" + str (n))
def _type (self, sym, code_type):
self._directive("\t.type\t" + sym._name + "," + code_type)
def _size (self, sym, size):
self._directive("\t.size\t" + sym._name + "," + str (size))
def _byte (self, val):
if isinstance(val, int):
self._directive(".byte\t" + IntegerLiteral((val)).to_source())
elif isinstance(val, Literal):
self._directive(".byte\t" + val.to_source())
def _value(self, val):
if isinstance(val, int):
self._directive("._value\t" + IntegerLiteral((val)).to_source())
elif isinstance(val, Literal):
self._directive("._value\t" + val.to_source())
def _long(self, val):
if isinstance(val, int):
self._directive(".long\t" + IntegerLiteral((val)).to_source())
elif isinstance(val, Literal):
self._directive(".long\t" + val.to_source())
def _quad(self, val):
if isinstance(val, int):
self._directive(".quad\t" + IntegerLiteral((val)).to_source())
elif isinstance(val, Literal):
self._directive(".quad\t" + val.to_source())
def _string(self, code_str):
self._directive("\t.string\t" + code_str)
def virtual_push(self, reg):
if self.verbose:
self.comment("push " + reg.base_name() + " -> " + self.virtual_stack.top())
else:
self.virtual_stack.extent(self.stack_wordsize)
self.mov (reg, self.virtual_stack.top())
def virtual_pop(self, reg):
if self.verbose:
self.comment("pop " + reg.base_name() + " <- " + self.virtual_stack.top())
else:
self.mov(self.virtual_stack.top(), reg)
self.virtual_stack.rewind(self.stack_wordsize)
def jmp (self, label):
self._insn(op = "jmp", a = DirectMemoryReference (label.symbol()))
def jne (self, label):
self._insn(op = "jnz", a = DirectMemoryReference (label.symbol()))
def je (self, label):
self._insn(op = "je", a = DirectMemoryReference (label.symbol()))
def cmp (self, a, b):
self._insn(t = b.type, op = "cmp", a = a, b = b)
def sete (self, reg):
self._insn(op = "sete", a = reg)
def setne (self, reg):
self._insn(op = "setne", a = reg)
def seta (self, reg):
self._insn(op = "seta", a = reg)
def setae (self, reg):
self._insn(op = "setae", a = reg)
def setb (self, reg):
self._insn(op = "setb", a = reg)
def setbe (self, reg):
self._insn(op = "setbe", a = reg)
def setg (self, reg):
self._insn(op = "setg", a = reg)
def setge (self, reg):
self._insn(op = "setge", a = reg)
def setl (self, reg):
self._insn(op = "setl", a = reg)
def setle (self, reg):
self._insn(op = "setle", a = reg)
def test (self, a, b):
self._insn(t = b.type, op = "test", a = a, b = b)
def push (self, reg):
self._insn(op = "push", suffix = self._type_suffix(self.natural_type), a = reg)
def pop (self, reg):
self._insn(op = "pop", suffix = self._type_suffix(self.natural_type), a = reg)
#call function by relative _address
def call (self, sym):
self._insn(op = "call", a = DirectMemoryReference (sym))
#call funciton byabsolute _address
def call_absolute (self, reg):
self._insn(op = "call", a = AbsoluteAddress (reg))
def ret (self):
self._insn(op = "ret")
def mov (self, src, dest):
if isinstance(src, x86Register) and isinstance(dest, x86Register):
code_type = self.natural_type
elif isinstance(src, Operand) and isinstance(dest, x86Register):
code_type = dest.type
elif isinstance(src, x86Register) and isinstance( dest, Operand):
code_type = src.type
else:
raise Exception ("Wrong src or dest type")
self._insn(t = code_type, op = "mov", a = src, b = dest)
#for stack access
def reloca_table_mov (self, src, dest):
self._assemblies.append(Instruction ("mov", self._type_suffix(self.natural_type), src, dest, True))
def movsx (self, src, dest):
self._insn(op = "movs", suffix = self._type_suffix(src.type, dest.type), a = src, b = dest)
def movzx (self, src, dest):
self._insn(op = "movz", suffix = self._type_suffix(src.type, dest.type), a = src, b = dest)
def movzb (self, src, dest):
self._insn(op = "movz", suffix = "b" + str (self._type_suffix(src.type, dest.type)), a = src, b = dest)
def lea (self, src, dest):
self._insn(t = self.natural_type, op = "lea", a = src, b = dest)
def neg (self, reg):
self._insn(t = reg.type, op = "neg", a = reg)
def add (self, diff, base):
self._insn(t = base.type, op = "add", a = diff, b = base)
def sub (self, diff, base):
self._insn(t = base.type, op = "sub", a = diff, b = base)
def imul (self, m, base):
self._insn(t = base.type, op = "imul",a = m, b = base)
def cltd (self):
self._insn(op = "cltd")
def div (self, base):
self._insn(t = base.type, op = "div", a = base)
def idiv (self, base):
self._insn(t = base.type, op = "idiv", a = base)
def _not (self, reg):
self._insn(t = reg.type, op = "not", a = reg)
def _and (self, bits, base):
self._insn(t = base.type, op = "and", a = bits, b = base)
def _or (self, bits, base):
self._insn(t = base.type, op = "or", a = bits, b = base)
def xor (self, bits, base):
self._insn(t = base.type, op = "xor", a = bits, b = base)
def sar (self, bits, base):
self._insn(t = base.type, op = "sar", a = bits, b = base)
def sal (self, bits, base):
self._insn(t = base.type, op = "sal", a = bits, b = base)
def shr (self, bits, base):
self._insn(t = base.type, op = "shr", a = bits, b = base)
#Virtual Stack
class VirtualStack ():
def __init__ (self, natural_type):
self.__offset = 0
self.__max = 0
self.__mem_refs = []
self.__natural_type = natural_type
self.reset()
def reset (self):
self.__offset = 0
self.__max = 0
self.__mem_refs = []
def max_size(self):
return self.__max
def extent (self, stack_len):
self.__offset += stack_len
self.__max = max (self.__offset, self.__max)
def rewind (self, stack_len):
self.__offset -= stack_len
def top (self):
mem = self.__reloca_table_mem (-self.__offset, self.__bp())
self.__mem_refs.append(mem)
return mem
def __reloca_table_mem (self, offset, base):
return IndirectMemoryReference.reloca_table(offset, base)
def __bp(self):
return x86Register (RegisterClass.BP, self.__natural_type)
def fix_offset (self, diff):
for mem in self.__mem_refs:
mem.fix_offset (diff)
|
15,573 | 480c41f96e9eaef192b23f69ae4498cad5b171ad | #!/usr/bin/env python3
import os
import os.path
import argparse
import math
DEFAULT_EXT = [
'java', 'jsp', 'xml', 'yml', 'yaml', 'properties', 'sql', 'css', 'js', 'cs'
]
exclude_dirs = [
'.svn', 'exploded', 'node_modules'
]
TRACE = False
argparse = argparse.ArgumentParser(description='Simple source code stats')
argparse.add_argument('-e',
type=str,
help='list of extensions')
argparse.add_argument('-t',
action='store_true',
help='trace')
args, unknown = argparse.parse_known_args()
exts = args.e.split(",") if args.e else DEFAULT_EXT
TRACE = bool(args.t)
src_dirs = unknown
def process_dir(_dir):
if os.path.basename(_dir) in exclude_dirs:
return
if TRACE: print('Scanning', _dir)
for _file in os.listdir(_dir):
f = _dir + '/' + _file
if os.path.isdir(f):
process_dir(f)
elif os.path.isfile(f) and any([f.endswith('.' + ext) for ext in exts]):
process_file(f)
def process_file(f_name):
f = open(f_name, errors='ignore')
file_content = f.read()
f.close()
if TRACE: print('\tFile:', f_name, get_loc(file_content), get_f_size(f_name))
ext = get_ext(f_name)
stat[ext]["loc"] += get_loc(file_content)
stat[ext]["size"] += get_f_size(f_name)
stat[ext]["count"] += 1
def get_ext(f_name):
if '.' in f_name:
return f_name.rsplit('.', 1)[1]
else:
return ''
def get_loc(content):
return len(content.split('\n'))
def get_f_size(f_name):
return os.path.getsize(f_name)
def init():
global stat
stat = {}
for ext in exts:
stat[ext] = {"count": 0, "loc": 0, "size": 0}
def report():
total_loc = 0
total_size = 0
total_cnt = 0
print("Ext | count | loc | size")
for ext in stat:
size = stat[ext]["size"]
print(" | ".join(str(s) for s in [ext, stat[ext]["count"], stat[ext]["loc"], renderFileSize(size)]))
total_loc += stat[ext]["loc"]
total_size += size
total_cnt += stat[ext]["count"]
print(" | ".join(str(s) for s in ["TOTAL", total_cnt, total_loc, renderFileSize(total_size)]))
# Renders human-readable file size. Ex.: "15 KB". Based on
# http://stackoverflow.com/a/3758880/104522
#
# @param bytes file size in bytes
# @return human-readable file size
def renderFileSize(bytes: int):
return humanReadableByteCount(bytes, False).replace("i", "")
# Renders human-readable file size. Ex.: "15 KB". Based on
# http://stackoverflow.com/a/3758880/104522
#
# @param bytes file size in bytes
# @param si true to use units of 1000, otherwise 1024
# @return human-readable file size
def humanReadableByteCount(bytes: int, si: bool):
unit = 1000 if si else 1024
if bytes < unit:
return str(bytes) + " B"
exp = int(math.log(bytes) / math.log(unit))
pre = ("kMGTPE" if si else "KMGTPE")[exp - 1] + ("" if si else "i")
return "{:.1f} {}B".format(bytes / math.pow(unit, exp), pre)
if __name__ == '__main__':
init()
for d in src_dirs:
process_dir(d)
report()
|
15,574 | 34379cd3946cff3630370348e2ce9f330f60a167 | MAIN_ = ".main"
OUT = "main.out"
IN = "main.in"
|
15,575 | aa7e42d36224240ddccc40898d3a4e78813a6a66 | from .BSM import *
from .MCS_PURE import * |
15,576 | 4db820642f8c58c5283af645112548da824d05a1 | import json
from assetapp import assetapp_handler_view
from assetapp import models
from assetapp.models import Asset
from django.http import HttpResponse
from django.shortcuts import render, get_object_or_404
from django.views.decorators.csrf import csrf_exempt
@csrf_exempt # 忽略csrf post令牌
def report(request):
if request.method == 'POST':
asset_data = request.POST.get('asset_data', None)
data = json.loads(asset_data)
# 此处忽略了数据验证!
if not data:
return HttpResponse('没有数据!')
if not isinstance(data, dict):
return HttpResponse('数据必须为字典格式!')
# 判断是否携带sn资产序列号,为真,进入审批流程
sn = data.get('sn', None)
if sn:
# 首先判断是否在上线资产中存在该sn
asset_obj = Asset.objects.filter(sn=sn)
if asset_obj:
# 进入已上线资产的数据更新流程
update_asset = assetapp_handler_view.UpdateAsset(request, asset_obj[0], data)
update_asset.asset_update()
return HttpResponse('资产数据已经更新!')
else:
# asset_obj不存在,就更新或新建
asset_new = assetapp_handler_view.NewAsset(request, data)
resp = asset_new.add_to_new_assets_zone()
return HttpResponse(resp)
else:
return HttpResponse('没有该资产序列号,请检查数据!')
# 资产总表,表格的形式展示资产信息
def index(request):
assets = models.Asset.objects.all()
return render(request=request, template_name='assetapp/index.html', context=locals())
# 仪表盘,图形化的数据展示
def dashboard(request):
asset_obj=models.Asset.objects
total=asset_obj.count()
upline=asset_obj.filter(status=0).count() # 在线
offline=asset_obj.filter(status=1).count() # 下线
unknown =asset_obj.filter(status=2).count() # 未知
breakdown =asset_obj.filter(status=3).count() # 故障
backup =asset_obj.filter(status=4).count() # 备用
up_rate=round(upline/total*100)
o_rate=round(offline/total*100)
un_rate=round(unknown/total*100)
bd_rate=round(breakdown/total*100)
bu_rate=round(backup/total*100)
server_number=models.Server.objects.count()
networkdevice_number=models.NetworkDevice.objects.count()
storagedevice_number=models.StorageDevice.objects.count()
securitydevice_number=models.SecurityDevice.objects.count()
software_number=models.Software.objects.count()
return render(request, template_name='assetapp/dashboard.html', context=locals())
# 单个资产的详细信息页面
def detail(request, asset_id):
"""以显示服务器类型资产详细为例,安全设备、存储设备、网络设备等参照此例。"""
asset = get_object_or_404(models.Asset, id=asset_id)
return render(request, 'assetapp/detail.html', context=locals())
|
15,577 | 09433aa752a3047cd691462db3801bc606daf544 |
#This is a quick script to look at the difference between
def output_diff(f1,f2):
OUT1 = {}
MATCHED = []
DIFF = []
with open(f2, "r") as f:
for line in f:
if line in OUT1:
sys.exit("This shouldn't happen: only unique files should be here")
else:
OUT1[line] = 1
f.close()
with open(f1, "r") as f:
for line in f:
if line in OUT1:
MATCHED.append(line)
else:
DIFF.append(line)
f.close()
for i in DIFF:
print(i[:-1])
if __name__ == "__main__":
import sys
f1 = sys.argv[1]
f2 = sys.argv[2]
output_diff(f1,f2)
|
15,578 | de3af4c27ab9fc39f5a4a2f65f08709c57beadbf | from django.contrib import admin
from .models import Sponsor
from ordered_model.admin import OrderedModelAdmin
from django.utils.safestring import mark_safe
class SponsorAdmin(OrderedModelAdmin):
fields = ('name', 'current_logo', 'logo', 'website', 'active', 'description', )
readonly_fields = ['current_logo']
list_display = ('name', 'current_logo', 'active', 'website_safe', 'move_up_down_links', )
list_filter = ('active', )
def website_safe(self, obj):
return mark_safe("<a href={0}>{0}</a>".format(obj.website))
website_safe.short_description = 'Website'
admin.site.register(Sponsor, SponsorAdmin)
|
15,579 | b6fd0f2bf458c00a19757e1b27ef7200379d8c7f | {
"_id" : ObjectId("607796ead3436a3788a6f399"),
"data" : {
"webhookEvent" : "jira:issue_updated",
"user" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"issue" : {
"id" : "63008",
"self" : "https://timsb.telechips.com:8443/rest/api/2/issue/63008",
"key" : "IS011A-19",
"fields" : {
"summary" : "테스트2",
"issuetype" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/issuetype/1",
"id" : "1",
"description" : "A problem which impairs or prevents the functions of the product.",
"iconUrl" : "https://timsb.telechips.com:8443/images/icons/issuetypes/bug.png",
"name" : "Bug",
"subtask" : false
},
"timespent" : null,
"reporter" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"customfield_12100" : null,
"customfield_12101" : null,
"customfield_10431" : null,
"customfield_10432" : null,
"customfield_11800" : "<br><br><font color=\"gray\">-------------------------------------------------------------------------- QA Section ----------------------------------------------------------------------</font><br>",
"customfield_10433" : null,
"customfield_11801" : "<font color=\"gray\">-----------------------------------------------------------------------------------------------------------------------------------------------------------------</font><br><br>",
"customfield_11802" : null,
"created" : "2021-04-14T20:40:18.200+0900",
"customfield_11803" : null,
"customfield_12102" : null,
"customfield_11804" : null,
"customfield_11805" : null,
"customfield_11806" : null,
"customfield_11807" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/customFieldOption/13051",
"value" : "None",
"id" : "13051"
},
"customfield_11808" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/customFieldOption/13050",
"value" : "None",
"id" : "13050"
},
"customfield_12003" : null,
"customfield_12004" : null,
"customfield_12001" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/customFieldOption/13100",
"value" : "20140115 DMP-Host SDK(r2841) with BT",
"id" : "13100"
},
"customfield_12002" : "<html>\r\n<head>\r\n<style type=\"text/css\">\r\n\r\nbody { color:#1D56F1; font-size:10pt;}\r\n\r\n</style>\r\n</head>\r\n\r\n<script>\r\n\r\nvar blink_speed = 20000; var t = setInterval(function () { var ele = document.getElementById('blinker'); ele.style.visibility = (ele.style.visibility == 'hidden' ? '' : 'hidden'); }, blink_speed);\r\n\r\n\r\n</script>\r\n\r\n<body style=\"background-color:#E5EBF7\" style=\"font-weight:bold\">\r\n <span style=\"font-weight:bold\"> [본문 필수 입력 항목(Required items)]</span><br>\r\n - 재현빈도(Occurrence Freq.),사전조건(Precondition),재현절차(Reproduction steps)<br>\r\n - 기대결과(Expected Result),실제결과(Actual Result),상세설명(Detailed Description)<br>\r\n</body>\r\n</html>",
"customfield_12000" : null,
"project" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/project/11102",
"id" : "11102",
"key" : "IS011A",
"name" : "IS-SOC-TCC893x",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/projectavatar?size=xsmall&pid=11102&avatarId=10011",
"24x24" : "https://timsb.telechips.com:8443/secure/projectavatar?size=small&pid=11102&avatarId=10011",
"32x32" : "https://timsb.telechips.com:8443/secure/projectavatar?size=medium&pid=11102&avatarId=10011",
"48x48" : "https://timsb.telechips.com:8443/secure/projectavatar?pid=11102&avatarId=10011"
},
"projectCategory" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/projectCategory/10001",
"id" : "10001",
"description" : "Customer issue project supported by R&D",
"name" : "IS"
}
},
"customfield_11602" : "<font color=\"gray\">-----------------------------------------------------------------------------------------------------------------------------------------------------------------</font><br><br>",
"lastViewed" : "2021-04-14T22:35:51.663+0900",
"customfield_11600" : "<br><br><font color=\"gray\">---------------------------------------------------------- Sub Task Field (FAR / IAR / ETC.) --------------------------------------------------------</font><br>",
"components" : [],
"comment" : {
"startAt" : 0,
"maxResults" : 2,
"total" : 2,
"comments" : [
{
"self" : "https://timsb.telechips.com:8443/rest/api/2/issue/63008/comment/316210",
"id" : "316210",
"author" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"body" : "<p>Comment 테스트 입니다.</p>\r\n",
"updateAuthor" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"created" : "2021-04-14T22:34:13.213+0900",
"updated" : "2021-04-14T22:34:13.213+0900"
},
{
"self" : "https://timsb.telechips.com:8443/rest/api/2/issue/63008/comment/316211",
"id" : "316211",
"author" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"body" : "<p>Comment 테스트 입니다.222222</p>\r\n",
"updateAuthor" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"created" : "2021-04-14T22:35:51.673+0900",
"updated" : "2021-04-14T22:35:51.673+0900"
}
]
},
"timeoriginalestimate" : null,
"customfield_11604" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/customFieldOption/12706",
"value" : "No",
"id" : "12706"
},
"customfield_11605" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/customFieldOption/12708",
"value" : "No",
"id" : "12708"
},
"customfield_11439" : null,
"customfield_10309" : null,
"customfield_10308" : null,
"customfield_10303" : null,
"customfield_10302" : null,
"customfield_11438" : null,
"customfield_10304" : null,
"customfield_11437" : "<html>\r\n<head>\r\n<style type=\"text/css\">\r\n\r\nbody { color:#1D56F1; font-size:10pt;}\r\n\r\n</style>\r\n</head>\r\n\r\n<script>\r\n\r\nvar blink_speed = 1000; var t = setInterval(function () { var ele = document.getElementById('blinker'); ele.style.visibility = (ele.style.visibility == 'hidden' ? '' : 'hidden'); }, blink_speed);\r\n\r\n\r\n</script>\r\n\r\n<body style=\"background-color:#E5EBF7\">\r\n\r\n<br><br>\r\n\r\n<span style=\"font-weight:bold\" id=\"blinker\">--------------------------------------------------------------Request for SOC failure analysis--------------------------------------------------------------</span><br>\r\n\r\n<br>\r\n</body>\r\n</html>",
"resolutiondate" : null,
"customfield_10203" : null,
"customfield_10204" : null,
"duedate" : null,
"customfield_10310" : null,
"customfield_10311" : null,
"customfield_10312" : null,
"customfield_10104" : null,
"watches" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/issue/IS011A-19/watchers",
"watchCount" : 1,
"isWatching" : true
},
"worklog" : {
"startAt" : 0,
"maxResults" : 20,
"total" : 0,
"worklogs" : []
},
"customfield_10106" : null,
"customfield_11440" : null,
"customfield_11441" : null,
"customfield_10103" : null,
"customfield_10102" : null,
"customfield_10101" : null,
"customfield_10100" : null,
"customfield_10202" : null,
"customfield_10201" : null,
"customfield_10200" : null,
"customfield_10501" : "",
"customfield_10500" : "",
"customfield_10704" : null,
"customfield_10705" : null,
"timeestimate" : null,
"customfield_10300" : null,
"customfield_10301" : null,
"customfield_11434" : null,
"customfield_11431" : null,
"customfield_11430" : "<html>\r\n<head>\r\n<style type=\"text/css\">\r\n\r\nbody { color:#1D56F1; font-size:10pt;}\r\n\r\n</style>\r\n</head>\r\n\r\n<script>\r\n\r\nvar blink_speed = 20000; var t = setInterval(function () { var ele = document.getElementById('blinker'); ele.style.visibility = (ele.style.visibility == 'hidden' ? '' : 'hidden'); }, blink_speed);\r\n\r\n\r\n</script>\r\n\r\n<body style=\"background-color:#E5EBF7\">\r\n\r\n <span style=\"font-weight:bold\"> [본문 필수 입력 항목(Required items)]</span><br>\r\n - 재현빈도(Occurrence Freq.),사전조건(Precondition),재현절차(Reproduction steps)<br>\r\n - 기대결과(Expected Result),실제결과(Actual Result),상세설명(Detailed Description)<br>\r\n ※ 비디오 파일 같은 자료도 첨부해주세요 (please attach a material like a video file)<br>\r\n</body>\r\n</html>",
"customfield_10801" : null,
"progress" : {
"progress" : 0,
"total" : 0
},
"timetracking" : {},
"updated" : "2021-04-14T22:35:51.681+0900",
"description" : "<p>본문에 이미지 삽입입니다.</p>\r\n\r\n<p><img alt=\"\" height=\"864\" src=\"https://timsb.telechips.com:8443/secure/attachment/44800/%EC%9D%B4%EB%AF%B8%EC%A7%80%201.png\" width=\"1245\" /></p>\r\n",
"priority" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/priority/4",
"iconUrl" : "https://timsb.telechips.com:8443/images/icons/priorities/minor.png",
"name" : "Minor",
"id" : "4"
},
"customfield_10001" : null,
"customfield_10003" : null,
"issuelinks" : [],
"customfield_10004" : null,
"customfield_10000" : null,
"subtasks" : [],
"customfield_10009" : null,
"status" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/status/4",
"description" : "This issue was once resolved, but the resolution was deemed incorrect. From here issues are either marked assigned or resolved.",
"iconUrl" : "https://timsb.telechips.com:8443/images/icons/statuses/reopened.png",
"name" : "Reopened",
"id" : "4",
"statusCategory" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/statuscategory/2",
"id" : 2,
"key" : "new",
"colorName" : "blue-gray",
"name" : "New"
}
},
"customfield_10006" : null,
"customfield_10005" : null,
"labels" : [],
"customfield_11502" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/customFieldOption/12609",
"value" : "IAR / FAR",
"id" : "12609"
},
"workratio" : -1,
"customfield_10313" : null,
"customfield_10314" : null,
"customfield_10319" : null,
"customfield_11702" : null,
"environment" : null,
"customfield_10014" : null,
"aggregateprogress" : {
"progress" : 0,
"total" : 0
},
"customfield_10015" : null,
"customfield_10012" : null,
"customfield_10013" : null,
"customfield_10010" : "",
"customfield_10011" : null,
"customfield_11100" : null,
"fixVersions" : [],
"security" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/securitylevel/10000",
"id" : "10000",
"description" : "Member",
"name" : "Level 1"
},
"resolution" : null,
"customfield_11102" : null,
"creator" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"aggregatetimeoriginalestimate" : null,
"customfield_11500" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/customFieldOption/12602",
"value" : "General",
"id" : "12602"
},
"assignee" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"attachment" : [
{
"self" : "https://timsb.telechips.com:8443/rest/api/2/attachment/44800",
"id" : "44800",
"filename" : "이미지 1.png",
"author" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"created" : "2021-04-14T22:31:53.357+0900",
"size" : 53658,
"mimeType" : "image/png",
"content" : "https://timsb.telechips.com:8443/secure/attachment/44800/%EC%9D%B4%EB%AF%B8%EC%A7%80+1.png"
}
],
"aggregatetimeestimate" : null,
"versions" : [],
"customfield_10440" : null,
"aggregatetimespent" : null
}
},
"comment" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/issue/63008/comment/316211",
"id" : "316211",
"author" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"body" : "<p>Comment 테스트 입니다.222222</p>\r\n",
"updateAuthor" : {
"self" : "https://timsb.telechips.com:8443/rest/api/2/user?username=timsadmin",
"name" : "timsadmin",
"emailAddress" : "gisa1@telechips.com",
"avatarUrls" : {
"16x16" : "https://timsb.telechips.com:8443/secure/useravatar?size=xsmall&avatarId=10122",
"24x24" : "https://timsb.telechips.com:8443/secure/useravatar?size=small&avatarId=10122",
"32x32" : "https://timsb.telechips.com:8443/secure/useravatar?size=medium&avatarId=10122",
"48x48" : "https://timsb.telechips.com:8443/secure/useravatar?avatarId=10122"
},
"displayName" : "TIMS Administrator",
"active" : true
},
"created" : "2021-04-14T22:35:51.673+0900",
"updated" : "2021-04-14T22:35:51.673+0900"
},
"timestamp" : NumberLong(1618407351683)
},
"prrameter" : {
"user_id" : "timsadmin",
"user_key" : "admin"
},
"date" : ISODate("2021-04-15T10:29:14.660Z")
} |
15,580 | 0c177473bc74a70856f960f0b78d6d27a93c2384 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-11-05 20:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Categoria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=100)),
],
options={
'ordering': ('nombre',),
'db_table': 'categories',
'verbose_name': 'Category',
'verbose_name_plural': 'Categories',
},
),
migrations.CreateModel(
name='Curso',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=200)),
('inicio', models.DateField(blank=True, null=True)),
('fin', models.DateField(blank=True, null=True)),
],
options={
'ordering': ('sesion', 'inicio'),
'db_table': 'courses',
'verbose_name': 'Course',
'verbose_name_plural': 'Courses',
},
),
migrations.CreateModel(
name='Materia',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=100)),
('descripcion', models.TextField(default=b'')),
('company', models.CharField(blank=True, max_length=100, null=True)),
('aniocreacion', models.IntegerField(blank=True, null=True)),
('ultimaversion', models.CharField(blank=True, max_length=100, null=True)),
('link', models.CharField(blank=True, max_length=100, null=True)),
('imagen1', models.FileField(blank=True, null=True, upload_to=b'imagenes1')),
('imagen2', models.FileField(blank=True, null=True, upload_to=b'imagenes2')),
('categoria', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gm.Categoria')),
],
options={
'ordering': ('-nombre',),
'db_table': 'subjects',
'verbose_name': 'Subject',
'verbose_name_plural': 'Subjects',
},
),
migrations.CreateModel(
name='Profesor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombres', models.CharField(max_length=200)),
('apellido1', models.CharField(max_length=200)),
('apellido2', models.CharField(blank=True, max_length=200, null=True)),
('cedula', models.CharField(blank=True, max_length=10, null=True)),
('email', models.CharField(blank=True, max_length=100, null=True)),
('twitter', models.CharField(blank=True, max_length=100, null=True)),
('linkedin', models.CharField(blank=True, max_length=100, null=True)),
],
options={
'ordering': ('apellido1', 'apellido2', 'nombres'),
'db_table': 'teachers',
'verbose_name': 'Teacher',
'verbose_name_plural': 'Teachers',
},
),
migrations.CreateModel(
name='Programa',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=200)),
('objetivo', models.TextField(blank=True, null=True)),
('horas', models.IntegerField(default=0)),
('materia', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gm.Materia')),
('profesor', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='gm.Profesor')),
],
options={
'ordering': ('materia',),
'db_table': 'programs',
'verbose_name': 'Program',
'verbose_name_plural': 'Programs',
},
),
migrations.CreateModel(
name='RegistroMateria',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('persona', models.CharField(max_length=300)),
('email', models.CharField(max_length=200)),
('comentario', models.TextField(blank=True, null=True)),
('fecha', models.DateField()),
('hora', models.TimeField()),
('materia', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gm.Materia')),
],
options={
'ordering': ('materia', 'fecha'),
'db_table': 'subject_register',
'verbose_name': 'Subject Register',
'verbose_name_plural': 'Subjects Registers',
},
),
migrations.CreateModel(
name='Sesion',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('nombre', models.CharField(max_length=100)),
('horainicio', models.TimeField()),
('horafin', models.TimeField()),
],
options={
'ordering': ('horainicio', 'horafin'),
'db_table': 'sessions',
'verbose_name': 'Session',
'verbose_name_plural': 'Sessions',
},
),
migrations.CreateModel(
name='Suscripciones',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('email', models.CharField(max_length=100)),
('fecha', models.DateField()),
('hora', models.TimeField()),
],
options={
'ordering': ('fecha', 'hora', 'email'),
'db_table': 'newsletters',
'verbose_name': 'Newsletter',
'verbose_name_plural': 'Newsletters',
},
),
migrations.CreateModel(
name='Tema',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('titulo', models.CharField(max_length=200)),
('descripcion', models.TextField()),
('horas', models.IntegerField(default=0)),
('programa', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gm.Programa')),
],
options={
'ordering': ('programa',),
'db_table': 'issues',
'verbose_name': 'Issue',
'verbose_name_plural': 'Issues',
},
),
migrations.AlterUniqueTogether(
name='sesion',
unique_together=set([('nombre',)]),
),
migrations.AlterUniqueTogether(
name='profesor',
unique_together=set([('cedula',)]),
),
migrations.AddField(
model_name='curso',
name='programa',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='gm.Programa'),
),
migrations.AddField(
model_name='curso',
name='sesion',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='gm.Sesion'),
),
migrations.AlterUniqueTogether(
name='categoria',
unique_together=set([('nombre',)]),
),
migrations.AlterUniqueTogether(
name='programa',
unique_together=set([('titulo', 'materia', 'profesor')]),
),
migrations.AlterUniqueTogether(
name='materia',
unique_together=set([('nombre', 'categoria')]),
),
migrations.AlterUniqueTogether(
name='curso',
unique_together=set([('nombre', 'programa')]),
),
]
|
15,581 | 6ddfa76fd9d132cfd402bc5507b1413e619c96a1 |
import numpy as np
import ur_data as ur
import goal_calc as goal
import cPickle as pickle
import force_calculator as f
'''
NOTES... Do i need all the offsetting calculations if
im only using relative move commands? I think
i could just add the offset just before the robot command
is sent.
Need to include some iterative function to get the next
iterative position.
'''
def state_calc():
print "Starting state_calc"
''' this code takes in the robot's current position and user defined goal position, and
calculates the distance between them.
it then breaks down that distance into intervals and returns the next intervale as an angle
and distance which then gets imported into force_calculator.
'''
#origin, orifice position in space.
home_pos = {"x": 0.31329, "y": (-0.65148)} # y changed from -0.75148 which is robot home_pos.
#import current robot position and add to dictionary current_pos.
pose = {}
pose["x"],pose["y"],pose["z"],pose["Rx"],pose["Ry"],pose["Rz"] = ur.where_now()
# user defined current robot position.
#x_1 = float(raw_input("x_1 :"))
#y_1 = float(raw_input("y_1 :"))
#create a dictionary of the current x,y position.
current_pos = {"x_1": pose["x"], "y_1": pose["y"]}
# import user defined goal position.
x,y = goal.goal_pos_calc()
x_2 = x # goal x position in in robot's frame.
y_2 = y
print "x_2", x_2
print "y_2", y_2
#create a dictionary of the goal x,y pos
goal_pos = {"x_2": x_2, "y_2": y_2}
'''offset current robot pos x,y coordinates relative to orifice which is the home_pos -100mm in y.
For ease of implimentation, the home position has been updated to include the -100mm offset in y,
meaning the home_pos is in fact the orifice pos.
'''
current_pos["x_1"] = current_pos["x_1"] - home_pos["x"]
current_pos["y_1"] = (current_pos["y_1"] - home_pos["y"])
#un-offset goal pos x,y coordinates relative to home_pos which is the origin.
'''This is necessary because even though goal_calc already applide an offset
to the goal_pos, the raw goal_pos is needed to do trajectory calculations.
However, the robot controller will need the offsetted values to move.
'''
goal_pos["x_2"] = (goal_pos["x_2"] - home_pos["x"])
goal_pos["y_2"] = (goal_pos["y_2"] - home_pos["y"])
print "goal_pos:", goal_pos
'''build new dict to find lengths of opposite and adjacent sides of triangle
formed between current position and goal position. These are then used to
find the hypotenuse which is the trajectory between the current and goal positions.
'''
dict_3 = {}
dict_3["x_3"] = goal_pos["x_2"] - current_pos["x_1"] # opposite
dict_3["y_3"] = goal_pos["y_2"] - current_pos["y_1"] # adjcent
#take absolute values for opp and adj to avoid quire math enomolies
dict_3["x_3"] = np.abs(dict_3["x_3"])
dict_3["y_3"] = np.abs(dict_3["y_3"])
print "dict_3", dict_3
# square the opposite and adjacent sides.
x_3_sq = (dict_3["x_3"])**2 #adj^2
y_3_sq = (dict_3["y_3"])**2 #oppo^2
# calculate hypotenuse between current and goal pos.
#this is needed in order to build a new triangle to feed back into the goal calc.
polar_rad = np.sqrt(x_3_sq + y_3_sq)
'''set dict items to variable because combining dicts
and trig functions seemed to be causing a problem
'''
x = dict_3["x_3"] # distance in x from current to goal pos.
y = dict_3["y_3"] # distance in y from current to goal pos.
'''find the new angle in degrees to feed back into goal calc.
'''
polar_angle = np.arctan(x/y) # in radians.
polar_angle = polar_angle *(180/np.pi) # convert rad to deg.
print "polar_rad =", polar_rad
print "polar_angle", polar_angle
# segment the hypotenuse into 10mm intervals.
interval = 0.01 #10mm interval #polar_rad
#there is no point working out the x,y if the force is too high!
#input x,y into force calc.
force = f.force_calc(polar_angle, interval)
print "force =", force
#print "interval", interval
#send new angle and distance to goal_pos_cal to find new x,y interval.
x,y = goal.goal_pos_calc(polar_angle, interval)
x = x
y = y
print "x", x
print "y", y
'''what is the force at this new x,y iteration?
'''
# build new dict containing the next move increment.
delta_x = pose["x"] + interval
delta_y = pose["y"] + interval
dict_delta = {"x": delta_x, "y": delta_y}
#print "delta_x" , delta_x
#print "delta_y" , delta_y
#subtract the delta poss' from origin??
# Not needed as we calibrated our positions relative to the origin to begin with.
#square the deltas to find length of hyp(beam bend equ)
delta_x_sq = delta_x**2
delta_y_sq = delta_y**2
print delta_x
print delta_y
#find the polar_rad coordinates for the next interitive position reletive to origin
length = np.sqrt(delta_x_sq + delta_y_sq)
print "length" , length
# determin the cossin of the angle of the next move.
cosin_angle = np.sqrt(delta_y_sq)/length
#determin the angle of the next move.
angle = np.arccos(cosin_angle)
print "angle" , angle
# change angle from rad to deg.
angle_deg = angle *(180/3.142)
print "angle_deg", angle_deg
#dict_delta = state_calc()
print_out1 = dict_delta["x"]
print_out_2 = dict_delta["y"]
print "x iteration = ", print_out1
print "y iteration = ", print_out_2
print "Angle =", angle_deg
print "Move distance", interval
with open('pickle_file.pickle', "wb") as file:
pickle.dump(dict_delta, file, -1)
print "End of state_calc"
return length, angle_deg
if __name__ == '__main__':
state_calc()
|
15,582 | 27c436c9d9c93fd7006845a277fcc851cdc5f603 | # coding: utf-8
import cgi
import time
def notfound_404(environ, start_response):
start_response('404 Not Found', [('Content-type', 'text/plain')])
return [b'Not Found']
class PathDispatcher:
def __init__(self):
self.pathmap = {}
def __call__(self, environ, start_response):
path = environ['PATH_INFO']
params = cgi.FieldStorage(environ['wsgi.input'],
environ=environ)
method = environ['REQUEST_METHOD'].lower()
environ['params'] = {key: params.getvalue(key) for key in params}
handler = self.pathmap.get((method, path), notfound_404)
return handler(environ, start_response)
def register(self, method, path, func):
self.pathmap[method.lower(), path] = func
return func
_hello_resp = '''\
<html>
<head>
<title>Hello {name}</title>
</head>
<body>
<h1>Hello {name}!</h1>
</body>
</html>'''
def hello_world(environ, start_response):
start_response('200 OK', [('Content-type', 'text/html')])
params = environ['params']
resp = _hello_resp.format(name=params.get('name'))
yield resp.encode('utf-8')
_localtime_resp = '''\
<?xml version="1.0"?>
<time>
<year>{t.tm_year}</year>
<month>{t.tm_mon}</month>
<day>{t.tm_mday}</day>
<hour>{t.tm_hour}</hour>
<minute>{t.tm_min}</minute>
<second>{t.tm_sec}</second>
</time>'''
_upload_file = """
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<title>上传文件</title>
</head>
<body>
<form action="" method="post" enctype="multipart/form-data">
<table>
<tr>
<td>头像:</td>
<td><input type="file" name="pichead"></td>
</tr>
<tr>
<td>心情:</td>
<td><input type="text" name="desc"></td>
</tr>
<tr>
<td></td>
<td><input type="submit" value="提交"></td>
</tr>
</table>
</form>
</body>
</html>
"""
def localtime(*args):
start_response = args[1]
start_response('200 OK', [('Content-type', 'application/xml')])
resp = _localtime_resp.format(t=time.localtime())
yield resp.encode('utf-8')
def upload(*args):
start_response = args[1]
start_response('200 OK', [('Content-type', 'text/html')])
resp = _upload_file
yield resp
def save_file(*args):
respond = args[1]
print(respond)
resp = '200 OK'
yield resp
if __name__ == '__main__':
from wsgiref.simple_server import make_server
# Create the dispatcher and register functions
dispatcher = PathDispatcher()
dispatcher.register('GET', '/hello', hello_world)
dispatcher.register('GET', '/localtime', localtime)
dispatcher.register('get', '/upload', upload)
dispatcher.register('post', '/upload?', save_file)
# Launch a basic server
httpd = make_server('', 8080, dispatcher)
print('Serving on port 8080...')
httpd.serve_forever()
|
15,583 | 78c672f4c14db52271db4ba03ce0d3f131eb700f | from rest_framework import serializers
from ..models import Board
class BoardSerializer(serializers.ModelSerializer):
last_num = serializers.ReadOnlyField()
class Meta:
model = Board
exclude = ('id', 'category', 'trip_required', 'is_hidden')
lookup_field = 'board'
extra_kwargs = {
'url': {'lookup_field': 'board'}
}
|
15,584 | 762021ca95d01b6f7a3a647f018115f22c06d6f0 | #!/usr/bin/python
#
# Copyright 2012 Ezox Systems, LLC
#
# This software is licensed. If a license was not provided with
# these files please contact Ezox Systems, LLC.
#
# Unless required by applicable law or agreed to in writing, this
# software is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under
# the License.
"""OpenID login and user setup handlers."""
import logging
import os
import sys
import webapp2
from google.appengine.ext import ndb
from webapp2_extras.security import check_password_hash
from config import webapp_config
from main import TemplateHandler
from sosbeacon.user import User
# Add lib to path.
libs_dir = os.path.join(os.path.dirname(__file__), 'lib')
if libs_dir not in sys.path:
logging.debug('Adding lib to path.')
sys.path.insert(0, libs_dir)
class LoginUserHandler(TemplateHandler):
"""Construct the login url and redirect home."""
def get(self, *args, **kwargs):
if not 'u' in self.session:
self.render_user_login(is_loggedin=False, error="")
return
urlsafe = self.session.get('u')
user_key = ndb.Key(urlsafe=urlsafe)
user = user_key.get()
school = self.session.get('s')
if len(user.schools) == 1 or school:
self.redirect("/")
return
if len(user.schools) > 1:
schools = [school_key.get() for school_key in user.schools]
self.render_user_login(is_loggedin = True, schools = schools)
return
self.render_user_login(is_loggedin = True, schools = None, error="You don't have any schools!.")
def post(self, *args, **kwargs):
if not 'u' in self.session:
email = self.request.POST['email']
password = self.request.POST['password']
user = User.query(ndb.AND(User.email == email),
namespace = '_x_')
if user.get() is None:
self.render_user_login(is_loggedin = False, error='Email or Password is wrong!.')
return
if user.get().is_admin:
self.render_user_login(is_loggedin = False, error='Email or Password is wrong!.')
return
else:
if check_password_hash(password, user.get().password):
self.delete_session()
self.set_current_user(user)
else:
self.render_user_login(is_loggedin = False, error='Email or Password is wrong!.')
return
user_key = self.session.get('u')
user = ndb.Key(urlsafe=user_key).get()
school_length = len(user.schools)
#check schools that user was asigned
if school_length == 1:
school_key = user.schools[0]
school_key = school_key.get().key.urlsafe()
self.set_current_school(school_key)
self.redirect("/")
return
if school_length == 0:
self.render_user_login(is_loggedin = False, error="You don't have any schools!. Please contact with admin for this reason.")
self.delete_session()
return
if school_length > 1 and 'school' not in self.request.POST:
schools = [school_key.get() for school_key in user.schools]
self.render_user_login(is_loggedin = True, schools=schools)
return
school_key = self.request.POST['school']
self.set_current_school(school_key)
self.redirect("/")
def set_current_user(self, user):
"""set session for current user"""
self.session['u'] = user.get().key.urlsafe()
def set_current_school(self, school_key):
"""set session for current school that user choose when login"""
self.session['s'] = school_key
def render_user_login(self, **context):
out = self.render(template_name='login.mako', **context)
self.response.out.write(out)
def delete_session(self):
"""delete all session"""
for key in self.session.keys():
if key != 'tz':
del self.session[key]
class LogoutUserHandler(TemplateHandler):
"""Log out and delete all session except session timezone."""
def get(self, *args, **kwargs):
if self.session:
for key in self.session.keys():
if key != 'tz':
del self.session[key]
self.redirect("/authentication/login")
class LoginAdminHandler(TemplateHandler):
def get(self):
if not 'ad' in self.session:
self.render_admin_login(error="")
else:
self.redirect("/admin")
def post(self, *args, **kwargs):
if not 'ad' in self.session:
email = self.request.POST['email']
password = self.request.POST['password']
user = User.query(ndb.AND(User.email == email,
User.is_admin == True),
namespace = '_x_')
if user.get() is None:
self.render_admin_login(error='Email or Password is wrong!.')
return
else:
if check_password_hash(password, user.get().password):
self.setup_admin_session(user)
else:
self.render_admin_login(error='Email or Password is wrong!.')
return
self.redirect("/admin")
def setup_admin_session(self, user):
"""set session for current admin"""
self.session['ad'] = user.get().key.urlsafe()
def render_admin_login(self, **context):
out = self.render(template_name='admin_login.mako', **context)
self.response.out.write(out)
class LogoutAdminHandler(TemplateHandler):
def get(self, *args, **kwargs):
"""Logout and delete all session"""
for key in self.session.keys():
del self.session[key]
self.redirect("/admin/authentication/login")
url_map = [
# ('/_ah/login_required', LoginHandler),
('/authentication/login', LoginUserHandler),
('/authentication/logout', LogoutUserHandler),
('/admin/authentication/login', LoginAdminHandler),
('/admin/authentication/logout', LogoutAdminHandler),
]
app = webapp2.WSGIApplication(
url_map,
config=webapp_config)
|
15,585 | 46d89f7448094a5d6ec018eb94680346c0d97fd7 | from random import *
count = 0
for i in range(1,51) :
time = randrange(5,51)
if 5<= time <=15 :
print("[0] {0}번째 손님 (소요시간 : {1}분)".format(i,time))
count +=1
else:
print("[] {0}번째 손님 (소요시간 : {1}분".format(i,time))
print("총 탑승 승객 : {0}분".format(count))
|
15,586 | 5c8187997e2b63b18b83781bee2b669d41e8179e | from django.apps import AppConfig
class JournalBackendConfig(AppConfig):
name = 'journal_backend'
|
15,587 | 1b8034bedef98ebbc51aaed83acf0609241c70f3 | # -*- coding: utf-8 -*-
"""Legal start web scraper test."""
import json
import argparse
import requests
import lxml.html
from scraper import ItemParser, Scraper
INITIAL_URL = "https://yolaw-tokeep-hiring-env.herokuapp.com/"
class WebScraper(object):
"""LegalStart web scraper."""
_initial_action = 0
def process(self, json_path, initial_url=INITIAL_URL, location="local"):
"""process the data.
:param json_path, String
:param initial_url, String
:param location, String
"""
rules = self.__get_local_json_data(json_path)
item_parser = ItemParser(rules[str(self._initial_action)])
scraper = Scraper(auth="auth", user="Thumb",
passw="Scraper",
initial_url=initial_url)
response = scraper.start_request()
current_page = self._initial_action
while True:
scraped_item = scraper.parse_item(response, item_parser)
if scraped_item is None:
print("ALERT - Can’t move to page {prev_page}: page {current_page} link has been malevolently tampered with!!".format(
prev_page=item_parser.get_next_parser(), current_page=current_page
))
break
print ("Move to page {current_page}".format(current_page=current_page))
next_parser = scraped_item['next_parser']
next_url = scraped_item['next_url']
item_parser = ItemParser(rules[next_parser])
response = scraper.start_request(url=next_url)
current_page = next_parser
def get_json_data(self, json_path, location):
if location == 'web':
return self.__get_remote_json_data(json_path)
return self.__get_local_json_data(json_path)
def __get_local_json_data(self, json_path):
with open(json_path) as js_data:
return json.load(js_data)
def __get_remote_json_data(self, json_path):
pass
def get_parameters():
"""Check the script parameters and return a dict with the values.
:return: Dict with the parsed parameters. Ex:
{
json_path: /home/user/test/test.json,
location: web
}
"""
parser = argparse.ArgumentParser(
prog='python web_scraper.py', description='web scraper test for legal start.')
parser.add_argument('json_path', help='Path to the json file')
parser.add_argument('-l', '--location', choices=["web", "local"],
help='If the path is a local file or a web url',
required=True)
parser.add_argument('-i', '--initial_url', help="Initial url",
default=INITIAL_URL)
args = parser.parse_args()
return vars(args)
if __name__ == "__main__":
params = get_parameters()
WebScraper().process(**params)
|
15,588 | 50e3f4605bc374f2c6be290805d5160f063a0cc6 | """Annotations for codepaths that deal with security in some way."""
from typing import Callable, TypeVar
X = TypeVar("X")
def secure_fn(f: Callable[..., X]) -> Callable[..., X]: # type: ignore
"""Mark this function as dealing with security, auth, etc but it's noop otherwise."""
return f
def secure_class(cls): # type: ignore
"""Mark this class as dealing with security, auth, etc but it's noop otherwise."""
return cls
|
15,589 | 9b319bf27c30537d4848d7329ac70566cbe22f05 | import pyspark as ps
from pyspark.sql import functions as F
from pyspark.sql.types import StringType, DoubleType, IntegerType, StructType, StructField, ArrayType
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
# !pip install geopy
import folium
from geopy.geocoders import Nominatim
import time
pd.set_option('display.max_columns', None)
pd.set_option('display.float_format', lambda x: '%.5f' % x)
spark = (ps.sql.SparkSession
.builder
.master('local[4]')
.appName('lecture')
.getOrCreate()
)
sc = spark.sparkContext
park = spark.read.csv('../data/Parking_Violations_Issued_FY_2019.csv',
header=True,
inferSchema=True)
park2 = (park
.drop('Violation In Front Of Or Opposite',
'Violation Post Code',
'To Hours In Effect',
'From Hours In Effect',
'Intersecting Street',
'Violation Description',
'Violation Legal Code',
'Meter Number',
'Unregistered Vehicle?',
'Time First Observed',
'No Standing or Stopping Violation',
'Hydrant Violation',
'Double Parking Violation')
.na.drop()
)
park3 = park2.filter((park2['Violation Code'].isin(21,38,14,20,46)))
park_sample = park3.sample(False, 0.01).toPandas()
park_sample['Address'] = park_sample['House Number'] + ' ' + park_sample['Street Name'] + ' NYC'
addr = pd.read_csv('../data/address.csv')
def unpack(row):
try:
return {'lat': row['location'][0], 'long': row['location'][1]}
except:
return np.nan
coord = pd.concat([addr, addr.apply(unpack, axis=1, result_type='expand')], axis=1) |
15,590 | e245f5e21f7abe35b3377d7a19132e4e8107731b | #!/bin/python3
import math
import os
import random
import re
import sys
#least = sys.minsize
least = -1
def updateRange(lazy, arr, tree, node, start, end , l, r, value):
if lazy[node] != 0:
tree[node] = tree[node] + lazy[node]
if start != end:
lazy[2*node + 1] = lazy[2*node + 1] + lazy[node]
lazy[2*node + 2] = lazy[2*node + 2] + lazy[node]
lazy[node] = 0
if start> end or l>end or r< start:
return
if l <= start and end <= r:
# print('inside eeee')
tree[node] = tree[node] + value
# print(tree)
if end !=start:
lazy[2*node + 1] = lazy[2*node + 1] + value
lazy[2*node + 2] = lazy[2*node + 2] + value
return
mid = (start + end)//2
updateRange(lazy, arr, tree, 2*node+1, start, mid, l, r, value)
updateRange(lazy, arr, tree, 2*node+2, mid+1, end, l, r, value)
# print('---------')
# print(tree)
tree[node] = max(tree[2*node+1], tree[2*node + 2])
# print(tree)
def queryRange(lazy,arr, tree, node, start, end, l , r):
if start>end or start>r or end< l:
return 0;
if lazy[node] !=0:
tree[node] = tree[node] + lazy[node]
if start != end:
lazy[2*node + 1] = lazy[2*node + 1] + lazy[node]
lazy[2*node + 2] = lazy[2*node + 2] + lazy[node]
lazy[node] = 0
if start >= l and end<=r:
return tree[node]
mid = (start + end)//2
p1 = queryRange(lazy, arr, tree, 2*node+1, start, mid, l, r)
p2 = queryRange(lazy, arr, tree, 2*node+2, mid +1, end, l, r)
return max(p1, p2)
def arrayManipulation(n, m, queries):
arr = [0] * n
x = int(math.log(n, 2)) +2
# print(str(x))
x = int(math.pow(2,x) -1)
# print(x)
# print('==')
tree = [0] *x
lazy = [0]*x
maximum = -1
for i in range(m):
a = queries[i][0] - 1
b = queries[i][1]
print(queries[i])
value = queries[i][2]
updateRange(lazy, arr, tree, 0, 0, n-1, a, b-1, value)
# print(tree)
# for j in range(a, b):
# updateRange(arr, tree, 0, , n-1, j, value)
# print(tree)
# print(lazy)
y=queryRange(lazy, arr, tree, 0, 0, n-1, 0, n-1)
# print(y)
return y
# if __name__ == '__main__':
# fptr = open(os.environ['OUTPUT_PATH'], 'w')
inputFile = open('inputFile', 'r')
nm = inputFile.readline().split()
n = int(nm[0])
m = int(nm[1])
queries = []
for _ in range(m):
queries.append(list(map(int, inputFile.readline().rstrip().split())))
print(n)
print(m)
print(queries[0])
print('----')
result = arrayManipulation(n, m, queries)
print(result)
# fptr.write(str(result) + '\n')
# fptr.close()
|
15,591 | 35141644b1cffa9a04bb5ba6d11de0f8dd28b936 | # -*- coding: utf-8 -*-
from atividade3 import Tres
''' Exercício – Ciclo 2 - Conceitos da Linguagem e Python POO
Módulos, Classes e Métodos
Atividade 3
Faça um programa que leia 3 números inteiros e mostre o menor deles. '''
if __name__ == '__main__':
# Atividade 03
Tres.executa()
|
15,592 | 1606c047002eaf0b0387304df3016334f0e88897 | from selenium import webdriver
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import NoSuchElementException, TimeoutException
from utils.input import inputWPM
from utils.characters import wordCount, charCount
from utils.delay import timeDelay
import time
wpm = inputWPM()
driver = webdriver.Chrome()
driver.get('https://play.typeracer.com/?universe=insane')
assert "TypeRacer" in driver.title
wait = WebDriverWait(driver, 10)
practice = wait.until(EC.element_to_be_clickable((By.XPATH, '//*[@id="dUI"]/table/tbody/tr[2]/td[2]/div/div[1]/div/table/tbody/tr[3]/td/table/tbody/tr/td[2]/table/tbody/tr[1]/td/a')))
practice.click()
html = wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'inputPanel')))
excerpt = html.find_element_by_css_selector('tbody > tr:nth-child(1) > td > table > tbody > tr:nth-child(1) > td > div')
print("Text: " + excerpt.text)
print("Words: " + str(wordCount(excerpt.text)))
print("Chars: " + str(charCount(excerpt.text)))
delay = timeDelay(wpm, wordCount(excerpt.text), charCount(excerpt.text))
print(delay)
textInput = wait.until(EC.element_to_be_clickable((By.XPATH, '/html/body/div[1]/div/div[1]/div[2]/table/tbody/tr[2]/td[2]/div/div[1]/table/tbody/tr[3]/td/div/div/table/tbody/tr[2]/td[3]/table/tbody/tr[2]/td/table/tbody/tr[2]/td/input')))
for word in excerpt.text:
textInput.send_keys(word)
if word == " ":
time.sleep(delay)
|
15,593 | c1446abee33c505423bac43a9425b27d446bd42e | """
a simple implementation of the circular linked list
"""
class Node:
__slots__ = '_element', '_next'
def __init__(self, element, next):
self._element = element
self._next = next
class CircularLinkedList:
"""simple ADT for circular linked list"""
def __init__(self):
self._size = 0
self._tail = Node(None, None) # use tail, so add all values in the tail
def is_empty(self): return self._size == 0
def add(self, value):
# default add to the tail
new = Node(value, None)
if self.is_empty():
self._tail = new
self._tail._next = self._tail # cyclic
else:
head = self._tail._next
self._tail._next = new
new._next = head
self._tail = new
self._size += 1
def show(self):
# for debug purpose
if self.is_empty():
print("Empty linked list")
else:
print("head => tail => head")
head = self._tail._next
while head != self._tail:
print(head._element, end=' ')
head = head._next
print(head._element, end=' ') # print the tail element
print(head._next._element, end=' ') # print the head again
print()
if __name__ == '__main__':
q = CircularLinkedList()
for i in range(20): q.add(i)
q.show()
|
15,594 | c0a4f105b85d6909dee7891c8b22bfff244c186c | from poium.common import logging
from poium.common.exceptions import CSSFindElementError
from selenium.common.exceptions import JavascriptException
class CSSElement(object):
"""
Only CSS selectors are supported.
Please see help: http://www.w3school.com.cn/cssref/css_selectors.asp
>> from page_objects import Page, CSSElements
>> class MyPage(Page):
input = CSSElements('.s_ipt')
button = CSSElements('#su')
"""
driver = None
def __init__(self, css: str, index: int = 0, describe: str = ""):
self.css = css
self.index = str(index)
self.desc = describe
def __get__(self, instance, owner):
if instance is None:
return None
global driver
driver = instance.driver
return self
def _execute_javascript(self, js):
"""
Run the javascript script
"""
try:
return driver.execute_script(js)
except JavascriptException:
raise CSSFindElementError("Element discovery failure. ", js)
def clear(self) -> None:
"""
JavaScript API
Clears the text if it's a text entry element, Only support css positioning
"""
logging.info(f"Clear input field. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.style.border="2px solid red";
elm.value = "";"""
self._execute_javascript(js)
def get_text(self, i: int = None) -> str:
"""
JavaScript API
Get element text content.
:param i: index
"""
if i is None:
i = self.index
else:
i = str(i)
logging.info(f"get text. {self.desc}")
js = f"""return document.querySelectorAll("{self.css}")[{i}].textContent;"""
return self._execute_javascript(js)
def set_text(self, value: str) -> None:
"""
JavaScript API
Simulates typing into the element.
:param value: input text
"""
logging.info(f"set text. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.style.border="2px solid red";
elm.value = "{value}";"""
self._execute_javascript(js)
def click(self) -> None:
"""
JavaScript API
Click element.
"""
logging.info(f"click element. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.style.border="2px solid red";
elm.click();"""
self._execute_javascript(js)
def click_display(self) -> None:
"""
JavaScript API
Click on the displayed element, otherwise skip it.
"""
logging.info(f"Click on the displayed element. {self.desc}")
js = 'var elm = document.querySelector("' + self.css + '");' \
' if(elm != null){elm.style.border="2px solid red";elm.click();}'
self._execute_javascript(js)
def display(self) -> None:
"""
JavaScript API
Display hidden elements
"""
logging.info(f"display hidden element. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.style.display = "block";"""
self._execute_javascript(js)
def remove_attribute(self, attribute) -> None:
"""
JavaScript API
Remove element attribute
:param attribute:
"""
logging.info(f"remove element attribute. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.removeAttribute("{attribute}");"""
self._execute_javascript(js)
def set_attribute(self, attribute, value) -> None:
"""
JavaScript API
Setting element attribute
:param attribute:
:param value:
"""
logging.info(f"setting element attribute. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.setAttribute("{attribute}", "{value}");
"""
self._execute_javascript(js)
def clear_style(self) -> None:
"""
JavaScript API
Clear element styles.
"""
logging.info(f"clear element styles. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.style="";"""
self._execute_javascript(js)
def clear_class(self) -> None:
"""
JavaScript API
Clear element class
"""
logging.info(f"clear element class. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.removeAttribute("class");"""
self._execute_javascript(js)
def inner_text(self, text) -> None:
"""
JavaScript API
The innerText property sets the text content of the specified element, Only support css positioning
:param text: Inserted text
"""
logging.info(f"inner text. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.innerText="{text}";"""
self._execute_javascript(js)
def remove_child(self, child: int = 0) -> None:
"""
JavaScript API
Remove a node from the child node list
:param child: child of the child node
"""
logging.info(f"Remove a node from the child node list. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.removeChild(elm.childNodes[{child}]);"""
self._execute_javascript(js)
def click_parent(self) -> None:
"""
JavaScript API
Click the parent element of the element
"""
logging.info(f"click the parent element of the element. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.parentElement.click();"""
self._execute_javascript(js)
def scroll(self, top=0, left=0) -> None:
"""
JavaScript API
Scroll the div element on the page
"""
logging.info(f"scroll the div element on the page. {self.desc}")
if top != 0:
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.scrollTop={top};"""
self._execute_javascript(js)
if left != 0:
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.scrollLeft={left};"""
self._execute_javascript(js)
def move_to(self) -> None:
"""
JavaScript API
Move the mouse over the element
"""
logging.info(f"Move the mouse over the element. {self.desc}")
js = f"""var elm = document.querySelectorAll("{self.css}")[{self.index}];
elm.dispatchEvent(new Event("mouseover"));"""
self._execute_javascript(js)
@property
def value(self) -> str:
"""
JavaScript API
Gets the value of the element.
"""
logging.info(f"get element value. {self.desc}")
js = f"""return document.querySelectorAll("{self.css}")[{self.index}].value;"""
return self._execute_javascript(js)
|
15,595 | 36c2b40a9345b92c188d1276014575d3aab40161 | # # https://blog.csdn.net/qq_38595487/article/details/79686081
class Solution:
# def climbStairs_memo(self, n, ls):
# if ls[n] is not None:
# return ls[n]
# elif n == 1:
# return 1
# elif n == 2:
# return 2
# res = self.climbStairs_memo(n - 1, ls) + self.climbStairs_memo(n - 2, ls)
# ls[n] = res
# return ls[n]
#
# def climbStairs(self, n):
# ls = [None] * (n + 1)
# return self.climbStairs_memo(n, ls)
#
#
def no_loop_climbStairs(self, n):
if n == 1:
return 1
elif n == 2:
return 2
ls = [None] * (n + 1)
ls[1] = 1
ls[2] = 2
for i in range(3, n + 1):
ls[i] = ls[i - 1] + ls[i - 2]
return ls[n]
#
#
from functools import lru_cache
class Solution2:
@lru_cache()
def climbStairs(self, n):
if n == 1:
return 1
elif n == 2:
return 2
elif n < 1:
return 0
return self.climbStairs(n - 1) + self.climbStairs(n - 2)
if __name__ == '__main__':
sol2 = Solution2()
assert sol2.climbStairs(2) == 2
assert sol2.climbStairs(3) == 3
assert sol2.climbStairs(4) == 5
|
15,596 | f842543b25a0bf938a9f5855f91399b9220cf4a6 | from typing import Tuple, Dict, List
def preprocess_text(text: str) -> Tuple[List[str], Dict]:
"""Clean and tokenize text before feature creation.
Returns list of tokens.
Also return meta dict with information about preprocessing
(e.g. tokens removed, length of original text, etc...)
"""
raise NotImplementedError
|
15,597 | b7702ad1549a7cb98ceb506822017af98631bbbe | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import time
import openpyxl
import subprocess
# SCRIPT set up with incrontab to run every time the XLSX is modified and closed. ## NE MARCHE PAS AVEC NTFS, utiliser crontab plutot
# /media/n06lbth/sauvegardes_pgm/checkMut_requests.xlsx IN_CLOSE_WRITE python .../scripts/checkMut_requests.py
# sudo service incron restart
pipeline_folder = os.environ['NGS_PIPELINE_BX_DIR']
# IF CHECKMUT-REQUESTS ALREADY RUNNING, QUIT
p = subprocess.Popen(['pgrep', '-f', 'your_script.py'], stdout=subprocess.PIPE)
out, err = p.communicate()
# print len(out.strip())
# print out
# print err
if len(out.strip()) > 1: # if another instance of this script is running, value would be 2 (other instance + this instance)
print "checkMut requests is already running"
exit()
# START
wb = openpyxl.load_workbook('/media/n06lbth/sauvegardes_pgm/checkMut_requests.xlsx')
ws = wb['checkMut']
checkMut_path = '%s/checkMut/checkMut.py' % pipeline_folder
print "parsing file..."
for row_idx in range(10, ws.max_row+1):
run_path = ws.cell(row=row_idx,column=1).value
gene = ws.cell(row=row_idx,column=2).value.upper().replace(' ','')
cpos = ws.cell(row=row_idx,column=3).value.replace(' ','')
run_type = ws.cell(row=row_idx,column=4).value.replace(' ','')
min_sample = ws.cell(row=row_idx,column=5).value
use_processed = ws.cell(row=row_idx,column=6).value
if min_sample != None:
min_sample = str(int(min_sample))
state = ws.cell(row=row_idx,column=7).value
if (run_path and gene and cpos and run_type) and (state is None): # (state != 'OK')
run_folder = run_path.replace('\\','/') # ex avant : \\ZISILON01\N06lbth\sauvegardes_pgm\SBT\Run_500-599\Auto_user_S5...
run_folder = run_folder.split('sauvegardes_pgm/')[-1]
run_folder = '/media/n06lbth/sauvegardes_pgm/%s' % run_folder
print "Running checkMut with param:"
print " - run_folder : %s" % run_folder
print " - gene : %s" % gene
print " - cpos : %s" % cpos
print " - run_type : %s" % run_type
print " - min_sample : %s" % min_sample
cmd_args = ['python',checkMut_path,'--run-folder',run_folder,'--gene',gene,'--cpos',cpos,'--run-type',run_type]
if min_sample != None:
cmd_args.append('--min-sample')
cmd_args.append(str(min_sample))
if use_processed != None:
if use_processed.lower() == 'true':
cmd_args.append('--use-processed')
print ' '.join(cmd_args)
cmd = subprocess.Popen(cmd_args,stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout, stderr = cmd.communicate()
fig_path = '%s/_checkMut/%s_%s.png' % (run_folder,gene,cpos.replace('>','_'))
if os.path.exists(fig_path):
state = 'OK'
link = fig_path.replace('/media/n06lbth/sauvegardes_pgm/','').replace('/','\\')
ws.cell(row=row_idx,column=8).hyperlink = link
ws.cell(row=row_idx,column=8).value = 'résultat'
ws.cell(row=row_idx,column=8).style = 'Hyperlink'
else:
state = 'error'
ws.cell(row=row_idx,column=9).value = stdout
ws.cell(row=row_idx,column=10).value = stderr
ws.cell(row=row_idx,column=7).value = state
write_success = False
for i in range(6):
try:
wb.save('/media/n06lbth/sauvegardes_pgm/checkMut_requests.xlsx')
write_success = True
break
except Exception as e:
print e.message
print "- retrying in 10sec..."
time.sleep(5)
if not write_success :
print "Cannot write file after 6 try in 30 sec. Aborting."
|
15,598 | f2ecbe9b4409acbf7f1a0f2c97993eae07f85b47 | from pitftgpio import PiTFT_GPIO
pitft = PiTFT_GPIO()
while True:
if pitft.Button1:
print "Button 1 pressed - screen off"
#pitftf.Backlight(False)
if pitft.Button2:
print "Button 2 pressed - screen on"
#pitft.Backlight(True)
if pitft.Button3:
print "Button 3 pressed"
if pitft.Button4:
print "Button 4 pressed" |
15,599 | 97e9978c8b7b8bbaee3533e21460a1fca2a274ab | import cv2
from picamera2 import Picamera2
import time
picam2=Picamera2()
dispW = 1280
dispH = 720
picam2.preview_configuration.main.size=(dispW,dispH)
picam2.preview_configuration.main.format="RGB888"
picam2.preview_configuration.controls.FrameRate=30
picam2.preview_configuration.align()
picam2.configure("preview")
picam2.start()
fps=0
pos=(30,60)
font=cv2.FONT_HERSHEY_SIMPLEX
height=1.5
myColor=(0,0,255)
weight=3
while True:
tStart=time.time()
frame=picam2.capture_array()
# Create a Region of Interest (ROI) frame
ROI=frame[0:int(dispH/2),0:int(dispW/2)]
frame[int(dispH/2):,int(dispW/2):] = ROI
frame[:int(dispH/2),int(dispW/2):] = ROI
frame[int(dispH/2):,:int(dispW/2)] = ROI
cv2.putText(frame,str(int(fps))+' FPS',pos,font,height,myColor,weight)
cv2.imshow("picam2",frame)
cv2.imshow('ROI', ROI)
if cv2.waitKey(1)==ord('q'):
break
tEnd=time.time()
loopTime=tEnd-tStart
fps=.9*fps +.1*(1/loopTime)
#print(int(fps))
cv2.destroyAllWindows() |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.