text stringlengths 38 1.54M |
|---|
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.debug = True
app.config.from_object('config')
db = SQLAlchemy(app)
#if not app.debug:
# import logging
# from logging.handlers import RotatingFileHandler
# error = RotatingFileHandler('tmp/error.log', 'a', 1 * 1024 * 1024, 10)
# error.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s [in %(pathname)s:%(lineno)d]'))
# app.logger.setLevel(logging.INFO)
# error.setLevel(logging.INFO)
# app.logger.addHandler(error)
# app.logger.info('Jane - syncing with SkyNet')
from app.core import api_views, web_views, models
|
#!/usr/bin/env python3
""" Python typing annotations """
from typing import Sequence, Union, Any, TypeVar, Mapping
T = TypeVar('T', bound=Any)
def safely_get_value(dct: Mapping,
key: Any,
default: Union[T, None] = None) -> Union[Any, T]:
""" Generics """
if key in dct:
return dct[key]
else:
return default
|
class SwaggerDocumentationError(Exception):
"""
Custom exception raised when package tests fail.
"""
pass
class CaseError(Exception):
"""
Custom exception raised when items are not cased correctly.
"""
pass
class OpenAPISchemaError(Exception):
"""
Custom exception raised for invalid schema specifications.
"""
pass
|
import numpy as np
import glob
import pandas as pd
import os
import time
from tqdm.auto import tqdm
from .misc import dfMirror
########################################################################################################################
# it's best to use asciiToDfMulti() (which exploits this asciiToDf()) also for single file opening
def asciiToDf(
nameFormat,
asciiMap,
nLinesEv = 1,
descFrac = 1,
mirrorMap = (), # this is a tuple here, but a dictionary in asciiToDfMulti() (i.e. the "main" function)
bVerbose = False,
bProgress = False,
):
t0 = time.time() # chronometer start
names = sorted(glob.glob(nameFormat.replace("YYYYYY", "*"))) # list of all the filenames of the current run
df = pd.DataFrame()
descFrac = 1e-12 if descFrac <= 0 else (descFrac if descFrac <= 1 else 1)
for iName in tqdm((names)) if (bVerbose & bProgress) else names:
if os.stat(iName).st_size > 0:
if nLinesEv == 1:
dataTableTemp = np.loadtxt(iName, unpack=False, ndmin=2)
else:
fileToString0 = open(iName,'r').read()
fileToStringSplitted0 = fileToString0.splitlines()
fileToString = ""
for i, iLine in enumerate(fileToStringSplitted0):
if (i%nLinesEv==nLinesEv-1):
fileToString += iLine + "\n"
else:
fileToString += iLine + " "
fileToStringSplitted = fileToString.splitlines()
dataTableTemp = np.loadtxt(fileToStringSplitted)
dfTemp = pd.DataFrame(dataTableTemp, columns=asciiMap)
df = df.append(dfTemp[dfTemp.index % int(1 / descFrac) == 0], ignore_index=True, sort=False)
df = dfMirror(df, mirrorMap)
t1 = time.time() # chronometer stop
dt = t1 - t0
return df, dt
########################################################################################################################
def asciiToDfMulti(
nameFormat,
fileIndex,
asciiMap,
fileIndexName = "iIndex",
nLinesEv = 1,
descFrac = {},
mirrorMap = {},
bVerbose = False,
bProgress = False,
):
t0 = time.time() # chronometer start
df = pd.DataFrame()
for i, iIndex in enumerate(sorted(fileIndex)):
if not (iIndex in descFrac.keys()):
descFrac.update({iIndex: 1}) # all the undefined descaling factors are trivially set to 1
if bVerbose:
print("(%d/%d) %s -- descaling fraction: %14.12f" % (i+1, len(fileIndex), iIndex, descFrac[iIndex]))
dfTemp, _ = asciiToDf(nameFormat.replace("XXXXXX", iIndex), asciiMap, nLinesEv, descFrac[iIndex], bVerbose=bVerbose, bProgress=bProgress)
# data mirroring according to mirrorMap, which differs from iLayer to iLayer
if iIndex in mirrorMap:
if bVerbose:
print("mirroring (from mirror map given) "+str(mirrorMap[iIndex]))
dfTemp = dfMirror(dfTemp, mirrorMap[iIndex])
else:
if bVerbose:
print("no variables to mirror")
# fileIndexName column creation (if requested & not already existing)
if len(fileIndexName)>0:
if bVerbose:
print("%s also added to df" % fileIndexName)
if not (fileIndexName in dfTemp.columns):
dfTemp[fileIndexName] = str(iIndex)
else:
dfTemp[fileIndexName] = dfTemp[fileIndexName].astype(str)
df = df.append(dfTemp, ignore_index=True, sort=False)
t1 = time.time() # chronometer stop
dt = t1 - t0
return df, dt
|
from datetime import datetime
import os
#reinvertimos la fecha
def inv_fecha(fech):
fechn = datetime.strptime(fech,'%d/%m/%Y')
return datetime.strftime(fechn,'%Y-%m-%d')
#se hace el calculo del Bono1 y Bono2
def pagar(basesueldo,h,v1,v2):
if int(h) <= 5:
SuelNu= float(basesueldo) + float(basesueldo) / 0.05 * float(v1)
else:
SuelNu= float(basesueldo) + float(basesueldo) / 1.5 * float(v2)
return float(SuelNu)
#datos a solicitar para calcular los bonos segun horas extras semanales
B1= input('Intoduzca bono General:')
B2 = input('Intoduzca bono Eficiencia:')
fichero= open("pago_nomina_12sep2021.py", "w")
#Leeo el archivo txt
with open("pago.txt", "r") as pago:
for linea in pago:
campo = linea.split()
if campo[0].isdigit():
fech = inv_fecha(campo[1])
sueld = pagar(campo[4].replace("$",""),campo[5],B1,B2)
else:
fech = campo[1]
sueld = campo[3]
fichero.write(campo[0]+" ")
fichero.write(fech+" ")
fichero.write(campo[2]+" ")
fichero.write(campo[3]+" ")
fichero.write(campo[4]+" ")
if campo[0].isdigit():
fichero.write(campo[5]+" ")
fichero.write(str(sueld))
fichero.write(os.linesep)
else:
fichero.write(os.linesep)
fichero.close()
|
import sys
import numpy as np
import cv2
import qnn_utils
from enum import Enum
from termcolor import colored, cprint
np.set_printoptions(threshold=sys.maxsize)
NUMBER_LINE = '├━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━┤'
# Option
use_ReLU = True
class Inference(Enum):
FP32 = 1
INT8 = 2
debug_option = {'print_all_layer': True}
def softmax(x):
"""
Compute softmax values for each sets of scores in x.
"""
return np.exp(x) / np.sum(np.exp(x), axis=0)
def print_debug(layer: str, min: float, max: float, scale: float):
"""
Print debug
:param str layer: layer name
:param float min:
:param float max:
:param float scale:
:return: None
"""
print('|-----------|---------|---------|---------|---------|---------|---------|')
print('| Layer | min | max | scale | 1/scale | min * S | max * S |')
print('|-----------|---------|---------|---------|---------|---------|---------|')
print('| {0:9} | {1:7.2} | {2:7.2} | {3:7.2} | {4:7.4} | {5:7.4} | {6:7.4} |'.format(layer, min, max, scale, 1 / scale, min / scale, max / scale))
print('|-----------|---------|---------|---------|---------|---------|---------|')
print('')
print('┏ {0: <30}{2: >30} ┒'.format(min, 0, max))
print(NUMBER_LINE)
print('')
quantized_min = round(min / scale)
quantized_max = round(max / scale)
print('┏ {0: <30}{1: >30} ┒'.format(quantized_min, quantized_max))
print(NUMBER_LINE)
print('')
def get_scale(l: list, num_bits: int):
temp_max = np.max(l)
temp_min = np.min(l)
scale = (temp_max - temp_min) / (2 ** num_bits)
return scale
def quantization(path: str, num_bits: int=8, use_zp: bool=False):
"""
Quantization
:param str path: ndarray file path
:param int num_bits: bits
:param bool use_zp: use zero point
:returns:
- tensor -
- quantized_tensor -
- scale -
- zero_point -
"""
# FC1 Weight
tensor = np.load(path)
print(' [fc1w] Shape : {} / dtype : {}'.format(tensor.shape, tensor.dtype))
# Max, Min, Scale
temp_max = np.max(tensor)
temp_min = np.min(tensor)
scale = (temp_max - temp_min) / (2 ** (num_bits - 1))
# Print Debug
if __debug__:
print_debug('L1', temp_min, temp_max, scale)
if use_zp is True:
zero_point = 0 -(temp_min // scale)
zero_point = zero_point.astype(np.int8)
else:
zero_point = 0
"""
Encoding zero point
example)
[-3, -1, 5] -> +3(-min) -> [3, 2, 8]
[ 2, 4, 5] -> -2(-min) -> [0, 2, 3]
"""
quantized_tensor = (tensor // scale) + zero_point
quantized_tensor = quantized_tensor.astype(np.int8)
return tensor, quantized_tensor, scale, zero_point
def print_debug_hex(a, b, c):
if a.ndim != 2 or b.ndim != 2 or c.ndim != 2:
print('Only 2 dim')
return
a_uint8 = a.view(dtype=np.uint8)
b_uint8 = b.view(dtype=np.uint8)
c_uint8 = c.view(dtype=np.uint8)
a = a_uint8
b = b_uint8
c = c_uint8
msg = []
msg.append(' xxx xxx xxx ')
msg.append(' ┌──────────────────────┐ ┌──────────────────────┐ ┌──────────────────────┐')
msg.append(' │ xx xx xx xx xx xx .. │ │ xx xx xx xx xx xx .. │ │ xx xx xx xx xx xx .. │')
msg.append(' │ xx xx xx xx xx xx .. │ * │ xx xx xx xx xx xx .. │ = │ xx xx xx xx xx xx .. │')
msg.append(' │ xx xx xx xx xx xx .. │ │ xx xx xx xx xx xx .. │ │ xx xx xx xx xx xx .. │')
msg.append(' │ xx xx xx xx xx xx .. │ │ xx xx xx xx xx xx .. │ │ xx xx xx xx xx xx .. │')
msg.append(' │ .. .. .. .. .. .. .. │ │ .. .. .. .. .. .. .. │ │ .. .. .. .. .. .. .. │')
msg.append(' xxx └──────────────────────┘ xxx └──────────────────────┘ xxx └──────────────────────┘')
val = [a, b, c]
for row in range(len(msg)):
msg[row] = msg[row].replace('xxx', '{:03}')
for v in range(3):
temp = val[v]
for k in range(6):
if 1 < row and row - 2 < temp.shape[0] and k < temp.shape[1]:
msg[row] = msg[row].replace('xx', '{:02x}'.format(temp[row-2][k]), 1)
else:
msg[row] = msg[row].replace('xx', '--', 1)
print(msg[0].format(a.shape[1], b.shape[1], c.shape[1]))
print(msg[1])
print(msg[2])
print(msg[3])
print(msg[4])
print(msg[5])
print(msg[6])
print(msg[7].format(a.shape[0], b.shape[0], c.shape[0]))
def _matmul(a, b):
print(a)
if __debug__:
a_shape = a.shape
b_shape = b.shape
if a.ndim <= 1:
a_shape = (1, a.shape[0])
if b.ndim <= 1:
b_shape = (1, b.shape[0])
c_shape = (a_shape[0], b_shape[1])
print('A shape :{} dtype: {}'.format(a_shape, a.dtype))
print('B shape :{} dtype: {}'.format(b_shape, b.dtype))
print(' {0:5} {1:5} {2:5}'.format(a_shape[1], b_shape[1], c_shape[1]))
print(' ┌───────────────┐ ┌────────────────┐ ┌─── ─── ─ ────┐')
print(' │ {:02x} {:02x} {:02x} ... │ │ {:02x} {:02x} {:02x} ... │ │ {:02x} {:02x} {:02x} ... │'.format(10, 20, 30, 40, 50, 60, 100, 101, 102))
print(' │ {:02x} {:02x} {:02x} ... │ * │ {:02x} {:02x} {:02x} ... │ = │ {:02x} {:02x} {:02x} ... │'.format(10, 20, 30, 10, 20, 30, 110, 111, 112))
print(' │ {:02x} {:02x} {:02x} ... │ │ {:02x} {:02x} {:02x} ... │ │ {:02x} {:02x} {:02x} ... │'.format(11, 12, 13, 14, 15, 16, 120, 121, 122))
print(' │ .. .. .. ... │ │ .. .. .. ... │ │ .. .. .. ... │'.format(11, 12, 13, 14, 15, 16))
print(' {:4} └───────────────┘ {:4} └────────────────┘ {:4} └───────── ─ ──┘'.format(a_shape[0], b_shape[0], c_shape[0]))
print('')
# matmul
ret = np.matmul(a, b)
print_debug_hex(a, b, ret)
if __debug__:
print('C shape :{} dtype: {}'.format(ret.shape, ret.dtype))
print('min : {} max : {}'.format(min(ret), max(ret)))
print('C :', ret[:5])
# Get scale
temp_scale = get_scale([min(ret), max(ret)], 8)
print('temp_scale :', temp_scale, 1/temp_scale)
print(colored('FC1 out', 'green', 'on_yellow'), colored(ret[0][:5], 'cyan'))
ret = ret / temp_scale
print(colored('* scale', 'green', 'on_yellow'), colored(ret[0][:5], 'cyan'))
ret = ret.astype(int)
print(colored('ast int', 'green', 'on_yellow'), colored(ret[0][:5], 'cyan'))
return ret
def inference(path: str, inference_mode=None):
use_zp = False
inference_scale_resize = True
inp = cv2.imread(path, cv2.IMREAD_GRAYSCALE)
inp = inp.reshape(1, 784)
inp = inp.astype(np.int32)
print('inp shape :', inp.shape)
# Load tensor
fc1w, quantized_fc1w, fc1w_scale, fc1w_zp = quantization('mnist_dkdk_FP32_20170708_v1/FC1.npy', use_zp=use_zp)
fc2w, quantized_fc2w, fc2w_scale, fc2w_zp = quantization('mnist_dkdk_FP32_20170708_v1/FC2.npy', use_zp=use_zp)
fc3w, quantized_fc3w, fc3w_scale, fc3w_zp = quantization('mnist_dkdk_FP32_20170708_v1/FC3.npy', use_zp=use_zp)
if __debug__:
print('fc1w (FP32) :', colored(fc1w[0][:5], 'red'))
print('fc1w (INT8) :', colored(quantized_fc1w[0][:5], 'red'))
print('fc1w scale :', colored(fc1w_scale, 'red'))
print('fc2w scale :', colored(fc2w_scale, 'red'))
print('fc3w scale :', colored(fc3w_scale, 'red'))
print('fc1w zp :', colored(fc1w_zp, 'red'))
print('fc1w - zp :', colored((quantized_fc1w[0][:5] - fc1w_zp), 'red'))
print('fc1w - zp * s :', colored((quantized_fc1w[0][:5] - fc1w_zp) * fc1w_scale, 'red'))
if inference_mode == Inference.INT8:
qnn_utils.ndarray_to_bin(quantized_fc1w, './bin/FC1.bin')
qnn_utils.ndarray_to_bin(quantized_fc2w, './bin/FC2.bin')
qnn_utils.ndarray_to_bin(quantized_fc3w, './bin/FC3.bin')
# zero point calibration (decoding)
fc1w = quantized_fc1w
fc2w = quantized_fc2w
fc3w = quantized_fc3w
if use_zp:
fc1w -= fc1w_zp
fc2w -= fc2w_zp
fc3w -= fc3w_zp
# FC1
temp = _matmul(inp, fc1w)
temp = np.maximum(0, temp)
# FC2
temp = _matmul(temp, fc2w)
temp = np.maximum(0, temp)
# FC3
temp = _matmul(temp, fc3w)
print(temp)
from scipy.special import softmax
temp = softmax(temp)
print(temp)
result = np.argmax(temp)
print(result)
return result
if __name__ == '__main__':
test_type = Inference.INT8 # Inference.INT8 or Inference.FP32
for i in range(10):
ret = inference('test_image/{}.png'.format(i), inference_mode=test_type)
print(colored('{} {}'.format(ret, i), 'blue'))
assert ret == i
print(ret)
|
from googlesearch import search
from threading import Thread
from bs4 import BeautifulSoup
from clint.textui import progress
import urllib2
import requests
import sys
import os
import time
import itertools
flag = 0
def progess():
global flag
sys.stdout.write("Searching...")
for c in itertools.cycle('/-\|'):
sys.stdout.write('\r\t\t' + c)
sys.stdout.flush()
time.sleep(0.2)
if flag:
break
def play_song(file_path):
reply = raw_input("Do you want to play the song?(Y/N)")
if reply == 'Y' or reply == 'y':
cli = 'play ' + '"' + file_path + '"'
os.system(cli)
elif reply == 'N' or reply == 'n':
return
else:
play_song(file_path)
def parse_url(url):
parse_url_components = url.split('//')
components = parse_url_components[1].split('/')
return components
def download_song(download_url):
file_name = download_url.split('/')[-1]
music_path = os.path.expanduser('~') + "/Music"
file_path = os.path.join(music_path, file_name)
r = requests.get(download_url, stream=True)
with open(file_path, 'wb') as f:
global flag
flag = 1
sys.stdout.write("\t[DONE]\n")
total_length = int(r.headers.get('content-length'))
for chunk in progress.bar(r.iter_content(
chunk_size=1024), expected_size=(total_length / 1024) + 1):
if chunk:
f.write(chunk)
f.flush()
return file_path, file_name
def get_download_link(song_url, keyword):
hdr = {'User-Agent': 'Mozilla/5.0'}
req = urllib2.Request(song_url, headers=hdr)
response = urllib2.urlopen(req)
soup = BeautifulSoup(response, "lxml")
# print soup.prettify()
for link in soup.find_all('a'):
if link.find(keyword) != -1:
download_url = link.get('href')
if download_url.split('.')[-1] == 'mp3':
return download_url
return
def google_search(search_term, keyword):
search_term = search_term + keyword[0]
song_url = ''
for url in search(search_term, lang='en', stop=1):
components = parse_url(url)
# print(url)
if components[0] == keyword[1] and components[1].find(
keyword[2]) != -1:
song_url = url
return song_url
return None
def search_song(search_term):
# song_url_mp3mad = google_search(
# search_term, [
# " mp3 download mp3mad", "mp3mad.site", "download"])
song_url_mp3mad = None
song_url_pagal = google_search(
search_term, [
" mp3 download pagalworld", "pagalworld.info", "kbps"])
# song_url_pagal = None
# song_url_jatt = google_search(
# search_term, [
# " mp3 download mr jatt", "mr-jatt.com", "download"])
song_url_jatt = None
if song_url_mp3mad:
return song_url_mp3mad, "Download In High Quality"
if song_url_pagal:
return song_url_pagal, "[ Download File ]"
if song_url_jatt:
return song_url_jatt, "Download in 128 kbps"
else:
global flag
flag = 1
sys.stdout.write("\tFailed !!\n")
sys.exit()
def start(search_term):
song_url, keyword = search_song(search_term)
download_url = get_download_link(song_url, keyword)
file_path, file_name = download_song(download_url)
sys.stdout.write(file_name + " saved to Music !!\n")
play_song(file_path)
if __name__ == '__main__':
try:
t1 = Thread(target=progess, args=())
t1.start()
t2 = Thread(target=start, args=(sys.argv[1],))
t2.start()
except BaseException:
print("Error: unable to start thread")
|
import sys
from PyQt5.QtWidgets import QApplication,QMainWindow
from handn import *
class HanyuForm(QMainWindow,Ui_MainWindow):
def __init__(self,parent = None):
super(HanyuForm,self).__init__(parent)
self.setupUi(self)
if __name__ == '__main__':
QtCore.QCoreApplication.setAttribute(QtCore.Qt.AA_EnableHighDpiScaling)
app = QApplication(sys.argv)
hanyu = HanyuForm()
hanyu.show()
sys.exit(app.exec_()) |
# -*- coding: utf-8 -*-
# @PRODUCTION MODULE
__author__ = "radek.augustyn@email.cz"
import time
import psycopg2
import pygeotoolbox.sharedtools.log as log
import pygeotoolbox.sharedtools.config as config
from pygeotoolbox.sharedtools import setParameters, Container
from base import DatabaseTemplate
config.registerValue("database.onCreateDatabaseSQL", "create extension postgis;", "Database EPSG Code", "Database EPSG Code")
class Database(DatabaseTemplate):
def disconnect(self):
if hasattr(self, "__connection"):
self.__connection.close()
self.__connection = None
@property
def connectionParams(self):
import pygeotoolbox.sharedtools.config as config
items = []
for key, value in {
"dbname": config.database.name,
"user": config.database.user,
"password": config.database.password
}.iteritems():
if value:
items.append("%s=%s" % (key, value))
if config.database.host <> "localhost":
items.append("host=%s" % config.database.host)
return " ".join(items)
@property
def connection(self):
if not hasattr(self, "__connection"):
self.__connection = psycopg2.connect(self.connectionParams)
return self.__connection
@staticmethod
def extractSchemaAndTableName(tableSpec):
assert isinstance(tableSpec, basestring)
delPos = tableSpec.find(".")
if delPos >= 0:
schema = tableSpec[:delPos]
tableName = tableSpec[delPos+1:]
else:
schema = Database.getDefaultSchemaIfNotAssigned("")
tableName = tableSpec
return schema, tableName
def getTableNames(self, schema):
"""Return table names in a given database schema.
:param str schema: schema name
:return list of str: list of table names
"""
assert isinstance(schema, basestring)
result = []
cursor = self.executeSelectSQL("SELECT table_name FROM information_schema.tables WHERE table_schema='%s'" % schema)
if cursor:
rows = cursor.fetchall()
for row in rows:
result.append(row[0])
return result
def createSchema(self, schema, dropIfExists = False):
"""Creates database schema with given name.
:param str schema: schema name
:param bool dropIfExists: drops schema before creating, if already exists.
"""
assert isinstance(schema, basestring)
assert isinstance(dropIfExists, bool)
sql = "create schema if not exists %s;" % schema
if dropIfExists:
sql = "drop schema if exists %s;" % schema
self.execute(sql)
def getRowCount(self, tableFullName, whereClause=None):
""" Returns table row count.
:param String tableFullName:
:return LongInt: Row count for the table specified.
>>> connection = Database()
>>> connection.getRowCount("public.z_terennirelief_l")
1099736L
"""
sql = "select count(*) from %s" % tableFullName
if whereClause:
sql += " where " + whereClause
return self.firstRowFromSelect(sql)[0]
def execute(self, sql, parameters = {}):
"""Executes sql command. Before execution, it replaces parameters if provided.
:param str sql: Command sequence to be executed;
:param dict parameters: Parameters to be used in a query.
"""
assert isinstance(sql, basestring)
assert isinstance(parameters, dict)
if self.muted or not sql:
return
sql = setParameters(sql, parameters)
connection = self.connection
cursor = connection.cursor()
#cursor = self.connection.cursor()
try:
startTime = time.time()
cursor.execute(sql)
self.executeElapsedTime += time.time() - startTime
connection.commit()
#self.connection.commit()
except psycopg2.Error as e:
log.error(str(e) + "\n" + sql)
finally:
cursor.close()
def firstRowFromSelect(self, sql, parameters = {}):
cursor = self.executeSelectSQL(sql, parameters)
if cursor:
result = cursor.fetchone()
cursor.close()
return result
else:
return None
def loadShapes(self, sql, fieldNames, geomFieldName):
from shapely.wkb import loads
from pygeotoolbox.sharedtools import Container
result = []
for row in self.executeSelectSQL(sql):
item = Container()
for value, fieldName in zip(row, fieldNames):
if fieldName == geomFieldName:
value = loads(value, hex=True)
setattr(item, fieldName, value)
result.append(item)
return result
def executeSelectSQL(self, sql, parameters = {}, iterSize=0):
"""Runs SQL query and returns database cursor with query result.
:param str sql: Query to be executed.
:param dict parameters: Parameters to be used in a query.
:return cursor : Cursor with result or None if fails.
"""
assert isinstance(sql, basestring)
assert isinstance(parameters, dict)
assert isinstance(iterSize, int)
sql = setParameters(sql, parameters)
cursor = self.connection.cursor()
if iterSize:
cursor.itersize = iterSize
startTime = time.time()
cursor.execute(sql)
self.selectElapsedTime += time.time() - startTime
return cursor
@staticmethod
def getDefaultSchemaIfNotAssigned(schemaName):
assert isinstance(schemaName, basestring) or schemaName == None
if not schemaName:
schemaName = 'public'
return schemaName
def cleanTableContent(self, tableName, schema = "temp", whereClause = ""):
if tableName.find(".") == -1:
tableName = schema + "." + tableName
if whereClause:
sql = "delete from %s where " % (tableName, whereClause)
else:
sql = "truncate table " + tableName
self.execute(sql)
def getFieldDefinition(self, schemaName, tableName):
assert isinstance(schemaName, basestring) or schemaName == None
assert isinstance(tableName, basestring)
schemaName = Database.getDefaultSchemaIfNotAssigned(schemaName)
result = []
cursor = self.connection.cursor()
try:
sql = "select * from information_schema.columns where table_schema = '%s' and table_name = '%s'" % (schemaName, tableName)
cursor.execute(sql)
result = cursor.fetchall()
except Exception as inst:
log.logger.error("PostGISConnector.getFieldDefinition('%s', '%s'):%s" % (schemaName, tableName, str(inst)))
finally:
cursor.close()
return result
def schemaExists(self, schemaName):
return self.executeSelectSQL("SELECT exists(select schema_name FROM information_schema.schemata WHERE schema_name = '%s');" % schemaName).fetchone()[0]
def findSRID(self, schemaName, tableName, fieldName):
return self.firstRowFromSelect("SELECT Find_SRID('%s', '%s', '%s');" % (schemaName, tableName, fieldName))[0]
@staticmethod
def getFieldString(fieldDefinition):
typeNames = {
int: "int",
basestring: "text"
}
return typeNames.get(fieldDefinition.type, "None")
def fieldExists(self, fieldName, tableOrFieldDefinitions):
assert isinstance(fieldName, basestring)
assert isinstance(tableOrFieldDefinitions, list) or isinstance(tableOrFieldDefinitions, basestring)
if isinstance(tableOrFieldDefinitions, basestring):
schemaName, tableName = Database.extractSchemaAndTableName(tableOrFieldDefinitions)
fieldDefinitions = self.getFieldDefinition(schemaName, tableName)
else:
fieldDefinitions = tableOrFieldDefinitions
for fieldDefinition in fieldDefinitions:
if fieldName == fieldDefinition[3]:
return True
return False
def databaseExists(self, databaseName):
"""Returns True if a database with name databaseName exists.
https://dba.stackexchange.com/questions/45143/check-if-postgresql-database-exists-case-insensitive-way
:param str databaseName:database name
:return:
"""
connection = psycopg2.connect(user=config.database.user, host = config.database.host, password=config.database.password)
cursor = connection.cursor()
sql = "select exists(SELECT datname FROM pg_catalog.pg_database WHERE lower(datname) = lower('dbname'));".replace("dbname", databaseName)
cursor.execute(sql)
result = cursor.fetchone()[0]
cursor.close()
connection.close()
return result
def createDatabase(self, databaseName):
"""
:param databaseName:
"""
import sys
from psycopg2.extensions import ISOLATION_LEVEL_AUTOCOMMIT
log.openSection("Creating database '%s'" % config.database.name, level=log.DEBUG)
sql = "create database %s;" % databaseName
log.debug(sql)
connection = psycopg2.connect(user=config.database.user, host = config.database.host, password=config.database.password)
connection.set_isolation_level(ISOLATION_LEVEL_AUTOCOMMIT)
cursor = connection.cursor()
cursor.execute(sql)
cursor.close()
connection.close()
if config.database.onCreateDatabaseSQL:
log.debug(config.database.onCreateDatabaseSQL)
self.execute(config.database.onCreateDatabaseSQL)
log.closeSection()
def geometryValueFromShape(self, shape):
return self.geometryValueFromWKT(shape.wkt)
def geometryValueFromWKT(self, wkt):
return "st_geometryfromtext('%s', %s)" % (wkt, config.database.epsg)
def normalizeName(self, name):
result = ""
for letter in name:
if letter.isupper():
result += "_" + letter.lower()
else:
result += letter
return result
def getFieldTypeFromValue(self, value):
if isinstance(value, bool):
return "boolean"
elif isinstance(value, float):
return "float"
elif isinstance(value, int):
return "integer"
elif isinstance(value, basestring):
return "varchar"
raise "Field type not recognized"
# @NO-PRODUCTION CODE
if __name__ == "__main__":
import database # No dummy, creating module variables
db = Database()
print "ids:", db.executeSelectSQL("select array_agg(elm_id) from data.elements").fetchone()[0] |
from __future__ import unicode_literals
from django.db import models
from django.contrib import admin
# Create your models here.
class Description(models.Model):
name = models.CharField(max_length=50)
description = models.CharField(max_length=100)
def __unicode__(self):
return self.name
class Education(models.Model):
name = models.CharField(max_length=50)
location = models.CharField(max_length=50)
start_date = models.CharField(max_length=20)
end_date = models.CharField(max_length=20)
degree = models.CharField(max_length=100)
description = models.TextField(max_length=1000)
display = models.BooleanField()
def __unicode__(self):
return self.name
class Career(models.Model):
name = models.CharField(max_length=50)
location = models.CharField(max_length=50)
start_date = models.CharField(max_length=20)
end_date = models.CharField(max_length=20)
title = models.CharField(max_length=100)
description = models.TextField(max_length=1000)
display = models.BooleanField()
def __unicode__(self):
return self.name
admin.site.register(Description)
admin.site.register(Education)
admin.site.register(Career)
|
from PIL import Image
import matplotlib.pyplot as plt
import numpy as np
import os
path = "/home/feng/darknet/data/cele_A/ac/img_celeba/" #文件夹目录
files= os.listdir(path) #得到文件夹下的所有文件名称
s = [[],[],[]]
count = 0
for file in files:
s[0].append(file)
img=Image.open(path + file)
s[1].append(img.size[0])
s[2].append(img.size[1])
count+=1
print(count)
s1 = np.array(s)
s2 = s1.T
s3 = s2[s2[:,0].argsort()]
np.savetxt('_weight11.txt',s3,fmt='%s')
|
from dec_tree import *
from pre_processor import *
import sys
# Names of the two datasets we are using
bank_attr_names = ['age', 'job', 'marital', 'education', 'default', 'balance', 'housing', 'loan', 'contact', 'day',
'month', 'duration', 'campaign', 'pdays', 'previous', 'poutcome', 'y']
car_attr_names = ['buying', 'maint', 'doors', 'persons', 'lug_boot', 'safety', 'label']
# Decision tree variables
attr_names = []
attrs = []
attr_vals = []
examples = []
# Configuration variables
car = False
train_file = 'bank/train.csv'
test_file = 'bank/test.csv'
max_depth = 100
purity = 'entropy'
accept_unknown = True
label_index = 0
# Evaluate the data on the learned decision trees
def evaluateExamples(msg, examples):
print('')
print(msg)
correct = 0
total = 0
for ex in examples:
total += 1
label = evaluateExample(root, ex)
if label == ex[label_index]:
correct += 1
correct_ratio = correct / total
print('Evaluated:', total)
print('Correct: ', correct)
print('Error: ', round((1 - correct_ratio) * 100, 2), '%', sep='')
# Get the arguments passed in to the script
if len(sys.argv) == 7:
car = sys.argv[1] == 'car'
train_file = sys.argv[2]
test_file = sys.argv[3]
max_depth = int(sys.argv[4])
purity = sys.argv[5]
accept_unknown = sys.argv[6] == 'True'
# Get the training and testing data from file
examples = populateExamples(train_file)
test_examples = populateExamples(test_file)
# Populate attr_vals
attr_vals = learnDataFormat(examples)
if car:
attrs = car_attr_names
else:
attrs = bank_attr_names
label_index = len(attrs) - 1
# Handle numeric values by taking the median and comparing each of them
# to it, replacing the number with either 'over' or 'under'
calculateMedians(examples, test_examples)
# Populate common_vals and replace unknown if necessary
if not accept_unknown:
remove_unknown(examples, test_examples, attr_vals)
# Learn the decision tree
root = learnTree(attrs, attr_vals, examples, max_depth, purity, [])
# Evaluate the quality of the learned tree on the training and test data
evaluateExamples('EVALUATING TRAINING EXAMPLES', examples)
evaluateExamples('EVALUATING TEST EXAMPLES', test_examples)
|
# import funkcji z innego modułu
from Zadanie7ocenyfun import drukuj, srednia, mediana, odchylenie
przedmioty = set() #definicja pustego zbioru
oceny = [] #definicja pustej listy ocen
drukuj(przedmioty, "Lista przedmiotów: ")
#wprowadzanie przedmiotów przez uzytkownika
print("\nWciśnięcie Enter powoduje przerwanie wprowadzania przedmiotów.")
while True:
przedmiot = input("Podaj nazwę przedmiotu: ")
if len(przedmiot): #sprawdzamy czy użytkownik wprowadził przedmiot (jezeli nie wprowadzono zadnego znaku to przechodzimy do else)
if przedmiot in przedmioty: #sprawdzamy czy przedmiot jest juz w zbiorze
print("Ten przedmiot jest już wprowadzony.")
przedmioty.add(przedmiot) #dodanie przedmiotu do zbioru
else:
drukuj(przedmioty, "\nWprowadzone przedmioty: ")
przedmiot = input("\nZ ktorego przedmiotu chcesz wprowadzić oceny? ")
if przedmiot not in przedmioty: #jezeli przedmiotu nie ma w zbiorze
print("Brak przedmiotu w zbiorze, możesz go dodać.")
else:
break #wychodzimy z pętli
print("\nWprowadzenie wartości 0 spowoduje przerwanie wprowadzania ocen.")
ocena = None #zmienna sterująca pętlą
while not ocena:
try:
ocena = int(input("Podaj ocenę (1-6): "))
if (ocena > 0 and ocena < 7): #sprawdzamy czy ocena jest z podanego zakresu
oceny.append(float(ocena)) #jezeli tak to dodajemy ją do listy, przekształcając na float
elif ocena == 0: #jezeli zostala wprowadzona wartosc 0 to przerywamy petle
break
else:
print("Wprowadzono błędną ocenę.")
ocena = None
except ValueError:
print("Wprowadzono błędne dane.")
drukuj(oceny, przedmiot.capitalize() + " - wprowadzone oceny: ")
s = srednia(oceny)
m = mediana(oceny)
o = odchylenie(oceny, s)
print("\nŚrednia ocen wynosi: {0:5.2f}".format(s))
print("Mediana wynosi: {0:5.2f}".format(m))
print("Odchylenie wynosi: (1:5.2f)".format(o))
|
# -*- coding:utf-8 -*-
"""
OKEx Future Trade module.
https://www.okex.me/docs/zh/
NOTE: Only Cross Margin Mode is supported in Trade module currently. Please change Margin Mode to `Cross`, not `Fixed`!
Author: HuangTao
Date: 2019/01/19
Email: huangtao@ifclover.com
"""
import time
import zlib
import json
import copy
import hmac
import base64
from urllib.parse import urljoin
from quant.error import Error
from quant.order import Order
from quant.utils import tools
from quant.utils import logger
from quant.tasks import SingleTask
from quant.position import Position
from quant.const import OKEX_FUTURE
from quant.utils.websocket import Websocket
from quant.asset import Asset, AssetSubscribe
from quant.utils.http_client import AsyncHttpRequests
from quant.utils.decorator import async_method_locker
from quant.order import ORDER_ACTION_BUY, ORDER_ACTION_SELL
from quant.order import ORDER_TYPE_LIMIT, ORDER_TYPE_MARKET
from quant.order import ORDER_STATUS_SUBMITTED, ORDER_STATUS_PARTIAL_FILLED, ORDER_STATUS_FILLED, \
ORDER_STATUS_CANCELED, ORDER_STATUS_FAILED
__all__ = ("OKExFutureRestAPI", "OKExFutureTrade", )
class OKExFutureRestAPI:
""" OKEx Future REST API client.
Attributes:
host: HTTP request host.
access_key: Account's ACCESS KEY.
secret_key: Account's SECRET KEY.
passphrase: API KEY Passphrase.
"""
def __init__(self, host, access_key, secret_key, passphrase):
"""initialize REST API client."""
self._host = host
self._access_key = access_key
self._secret_key = secret_key
self._passphrase = passphrase
async def get_user_account(self):
""" Get account asset information.
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
"""
success, error = await self.request("GET", "/api/futures/v3/accounts", auth=True)
return success, error
async def get_position(self, instrument_id):
""" Get the information of holding positions of a contract.
Args:
instrument_id: Contract ID, e.g. BTC-USD-180213.
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
"""
uri = "/api/futures/v3/{instrument_id}/position".format(instrument_id=instrument_id)
success, error = await self.request("GET", uri, auth=True)
return success, error
async def create_order(self, instrument_id, trade_type, price, size, match_price=0, leverage=20):
""" Create an order.
Args:
instrument_id: Contract ID, e.g. BTC-USD-180213.
trade_type: Trade type, 1: open long, 2: open short, 3: close long, 4: close short.
price: Price of each contract.
size: The buying or selling quantity.
match_price: Order at best counter party price? (0: no, 1: yes), When posting orders at best bid price,
order_type can only be 0 (regular order).
leverage: The leverage of order, default is 20.
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
"""
uri = "/api/futures/v3/order"
body = {
"instrument_id": instrument_id,
"type": str(trade_type),
"price": price,
"size": size,
"match_price": match_price,
"leverage": leverage
}
success, error = await self.request("POST", uri, body=body, auth=True)
return success, error
async def revoke_order(self, instrument_id, order_no):
""" Cancelling an unfilled order.
Args:
instrument_id: Contract ID, e.g. BTC-USD-180213.
order_no: order ID.
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
"""
uri = "/api/futures/v3/cancel_order/{instrument_id}/{order_id}".format(
instrument_id=instrument_id, order_id=order_no)
success, error = await self.request("POST", uri, auth=True)
if error:
return None, error
if not success["result"]:
return None, success
return success, None
async def revoke_orders(self, instrument_id, order_ids):
""" Cancelling multiple open orders with order_id,Maximum 10 orders can be cancelled at a time for each
trading pair.
Args:
instrument_id: Contract ID, e.g. BTC-USD-180213.
order_ids: order ID list.
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
"""
assert isinstance(order_ids, list)
if len(order_ids) > 10:
logger.warn("order id list too long! no more than 10!", caller=self)
uri = "/api/futures/v3/cancel_batch_orders/{instrument_id}".format(instrument_id=instrument_id)
body = {
"order_ids": order_ids
}
success, error = await self.request("POST", uri, body=body, auth=True)
if error:
return None, error
if not success["result"]:
return None, success
return success, None
async def get_order_info(self, instrument_id, order_id):
""" Get order detail by order ID. Canceled unfilled orders will be kept in record for 2 hours only.
Args:
instrument_id: Contract ID, e.g. BTC-USD-180213.
order_id: order ID.
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
"""
uri = "/api/futures/v3/orders/{instrument_id}/{order_id}".format(
instrument_id=instrument_id, order_id=order_id)
success, error = await self.request("GET", uri, auth=True)
return success, error
async def get_order_list(self, instrument_id, state, limit=100):
""" List your orders. This API can retrieve the latest 20000 entries of data this week.
Args:
instrument_id: Contract ID, e.g. BTC-USD-SWAP.
state: Order state for filter. ("-2": Failed, "-1": Cancelled, "0": Open , "1": Partially Filled,
"2": Fully Filled, "3": Submitting, "4": Cancelling, "6": Incomplete(open + partially filled),
"7": Complete(cancelled + fully filled)).
limit: Number of results per request. Maximum 100. (default 100)
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
TODO: Add args `from` & `to`.
"""
uri = "/api/futures/v3/orders/{instrument_id}".format(instrument_id=instrument_id)
params = {
"state": state,
"limit": limit
}
success, error = await self.request("GET", uri, params=params, auth=True)
return success, error
async def request(self, method, uri, params=None, body=None, headers=None, auth=False):
""" Do HTTP request.
Args:
method: HTTP request method. GET, POST, DELETE, PUT.
uri: HTTP request uri.
params: HTTP query params.
body: HTTP request body.
headers: HTTP request headers.
auth: If this request requires authentication.
Returns:
success: Success results, otherwise it's None.
error: Error information, otherwise it's None.
"""
if params:
query = "&".join(["{}={}".format(k, params[k]) for k in sorted(params.keys())])
uri += "?" + query
url = urljoin(self._host, uri)
if auth:
timestamp = str(time.time()).split(".")[0] + "." + str(time.time()).split(".")[1][:3]
if body:
body = json.dumps(body)
else:
body = ""
message = str(timestamp) + str.upper(method) + uri + str(body)
mac = hmac.new(bytes(self._secret_key, encoding="utf8"), bytes(message, encoding="utf-8"),
digestmod="sha256")
d = mac.digest()
sign = base64.b64encode(d)
if not headers:
headers = {}
headers["Content-Type"] = "application/json"
headers["OK-ACCESS-KEY"] = self._access_key.encode().decode()
headers["OK-ACCESS-SIGN"] = sign.decode()
headers["OK-ACCESS-TIMESTAMP"] = str(timestamp)
headers["OK-ACCESS-PASSPHRASE"] = self._passphrase
_, success, error = await AsyncHttpRequests.fetch(method, url, body=body, headers=headers, timeout=10)
return success, error
class OKExFutureTrade(Websocket):
""" OKEx Future Trade module. You can initialize trade object with some attributes in kwargs.
Attributes:
account: Account name for this trade exchange.
strategy: What's name would you want to created for you strategy.
symbol: Symbol name for your trade.
host: HTTP request host. (default "https://www.okex.com")
wss: Websocket address. (default "wss://real.okex.com:8443")
access_key: Account's ACCESS KEY.
secret_key Account's SECRET KEY.
passphrase API KEY Passphrase.
asset_update_callback: You can use this param to specific a async callback function when you initializing Trade
object. `asset_update_callback` is like `async def on_asset_update_callback(asset: Asset): pass` and this
callback function will be executed asynchronous when received AssetEvent.
order_update_callback: You can use this param to specific a async callback function when you initializing Trade
object. `order_update_callback` is like `async def on_order_update_callback(order: Order): pass` and this
callback function will be executed asynchronous when some order state updated.
position_update_callback: You can use this param to specific a async callback function when you initializing Trade
object. `position_update_callback` is like `async def on_position_update_callback(order: Position): pass` and
this callback function will be executed asynchronous when some position state updated.
init_success_callback: You can use this param to specific a async callback function when you initializing Trade
object. `init_success_callback` is like `async def on_init_success_callback(success: bool, error: Error, **kwargs): pass`
and this callback function will be executed asynchronous after Trade module object initialized successfully.
"""
def __init__(self, **kwargs):
"""Initialize."""
e = None
if not kwargs.get("account"):
e = Error("param account miss")
if not kwargs.get("strategy"):
e = Error("param strategy miss")
if not kwargs.get("symbol"):
e = Error("param symbol miss")
if not kwargs.get("host"):
kwargs["host"] = "https://www.okex.com"
if not kwargs.get("wss"):
kwargs["wss"] = "wss://real.okex.com:8443"
if not kwargs.get("access_key"):
e = Error("param access_key miss")
if not kwargs.get("secret_key"):
e = Error("param secret_key miss")
if not kwargs.get("passphrase"):
e = Error("param passphrase miss")
if e:
logger.error(e, caller=self)
if kwargs.get("init_success_callback"):
SingleTask.run(kwargs["init_success_callback"], False, e)
return
self._account = kwargs["account"]
self._strategy = kwargs["strategy"]
self._platform = OKEX_FUTURE
self._symbol = kwargs["symbol"]
self._host = kwargs["host"]
self._wss = kwargs["wss"]
self._access_key = kwargs["access_key"]
self._secret_key = kwargs["secret_key"]
self._passphrase = kwargs["passphrase"]
self._asset_update_callback = kwargs.get("asset_update_callback")
self._order_update_callback = kwargs.get("order_update_callback")
self._position_update_callback = kwargs.get("position_update_callback")
self._init_success_callback = kwargs.get("init_success_callback")
url = self._wss + "/ws/v3"
super(OKExFutureTrade, self).__init__(url, send_hb_interval=5)
self.heartbeat_msg = "ping"
self._assets = {} # Asset object. e.g. {"BTC": {"free": "1.1", "locked": "2.2", "total": "3.3"}, ... }
self._orders = {} # Order objects. e.g. {"order_no": Order, ... }
self._position = Position(self._platform, self._account, self._strategy, self._symbol)
# Subscribing our channels.
self._order_channel = "futures/order:{symbol}".format(symbol=self._symbol)
self._position_channel = "futures/position:{symbol}".format(symbol=self._symbol)
# If our channels that subscribed successfully.
self._subscribe_order_ok = False
self._subscribe_position_ok = False
# Initializing our REST API client.
self._rest_api = OKExFutureRestAPI(self._host, self._access_key, self._secret_key, self._passphrase)
# Subscribing our asset event.
if self._asset_update_callback:
AssetSubscribe(self._platform, self._account, self.on_event_asset_update)
self.initialize()
@property
def assets(self):
return copy.copy(self._assets)
@property
def orders(self):
return copy.copy(self._orders)
@property
def position(self):
return copy.copy(self._position)
@property
def rest_api(self):
return self._rest_api
async def connected_callback(self):
"""After websocket connection created successfully, we will send a message to server for authentication."""
timestamp = str(time.time()).split(".")[0] + "." + str(time.time()).split(".")[1][:3]
message = str(timestamp) + "GET" + "/users/self/verify"
mac = hmac.new(bytes(self._secret_key, encoding="utf8"), bytes(message, encoding="utf8"), digestmod="sha256")
d = mac.digest()
signature = base64.b64encode(d).decode()
data = {
"op": "login",
"args": [self._access_key, self._passphrase, timestamp, signature]
}
await self.ws.send_json(data)
@async_method_locker("OKExFutureTrade.process_binary.locker")
async def process_binary(self, raw):
""" Process binary message that received from websocket.
Args:
raw: Binary message received from websocket.
Returns:
None.
"""
decompress = zlib.decompressobj(-zlib.MAX_WBITS)
msg = decompress.decompress(raw)
msg += decompress.flush()
msg = msg.decode()
if msg == "pong":
return
logger.debug("msg:", msg, caller=self)
msg = json.loads(msg)
# Authorization message received.
if msg.get("event") == "login":
if not msg.get("success"):
e = Error("Websocket connection authorized failed: {}".format(msg))
logger.error(e, caller=self)
SingleTask.run(self._init_success_callback, False, e)
return
logger.info("Websocket connection authorized successfully.", caller=self)
# Fetch orders from server. (open + partially filled)
result, error = await self._rest_api.get_order_list(self._symbol, 6)
if error:
e = Error("get open orders error: {}".format(error))
SingleTask.run(self._init_success_callback, False, e)
return
if len(result) > 100:
logger.warn("order length too long! (more than 100)", caller=self)
for order_info in result["order_info"]:
self._update_order(order_info)
# Fetch positions from server.
position, error = await self._rest_api.get_position(self._symbol)
if error:
e = Error("get position error: {}".format(error))
SingleTask.run(self._init_success_callback, False, e)
return
if len(position["holding"]) > 0:
self._update_position(position["holding"][0])
# Subscribe order channel and position channel.
data = {
"op": "subscribe",
"args": [self._order_channel, self._position_channel]
}
await self.ws.send_json(data)
return
# Subscribe response message received.
if msg.get("event") == "subscribe":
if msg.get("channel") == self._order_channel:
self._subscribe_order_ok = True
if msg.get("channel") == self._position_channel:
self._subscribe_position_ok = True
if self._subscribe_order_ok and self._subscribe_position_ok:
SingleTask.run(self._init_success_callback, True, None)
return
# Order update message received.
if msg.get("table") == "futures/order":
for data in msg["data"]:
self._update_order(data)
return
# Position update message receive.
if msg.get("table") == "futures/position":
for data in msg["data"]:
self._update_position(data)
async def create_order(self, action, price, quantity, order_type=ORDER_TYPE_LIMIT, *args, **kwargs):
""" Create an order.
Args:
action: Trade direction, `BUY` or `SELL`.
price: Price of each contract.
quantity: The buying or selling quantity.
order_type: Order type, `MARKET` or `LIMIT`.
Returns:
order_no: Order ID if created successfully, otherwise it's None.
error: Error information, otherwise it's None.
"""
if int(quantity) > 0:
if action == ORDER_ACTION_BUY:
trade_type = "1"
else:
trade_type = "3"
else:
if action == ORDER_ACTION_BUY:
trade_type = "4"
else:
trade_type = "2"
quantity = abs(int(quantity))
result, error = await self._rest_api.create_order(self._symbol, trade_type, price, quantity)
if error:
return None, error
return result["order_id"], None
async def revoke_order(self, *order_nos):
""" Revoke (an) order(s).
Args:
order_nos: Order id list, you can set this param to 0 or multiple items. If you set 0 param, you can cancel
all orders for this symbol(initialized in Trade object). If you set 1 param, you can cancel an order.
If you set multiple param, you can cancel multiple orders. Do not set param length more than 100.
Returns:
Success or error, see bellow.
NOTEs:
DO NOT INPUT MORE THAT 10 ORDER NOs, you can invoke many times.
"""
# If len(order_nos) == 0, you will cancel all orders for this symbol(initialized in Trade object).
if len(order_nos) == 0:
result, error = await self._rest_api.get_order_list(self._symbol, 6)
if error:
return False, error
if len(result) > 100:
logger.warn("order length too long! (more than 100)", caller=self)
for order_info in result["order_info"]:
order_no = order_info["order_id"]
_, error = await self._rest_api.revoke_order(self._symbol, order_no)
if error:
return False, error
return True, None
# If len(order_nos) == 1, you will cancel an order.
if len(order_nos) == 1:
success, error = await self._rest_api.revoke_order(self._symbol, order_nos[0])
if error:
return order_nos[0], error
else:
return order_nos[0], None
# If len(order_nos) > 1, you will cancel multiple orders.
if len(order_nos) > 1:
success, error = [], []
for order_no in order_nos:
_, e = await self._rest_api.revoke_order(self._symbol, order_no)
if e:
error.append((order_no, e))
else:
success.append(order_no)
return success, error
async def get_open_order_nos(self):
""" Get open order id list.
Args:
None.
Returns:
order_nos: Open order id list, otherwise it's None.
error: Error information, otherwise it's None.
"""
success, error = await self._rest_api.get_order_list(self._symbol, 6)
if error:
return None, error
else:
if len(success) > 100:
logger.warn("order length too long! (more than 100)", caller=self)
order_nos = []
for order_info in success["order_info"]:
order_nos.append(order_info["order_id"])
return order_nos, None
def _update_order(self, order_info):
""" Order update.
Args:
order_info: Order information.
Returns:
None.
"""
order_no = str(order_info["order_id"])
state = order_info["state"]
remain = int(order_info["size"]) - int(order_info["filled_qty"])
ctime = tools.utctime_str_to_mts(order_info["timestamp"])
if state == "-2":
status = ORDER_STATUS_FAILED
elif state == "-1":
status = ORDER_STATUS_CANCELED
elif state == "0":
status = ORDER_STATUS_SUBMITTED
elif state == "1":
status = ORDER_STATUS_PARTIAL_FILLED
elif state == "2":
status = ORDER_STATUS_FILLED
else:
return None
order = self._orders.get(order_no)
if not order:
info = {
"platform": self._platform,
"account": self._account,
"strategy": self._strategy,
"order_no": order_no,
"action": ORDER_ACTION_BUY if order_info["type"] in ["1", "4"] else ORDER_ACTION_SELL,
"symbol": self._symbol,
"price": order_info["price"],
"quantity": order_info["size"],
"trade_type": int(order_info["type"])
}
order = Order(**info)
order.remain = remain
order.status = status
order.avg_price = order_info["price_avg"]
order.ctime = ctime
order.utime = ctime
self._orders[order_no] = order
SingleTask.run(self._order_update_callback, copy.copy(order))
if status in [ORDER_STATUS_FAILED, ORDER_STATUS_CANCELED, ORDER_STATUS_FILLED]:
self._orders.pop(order_no)
def _update_position(self, position_info):
""" Position update.
Args:
position_info: Position information.
Returns:
None.
"""
self._position.long_quantity = int(position_info["long_qty"])
self._position.long_avg_price = position_info["long_avg_cost"]
self._position.short_quantity = int(position_info["short_qty"])
self._position.short_avg_price = position_info["short_avg_cost"]
self._position.liquid_price = position_info["liquidation_price"]
self._position.utime = tools.utctime_str_to_mts(position_info["updated_at"])
SingleTask.run(self._position_update_callback, copy.copy(self.position))
async def on_event_asset_update(self, asset: Asset):
""" Asset event data callback.
Args:
asset: Asset object callback from EventCenter.
Returns:
None.
"""
self._assets = asset
SingleTask.run(self._asset_update_callback, asset)
|
# allows to import own functions
import sys
import os
import re
root_project = re.findall(r'(^\S*TFM)', os.getcwd())[0]
sys.path.append(root_project)
from src.utils.help_func import results_searchcv,plot_predictions,\
errors_distribution, plot_visualizations, get_model_data
from sklearn.model_selection import GridSearchCV, train_test_split
import pandas as pd
from scipy.stats import randint
import joblib
import seaborn as sns
sns.set()
import time
from scipy.stats import uniform, randint, loguniform
from sklearn.dummy import DummyRegressor
# Get the data
df_train_val = get_model_data(500000)
# Feature selection
features = [
'Tr',
'inf_pow_1',
'inf_pow_2',
'mort_pow_1',
'mort_pow_2',
'mort_pow_3',
'n_closed',
'react_time',
'total_deceased',
'betweenness',
'degree',
'closeness',
'country_pop',
'country_departures',
'exposed_pop',
'inf_pow_1_log',
'inf_pow_2_log',
'mort_pow_1_log',
'mort_pow_2_log',
'mort_pow_3_log',
]
df_train_val = df_train_val[features]
print("=" * 20)
print(f"Train_validation size: {df_train_val.shape}")
print("=" * 20)
X_train_val = df_train_val.drop('total_deceased', axis=1)
y_train_val = df_train_val['total_deceased']
X_train, X_val, y_train, y_val = train_test_split(X_train_val,
y_train_val,
random_state=42)
param_grid = dict(
strategy=['mean', 'median'])
scoring = {'R2': 'r2', 'RMSE': 'neg_root_mean_squared_error',
'MAE': 'neg_mean_absolute_error'}
grid_search = GridSearchCV(DummyRegressor(),
param_grid=param_grid,
scoring=scoring,
refit='R2',
verbose=1, n_jobs=-1)
grid_search.fit(X_train_val, y_train_val)
# Train the model with only train data and best parameters of random search
estimator = DummyRegressor(**grid_search.best_params_)
estimator.fit(X_train, y_train)
results_searchcv(grid_search, estimator, X_val, y_val) |
import FileUtil
import FeatureExtractor
import math
################## Notes #####################
# What is tq_0, t_q, and t_i?
# How do you obtan the desired output vectors?
##############################################
sigma = 3.5 #d_max
numOfFeatureVectors = 0
def getAverageVector(author):
x = []
articlesDirectory = "../Articles/"
fileNames = FileUtil.getFileNames(articlesDirectory)
for file in fileNames:
if (author in file):
fileArg = articlesDirectory + file
fVector = FeatureExtractor.createFeatureVector(fileArg)
fVectorNormalized = FeatureExtractor.normalize(fVector)
x.append(fVector)
sumElement = 0
avgVector = []
for i in range(len(x[0])):
for j in range(len(x)):
sumElement += x[j][i]
avgVector.append(sumElement/len(x[0]))
return avgVector
# Compute the final outputs for the General Refression Neural Networks for some
# query tq_i.
# tq_i: query
# t: list of training sets
def compute_outputs(t, tq_i, hfs, d, numSpots, dq_i): #Denoted as dq
result = 0
numOfFeatureVectors = len(t)
compute_desired_output_vectors(d, numSpots)
for i in range(0, len(t)):
hfs.append(hf(tq_i, t[i], sigma)) # Compute the kernels for each training instance with query tq_i
for i in range(0, len(t)):
result = 0
for j in range(0, len(t)):
result += hfs[i] * d[i][j] #First summation Unit
dq_i.append(result)
sum_hfs = 0.0
for i in range(0, len(t)):
sum_hfs += hfs[i] #Second summation unit
for j in range(0, len(t)):
dq_i[j] = dq_i[j] / sum_hfs
print (dq_i)
# Reads a list of file names, opens the files, and creates the appropriate
# feature vectors.
def append_vectors_to_t(t):
#Get file names
articlesDirectory = "../Articles/"
fileNames = FileUtil.getFileNames(articlesDirectory)
#Create feature Vectors
numOfFeatureVectors = len(fileNames)
for file in fileNames:
# fileArg is set to directory of each files
# fVector is feature vector of each file then it is written to file
fileArg = articlesDirectory + file
fVector = FeatureExtractor.createFeatureVector(fileArg)
fVectorNormalized = FeatureExtractor.normalize(fVector)
t.append(fVectorNormalized) #Append feature vectors to t.
# Create a vector initialized with "numSpots" 0s.
def create_empty_vectors(numSpots):
vector = [0] * numSpots
return vector
# Create desired output vectors.
def compute_desired_output_vectors(d, numSpots):
#Create an empty vector for how many training sets you have.
for i in range(0, numSpots):
d.append(create_empty_vectors(numSpots))
#Mark the appropriate spot in the desired output vector with a 1.
for j in range(0, len(d)):
for k in range(0, len(d)):
if (k == j):
d[j][k] = 1
# Euclidean Distance Squared
def dist_sqrd(t_q, t_i):
sum = 0.0
for i in range(0, len(t_q)):
sum += math.pow((t_q[i] - t_i[i]), 2.0)
return sum
# Gaussian Kernel - (Denoted as hf in the noees)
def hf(t_q,t_i,sigma):
return math.exp(-dist_sqrd(t_q, t_i) / pow((2.0 * sigma), 2.0))
|
from PIL import Image
import numpy as np
import scipy.misc
"""
AUTHOR: Himanshu Sharma
TITLE: Bit Plane Generator
==========================
"""
def generateGray(bit):
'''
Returns white or black value depending on the bit plane.
'''
bit = str(bit)
if bit == '1':
return 255
else:
return 0
def bitplane(binary, plane_no):
'''
Returns the plane_no th bit plane from the binary pixel.
'''
# convert the binary no to string, to avoid errors.
binary = str(binary)
# extract the plane_no-1 th bit from the binary.
binary = binary[plane_no-1]
return binary
def binary_generate(decimal_number):
'''
Returns the binary equivalent of the number in 8 bits.
'''
bits = 8
# initialise a 'rem' array to store remainders from 2's divisions.
rem = []
while decimal_number != 0:
rem.append(str(decimal_number%2))
decimal_number /= 2
binary = ''.join(rem[-1:-len(rem)-1:-1])
# if it is less than 8 bits add zeros to front.
while len(binary) < bits:
binary = '0' + binary
return binary
def string_to_integer(string):
return int(string)
def generateBitPlane(image, plane_no=1):
'''
Returns the plane_no th bit plane of any image.
Plane no 1 refers to the MSB and the last plane is of LSB.
'''
# open the image as grayscale.
img = Image.open(image).convert('L')
# convert image into numpy array for faster processing.
arr_image = np.array(img)
# vectorize binary_generate function to enable it operate on numpy arrays.
numpy_binary = np.vectorize(binary_generate)
# do similar thing with bitplane() function.
numpy_bitplane = np.vectorize(bitplane)
# use the new vectorized numpy_binary() to get the binary equivalent of all
# the numpy array elements in 8 bits.
arr_image = numpy_binary(arr_image)
# now use the new vectorized numpy_bitplane() to get the bit plane of the elements
# of the the numpy array.
arr_image = numpy_bitplane(arr_image, plane_no)
# vectorize string_to_integer() function
numpy_str_to_int = np.vectorize(string_to_integer)
# use this function now to get integer form of the bit plane.
arr_image = numpy_str_to_int(arr_image)
# multiply arr_image by 255 to get bit plane image array. So, at 1, we get 255 and at 0 its 0.
arr_image = 255*arr_image
# return the image
return arr_image
|
import numpy as np
def gen():
vals = [100, 50, 30, 20]
results = [1, 5, 3, 2]
x_train = []
y_train = []
cnt = 0
for val in vals:
for addval in vals:
x_set = []
y_set = []
x_set.append(val)
x_set.append(val)
x_set.append(addval)
y_set.append(results[cnt])
# print(x_set)
# print(y_set)
# x_set.append(x_set)
x_train.append(x_set)
y_train.append(y_set)
x_set = []
y_set = []
x_set.append(val)
x_set.append(addval)
x_set.append(val)
y_set.append(results[cnt])
# print(x_set)
# print(y_set)
# x_set.append(x_set)
x_train.append(x_set)
y_train.append(y_set)
x_set = []
y_set = []
x_set.append(addval)
x_set.append(val)
x_set.append(val)
y_set.append(results[cnt])
# print(x_set)
# print(y_set)
# x_set.append(x_set)
x_train.append(x_set)
y_train.append(y_set)
cnt+=1
# x_train = np.unique(np.array(x_train), axis = 0)
# y_train = np.array(y_train)
return x_train, y_train
def gen2():
vals = [100, 50, 0]
results = [1, 5, 0]
x_train = []
y_train = []
# cnt = 0
for val in vals:
for addval in vals:
for count in range(len(vals)):
if(count!=0 and val == addval):
continue
x_set = [val]*len(vals) # массив из повторяющихся элементов
# y_set = [val]*len(vals)
x_set[count] = addval
x_train.append(x_set)
if(x_set.count(0) or x_set.count(val) <= x_set.count(addval)):
y_train.append([0])
elif(x_set.count(val) <= x_set.count(addval)):
y_train.append([results[count]])
print(x_set, y_train[-1])
print('len: ', len(x_train), '\n')
# cnt+=1
# x_train = np.unique(np.array(x_train), axis = 0)
# y_train = np.array(y_train)
return x_train, y_train
# def gen3():
# dsets = [[0, 1, 10, 100], [0, 2, 20, 200]]
# results = [0, 5, 10]
# x_train = []
# y_train = []
# # cnt = 0
# # for dset in dsets: # по массивам
# # for el in dset: # по элементам
# # for addset in dsets: # по доп массивам
# # for addel in addset: # по элементам доп массива
# # # for count in range(len(dsets)):
# # x_set = [el]
# # x_set = [0]*(len(dset)-1)
# # x_set.insert(0, el)
# # # x_set.insert()
# # x_train.append(x_set)
# # print(x_set)
# # print()
# count = len(dsets[0])
# for dset in dsets:
# for count1 in range(count):
# x_set = [0]*count
# x_set.pop(count1)
# xset = x_set.insert(count1-1, dset[count1])
# for addset in dsets:
# for count2 in range(count):
# x_set.pop(count2)
# xset = x_set.insert(count2-1, addset[count2])
# for count3 in range(count):
# x_set.pop(count3)
# xset = x_set.insert(count3-1, addset[count3])
# x_train.extend(x_set)
# print("xs:", x_set)
# print()
# xnp = np.unique(np.array(x_train), axis = 0)
# print("x:", x_train)
# print("xnp:", np.unique(np.array(x_train).reshape((-1, 4)), axis = 0))
# return x_train, y_train
# # print(gen3()[0])
# gen3()
# print("end")
# def gen4():
dsets = [[1, 10, 100], [2, 20, 200]]
x_train = []
dset = []
for i in dsets:
dset.extend(dsets)
count = len(dset)
res = []
for i in range(count):
res.append(i)
for dset in dsets:
xnp = np.unique(np.array(x_train), axis = 0)
print("x:", x_train)
print("xnp:", np.unique(np.array(x_train).reshape((-1, 4)), axis = 0))
return x_train, y_train
# gen4()
# print("end")
# import numpy as np
# # dsets = [[0], [1, 2, 3], [4, 5, 6]]
# dsets = [[0], [1, 2, 3]]
# dset = []
# for i in dsets:
# dset.extend(i)
# count = len(dset)
# res = []
# xset = []
# for j in dset:
# for i in dset:
# res.append(i)
# xset.append()
# dset.insert(0, dset.pop())
# print("dset:", dset, '\n')
# print("res:", res, '\n')
# print("np.array(res).reshape((-1, len(dset))):\n", np.array(res).reshape((-1, len(dset))))
# # print("np.array(res).reshape((-1, len(dsets[0]))):\n", np.array(res).reshape((-1, len(dsets[-1]))))
|
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Function
import numpy as np
import soft_renderer.cuda.standard_rasterize as standard_rasterize_cuda
def standard_rasterize(face_vertices, image_size=256):
if face_vertices.device == "cpu":
raise TypeError('Rasterize module supports only cuda Tensors')
# import ipdb; ipdb.set_trace()
h = w = image_size
depth_buffer = torch.zeros([face_vertices.shape[0], h, w]).float().cuda() + 1e6
triangle_buffer = torch.zeros([face_vertices.shape[0], h, w]).int().cuda() - 1
baryw_buffer = torch.zeros([face_vertices.shape[0], h, w, 3]).float().cuda()
depth_buffer, triangle_buffer, baryw_buffer = standard_rasterize_cuda.standard_rasterize(face_vertices, depth_buffer, triangle_buffer, baryw_buffer, h, w)
return depth_buffer, triangle_buffer, baryw_buffer
def standard_rasterize_colors(face_vertices, face_colors, depth_buffer, triangle_buffer, images, h, w):
if face_vertices.device == "cpu":
raise TypeError('Rasterize module supports only cuda Tensors')
# # import ipdb; ipdb.set_trace()
# from time import time; st = time()
# h = w = image_size
# depth_buffer = torch.zeros([face_vertices.shape[0], h, w]).float().cuda() + 1e6
# triangle_buffer = torch.zeros([face_vertices.shape[0], h, w]).int().cuda() - 1
# images = torch.zeros([face_vertices.shape[0], h, w, 3]).float().cuda()
# print(time() - st)
# st = time()
standard_rasterize_cuda.standard_rasterize_colors(face_vertices, face_colors, depth_buffer, triangle_buffer, images, h, w)
# print(time() - st)
# if return_buffers:
# return images, triangle_buffer, depth_buffer
# else:
# return images
|
from ipaddress import ip_network
from random import choice
def genv6(n):
print('generating {} addresses...'.format(n))
current_net = ip_network('1234:1234::/30')
i = 0
output = []
for net in current_net.subnets(new_prefix=56):
if choice(range(30)) > 10:
output.append(net)
i += 1
if i == int(n):
break
print('... done')
return output
testdata = [
ip_network('134.162.82.0/24'),
ip_network('134.162.83.0/24'),
ip_network('134.162.84.0/24'),
ip_network('168.226.240.0/24'),
ip_network('168.226.241.0/24'),
ip_network('168.226.242.0/24'),
ip_network('168.226.243.0/24'),
ip_network('168.226.244.0/24'),
ip_network('168.226.245.0/24'),
ip_network('168.226.246.0/24'),
ip_network('168.226.247.0/24'),
ip_network('168.226.248.0/24'),
ip_network('168.226.249.0/24'),
ip_network('168.226.250.0/24'),
ip_network('168.226.251.0/24'),
ip_network('168.226.252.0/24'),
ip_network('168.226.253.0/24'),
ip_network('168.226.254.0/24'),
ip_network('168.226.255.0/24'),
ip_network('170.155.1.0/24'),
ip_network('170.155.2.0/24'),
ip_network('170.155.3.0/24'),
ip_network('170.155.4.0/24'),
ip_network('170.155.5.0/24'),
ip_network('170.155.6.0/24'),
ip_network('170.155.9.0/24'),
ip_network('170.155.10.0/24'),
ip_network('170.155.11.0/24'),
ip_network('170.155.16.0/24'),
ip_network('170.155.17.0/24'),
ip_network('170.155.19.0/24'),
ip_network('170.155.20.0/24'),
ip_network('170.155.21.0/24'),
ip_network('170.155.26.0/24'),
ip_network('170.155.27.0/24'),
ip_network('170.155.33.0/24'),
ip_network('170.155.34.0/24'),
ip_network('170.155.35.0/24'),
ip_network('170.155.36.0/24'),
ip_network('170.155.37.0/24'),
ip_network('170.155.42.0/24'),
ip_network('170.155.44.0/24'),
ip_network('170.155.46.0/24'),
ip_network('170.155.47.0/24'),
ip_network('170.155.48.0/24'),
ip_network('170.155.54.0/24'),
ip_network('170.155.60.0/24'),
ip_network('170.155.61.0/24'),
ip_network('170.155.65.0/24'),
ip_network('170.155.66.0/24'),
ip_network('170.155.69.0/24'),
ip_network('170.155.73.0/24'),
ip_network('170.155.77.0/24'),
ip_network('170.155.78.0/24'),
ip_network('170.155.79.0/24'),
ip_network('170.155.80.0/24'),
ip_network('170.155.83.0/24'),
ip_network('170.155.85.0/24'),
ip_network('170.155.88.0/24'),
ip_network('170.155.89.0/24'),
ip_network('170.155.90.0/24'),
ip_network('170.155.91.0/24'),
ip_network('170.155.92.0/24'),
ip_network('170.155.93.0/24'),
ip_network('170.155.97.0/24'),
ip_network('170.155.99.0/24'),
ip_network('170.155.100.0/24'),
ip_network('170.155.101.0/24'),
ip_network('170.155.102.0/24'),
ip_network('170.155.103.0/24'),
ip_network('170.155.105.0/24'),
ip_network('170.155.106.0/24'),
ip_network('170.155.107.0/24'),
ip_network('170.155.109.0/24'),
ip_network('170.155.110.0/24'),
ip_network('170.155.111.0/24'),
ip_network('170.155.113.0/24'),
ip_network('170.155.115.0/24'),
ip_network('170.155.118.0/24'),
ip_network('170.155.120.0/24'),
ip_network('170.155.121.0/24'),
ip_network('170.155.122.0/24'),
ip_network('170.155.123.0/24'),
ip_network('170.155.126.0/24'),
ip_network('170.155.127.0/24'),
ip_network('170.155.142.0/24'),
ip_network('170.155.144.0/24'),
ip_network('170.155.145.0/24'),
ip_network('170.155.146.0/24'),
ip_network('170.155.148.0/24'),
ip_network('170.155.160.0/24'),
ip_network('170.155.163.0/24'),
ip_network('170.155.168.0/24'),
ip_network('170.155.180.0/24'),
ip_network('170.155.181.0/24'),
ip_network('170.155.182.0/24'),
ip_network('170.155.200.0/24'),
ip_network('170.155.201.0/24'),
ip_network('170.155.202.0/24'),
ip_network('170.155.220.0/24'),
ip_network('170.155.222.0/24'),
ip_network('170.155.225.0/24'),
ip_network('170.155.254.0/24'),
ip_network('181.0.0.0/19'),
ip_network('181.0.32.0/19'),
ip_network('181.0.64.0/19'),
ip_network('181.0.96.0/19'),
ip_network('181.0.128.0/19'),
ip_network('181.0.160.0/19'),
ip_network('181.0.192.0/19'),
ip_network('181.0.224.0/19'),
ip_network('181.1.0.0/19'),
ip_network('181.1.32.0/19'),
ip_network('181.1.64.0/19'),
ip_network('181.1.96.0/19'),
ip_network('181.1.128.0/21'),
ip_network('181.1.136.0/21'),
ip_network('181.1.144.0/21'),
ip_network('181.1.152.0/21'),
ip_network('181.1.160.0/21'),
ip_network('181.1.168.0/21'),
ip_network('181.1.176.0/21'),
ip_network('181.1.184.0/21'),
ip_network('181.1.192.0/21'),
ip_network('181.1.200.0/21'),
ip_network('181.1.208.0/21'),
ip_network('181.1.216.0/21'),
ip_network('181.1.224.0/21'),
ip_network('181.1.232.0/21'),
ip_network('181.1.240.0/21'),
ip_network('181.1.248.0/21'),
ip_network('181.2.0.0/20'),
ip_network('181.2.16.0/20'),
ip_network('181.2.32.0/20'),
ip_network('181.2.48.0/20'),
ip_network('181.2.64.0/20'),
ip_network('181.2.80.0/20'),
ip_network('181.2.96.0/20'),
ip_network('181.2.112.0/20'),
ip_network('181.2.128.0/20'),
ip_network('181.2.144.0/20'),
ip_network('181.2.160.0/20'),
ip_network('181.2.176.0/20'),
ip_network('181.2.192.0/20'),
ip_network('181.2.208.0/20'),
ip_network('181.2.224.0/20'),
ip_network('181.2.240.0/20'),
ip_network('181.3.0.0/20'),
ip_network('181.3.16.0/20'),
ip_network('181.3.32.0/20'),
ip_network('181.3.48.0/20'),
ip_network('181.3.64.0/20'),
ip_network('181.3.80.0/20'),
ip_network('181.3.96.0/20'),
ip_network('181.3.112.0/20'),
ip_network('181.3.128.0/20'),
ip_network('181.3.144.0/20'),
ip_network('181.3.160.0/20'),
ip_network('181.3.176.0/20'),
ip_network('181.3.192.0/20'),
ip_network('181.3.208.0/20'),
ip_network('181.3.224.0/20'),
ip_network('181.3.240.0/20'),
ip_network('181.4.0.0/20'),
ip_network('181.4.16.0/20'),
ip_network('181.4.32.0/20'),
ip_network('181.4.48.0/20'),
ip_network('181.4.64.0/20'),
ip_network('181.4.80.0/20'),
ip_network('181.4.96.0/20'),
ip_network('181.4.112.0/20'),
ip_network('181.4.128.0/20'),
ip_network('181.4.144.0/20'),
ip_network('181.4.160.0/20'),
ip_network('181.4.176.0/20'),
ip_network('181.4.192.0/20'),
ip_network('181.4.208.0/20'),
ip_network('181.4.224.0/20'),
ip_network('181.4.240.0/20'),
ip_network('181.5.0.0/20'),
ip_network('181.5.16.0/20'),
ip_network('181.5.32.0/20'),
ip_network('181.5.48.0/20'),
ip_network('181.5.64.0/20'),
ip_network('181.5.80.0/20'),
ip_network('181.5.96.0/20'),
ip_network('181.5.112.0/20'),
ip_network('181.5.128.0/20'),
ip_network('181.5.144.0/20'),
ip_network('181.5.160.0/20'),
ip_network('181.5.176.0/20'),
ip_network('181.5.192.0/20'),
ip_network('181.5.208.0/20'),
ip_network('181.5.224.0/20'),
ip_network('181.5.240.0/20'),
ip_network('181.6.0.0/20'),
ip_network('181.6.16.0/20'),
ip_network('181.6.32.0/20'),
ip_network('181.6.48.0/20'),
ip_network('181.6.64.0/20'),
ip_network('181.6.80.0/20'),
ip_network('181.6.96.0/20'),
ip_network('181.6.112.0/20'),
ip_network('181.6.128.0/20'),
ip_network('181.6.144.0/20'),
ip_network('181.6.160.0/20'),
ip_network('181.6.176.0/20'),
ip_network('181.6.192.0/20'),
ip_network('181.6.208.0/20'),
ip_network('181.6.224.0/20'),
ip_network('181.6.240.0/20'),
ip_network('181.7.0.0/20'),
ip_network('181.7.16.0/20'),
ip_network('181.7.32.0/20'),
ip_network('181.7.48.0/20'),
ip_network('181.7.64.0/20'),
ip_network('181.7.80.0/20'),
ip_network('181.7.96.0/20'),
ip_network('181.7.112.0/20'),
ip_network('181.7.128.0/20'),
ip_network('181.7.144.0/20'),
ip_network('181.7.160.0/20'),
ip_network('181.7.176.0/20'),
ip_network('181.7.192.0/20'),
ip_network('181.7.208.0/20'),
ip_network('181.7.224.0/20'),
ip_network('181.7.240.0/20'),
ip_network('181.8.0.0/18'),
ip_network('181.8.64.0/18'),
ip_network('181.8.128.0/17'),
ip_network('181.9.0.0/18'),
ip_network('181.9.64.0/18'),
ip_network('181.10.0.0/20'),
ip_network('181.10.16.0/20'),
ip_network('181.10.32.0/20'),
ip_network('181.10.48.0/20'),
ip_network('181.10.64.0/20'),
ip_network('181.10.80.0/20'),
ip_network('181.10.96.0/20'),
ip_network('181.10.112.0/20'),
ip_network('181.10.128.0/20'),
ip_network('181.10.144.0/20'),
ip_network('181.10.160.0/20'),
ip_network('181.10.176.0/20'),
ip_network('181.10.192.0/20'),
ip_network('181.10.208.0/20'),
ip_network('181.10.224.0/20'),
ip_network('181.10.240.0/20'),
ip_network('181.11.0.0/18'),
ip_network('181.12.0.0/20'),
ip_network('181.12.16.0/20'),
ip_network('181.12.32.0/20'),
ip_network('181.12.48.0/20'),
ip_network('181.12.64.0/20'),
ip_network('181.12.80.0/20'),
ip_network('181.12.96.0/20'),
ip_network('181.12.112.0/20'),
ip_network('181.12.128.0/20'),
ip_network('181.12.144.0/20'),
ip_network('181.12.160.0/20'),
ip_network('181.12.176.0/20'),
ip_network('181.12.192.0/20'),
ip_network('181.12.208.0/20'),
ip_network('181.12.224.0/20'),
ip_network('181.12.240.0/20'),
ip_network('181.13.0.0/20'),
ip_network('181.13.16.0/20'),
ip_network('181.13.32.0/20'),
ip_network('181.13.48.0/20'),
ip_network('181.13.64.0/20'),
ip_network('181.13.80.0/20'),
ip_network('181.13.96.0/20'),
ip_network('181.13.112.0/20'),
ip_network('181.13.128.0/20'),
ip_network('181.13.144.0/20'),
ip_network('181.13.160.0/20'),
ip_network('181.13.176.0/20'),
ip_network('181.13.192.0/20'),
ip_network('181.13.208.0/20'),
ip_network('181.13.224.0/20'),
ip_network('181.13.240.0/20'),
ip_network('181.14.0.0/20'),
ip_network('181.14.64.0/21'),
ip_network('181.14.72.0/21'),
ip_network('181.14.80.0/21'),
ip_network('181.14.88.0/21'),
ip_network('181.14.96.0/21'),
ip_network('181.14.104.0/21'),
ip_network('181.14.112.0/21'),
ip_network('181.14.120.0/21'),
ip_network('181.14.128.0/21'),
ip_network('181.14.136.0/21'),
ip_network('181.14.144.0/21'),
ip_network('181.14.152.0/21'),
ip_network('181.14.160.0/21'),
ip_network('181.14.168.0/21'),
ip_network('181.14.176.0/21'),
ip_network('181.14.184.0/21'),
ip_network('181.14.192.0/21'),
ip_network('181.14.200.0/21'),
ip_network('181.14.208.0/21'),
ip_network('181.14.216.0/21'),
ip_network('181.14.224.0/21'),
ip_network('181.14.232.0/21'),
ip_network('181.14.240.0/21'),
ip_network('181.14.248.0/21'),
ip_network('181.15.56.0/21'),
ip_network('181.15.64.0/20'),
ip_network('181.15.80.0/21'),
ip_network('181.15.88.0/21'),
ip_network('181.15.96.0/21'),
ip_network('181.15.104.0/21'),
ip_network('181.15.112.0/21'),
ip_network('181.15.120.0/21'),
ip_network('181.15.136.0/21'),
ip_network('181.15.144.0/21'),
ip_network('181.15.152.0/21'),
ip_network('181.15.160.0/21'),
ip_network('181.15.168.0/21'),
ip_network('181.15.176.0/21'),
ip_network('181.15.184.0/21'),
ip_network('181.15.192.0/21'),
ip_network('181.15.200.0/21'),
ip_network('181.15.208.0/21'),
ip_network('181.15.216.0/21'),
ip_network('181.15.224.0/21'),
ip_network('181.15.232.0/21'),
ip_network('181.15.240.0/21'),
ip_network('181.15.248.0/21'),
ip_network('181.16.129.0/24'),
ip_network('181.16.130.0/24'),
ip_network('181.16.131.0/24'),
ip_network('181.16.132.0/24'),
ip_network('181.16.151.0/24'),
ip_network('181.16.159.0/24'),
ip_network('181.16.162.0/24'),
ip_network('181.16.163.0/24'),
ip_network('181.16.164.0/24'),
ip_network('181.16.168.0/24'),
ip_network('181.16.171.0/24'),
ip_network('181.16.172.0/24'),
ip_network('181.16.173.0/24'),
ip_network('181.16.174.0/24'),
ip_network('181.16.175.0/24'),
ip_network('181.16.181.0/24'),
ip_network('181.16.190.0/24'),
ip_network('181.80.0.0/20'),
ip_network('181.80.16.0/20'),
ip_network('181.80.32.0/20'),
ip_network('181.80.48.0/20'),
ip_network('181.80.64.0/20'),
ip_network('181.80.80.0/20'),
ip_network('181.80.96.0/20'),
ip_network('181.80.112.0/20'),
ip_network('181.80.128.0/20'),
ip_network('181.80.144.0/20'),
ip_network('181.80.160.0/20'),
ip_network('181.80.176.0/20'),
ip_network('181.80.192.0/20'),
ip_network('181.80.208.0/20'),
ip_network('181.80.224.0/20'),
ip_network('181.80.240.0/20'),
ip_network('181.81.0.0/20'),
ip_network('181.81.16.0/20'),
ip_network('181.81.32.0/20'),
ip_network('181.81.48.0/20'),
ip_network('181.81.64.0/20'),
ip_network('181.81.80.0/20'),
ip_network('181.81.96.0/20'),
ip_network('181.81.112.0/20'),
ip_network('181.81.128.0/20'),
ip_network('181.81.144.0/20'),
ip_network('181.81.160.0/20'),
ip_network('181.81.176.0/20'),
ip_network('181.81.192.0/20'),
ip_network('181.81.208.0/20'),
ip_network('181.81.224.0/20'),
ip_network('181.81.240.0/20'),
ip_network('181.82.0.0/20'),
ip_network('181.82.16.0/20'),
ip_network('181.82.32.0/20'),
ip_network('181.82.48.0/20'),
ip_network('181.82.64.0/20'),
ip_network('181.82.80.0/20'),
ip_network('181.82.96.0/20'),
ip_network('181.82.112.0/20'),
ip_network('181.82.128.0/20'),
ip_network('181.82.144.0/20'),
ip_network('181.82.160.0/20'),
ip_network('181.82.176.0/20'),
ip_network('181.82.192.0/20'),
ip_network('181.82.208.0/20'),
ip_network('181.82.224.0/20'),
ip_network('181.82.240.0/20'),
ip_network('181.83.0.0/20'),
ip_network('181.83.16.0/20'),
ip_network('181.83.32.0/20'),
ip_network('181.83.48.0/20'),
ip_network('181.83.64.0/20'),
ip_network('181.83.80.0/20'),
ip_network('181.83.96.0/20'),
ip_network('181.83.112.0/20'),
ip_network('181.83.128.0/20'),
ip_network('181.83.144.0/20'),
ip_network('181.83.160.0/20'),
ip_network('181.83.176.0/20'),
ip_network('181.83.192.0/20'),
ip_network('181.83.208.0/20'),
ip_network('181.83.224.0/20'),
ip_network('181.83.240.0/20'),
ip_network('181.84.0.0/19'),
ip_network('181.84.32.0/19'),
ip_network('181.84.64.0/19'),
ip_network('181.84.96.0/19'),
ip_network('181.84.128.0/19'),
ip_network('181.84.160.0/19'),
ip_network('181.84.192.0/19'),
ip_network('181.84.224.0/19'),
ip_network('181.85.0.0/20'),
ip_network('181.85.16.0/20'),
ip_network('181.85.32.0/20'),
ip_network('181.85.48.0/20'),
ip_network('181.85.64.0/20'),
ip_network('181.85.80.0/20'),
ip_network('181.85.96.0/20'),
ip_network('181.85.112.0/20'),
ip_network('181.85.128.0/20'),
ip_network('181.85.144.0/20'),
ip_network('181.85.160.0/20'),
ip_network('181.85.176.0/20'),
ip_network('181.85.192.0/20'),
ip_network('181.85.208.0/20'),
ip_network('181.85.224.0/20'),
ip_network('181.85.240.0/20'),
ip_network('181.86.0.0/19'),
ip_network('181.86.32.0/19'),
ip_network('181.86.64.0/19'),
ip_network('181.86.96.0/19'),
ip_network('181.86.128.0/19'),
ip_network('181.86.160.0/19'),
ip_network('181.86.192.0/19'),
ip_network('181.86.224.0/19'),
ip_network('181.87.0.0/20'),
ip_network('181.87.16.0/20'),
ip_network('181.87.32.0/20'),
ip_network('181.87.48.0/20'),
ip_network('181.87.64.0/20'),
ip_network('181.87.80.0/20'),
ip_network('181.87.96.0/20'),
ip_network('181.87.112.0/20'),
ip_network('181.87.128.0/20'),
ip_network('181.87.144.0/20'),
ip_network('181.87.160.0/20'),
ip_network('181.87.176.0/20'),
ip_network('181.87.192.0/20'),
ip_network('181.87.208.0/20'),
ip_network('181.87.224.0/20'),
ip_network('181.87.240.0/20'),
ip_network('181.89.0.0/20'),
ip_network('181.89.16.0/20'),
ip_network('181.89.32.0/20'),
ip_network('181.89.48.0/20'),
ip_network('181.89.64.0/20'),
ip_network('181.89.80.0/20'),
ip_network('181.89.96.0/20'),
ip_network('181.89.112.0/20'),
ip_network('181.89.128.0/20'),
ip_network('181.89.144.0/20'),
ip_network('181.89.160.0/20'),
ip_network('181.89.176.0/20'),
ip_network('181.89.192.0/20'),
ip_network('181.89.208.0/20'),
ip_network('181.89.224.0/20'),
ip_network('181.89.240.0/20'),
ip_network('181.90.0.0/20'),
ip_network('181.90.16.0/20'),
ip_network('181.90.32.0/20'),
ip_network('181.90.48.0/20'),
ip_network('181.90.64.0/20'),
ip_network('181.90.80.0/20'),
ip_network('181.90.96.0/20'),
ip_network('181.90.112.0/20'),
ip_network('181.90.128.0/20'),
ip_network('181.90.144.0/20'),
ip_network('181.90.160.0/20'),
ip_network('181.90.176.0/20'),
ip_network('181.90.192.0/20'),
ip_network('181.90.208.0/20'),
ip_network('181.90.224.0/20'),
ip_network('181.90.240.0/20'),
ip_network('181.91.0.0/20'),
ip_network('181.91.16.0/20'),
ip_network('181.91.32.0/20'),
ip_network('181.91.48.0/20'),
ip_network('181.91.64.0/20'),
ip_network('181.91.80.0/20'),
ip_network('181.91.96.0/20'),
ip_network('181.91.112.0/20'),
ip_network('181.91.128.0/20'),
ip_network('181.91.144.0/20'),
ip_network('181.91.160.0/20'),
ip_network('181.91.176.0/20'),
ip_network('181.91.192.0/20'),
ip_network('181.91.208.0/20'),
ip_network('181.91.224.0/20'),
ip_network('181.91.240.0/20'),
ip_network('181.92.0.0/20'),
ip_network('181.92.16.0/20'),
ip_network('181.92.32.0/20'),
ip_network('181.92.48.0/20'),
ip_network('181.92.64.0/20'),
ip_network('181.92.80.0/20'),
ip_network('181.92.96.0/20'),
ip_network('181.92.112.0/20'),
ip_network('181.92.128.0/20'),
ip_network('181.92.144.0/20'),
ip_network('181.92.160.0/20'),
ip_network('181.92.176.0/20'),
ip_network('181.92.192.0/20'),
ip_network('181.92.208.0/20'),
ip_network('181.92.224.0/20'),
ip_network('181.92.240.0/20'),
ip_network('181.93.0.0/20'),
ip_network('181.93.16.0/20'),
ip_network('181.93.32.0/20'),
ip_network('181.93.48.0/20'),
ip_network('181.93.64.0/20'),
ip_network('181.93.80.0/20'),
ip_network('181.93.96.0/20'),
ip_network('181.93.112.0/20'),
ip_network('181.93.128.0/20'),
ip_network('181.93.144.0/20'),
ip_network('181.93.160.0/20'),
ip_network('181.93.176.0/20'),
ip_network('181.93.192.0/20'),
ip_network('181.93.208.0/20'),
ip_network('181.93.224.0/20'),
ip_network('181.93.240.0/20'),
ip_network('181.94.0.0/20'),
ip_network('181.94.16.0/20'),
ip_network('181.94.32.0/20'),
ip_network('181.94.48.0/20'),
ip_network('181.94.64.0/20'),
ip_network('181.94.80.0/20'),
ip_network('181.94.96.0/20'),
ip_network('181.94.112.0/20'),
ip_network('181.94.128.0/20'),
ip_network('181.94.144.0/20'),
ip_network('181.94.160.0/20'),
ip_network('181.94.176.0/20'),
ip_network('181.94.192.0/20'),
ip_network('181.94.208.0/20'),
ip_network('181.94.224.0/20'),
ip_network('181.94.240.0/20'),
ip_network('181.95.0.0/21'),
ip_network('181.95.8.0/21'),
ip_network('181.95.16.0/21'),
ip_network('181.95.24.0/21'),
ip_network('181.95.32.0/21'),
ip_network('181.95.40.0/21'),
ip_network('181.95.48.0/21'),
ip_network('181.95.56.0/21'),
ip_network('181.95.64.0/20'),
ip_network('181.95.80.0/20'),
ip_network('181.95.96.0/20'),
ip_network('181.95.112.0/20'),
ip_network('181.95.128.0/20'),
ip_network('181.95.144.0/20'),
ip_network('181.95.160.0/20'),
ip_network('181.95.176.0/21'),
ip_network('181.95.184.0/21'),
ip_network('181.95.192.0/21'),
ip_network('181.95.200.0/21'),
ip_network('181.95.208.0/21'),
ip_network('181.95.216.0/21'),
ip_network('181.95.224.0/21'),
ip_network('181.95.232.0/21'),
ip_network('181.95.240.0/21'),
ip_network('181.95.248.0/21'),
ip_network('181.96.0.0/20'),
ip_network('181.96.16.0/20'),
ip_network('181.96.32.0/20'),
ip_network('181.96.48.0/20'),
ip_network('181.96.64.0/20'),
ip_network('181.96.80.0/20'),
ip_network('181.96.96.0/20'),
ip_network('181.96.112.0/20'),
ip_network('181.96.128.0/20'),
ip_network('181.96.144.0/20'),
ip_network('181.96.160.0/20'),
ip_network('181.96.176.0/20'),
ip_network('181.96.192.0/20'),
ip_network('181.96.208.0/20'),
ip_network('181.96.224.0/20'),
ip_network('181.96.240.0/20'),
ip_network('181.97.0.0/20'),
ip_network('181.97.16.0/20'),
ip_network('181.97.32.0/20'),
ip_network('181.97.48.0/20'),
ip_network('181.97.64.0/20'),
ip_network('181.97.80.0/20'),
ip_network('181.97.96.0/20'),
ip_network('181.97.112.0/20'),
ip_network('181.97.128.0/20'),
ip_network('181.97.144.0/20'),
ip_network('181.97.160.0/20'),
ip_network('181.97.176.0/20'),
ip_network('181.97.192.0/20'),
ip_network('181.97.208.0/20'),
ip_network('181.97.224.0/20'),
ip_network('181.97.240.0/20'),
ip_network('181.98.0.0/20'),
ip_network('181.98.16.0/20'),
ip_network('181.98.32.0/20'),
ip_network('181.98.48.0/20'),
ip_network('181.98.64.0/20'),
ip_network('181.98.80.0/20'),
ip_network('181.98.96.0/20'),
ip_network('181.98.112.0/20'),
ip_network('181.98.128.0/20'),
ip_network('181.98.144.0/20'),
ip_network('181.98.160.0/20'),
ip_network('181.98.176.0/20'),
ip_network('181.98.192.0/20'),
ip_network('181.98.208.0/20'),
ip_network('181.98.224.0/20'),
ip_network('181.98.240.0/20'),
ip_network('181.99.0.0/20'),
ip_network('181.99.16.0/20'),
ip_network('181.99.32.0/20'),
ip_network('181.99.48.0/20'),
ip_network('181.99.64.0/20'),
ip_network('181.99.80.0/20'),
ip_network('181.99.96.0/20'),
ip_network('181.99.112.0/20'),
ip_network('181.99.128.0/20'),
ip_network('181.99.144.0/20'),
ip_network('181.99.160.0/20'),
ip_network('181.99.176.0/20'),
ip_network('181.99.192.0/20'),
ip_network('181.99.208.0/20'),
ip_network('181.99.224.0/20'),
ip_network('181.99.240.0/20'),
ip_network('181.100.0.0/20'),
ip_network('181.100.16.0/20'),
ip_network('181.100.32.0/20'),
ip_network('181.100.48.0/20'),
ip_network('181.100.64.0/20'),
ip_network('181.100.80.0/20'),
ip_network('181.100.96.0/20'),
ip_network('181.100.112.0/20'),
ip_network('181.100.128.0/20'),
ip_network('181.100.144.0/20'),
ip_network('181.100.160.0/20'),
ip_network('181.100.176.0/20'),
ip_network('181.100.192.0/20'),
ip_network('181.100.208.0/20'),
ip_network('181.100.224.0/20'),
ip_network('181.100.240.0/20'),
ip_network('181.101.0.0/20'),
ip_network('181.101.16.0/20'),
ip_network('181.101.32.0/20'),
ip_network('181.101.48.0/20'),
ip_network('181.101.64.0/20'),
ip_network('181.101.80.0/20'),
ip_network('181.101.96.0/20'),
ip_network('181.101.112.0/20'),
ip_network('181.101.128.0/20'),
ip_network('181.101.144.0/20'),
ip_network('181.101.160.0/20'),
ip_network('181.101.176.0/20'),
ip_network('181.101.192.0/20'),
ip_network('181.101.208.0/20'),
ip_network('181.101.224.0/20'),
ip_network('181.101.240.0/20'),
ip_network('181.102.0.0/20'),
ip_network('181.102.16.0/20'),
ip_network('181.102.32.0/20'),
ip_network('181.102.48.0/20'),
ip_network('181.102.64.0/20'),
ip_network('181.102.80.0/20'),
ip_network('181.102.96.0/20'),
ip_network('181.102.112.0/20'),
ip_network('181.102.128.0/20'),
ip_network('181.102.144.0/20'),
ip_network('181.102.160.0/20'),
ip_network('181.102.176.0/20'),
ip_network('181.102.192.0/20'),
ip_network('181.102.208.0/20'),
ip_network('181.102.224.0/20'),
ip_network('181.102.240.0/20'),
ip_network('181.103.0.0/20'),
ip_network('181.103.16.0/20'),
ip_network('181.103.32.0/20'),
ip_network('181.103.48.0/20'),
ip_network('181.103.64.0/20'),
ip_network('181.103.80.0/20'),
ip_network('181.103.96.0/20'),
ip_network('181.103.112.0/20'),
ip_network('181.103.128.0/20'),
ip_network('181.103.144.0/20'),
ip_network('181.103.160.0/20'),
ip_network('181.103.176.0/20'),
ip_network('181.103.192.0/20'),
ip_network('181.103.208.0/20'),
ip_network('181.103.224.0/20'),
ip_network('181.103.240.0/20'),
ip_network('181.104.0.0/20'),
ip_network('181.104.16.0/20'),
ip_network('181.104.32.0/20'),
ip_network('181.104.48.0/20'),
ip_network('181.104.64.0/20'),
ip_network('181.104.80.0/20'),
ip_network('181.104.96.0/20'),
ip_network('181.104.112.0/20'),
ip_network('181.104.128.0/20'),
ip_network('181.104.144.0/20'),
ip_network('181.104.160.0/20'),
ip_network('181.104.176.0/20'),
ip_network('181.104.192.0/20'),
ip_network('181.104.208.0/20'),
ip_network('181.104.224.0/20'),
ip_network('181.104.240.0/20'),
ip_network('181.105.0.0/20'),
ip_network('181.105.16.0/20'),
ip_network('181.105.32.0/20'),
ip_network('181.105.48.0/20'),
ip_network('181.105.64.0/20'),
ip_network('181.105.80.0/20'),
ip_network('181.105.96.0/20'),
ip_network('181.105.112.0/20'),
ip_network('181.105.128.0/20'),
ip_network('181.105.144.0/20'),
ip_network('181.105.160.0/20'),
ip_network('181.105.176.0/20'),
ip_network('181.105.192.0/20'),
ip_network('181.105.208.0/20'),
ip_network('181.105.224.0/20'),
ip_network('181.105.240.0/20'),
ip_network('181.106.0.0/20'),
ip_network('181.106.16.0/20'),
ip_network('181.106.32.0/20'),
ip_network('181.106.48.0/20'),
ip_network('181.106.64.0/20'),
ip_network('181.106.80.0/20'),
ip_network('181.106.96.0/20'),
ip_network('181.106.112.0/20'),
ip_network('181.106.128.0/20'),
ip_network('181.106.144.0/20'),
ip_network('181.106.160.0/20'),
ip_network('181.106.176.0/20'),
ip_network('181.106.192.0/20'),
ip_network('181.106.208.0/20'),
ip_network('181.106.224.0/20'),
ip_network('181.106.240.0/20'),
ip_network('181.107.0.0/20'),
ip_network('181.107.16.0/20'),
ip_network('181.107.32.0/20'),
ip_network('181.107.48.0/20'),
ip_network('181.107.64.0/20'),
ip_network('181.107.80.0/20'),
ip_network('181.107.96.0/20'),
ip_network('181.107.112.0/20'),
ip_network('181.107.128.0/20'),
ip_network('181.107.144.0/20'),
ip_network('181.107.160.0/20'),
ip_network('181.107.176.0/20'),
ip_network('181.107.192.0/20'),
ip_network('181.107.208.0/20'),
ip_network('181.107.224.0/20'),
ip_network('181.107.240.0/20'),
ip_network('181.108.0.0/20'),
ip_network('181.108.16.0/20'),
ip_network('181.108.32.0/20'),
ip_network('181.108.48.0/20'),
ip_network('181.108.64.0/20'),
ip_network('181.108.80.0/20'),
ip_network('181.108.96.0/20'),
ip_network('181.108.112.0/20'),
ip_network('181.108.128.0/20'),
ip_network('181.108.144.0/20'),
ip_network('181.108.160.0/20'),
ip_network('181.108.176.0/20'),
ip_network('181.108.192.0/20'),
ip_network('181.108.208.0/20'),
ip_network('181.108.224.0/20'),
ip_network('181.108.240.0/20'),
ip_network('181.109.0.0/20'),
ip_network('181.109.16.0/20'),
ip_network('181.109.32.0/20'),
ip_network('181.109.48.0/20'),
ip_network('181.109.64.0/20'),
ip_network('181.109.80.0/20'),
ip_network('181.109.96.0/20'),
ip_network('181.109.112.0/20'),
ip_network('181.109.128.0/20'),
ip_network('181.109.144.0/20'),
ip_network('181.109.160.0/20'),
ip_network('181.109.176.0/20'),
ip_network('181.109.192.0/20'),
ip_network('181.109.208.0/20'),
ip_network('181.109.224.0/20'),
ip_network('181.109.240.0/20'),
ip_network('181.110.0.0/21'),
ip_network('181.110.8.0/21'),
ip_network('181.110.16.0/21'),
ip_network('181.110.24.0/21'),
ip_network('181.110.32.0/21'),
ip_network('181.110.40.0/21'),
ip_network('181.110.48.0/21'),
ip_network('181.110.56.0/21'),
ip_network('181.110.64.0/21'),
ip_network('181.110.72.0/21'),
ip_network('181.110.80.0/21'),
ip_network('181.110.88.0/21'),
ip_network('181.110.96.0/21'),
ip_network('181.110.104.0/21'),
ip_network('181.110.112.0/21'),
ip_network('181.110.120.0/21'),
ip_network('181.110.128.0/21'),
ip_network('181.110.136.0/21'),
ip_network('181.110.144.0/21'),
ip_network('181.118.128.0/20'),
ip_network('181.177.0.0/24'),
ip_network('181.177.1.0/24'),
ip_network('181.177.2.0/24'),
ip_network('181.177.3.0/24'),
ip_network('181.177.4.0/24'),
ip_network('181.177.5.0/24'),
ip_network('181.177.6.0/24'),
ip_network('181.177.7.0/24'),
ip_network('181.177.8.0/24'),
ip_network('181.177.9.0/24'),
ip_network('181.177.10.0/24'),
ip_network('181.177.11.0/24'),
ip_network('181.177.12.0/24'),
ip_network('181.177.13.0/24'),
ip_network('181.177.14.0/24'),
ip_network('181.177.15.0/24'),
ip_network('181.177.16.0/24'),
ip_network('181.177.17.0/24'),
ip_network('181.177.18.0/24'),
ip_network('181.177.19.0/24'),
ip_network('181.177.20.0/24'),
ip_network('181.177.200.0/21'),
ip_network('186.108.64.0/21'),
ip_network('186.108.72.0/21'),
ip_network('186.108.80.0/21'),
ip_network('186.108.88.0/21'),
ip_network('186.108.96.0/21'),
ip_network('186.108.104.0/21'),
ip_network('186.108.112.0/21'),
ip_network('186.108.120.0/21'),
ip_network('186.108.144.0/21'),
ip_network('186.108.152.0/21'),
ip_network('186.108.160.0/21'),
ip_network('186.108.168.0/21'),
ip_network('186.108.176.0/21'),
ip_network('186.108.184.0/21'),
ip_network('186.108.192.0/21'),
ip_network('186.108.200.0/21'),
ip_network('186.108.208.0/21'),
ip_network('186.108.216.0/21'),
ip_network('186.108.224.0/21'),
ip_network('186.108.232.0/21'),
ip_network('186.108.240.0/21'),
ip_network('186.108.248.0/21'),
ip_network('186.109.0.0/21'),
ip_network('186.109.8.0/21'),
ip_network('186.109.16.0/21'),
ip_network('186.109.24.0/21'),
ip_network('186.109.32.0/21'),
ip_network('186.109.40.0/21'),
ip_network('186.109.48.0/21'),
ip_network('186.109.56.0/21'),
ip_network('186.109.64.0/21'),
ip_network('186.109.72.0/21'),
ip_network('186.109.80.0/21'),
ip_network('186.109.88.0/21'),
ip_network('186.109.96.0/21'),
ip_network('186.109.104.0/21'),
ip_network('186.109.112.0/21'),
ip_network('186.109.120.0/21'),
ip_network('186.109.128.0/21'),
ip_network('186.109.136.0/21'),
ip_network('186.109.144.0/21'),
ip_network('186.109.152.0/21'),
ip_network('186.109.160.0/21'),
ip_network('186.109.168.0/21'),
ip_network('186.109.176.0/21'),
ip_network('186.109.184.0/21'),
ip_network('186.109.192.0/21'),
ip_network('186.109.200.0/21'),
ip_network('186.109.208.0/21'),
ip_network('186.109.216.0/21'),
ip_network('186.109.224.0/21'),
ip_network('186.109.232.0/21'),
ip_network('186.109.240.0/21'),
ip_network('186.109.248.0/21'),
ip_network('186.110.0.0/21'),
ip_network('186.110.8.0/21'),
ip_network('186.110.16.0/21'),
ip_network('186.110.24.0/21'),
ip_network('186.110.32.0/20'),
ip_network('186.110.32.0/21'),
ip_network('186.110.40.0/21'),
ip_network('186.110.48.0/20'),
ip_network('186.110.48.0/21'),
ip_network('186.110.56.0/21'),
ip_network('186.110.64.0/21'),
ip_network('186.110.72.0/21'),
ip_network('186.110.80.0/21'),
ip_network('186.110.88.0/21'),
ip_network('186.110.96.0/21'),
ip_network('186.110.104.0/21'),
ip_network('186.110.112.0/21'),
ip_network('186.110.120.0/21'),
ip_network('186.110.128.0/21'),
ip_network('186.110.136.0/21'),
ip_network('186.110.144.0/21'),
ip_network('186.110.152.0/21'),
ip_network('186.110.160.0/21'),
ip_network('186.110.168.0/21'),
ip_network('186.110.176.0/21'),
ip_network('186.110.184.0/21'),
ip_network('186.110.192.0/21'),
ip_network('186.110.200.0/21'),
ip_network('186.110.208.0/21'),
ip_network('186.110.216.0/21'),
ip_network('186.110.224.0/21'),
ip_network('186.110.232.0/21'),
ip_network('186.110.240.0/21'),
ip_network('186.110.248.0/21'),
ip_network('186.111.0.0/20'),
ip_network('186.111.16.0/20'),
ip_network('186.111.32.0/20'),
ip_network('186.111.48.0/20'),
ip_network('186.111.64.0/20'),
ip_network('186.111.80.0/20'),
ip_network('186.111.96.0/20'),
ip_network('186.111.112.0/20'),
ip_network('186.111.128.0/20'),
ip_network('186.111.144.0/20'),
ip_network('186.111.160.0/20'),
ip_network('186.111.176.0/20'),
ip_network('186.111.192.0/20'),
ip_network('186.111.208.0/20'),
ip_network('186.111.224.0/20'),
ip_network('186.111.240.0/20'),
ip_network('186.121.179.0/24'),
ip_network('186.124.0.0/21'),
ip_network('186.124.8.0/21'),
ip_network('186.124.12.0/22'),
ip_network('186.124.16.0/21'),
ip_network('186.124.24.0/21'),
ip_network('186.124.32.0/21'),
ip_network('186.124.40.0/21'),
ip_network('186.124.48.0/21'),
ip_network('186.124.56.0/21'),
ip_network('186.124.112.0/21'),
ip_network('186.124.120.0/21'),
ip_network('186.124.128.0/21'),
ip_network('186.124.136.0/21'),
ip_network('186.124.144.0/21'),
ip_network('186.124.152.0/21'),
ip_network('186.124.160.0/21'),
ip_network('186.124.168.0/21'),
ip_network('186.124.176.0/21'),
ip_network('186.124.184.0/21'),
ip_network('186.124.192.0/21'),
ip_network('186.124.200.0/21'),
ip_network('186.124.208.0/21'),
ip_network('186.124.216.0/21'),
ip_network('186.124.224.0/21'),
ip_network('186.124.232.0/21'),
ip_network('186.124.240.0/21'),
ip_network('186.124.248.0/21'),
ip_network('186.125.0.0/21'),
ip_network('186.125.8.0/21'),
ip_network('186.125.16.0/21'),
ip_network('186.125.24.0/21'),
ip_network('186.125.32.0/21'),
ip_network('186.125.40.0/21'),
ip_network('186.125.48.0/21'),
ip_network('186.125.56.0/21'),
ip_network('186.125.64.0/21'),
ip_network('186.125.72.0/21'),
ip_network('186.125.80.0/21'),
ip_network('186.125.88.0/21'),
ip_network('186.125.96.0/21'),
ip_network('186.125.104.0/21'),
ip_network('186.125.112.0/21'),
ip_network('186.125.120.0/21'),
ip_network('186.125.128.0/21'),
ip_network('186.125.136.0/21'),
ip_network('186.125.144.0/21'),
ip_network('186.125.152.0/21'),
ip_network('186.125.160.0/21'),
ip_network('186.125.168.0/21'),
ip_network('186.125.176.0/21'),
ip_network('186.125.184.0/21'),
ip_network('186.125.187.0/24'),
ip_network('186.125.188.0/24'),
ip_network('186.125.189.0/24'),
ip_network('186.125.192.0/21'),
ip_network('186.125.195.0/24'),
ip_network('186.125.200.0/21'),
ip_network('186.125.208.0/21'),
ip_network('186.125.211.0/24'),
ip_network('186.125.216.0/21'),
ip_network('186.125.224.0/21'),
ip_network('186.125.232.0/21'),
ip_network('186.125.240.0/21'),
ip_network('186.125.240.0/24'),
ip_network('186.125.241.0/24'),
ip_network('186.125.242.0/24'),
ip_network('186.125.248.0/21'),
ip_network('186.126.0.0/20'),
ip_network('186.126.16.0/20'),
ip_network('186.126.32.0/20'),
ip_network('186.126.48.0/20'),
ip_network('186.126.64.0/20'),
ip_network('186.126.80.0/20'),
ip_network('186.126.96.0/20'),
ip_network('186.126.112.0/20'),
ip_network('186.126.128.0/20'),
ip_network('186.126.144.0/20'),
ip_network('186.126.160.0/20'),
ip_network('186.126.176.0/20'),
ip_network('186.126.192.0/20'),
ip_network('186.126.208.0/20'),
ip_network('186.126.224.0/20'),
ip_network('186.126.240.0/20'),
ip_network('186.127.0.0/20'),
ip_network('186.127.16.0/20'),
ip_network('186.127.32.0/20'),
ip_network('186.127.48.0/20'),
ip_network('186.127.64.0/20'),
ip_network('186.127.80.0/20'),
ip_network('186.127.96.0/20'),
ip_network('186.127.112.0/20'),
ip_network('186.127.128.0/20'),
ip_network('186.127.144.0/20'),
ip_network('186.127.160.0/20'),
ip_network('186.127.176.0/20'),
ip_network('186.127.192.0/20'),
ip_network('186.127.208.0/20'),
ip_network('186.127.224.0/20'),
ip_network('186.127.240.0/20'),
ip_network('186.152.0.0/20'),
ip_network('186.152.16.0/20'),
ip_network('186.152.32.0/20'),
ip_network('186.152.48.0/20'),
ip_network('186.152.64.0/20'),
ip_network('186.152.80.0/20'),
ip_network('186.152.96.0/20'),
ip_network('186.152.112.0/20'),
ip_network('186.152.128.0/20'),
ip_network('186.152.144.0/20'),
ip_network('186.152.160.0/20'),
ip_network('186.152.176.0/20'),
ip_network('186.152.192.0/20'),
ip_network('186.152.208.0/20'),
ip_network('186.152.224.0/20'),
ip_network('186.152.240.0/20'),
ip_network('186.153.0.0/21'),
ip_network('186.153.7.0/24'),
ip_network('186.153.8.0/21'),
ip_network('186.153.16.0/21'),
ip_network('186.153.32.0/21'),
ip_network('186.153.40.0/21'),
ip_network('186.153.48.0/21'),
ip_network('186.153.64.0/21'),
ip_network('186.153.72.0/21'),
ip_network('186.153.80.0/21'),
ip_network('186.153.88.0/21'),
ip_network('186.153.96.0/21'),
ip_network('186.153.104.0/21'),
ip_network('186.153.112.0/21'),
ip_network('186.153.120.0/21'),
ip_network('186.153.128.0/21'),
ip_network('186.153.136.0/21'),
ip_network('186.153.144.0/21'),
ip_network('186.153.160.0/21'),
ip_network('186.153.168.0/21'),
ip_network('186.153.176.0/21'),
ip_network('186.153.184.0/21'),
ip_network('186.153.192.0/21'),
ip_network('186.153.200.0/21'),
ip_network('186.153.208.0/21'),
ip_network('186.153.216.0/21'),
ip_network('186.153.224.0/21'),
ip_network('186.153.232.0/21'),
ip_network('186.153.240.0/21'),
ip_network('186.153.248.0/21'),
ip_network('186.159.124.0/23'),
ip_network('186.159.126.0/23'),
ip_network('186.190.128.0/24'),
ip_network('186.190.129.0/24'),
ip_network('186.190.130.0/24'),
ip_network('186.190.131.0/24'),
ip_network('186.190.132.0/24'),
ip_network('186.190.133.0/24'),
ip_network('186.190.134.0/24'),
ip_network('186.190.135.0/24'),
ip_network('186.190.136.0/24'),
ip_network('186.190.137.0/24'),
ip_network('186.190.138.0/24'),
ip_network('186.190.139.0/24'),
ip_network('186.190.140.0/24'),
ip_network('186.190.141.0/24'),
ip_network('186.190.142.0/24'),
ip_network('186.190.143.0/24'),
ip_network('186.190.144.0/24'),
ip_network('186.190.145.0/24'),
ip_network('186.190.146.0/24'),
ip_network('186.190.147.0/24'),
ip_network('186.190.148.0/24'),
ip_network('186.190.149.0/24'),
ip_network('186.190.150.0/24'),
ip_network('186.190.151.0/24'),
ip_network('186.190.152.0/24'),
ip_network('186.190.153.0/24'),
ip_network('186.190.154.0/24'),
ip_network('186.190.155.0/24'),
ip_network('186.190.156.0/24'),
ip_network('186.190.157.0/24'),
ip_network('186.190.158.0/24'),
ip_network('186.190.159.0/24'),
ip_network('190.8.190.0/24'),
ip_network('190.8.191.0/24'),
ip_network('190.14.170.0/24'),
ip_network('190.14.171.0/24'),
ip_network('190.30.0.0/21'),
ip_network('190.30.8.0/21'),
ip_network('190.30.16.0/21'),
ip_network('190.30.24.0/21'),
ip_network('190.30.32.0/21'),
ip_network('190.30.40.0/21'),
ip_network('190.30.48.0/21'),
ip_network('190.30.56.0/21'),
ip_network('190.30.64.0/21'),
ip_network('190.30.72.0/21'),
ip_network('190.30.80.0/21'),
ip_network('190.30.88.0/21'),
ip_network('190.30.96.0/21'),
ip_network('190.30.104.0/21'),
ip_network('190.30.112.0/21'),
ip_network('190.30.120.0/21'),
ip_network('190.30.128.0/21'),
ip_network('190.30.136.0/21'),
ip_network('190.30.144.0/21'),
ip_network('190.30.152.0/21'),
ip_network('190.30.160.0/21'),
ip_network('190.30.168.0/21'),
ip_network('190.30.176.0/21'),
ip_network('190.30.184.0/21'),
ip_network('190.30.192.0/21'),
ip_network('190.30.200.0/21'),
ip_network('190.30.208.0/21'),
ip_network('190.30.216.0/21'),
ip_network('190.30.224.0/21'),
ip_network('190.30.232.0/21'),
ip_network('190.30.238.0/24'),
ip_network('190.30.240.0/21'),
ip_network('190.30.248.0/21'),
ip_network('190.31.0.0/16'),
ip_network('190.31.0.0/21'),
ip_network('190.31.8.0/21'),
ip_network('190.31.16.0/21'),
ip_network('190.31.24.0/21'),
ip_network('190.31.32.0/21'),
ip_network('190.31.40.0/21'),
ip_network('190.31.48.0/21'),
ip_network('190.31.56.0/21'),
ip_network('190.31.64.0/21'),
ip_network('190.31.72.0/21'),
ip_network('190.31.80.0/21'),
ip_network('190.31.96.0/21'),
ip_network('190.31.104.0/21'),
ip_network('190.31.128.0/21'),
ip_network('190.31.136.0/21'),
ip_network('190.31.144.0/21'),
ip_network('190.31.152.0/21'),
ip_network('190.31.160.0/21'),
ip_network('190.31.184.0/21'),
ip_network('190.31.192.0/21'),
ip_network('190.31.200.0/21'),
ip_network('190.31.208.0/21'),
ip_network('190.31.216.0/21'),
ip_network('190.31.224.0/21'),
ip_network('190.31.232.0/21'),
ip_network('190.31.240.0/21'),
ip_network('190.31.248.0/21'),
ip_network('190.103.176.0/22'),
ip_network('190.103.231.0/24'),
ip_network('190.106.80.0/24'),
ip_network('190.106.81.0/24'),
ip_network('190.106.82.0/24'),
ip_network('190.106.83.0/24'),
ip_network('190.106.84.0/24'),
ip_network('190.106.85.0/24'),
ip_network('190.108.79.0/24'),
ip_network('190.114.140.0/24'),
ip_network('190.114.141.0/24'),
ip_network('190.114.142.0/24'),
ip_network('190.114.191.0/24'),
ip_network('190.123.241.0/24'),
ip_network('190.123.242.0/24'),
ip_network('190.123.243.0/24'),
ip_network('190.123.244.0/24'),
ip_network('190.123.245.0/24'),
ip_network('190.123.246.0/24'),
ip_network('190.124.252.0/22'),
ip_network('190.136.0.0/16'),
ip_network('190.136.8.0/21'),
ip_network('190.136.16.0/21'),
ip_network('190.136.18.0/24'),
ip_network('190.136.32.0/21'),
ip_network('190.136.40.0/21'),
ip_network('190.136.42.0/24'),
ip_network('190.136.43.0/24'),
ip_network('190.136.54.0/24'),
ip_network('190.136.56.0/21'),
ip_network('190.136.64.0/21'),
ip_network('190.136.72.0/21'),
ip_network('190.136.80.0/21'),
ip_network('190.136.88.0/21'),
ip_network('190.136.96.0/21'),
ip_network('190.136.104.0/21'),
ip_network('190.136.112.0/21'),
ip_network('190.136.120.0/21'),
ip_network('190.136.128.0/21'),
ip_network('190.136.143.0/24'),
ip_network('190.136.152.0/21'),
ip_network('190.136.160.0/21'),
ip_network('190.136.168.0/21'),
ip_network('190.136.176.0/21'),
ip_network('190.136.184.0/21'),
ip_network('190.136.192.0/21'),
ip_network('190.136.200.0/21'),
ip_network('190.136.208.0/21'),
ip_network('190.136.216.0/21'),
ip_network('190.136.224.0/21'),
ip_network('190.136.232.0/21'),
ip_network('190.136.240.0/21'),
ip_network('190.136.248.0/21'),
ip_network('190.137.0.0/21'),
ip_network('190.137.8.0/21'),
ip_network('190.137.16.0/21'),
ip_network('190.137.24.0/21'),
ip_network('190.137.32.0/21'),
ip_network('190.137.40.0/21'),
ip_network('190.137.48.0/21'),
ip_network('190.137.56.0/21'),
ip_network('190.137.64.0/21'),
ip_network('190.137.72.0/21'),
ip_network('190.137.80.0/21'),
ip_network('190.137.88.0/21'),
ip_network('190.137.96.0/21'),
ip_network('190.137.104.0/21'),
ip_network('190.137.112.0/21'),
ip_network('190.137.120.0/21'),
ip_network('190.137.128.0/21'),
ip_network('190.137.136.0/21'),
ip_network('190.137.144.0/21'),
ip_network('190.137.152.0/21'),
ip_network('190.137.157.0/24'),
ip_network('190.137.160.0/21'),
ip_network('190.137.162.0/24'),
ip_network('190.137.163.0/24'),
ip_network('190.137.168.0/21'),
ip_network('190.137.176.0/21'),
ip_network('190.137.184.0/21'),
ip_network('190.137.192.0/21'),
ip_network('190.137.200.0/21'),
ip_network('190.137.208.0/21'),
ip_network('190.137.216.0/21'),
ip_network('190.137.224.0/21'),
ip_network('190.137.232.0/21'),
ip_network('190.137.240.0/21'),
ip_network('190.137.248.0/21'),
ip_network('190.138.0.0/21'),
ip_network('190.138.8.0/21'),
ip_network('190.138.16.0/21'),
ip_network('190.138.24.0/21'),
ip_network('190.138.32.0/21'),
ip_network('190.138.40.0/21'),
ip_network('190.138.48.0/21'),
ip_network('190.138.56.0/21'),
ip_network('190.138.64.0/21'),
ip_network('190.138.72.0/21'),
ip_network('190.138.80.0/21'),
ip_network('190.138.88.0/21'),
ip_network('190.138.96.0/21'),
ip_network('190.138.104.0/21'),
ip_network('190.138.112.0/21'),
ip_network('190.138.120.0/21'),
ip_network('190.138.128.0/21'),
ip_network('190.138.136.0/21'),
ip_network('190.138.144.0/21'),
ip_network('190.138.152.0/21'),
ip_network('190.138.160.0/21'),
ip_network('190.138.168.0/21'),
ip_network('190.138.176.0/21'),
ip_network('190.138.184.0/21'),
ip_network('190.138.192.0/21'),
ip_network('190.138.200.0/21'),
ip_network('190.138.208.0/21'),
ip_network('190.138.216.0/21'),
ip_network('190.138.224.0/21'),
ip_network('190.138.232.0/21'),
ip_network('190.138.240.0/23'),
ip_network('190.138.248.0/21'),
ip_network('190.139.0.0/20'),
ip_network('190.139.16.0/21'),
ip_network('190.139.24.0/21'),
ip_network('190.139.32.0/21'),
ip_network('190.139.40.0/21'),
ip_network('190.139.48.0/21'),
ip_network('190.139.56.0/21'),
ip_network('190.139.64.0/21'),
ip_network('190.139.72.0/21'),
ip_network('190.139.80.0/21'),
ip_network('190.139.88.0/21'),
ip_network('190.139.96.0/21'),
ip_network('190.139.104.0/21'),
ip_network('190.139.112.0/21'),
ip_network('190.139.120.0/21'),
ip_network('190.139.128.0/21'),
ip_network('190.139.136.0/21'),
ip_network('190.139.144.0/21'),
ip_network('190.139.152.0/21'),
ip_network('190.139.160.0/21'),
ip_network('190.139.168.0/21'),
ip_network('190.139.176.0/21'),
ip_network('190.139.184.0/21'),
ip_network('190.139.192.0/21'),
ip_network('190.139.200.0/21'),
ip_network('190.139.208.0/21'),
ip_network('190.139.216.0/21'),
ip_network('190.139.224.0/21'),
ip_network('190.139.232.0/21'),
ip_network('190.139.240.0/21'),
ip_network('190.139.248.0/21'),
ip_network('190.182.252.0/22'),
ip_network('190.185.192.0/23'),
ip_network('190.185.227.0/24'),
ip_network('190.224.16.0/21'),
ip_network('190.224.32.0/21'),
ip_network('190.224.40.0/21'),
ip_network('190.224.46.0/24'),
ip_network('190.224.48.0/21'),
ip_network('190.224.56.0/21'),
ip_network('190.224.64.0/21'),
ip_network('190.224.72.0/21'),
ip_network('190.224.80.0/21'),
ip_network('190.224.88.0/21'),
ip_network('190.224.96.0/21'),
ip_network('190.224.104.0/21'),
ip_network('190.224.112.0/21'),
ip_network('190.224.120.0/21'),
ip_network('190.224.128.0/21'),
ip_network('190.224.136.0/21'),
ip_network('190.224.144.0/21'),
ip_network('190.224.152.0/21'),
ip_network('190.224.160.0/22'),
ip_network('190.224.168.0/21'),
ip_network('190.224.184.0/21'),
ip_network('190.224.196.0/22'),
ip_network('190.224.200.0/21'),
ip_network('190.224.208.0/21'),
ip_network('190.224.216.0/21'),
ip_network('190.224.224.0/21'),
ip_network('190.224.232.0/21'),
ip_network('190.224.240.0/21'),
ip_network('190.224.248.0/21'),
ip_network('190.225.0.0/21'),
ip_network('190.225.8.0/21'),
ip_network('190.225.16.0/21'),
ip_network('190.225.24.0/21'),
ip_network('190.225.32.0/21'),
ip_network('190.225.40.0/21'),
ip_network('190.225.48.0/21'),
ip_network('190.225.56.0/21'),
ip_network('190.225.64.0/21'),
ip_network('190.225.72.0/21'),
ip_network('190.225.80.0/21'),
ip_network('190.225.88.0/21'),
ip_network('190.225.96.0/21'),
ip_network('190.225.104.0/21'),
ip_network('190.225.112.0/21'),
ip_network('190.225.120.0/21'),
ip_network('190.225.128.0/21'),
ip_network('190.225.136.0/21'),
ip_network('190.225.144.0/21'),
ip_network('190.225.152.0/21'),
ip_network('190.225.160.0/21'),
ip_network('190.225.168.0/21'),
ip_network('190.225.176.0/21'),
ip_network('190.225.184.0/21'),
ip_network('190.225.192.0/21'),
ip_network('190.225.200.0/21'),
ip_network('190.225.208.0/21'),
ip_network('190.225.216.0/21'),
ip_network('190.225.224.0/21'),
ip_network('190.225.232.0/21'),
ip_network('190.225.240.0/21'),
ip_network('190.226.0.0/21'),
ip_network('190.226.8.0/21'),
ip_network('190.226.16.0/21'),
ip_network('190.226.24.0/21'),
ip_network('190.226.32.0/21'),
ip_network('190.226.40.0/21'),
ip_network('190.226.48.0/21'),
ip_network('190.226.56.0/21'),
ip_network('190.226.64.0/20'),
ip_network('190.226.80.0/20'),
ip_network('190.226.96.0/21'),
ip_network('190.226.104.0/21'),
ip_network('190.226.112.0/21'),
ip_network('190.226.120.0/21'),
ip_network('190.226.128.0/21'),
ip_network('190.226.136.0/21'),
ip_network('190.226.144.0/21'),
ip_network('190.226.152.0/21'),
ip_network('190.226.176.0/21'),
ip_network('190.226.184.0/21'),
ip_network('190.226.192.0/21'),
ip_network('190.226.200.0/21'),
ip_network('190.226.208.0/20'),
ip_network('190.226.224.0/21'),
ip_network('190.226.232.0/21'),
ip_network('190.226.240.0/21'),
ip_network('190.226.248.0/21'),
ip_network('190.227.8.0/21'),
ip_network('190.227.16.0/21'),
ip_network('190.227.24.0/21'),
ip_network('190.227.32.0/21'),
ip_network('190.227.40.0/21'),
ip_network('190.227.48.0/21'),
ip_network('190.227.56.0/21'),
ip_network('190.227.64.0/18'),
ip_network('190.227.128.0/21'),
ip_network('190.227.136.0/21'),
ip_network('190.227.144.0/21'),
ip_network('190.227.152.0/21'),
ip_network('190.227.160.0/21'),
ip_network('190.227.168.0/21'),
ip_network('190.227.176.0/21'),
ip_network('190.227.184.0/21'),
ip_network('190.227.208.0/21'),
ip_network('190.227.248.0/21'),
ip_network('190.228.0.0/21'),
ip_network('190.228.8.0/21'),
ip_network('190.228.16.0/21'),
ip_network('190.228.24.0/21'),
ip_network('190.228.32.0/21'),
ip_network('190.228.40.0/21'),
ip_network('190.228.48.0/21'),
ip_network('190.228.64.0/21'),
ip_network('190.228.72.0/21'),
ip_network('190.228.80.0/21'),
ip_network('190.228.84.0/24'),
ip_network('190.228.88.0/21'),
ip_network('190.228.96.0/21'),
ip_network('190.228.99.0/24'),
ip_network('190.228.104.0/21'),
ip_network('190.228.112.0/21'),
ip_network('190.228.120.0/21'),
ip_network('190.228.128.0/21'),
ip_network('190.228.136.0/21'),
ip_network('190.228.144.0/21'),
ip_network('190.228.152.0/21'),
ip_network('190.228.160.0/21'),
ip_network('190.228.162.0/24'),
ip_network('190.228.165.0/24'),
ip_network('190.228.168.0/21'),
ip_network('190.228.200.0/21'),
ip_network('190.228.208.0/20'),
ip_network('190.228.216.0/21'),
ip_network('190.228.224.0/20'),
ip_network('190.228.232.0/21'),
ip_network('190.228.240.0/20'),
ip_network('190.228.248.0/21'),
ip_network('190.229.0.0/20'),
ip_network('190.229.16.0/21'),
ip_network('190.229.24.0/21'),
ip_network('190.229.32.0/21'),
ip_network('190.229.40.0/21'),
ip_network('190.229.56.0/21'),
ip_network('190.229.64.0/20'),
ip_network('190.229.80.0/20'),
ip_network('190.229.96.0/20'),
ip_network('190.229.112.0/20'),
ip_network('190.229.128.0/20'),
ip_network('190.229.144.0/20'),
ip_network('190.229.160.0/20'),
ip_network('190.229.176.0/20'),
ip_network('190.229.192.0/21'),
ip_network('190.229.200.0/21'),
ip_network('190.229.208.0/21'),
ip_network('190.229.216.0/21'),
ip_network('190.229.224.0/21'),
ip_network('190.229.232.0/21'),
ip_network('190.229.240.0/21'),
ip_network('190.229.248.0/21'),
ip_network('190.230.0.0/20'),
ip_network('190.230.16.0/21'),
ip_network('190.230.24.0/21'),
ip_network('190.230.32.0/20'),
ip_network('190.230.48.0/20'),
ip_network('190.230.64.0/20'),
ip_network('190.230.80.0/20'),
ip_network('190.230.96.0/21'),
ip_network('190.230.104.0/21'),
ip_network('190.230.112.0/21'),
ip_network('190.230.120.0/21'),
ip_network('190.230.128.0/21'),
ip_network('190.230.136.0/21'),
ip_network('190.230.144.0/20'),
ip_network('190.230.160.0/20'),
ip_network('190.230.176.0/21'),
ip_network('190.230.184.0/21'),
ip_network('190.230.192.0/20'),
ip_network('190.230.192.0/21'),
ip_network('190.230.208.0/20'),
ip_network('190.230.224.0/20'),
ip_network('190.230.240.0/21'),
ip_network('190.230.248.0/21'),
ip_network('190.231.8.0/21'),
ip_network('190.231.16.0/21'),
ip_network('190.231.24.0/21'),
ip_network('190.231.32.0/20'),
ip_network('190.231.48.0/20'),
ip_network('190.231.64.0/20'),
ip_network('190.231.80.0/21'),
ip_network('190.231.88.0/21'),
ip_network('190.231.96.0/21'),
ip_network('190.231.104.0/21'),
ip_network('190.231.112.0/21'),
ip_network('190.231.120.0/21'),
ip_network('190.231.128.0/21'),
ip_network('190.231.136.0/21'),
ip_network('190.231.144.0/21'),
ip_network('190.231.152.0/21'),
ip_network('190.231.160.0/21'),
ip_network('190.231.168.0/21'),
ip_network('190.231.176.0/21'),
ip_network('190.231.184.0/21'),
ip_network('190.231.192.0/21'),
ip_network('190.231.200.0/21'),
ip_network('190.231.208.0/21'),
ip_network('190.231.216.0/21'),
ip_network('190.231.224.0/21'),
ip_network('190.231.232.0/21'),
ip_network('190.231.240.0/21'),
ip_network('190.231.248.0/21'),
ip_network('200.3.48.0/24'),
ip_network('200.3.76.0/24'),
ip_network('200.3.78.0/24'),
ip_network('200.3.84.0/24'),
ip_network('200.3.90.0/23'),
ip_network('200.3.94.0/23'),
ip_network('200.10.201.0/24'),
ip_network('200.43.0.0/21'),
ip_network('200.43.8.0/21'),
ip_network('200.43.16.0/20'),
ip_network('200.43.32.0/20'),
ip_network('200.43.48.0/20'),
ip_network('200.43.64.0/21'),
ip_network('200.43.72.0/21'),
ip_network('200.43.80.0/21'),
ip_network('200.43.88.0/21'),
ip_network('200.43.96.0/21'),
ip_network('200.43.104.0/21'),
ip_network('200.43.112.0/21'),
ip_network('200.43.120.0/21'),
ip_network('200.43.128.0/21'),
ip_network('200.43.128.0/24'),
ip_network('200.43.129.0/24'),
ip_network('200.43.136.0/21'),
ip_network('200.43.144.0/21'),
ip_network('200.43.152.0/21'),
ip_network('200.43.160.0/21'),
ip_network('200.43.168.0/21'),
ip_network('200.43.176.0/21'),
ip_network('200.43.184.0/21'),
ip_network('200.43.192.0/21'),
ip_network('200.43.200.0/21'),
ip_network('200.43.208.0/21'),
ip_network('200.43.216.0/21'),
ip_network('200.43.224.0/21'),
ip_network('200.43.232.0/21'),
ip_network('200.43.240.0/21'),
ip_network('200.43.248.0/21'),
ip_network('200.45.0.0/21'),
ip_network('200.45.8.0/21'),
ip_network('200.45.16.0/21'),
ip_network('200.45.24.0/21'),
ip_network('200.45.32.0/21'),
ip_network('200.45.40.0/21'),
ip_network('200.45.48.0/21'),
ip_network('200.45.56.0/21'),
ip_network('200.45.64.0/21'),
ip_network('200.45.72.0/21'),
ip_network('200.45.80.0/21'),
ip_network('200.45.88.0/21'),
ip_network('200.45.96.0/21'),
ip_network('200.45.104.0/21'),
ip_network('200.45.112.0/21'),
ip_network('200.45.112.0/24'),
ip_network('200.45.116.0/22'),
ip_network('200.45.119.0/24'),
ip_network('200.45.120.0/21'),
ip_network('200.45.128.0/21'),
ip_network('200.45.136.0/21'),
ip_network('200.45.144.0/21'),
ip_network('200.45.152.0/21'),
ip_network('200.45.160.0/21'),
ip_network('200.45.168.0/21'),
ip_network('200.45.176.0/21'),
ip_network('200.45.184.0/21'),
ip_network('200.45.192.0/21'),
ip_network('200.45.200.0/21'),
ip_network('200.45.208.0/21'),
ip_network('200.45.216.0/21'),
ip_network('200.45.224.0/21'),
ip_network('200.45.232.0/21'),
ip_network('200.45.240.0/21'),
ip_network('200.45.248.0/21'),
ip_network('200.61.208.0/20'),
ip_network('200.63.92.0/24'),
ip_network('200.71.224.0/21'),
ip_network('200.71.232.0/21'),
ip_network('200.82.0.0/21'),
ip_network('200.82.8.0/21'),
ip_network('200.82.16.0/21'),
ip_network('200.82.24.0/21'),
ip_network('200.82.32.0/21'),
ip_network('200.82.40.0/21'),
ip_network('200.82.48.0/21'),
ip_network('200.82.56.0/21'),
ip_network('200.82.64.0/21'),
ip_network('200.82.72.0/21'),
ip_network('200.82.80.0/21'),
ip_network('200.82.88.0/21'),
ip_network('200.82.96.0/21'),
ip_network('200.82.104.0/21'),
ip_network('200.82.112.0/21'),
ip_network('200.82.120.0/21'),
ip_network('200.91.56.0/24'),
ip_network('200.91.57.0/24'),
ip_network('200.91.58.0/24'),
ip_network('200.91.59.0/24'),
ip_network('200.115.27.0/24'),
ip_network('200.117.0.0/21'),
ip_network('200.117.8.0/21'),
ip_network('200.117.16.0/21'),
ip_network('200.117.24.0/21'),
ip_network('200.117.32.0/21'),
ip_network('200.117.40.0/21'),
ip_network('200.117.48.0/21'),
ip_network('200.117.56.0/21'),
ip_network('200.117.80.0/21'),
ip_network('200.117.88.0/21'),
ip_network('200.117.96.0/21'),
ip_network('200.117.104.0/21'),
ip_network('200.117.112.0/21'),
ip_network('200.117.120.0/22'),
ip_network('200.117.128.0/20'),
ip_network('200.117.144.0/20'),
ip_network('200.117.160.0/21'),
ip_network('200.117.168.0/21'),
ip_network('200.117.176.0/21'),
ip_network('200.117.184.0/21'),
ip_network('200.117.192.0/20'),
ip_network('200.117.208.0/21'),
ip_network('200.117.216.0/21'),
ip_network('200.117.224.0/21'),
ip_network('200.117.232.0/21'),
ip_network('200.117.240.0/21'),
ip_network('200.117.248.0/21'),
ip_network('200.123.50.0/24'),
ip_network('201.252.0.0/21'),
ip_network('201.252.8.0/21'),
ip_network('201.252.16.0/21'),
ip_network('201.252.24.0/21'),
ip_network('201.252.32.0/21'),
ip_network('201.252.40.0/21'),
ip_network('201.252.48.0/21'),
ip_network('201.252.56.0/21'),
ip_network('201.252.64.0/21'),
ip_network('201.252.72.0/21'),
ip_network('201.252.80.0/21'),
ip_network('201.252.88.0/21'),
ip_network('201.252.96.0/21'),
ip_network('201.252.104.0/21'),
ip_network('201.252.112.0/21'),
ip_network('201.252.120.0/21'),
ip_network('201.252.128.0/21'),
ip_network('201.252.136.0/21'),
ip_network('201.252.144.0/21'),
ip_network('201.252.152.0/21'),
ip_network('201.252.160.0/21'),
ip_network('201.252.168.0/21'),
ip_network('201.252.176.0/21'),
ip_network('201.252.184.0/21'),
ip_network('201.252.192.0/21'),
ip_network('201.252.200.0/21'),
ip_network('201.252.208.0/21'),
ip_network('201.252.216.0/21'),
ip_network('201.252.224.0/21'),
ip_network('201.252.232.0/21'),
ip_network('201.252.240.0/21'),
ip_network('201.252.248.0/21'),
ip_network('201.253.0.0/21'),
ip_network('201.253.8.0/21'),
ip_network('201.253.16.0/21'),
ip_network('201.253.24.0/21'),
ip_network('201.253.32.0/21'),
ip_network('201.253.40.0/21'),
ip_network('201.253.48.0/21'),
ip_network('201.253.56.0/21'),
ip_network('201.253.64.0/21'),
ip_network('201.253.72.0/21'),
ip_network('201.253.80.0/21'),
ip_network('201.253.88.0/21'),
ip_network('201.253.96.0/21'),
ip_network('201.253.104.0/21'),
ip_network('201.253.112.0/21'),
ip_network('201.253.120.0/21'),
ip_network('201.253.122.0/24'),
ip_network('201.253.128.0/21'),
ip_network('201.253.136.0/21'),
ip_network('201.253.144.0/21'),
ip_network('201.253.152.0/21'),
ip_network('201.253.160.0/21'),
ip_network('201.253.168.0/21'),
ip_network('201.253.176.0/21'),
ip_network('201.253.184.0/21'),
ip_network('201.253.192.0/21'),
ip_network('201.253.200.0/21'),
ip_network('201.253.208.0/21'),
ip_network('201.253.216.0/21'),
ip_network('201.253.224.0/21'),
ip_network('201.253.232.0/21'),
ip_network('201.253.240.0/21'),
ip_network('201.253.248.0/21'),
]
|
## Heap Sort
def heapsort(alist):
# Convert the list into heap:
# each node is lower than each of its parent i.e max-heap
# the tree is pertectly balanced
# all leaves are in the leftmost position available.
length = len(alist) - 1
leastParent = length / 2
for i in range(leastParent, -1, -1):
moveDown(alist, i, length)
print 'heap array ', alist
# flatten heap into sorted array
for i in range(length, 0, -1):
if alist[0] > alist[i]:
swap(alist, 0, i)
moveDown(alist, 0, i - 1)
def moveDown(alist, first, last):
largest = 2 * first + 1
while largest <= last:
# right child exist and it larger than left child
if largest < last and alist[largest] < alist[largest + 1]:
largest = largest + 1
# right child is larger than parent
if alist[largest] > alist[first]:
swap(alist, largest, first)
first = largest
largest = 2*first + 1
else:
return;
def swap(alist, x, y):
alist[x], alist[y] = alist[y], alist[x]
a = [5, 15, 10, 2]
print 'input array', a
heapsort(a)
print 'sorted array', a
|
from __future__ import generators
import config.package
class Configure(config.package.Package):
def __init__(self, framework):
config.package.Package.__init__(self, framework)
self.includes = ['cusp/version.h']
self.includedir = ['','include']
self.forceLanguage = 'CUDA'
self.cxx = 0
return
def setupDependencies(self, framework):
config.package.Package.setupDependencies(self, framework)
self.thrust = framework.require('config.packages.thrust', self)
self.deps = [self.thrust]
return
def Install(self):
import shutil
import os
self.framework.log.write('cuspDir = '+self.packageDir+' installDir '+self.installDir+'\n')
srcdir = self.packageDir
destdir = os.path.join(self.installDir, 'include', 'cusp')
try:
if os.path.isdir(destdir): shutil.rmtree(destdir)
shutil.copytree(srcdir,destdir)
except RuntimeError,e:
raise RuntimeError('Error installing Cusp include files: '+str(e))
self.includedir = 'include' # default and --download have different includedirs
return self.installDir
def getSearchDirectories(self):
import os
yield ''
yield os.path.join('/usr','local','cuda')
yield os.path.join('/usr','local','cuda','cusp')
return
def configurePC(self):
self.pushLanguage('CUDA')
oldFlags = self.compilers.CUDAPPFLAGS
self.compilers.CUDAPPFLAGS += ' '+self.headers.toString(self.include)
self.compilers.CUDAPPFLAGS += ' '+self.headers.toString(self.thrust.include)
if self.checkCompile('#include <cusp/version.h>\n#if CUSP_VERSION >= 400\n#include <cusp/precond/aggregation/smoothed_aggregation.h>\n#else\n#include <cusp/precond/smoothed_aggregation.h>\n#endif\n', ''):
self.addDefine('HAVE_CUSP_SMOOTHED_AGGREGATION','1')
self.compilers.CUDAPPFLAGS = oldFlags
self.popLanguage()
return
def configureLibrary(self):
'''Calls the regular package configureLibrary and then does an additional tests needed by CUSP'''
if not self.thrust.found:
raise RuntimeError('CUSP support requires the THRUST package\nRerun configure using --with-thrust-dir')
config.package.Package.configureLibrary(self)
self.executeTest(self.configurePC)
return
|
# 文本颜色设置
print('\033[1;31m')
print('*'*10)
print('hello world!')
print('*'*10)
print('\033[0m')
# 绿色字体
print('\033[1;32m'+'green'+'\033[0m')
# 蓝色字体
print('\033[1;34m'+'blue'+'\033[0m')
# 黄字下划线
print('\033[4;33m'+'yellow'+'\033[0m')
# 红底黑字
print('\033[1;30;41m'+'black'+'\033[0m')
# 白底黑字
print('\033[1;30;47m'+'white'+'\033[0m')
print('normal')
|
from django.shortcuts import render
from django.template import Context, loader, Template
from django.http import HttpResponse
from backend import collect
from analyse import analyse,build_markers
from .models import *
import threading,time
from django.db import connection
cursor = connection.cursor()
class collectThread(object):
def __init__(self, interval=1):
self.interval = interval
thread = threading.Thread(target=self.run, args=())
thread.daemon = True # Daemonize thread
thread.start() # Start the execution
def run(self):
collect()
time.sleep(self.interval)
def homepage(request):
collectThread()
template= loader.get_template('scholarship/index.html')
return HttpResponse(template.render(request))
def highchart(request):
analyse()
template= loader.get_template('scholarship/highchart.html')
return HttpResponse(template.render(request))
def map(request):
build_markers()
template= loader.get_template('scholarship/map.html')
return HttpResponse(template.render(request))
def search(request):
template= loader.get_template('scholarship/search.html')
category= Scholarship.objects.values('category').distinct()
country= Scholarship.objects.values('country').distinct()
university = Scholarship.objects.values('university').distinct()
context={'all_category':category,
'all_country':country,
'all_university':university,}
return HttpResponse(template.render(context,request))
def search_result(request):
template= loader.get_template('scholarship/result.html')
try:
if request.POST.get('category'):
value=request.POST.get('category')
result=Scholarship.objects.filter(category=value).distinct()
elif request.POST.get('country'):
value=request.POST.get('country')
result=Scholarship.objects.filter(country=value).distinct()
elif request.POST.get('university'):
value=request.POST.get('university')
result=Scholarship.objects.filter(university=value).distinct()
return HttpResponse(template.render({'records':result,'value':value},request))
except Exception as e:
return HttpResponse(template.render({'error':"No Scholarship Available",'value':''},request))
''' TO DO
add source link https://twitter.com/statuses/ID
remove "," from url
add facebook
add back buttons
''' |
# https://hwwong168.wordpress.com/2019/09/25/esp32-micropython-implementation-of-cryptographic/
import uos
from ucryptolib import aes, MODE_CBC
g_iv1 = b"1234567812345678"
g_iv2 = bytes("1234567812345678", "utf8")
def p_example1_hard_coded1(key, data):
cipher = aes(key, MODE_CBC, b"1234567812345678")
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example2_hard_coded2(key, data):
cipher = aes(key, MODE_CBC, bytes("1234567812345678", "utf8"))
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example3_local_variable1(key, data):
iv = b"1234567812345678"
cipher = aes(key, MODE_CBC, iv)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example4_local_variable2(key, data):
iv = bytes("1234567812345678", "utf8")
cipher = aes(key, MODE_CBC, iv)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example5_nested_local_variable1(key, data):
iv1 = b"1234567812345678"
iv2 = iv1
iv3 = iv2
cipher = aes(key, MODE_CBC, iv3)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example6_nested_local_variable2(key, data):
iv1 = bytes("1234567812345678", "utf8")
iv2 = iv1
iv3 = iv2
cipher = aes(key, MODE_CBC, iv3)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example_method_call(key, iv, data):
cipher = aes(key, MODE_CBC, iv)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example_nested_method_call(key, iv, data):
return p_example_method_call(key, iv, data)
def p_example7_direct_method_call1(key, data):
iv = b"1234567812345678"
return p_example_method_call(key, iv, data)
def p_example8_direct_method_call2(key, data):
iv = bytes("1234567812345678", "utf8")
return p_example_method_call(key, iv, data)
def p_example9_nested_method_call1(key, data):
iv = b"1234567812345678"
return p_example_nested_method_call(key, iv, data)
def p_example10_nested_method_call2(key, data):
iv = bytes("1234567812345678", "utf8")
return p_example_nested_method_call(key, iv, data)
def p_example11_direct_g_variable_access1(key, data):
cipher = aes(key, MODE_CBC, g_iv1)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example12_direct_g_variable_access2(key, data):
cipher = aes(key, MODE_CBC, g_iv2)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example13_indirect_g_variable_access1(key, data):
iv = g_iv1
cipher = aes(key, MODE_CBC, iv)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example14_indirect_g_variable_access2(key, data):
iv = g_iv2
cipher = aes(key, MODE_CBC, iv)
cipher_text = cipher.encrypt(data)
return cipher_text
def p_example15_warning_parameter_not_resolvable(key, iv, data):
cipher = aes(key, MODE_CBC, iv)
cipher_text = cipher.encrypt(data)
return cipher_text
def n_example1_secrets_system_random(key, data):
iv = uos.urandom(16)
cipher = aes(key, MODE_CBC, iv)
cipher_text = cipher.encrypt(data)
return cipher_text
|
import csv
def prob_dist(int1, int2, n):
#int1 is a pair or list [a1, b1] of integers
#int2 is a pair or list [a2, b2] of integers
#n is the resolution
#we calculate the probability that randomly
#chosen numbers from int1 and int2 are at least
#distance n apart.
a1 = int1[0]
a2 = int2[0]
b1 = int1[1]
b2 = int2[1]
if b1 - a1 == 0 and b2 - a2 ==0:
if abs(b1-b2) >= n:
return 1
else:
return 0
if b1 - a1 == 0:
tot = b2 - a2
if b1 <= a2 - n:
return 1
elif a2 - n< b1 < b2 + n:
return (tot-float(min(b1+n,b2)-max(b1-n,a2)))/tot
elif b2 + n<= b1:
return 1
if b2 - a2 == 0:
tot = b1 - a1
if b2 <= a1 - n:
return 1
elif a1 - n< b2 < b1 + n:
return (tot-float(min(b2+n,b1)-max(b2-n,a1)))/tot
elif b1 + n<= b2:
return 1
if b2 >= b1+n:
top = 0
if a1-n >= b2:
top = 2*n*(b1-a1)
if a1 + n <= b1 - n:
if (b1 + n >= b2 >= b1 - n):
top = float((b1+n-b2)**2)/2
elif b1 - n > b2 >= a1 + n:
top = 2*n**2 + 2*n*(b1 -n - b2)
elif a1+n > b2 >= a1 - n:
top = 2*n*(b1 - a1) - float((b2 - a1 + n)**2)/2
if b1 - n <= a1 + n:
if b1 + n >= b2 >= a1+n:
top = float((b1+n-b2)**2)/2
elif a1 + n > b2 >= b1 - n:
top = float((b1-a1)**2)/2 + (b1-a1)*(a1+n-b2)
elif b1-n > b2 >= a1-n:
top = 2*n*(b1 -a1 ) - float((b2-a1+n)**2)/2
#now calculate effect of location of a2.
if a2 <= a1 -n:
bot = 0
if a2 >= b1 + n:
bot = 2*n*(b1-a1)
if a1 + n <= b1 -n:
if a1 - n <= a2 <= a1 +n :
bot = float((a2 - a1 + n)**2)/2
elif a1 +n < a2 <= b1 - n:
bot = 2*n**2 + 2*n*(a2 - a1 - n)
elif b1 - n < a2 <= b1 + n:
bot = 2*n*(b1 - a1) - float((b1 + n - a2)**2)/2
if b1 - n <= a1 + n:
if a1 - n <= a2 <= b1-n:
bot = float((a2 - a1 +n)**2)/2
elif b1 - n < a2 <= a1 + n:
bot = float((b1-a1)**2)/2 + (b1-a1)*(a2-b1+n)
elif a1 + n < a2 <= b1 + n:
bot = 2*n*(b1 - a1) - float((b1 + n - a2)**2)/2
tot = (b1 - a1)*(b2 - a2)
if float(tot - (b1 -a1)*2*n + top + bot)/tot < 0:
print int1
print int2
print str(tot) + " " + str(top) + " " + str(bot) +" " + str((b1-a1)*2*n) + " " + str(float(tot - (b1 -a1)*2*n + top + bot)/tot)
return float(tot - (b1 -a1)*2*n + top + bot)/tot
win = open('wins_primers-maxmin-np.csv', 'r')
wincsv = csv.reader(win, delimiter='\t', quotechar=None, doublequote=False)
winlist = []
for row in wincsv:
winlist.append(row)
win.close()
#this removes the header, but saves it with name header
header = winlist.pop(0)
#Open file to write to, will make rows for csv file
out = open('ordered_windows-maxmin-np.csv', 'w')
outcsv = csv.writer(out, delimiter='\t', quotechar=None)
outlist = [row for row in winlist]
offset = 6
#this is the resolution used in calling distinguishing or not
resn = 5
for row in outlist:
distlist = []
for q in range(0,9):
r = q+1
while r < 9:
int1 = [int(row[2*q+offset]), int(row[2*q+offset+1])]
int2 = [int(row[2*r+offset]), int(row[2*r+offset+1])]
distlist.append(prob_dist(int1, int2, resn))
r += 1
row.extend(distlist)
#this adds a column which says how many pairs of variants
#can be distinguished for a given gene. The threshold for
#distinguishing variants is 5bp
for row in outlist:
matrix = row[24:60]
score = 0
for entry in matrix:
if entry > .7:
score += 1
row.append(score)
#this adds a column which is roughly the L0 distance of the
#pairwise bottleneck distance vector
for row in outlist:
matrix = row[24:60]
score = 0
for entry in matrix:
score += float(entry)
score = score/36
row.append(score)
#this sorts the genes by how many pairs of variants can be distinguished
outlist = sorted(outlist, key=lambda row: int(row[61]), reverse=True)
outlist = sorted(outlist, key=lambda row: int(row[60]), reverse=True)
#this makes the header
for q in range(9):
r = q + 1
while r < 9:
header.append("["+str(header[2*q+offset])[:-8] + ", " +
str(header[2*r+offset])[:-8] + "]")
r += 1
header.append("# distinguished pairs")
header.append("L0 score")
#print header[59]
#this writes the rows to outcsv.
outcsv.writerow(header)
for row in outlist:
outcsv.writerow(row)
out.close()
|
S=[]
print("Welcome to the bubble sort algorithm")
i=1
while i<8:
num=input("Please enter a number:")
if num!=int:
print("Invalid entry try again.")
break
else:
S.append(num)
i=i+1
N=len(S)
swapped=True
while swapped==True:
swapped=False
for x in range(1,N):
if S[x-1]>S[x]:
temp=S[x-1]
S[x-1]=S[x]
S[x]=temp
swapped=True
print(S)
x=x+1
print(S)
|
#!/usr/bin/python3
import subprocess
import sys
CLIENTS_PROP = "clientsnode"
SERVERS_PROP = "serversnode"
def run_with_log_and_exit(cmd):
print(f"RUNNING | {cmd}")
ret = subprocess.run(cmd, shell=True)
if ret.returncode != 0:
print(f"{cmd} returned {ret.returncode}")
exit(ret.returncode)
def get_nodes():
cmd = 'kubectl get nodes --template \'{{range .items}}{{.metadata.name}}{{"\\n"}}{{end}}\''
output = subprocess.getoutput(cmd)
return [node.strip() for node in output.split("\n")]
def clear_prop_for_node(node, prop):
cmd = f'kubectl label nodes {node} {prop}-'
run_with_log_and_exit(cmd)
def clear_client_and_server_prop_for_node(node):
clear_prop_for_node(CLIENTS_PROP)
clear_prop_for_node(SERVERS_PROP)
def set_prop_for_node(node, prop):
cmd = f'kubectl label nodes {node} {prop}=true'
run_with_log_and_exit(cmd)
def set_as_client_node(node):
set_prop_for_node(node, CLIENTS_PROP)
def set_as_service_node(node):
set_prop_for_node(node, SERVERS_PROP)
def clear_props(nodes):
for node in nodes:
clear_prop_for_node(node, CLIENTS_PROP)
clear_prop_for_node(node, SERVERS_PROP)
print("Cleared props!")
def main():
num_args = 1
args = sys.argv[1:]
if len(args) != num_args:
print("Usage: python3 setup_nodes.py <number_of_client_nodes>")
exit(1)
number_of_clients = int(args[0])
nodes = get_nodes()
print(f"Got nodes {nodes}")
clear_props(nodes)
if len(nodes) == 1:
node = nodes[0]
print(f"Only 1 node ({node}). Applying both server and client prop...")
set_prop_for_node(node, CLIENTS_PROP)
set_prop_for_node(node, SERVERS_PROP)
else:
for i in range(len(nodes) - number_of_clients):
node = nodes[i]
set_as_service_node(node)
print(f"{node} is a server node")
for i in range(len(nodes) - number_of_clients, len(nodes)):
node = nodes[i]
set_as_client_node(node)
print(f"{node} is a client node")
if __name__ == '__main__':
main()
|
'''
装饰器
由于函数也是一个对象,而且函数对象可以被赋值给变量,所以,通过变量也能调用该函数。
函数对象有一个__name__属性,可以拿到函数的名字:
现在,假设我们要增强now()函数的功能,比如,在函数调用前后自动打印日志,但又不希望修改now()函数的定义,这种在代码运行期间动态增加功能的方式,称之为“装饰器”(Decorator)。
本质上,decorator就是一个返回函数的高阶函数。所以,我们要定义一个能打印日志的decorator,可以定义如下:
'''
def log(func):
def wrapper(*args, **kw):
print('call %s():' % func.__name__)
return func(*args, **kw)
return wrapper
'''
观察上面的log,因为它是一个decorator,所以接受一个函数作为参数,并返回一个函数。我们要借助Python的@语法,把decorator置于函数的定义处:
'''
@log
def now():
print('2016-1-4')
now()
# call now():
# 2015-1-4
'''
把@log放到now()函数的定义处,相当于执行了语句:
now = log(now)
由于log()是一个decorator,返回一个函数,所以,原来的now()函数仍然存在,只是现在同名的now变量指向了新的函数,于是调用now()将执行新函数,即在log()函数中返回的wrapper()函数。
wrapper()函数的参数定义是(*args, **kw),因此,wrapper()函数可以接受任意参数的调用。在wrapper()函数内,首先打印日志,再紧接着调用原始函数。
''' |
# Generated by Django 3.1 on 2020-11-28 20:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('classrooms', '0002_classroom_creator'),
('users', '0004_auto_20201128_1628'),
]
operations = [
migrations.RemoveField(
model_name='student',
name='classroom',
),
migrations.AddField(
model_name='student',
name='classroom',
field=models.ManyToManyField(blank=True, related_name='_student_classroom_+', to='classrooms.Classroom'),
),
]
|
import numpy as np
from .Base_Automaton import Base_Automaton
class F3_Fly(object):
def __init__(self, garden, id_no, gender, orientation):
self.G = garden # the F3_Garden object
self.id_no = id_no # a reference number to identify this fly
self.gender = gender # 'M', 'N' or 'F'
self.orientation = orientation # list of one or more genders
self.automaton = Base_Automaton(2 + self.G.bee_shells, self.G.bee_reward, self.G.bee_punish)
self.X = None # current position
self.best_X = None # best personal position
self.best_XM = None # associated MESO position
def X_from_MESO(self):
indices = []
if np.array_equal(self.best_X, self.best_XM):
X = self.best_X
else:
X = np.copy(self.best_X)
for ix in range(0, len(X)):
if X[ix] != self.best_XM[ix]:
if np.random.rand(1) < 0.5:
X[ix] = self.best_XM[ix]
indices.append(ix)
if self.G.costfn.verbose:
print(' >8< Bee: MESO = {i}'.format(i=indices))
return X
def bees(self, count):
if self.G.costfn.verbose:
print('==== Fly {p} (gender={g}, orientation={o}): #bees={b}, radius={r}'.format(p=self.id_no, g=self.gender, o=self.orientation, b=count, r=self.G.bee_radius))
for b in range(0, count):
meso_X = self.X_from_MESO()
cell = self.automaton.cell()
if cell == 0:
new_X = self.G.new_position_in_neighbourhood(meso_X, self.G.bee_radius, 'gauss')
elif cell < (self.automaton.count - 1):
new_X = self.G.new_position_in_neighbourhood(meso_X, self.G.bee_radius * cell, 'sphere')
else:
radius = self.G.bee_radius * (self.automaton.count - 1)
radius = radius + self.G.rand_exp(radius)
new_X = self.G.new_position_in_neighbourhood(meso_X, radius, 'sphere')
if self.G.costfn.calculate_cost(new_X) is not None:
if self.G.plotter is not None:
self.G.plotter.bee(self.G.costfn.XA)
if self.G.compare(self.G.costfn.XA, self.best_X):
if self.G.costfn.verbose:
print('(updating personal best)')
if self.G.plotter is not None:
self.G.plotter.fly(self.gender, self.G.costfn.XA, self.X, None)
self.best_X = self.G.costfn.XA
self.best_XM = self.G.costfn.XM
self.X = self.G.costfn.XA
self.automaton.reward(cell)
else:
self.automaton.punish(cell)
if False: # this is very noisy
self.automaton.summarise()
def new_local_search(self, flies, ranks, radius, jitter):
if self.G.costfn.verbose:
print('==== Fly {p} (gender={g}, orientation={o}): rank={k}, radius={r}'.format(p=self.id_no, g=self.gender, o=self.orientation, k=ranks[0], r=radius))
if ranks[0] == 0: # self-fly is superior to any it is attracted to; let's be narcissistic
new_X = self.best_X
else:
old_X = self.G.baseline(self.X, radius)
new_X = np.zeros(self.G.Ndim)
weight = np.zeros(len(flies))
for f in range(1, len(flies)):
if ranks[f] < ranks[0]: # a better fly than self-fly
weight[f] = 1 / (1 + ranks[f])
weight = weight / sum(weight) # weight must sum to 1; it's a probability set
for f in range(1, len(flies)):
if ranks[f] < ranks[0]: # a better fly than self-fly
new_X = new_X + weight[f] * self.G.attraction(flies[f].best_X - old_X, radius)
new_X = new_X + old_X
new_X = self.G.new_position_in_neighbourhood(new_X, jitter)
if self.G.costfn.calculate_cost(new_X) is not None:
if self.G.compare(self.G.costfn.XA, self.best_X):
if self.G.costfn.verbose:
print('(updating personal best)')
if self.G.plotter is not None:
self.G.plotter.fly(self.gender, self.G.costfn.XA, self.X, None)
self.best_X = self.G.costfn.XA
self.best_XM = self.G.costfn.XM
self.X = self.G.costfn.XA
else:
if self.G.plotter is not None:
self.G.plotter.fly(self.gender, self.G.costfn.XA, self.X, self.best_X)
self.X = self.G.costfn.XA
else:
if self.G.plotter is not None:
self.G.plotter.fly(self.gender, self.X, None, self.best_X)
self.gender = self.G.transition(self.gender)
return self.best_X # return the local best solution, even if old
def new_global_search(self):
cost, XA, XM = self.G.scout.pop()
while cost is None: # shouldn't happen, but could (if solution space is small), so just in case...
print('* * * No scouts banked! * * *')
self.G.scout.schedule(1)
self.G.scout.evaluate(1)
cost, XA, XM = self.G.scout.pop() # although, if we exhaust all of space, this will go infinite
self.X = XA
self.best_X = XA
self.best_XM = XM
if self.G.plotter is not None:
self.G.plotter.fly(self.gender, self.X, None, None)
return self.best_X # return the local best solution, even if old
|
import torch.nn as nn
import torch.utils.model_zoo as model_zoo
from CA import CoordAtt
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def ResNet18(pretrained=True, in_c=3):
"""
output, low_level_feat:
512, 256, 128, 64, 64
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], in_c=in_c)
if in_c != 3:
pretrained = False
if pretrained:
model._load_pretrained_model(model_urls['resnet18'])
return model
def ResNet34(pretrained=True, in_c=3):
"""
output, low_level_feat:
512, 64
"""
model = ResNet(BasicBlock, [3, 4, 6, 3], in_c=in_c)
if in_c != 3:
pretrained = False
if pretrained:
model._load_pretrained_model(model_urls['resnet34'])
return model
def ResNet50(pretrained=True, in_c=3):
"""
output, low_level_feat:
2048, 256
"""
model = ResNet(Bottleneck, [3, 4, 6, 3], in_c=in_c)
if in_c != 3:
pretrained = False
if pretrained:
model._load_pretrained_model(model_urls['resnet50'])
return model
def ResNet101(pretrained=True, in_c=3):
"""
output, low_level_feat:
2048, 256
"""
model = ResNet(Bottleneck, [3, 4, 23, 3], in_c=in_c)
if in_c != 3:
pretrained = False
if pretrained:
model._load_pretrained_model(model_urls['resnet50'])
return model
def ResNet152(pretrained=True, in_c=3):
"""
output, low_level_feat:
2048, 256
"""
model = ResNet(Bottleneck, [3, 8, 36, 3], in_c=in_c)
if in_c != 3:
pretrained = False
if pretrained:
model._load_pretrained_model(model_urls['resnet50'])
return model
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=3, stride=stride, dilation=dilation, padding=dilation,
bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=1, dilation=dilation, padding=dilation,
bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, dilation=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = nn.Conv2d(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm2d(planes)
self.conv2 = nn.Conv2d(planes, planes, kernel_size=3, stride=stride, dilation=dilation, padding=dilation,
bias=False)
self.bn2 = nn.BatchNorm2d(planes)
self.conv3 = nn.Conv2d(planes, planes * self.expansion, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm2d(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
self.dilation = dilation
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers, in_c=3):
super(ResNet, self).__init__()
self.inplanes = 64
self.in_c = in_c
self.conv1 = nn.Conv2d(self.in_c, 64, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = nn.BatchNorm2d(64)
self.relu = nn.ReLU(inplace=True)
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
self.resblock1 = self._make_layer(block, 64, layers[0], stride=2)
self.resblock2 = self._make_layer(block, 128, layers[1], stride=2)
self.resblock3 = self._make_layer(block, 256, layers[2], stride=2)
self.resblock4 = self._make_layer(block, 512, layers[3], stride=2)
def _make_layer(self, block, planes, num_blocks, stride=1, dilation=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
nn.Conv2d(self.inplanes, planes * block.expansion, kernel_size=1, stride=stride, bias=False),
nn.BatchNorm2d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, dilation, downsample=downsample))
self.inplanes = planes * block.expansion
for i in range(1, num_blocks):
layers.append(block(self.inplanes, planes, dilation=dilation))
return nn.Sequential(*layers)
def forward(self, input):
x = self.conv1(input)
x = self.bn1(x)
x1 = self.relu(x)
x2 = self.resblock1(x1) # 128
x3 = self.resblock2(x2) # 64
x4 = self.resblock3(x3) # 32
x5 = self.resblock4(x4) # 16
return x5, x4, x3, x2, x1
def _load_pretrained_model(self, model_path):
pretrain_dict = model_zoo.load_url(model_path)
model_dict = {}
state_dict = self.state_dict()
for k, v in pretrain_dict.items():
if k in state_dict:
model_dict[k] = v
state_dict.update(model_dict)
self.load_state_dict(state_dict)
def build_backbone(backbone, pretrained, in_c=3):
if backbone == 'resnet50':
return ResNet50(pretrained, in_c=in_c)
elif backbone == 'resnet34':
return ResNet34(pretrained, in_c=in_c)
elif backbone == 'resnet18':
return ResNet18(pretrained, in_c=in_c)
elif backbone == 'resnet101':
return ResNet101(pretrained, in_c=in_c)
elif backbone == 'resnet152':
return ResNet152(pretrained, in_c=in_c)
else:
raise NotImplementedError |
import re
from typing import List, Union, Optional, Generator, TypeVar, Iterable, Iterator
from . import LineInterface, Comment as oldComment, LineError, NoOptionError, UnitBase
class NoSectionError(Exception):
"""Исключение, возникающее при отсутствующей секции."""
def __init__(self, section: str):
super().__init__('No section: {!r}'.format(section))
class DuplicateSectionError(Exception):
"""Исключение, возникающее если секция существует."""
def __init__(self, section: str):
super().__init__('Section already exists: {!r}'.format(section))
# noinspection PyClassHasNoInit
class Comment(oldComment):
COMMENT_CHARS = ('#', ';')
OptionType = TypeVar('OptionType', bound='Option')
class Option(LineInterface):
"""Класс линии опции."""
DELIMITER_CHAR = '='
DELIMITER_CHARS = (DELIMITER_CHAR, ':')
_TMPL = r"""
(?P<option>.*?)
\s*(?P<vi>[{delim}])\s*
(?P<name>.*)$
"""
_PATTERN = re.compile(_TMPL.format(delim=''.join(DELIMITER_CHARS)), re.VERBOSE)
BOOLEAN_STATES = {'1': True, '0': False,
'yes': True, 'no': False,
'true': True, 'false': False,
'on': True, 'off': False}
BOOLEAN_STR = ('no', 'yes')
name: str
value: List[str] = None
@classmethod
def test(cls, line: str) -> bool:
return bool(cls._PATTERN.match(line))
def __init__(self, line: str, value: List[str] = None, *, test: bool = False):
"""
:raise: LineError
"""
self.value = [] # type: List[str]
if not self.test(line):
if test:
raise LineError
assert value
self.name = line # type: str
self.value.extend(value)
return
mo = self._PATTERN.match(line)
self.name, value = mo.group('option', 'name') # type: str, str
self.value.append(value)
def __repr__(self) -> str:
return '{}({}{}{!r})'.format(self.__class__.__name__, self.name, self.DELIMITER_CHAR, self.value)
def __str__(self) -> str:
return '\n'.join(map(lambda x: '{}{}{}'.format(self.name, self.DELIMITER_CHAR, self._from_type(x)),
self.value)) + '\n'
@classmethod
def _from_type(cls, value: Union[str, bool, int, float]) -> str:
if isinstance(value, bool):
value = cls.BOOLEAN_STR[int(value)]
elif isinstance(value, int):
value = '{:d}'.format(value)
elif isinstance(value, float):
value = '{:f}'.format(value)
return str(value)
def __add__(self, other: OptionType):
for value in other.value:
if value not in self.value:
self.value.append(value)
return self
class Section(LineInterface):
"""Класс линии секции."""
_lines: List[Union[Option, Comment]]
name: str = None
@classmethod
def test(cls, line: str) -> bool:
line = line.strip()
return line.startswith('[') and line.endswith(']')
def __init__(self, line: Optional[str], *, test: bool = False):
"""
:raise: LineError
"""
self._lines = [] # type: List[Union[Option, Comment]]
if line is None:
return
if not self.test(line):
if test:
raise LineError
self.name = line
return
self.name = line.strip().strip('[]') # type: str
def __repr__(self) -> str:
return '{}({!r} {!r})'.format(self.__class__.__name__, self.name, self._lines)
def __str__(self) -> str:
text = ''.join(map(lambda x: str(x), self._lines)) + '\n'
if self.name:
return '[{}]\n{}'.format(self.name, text)
return text
def __bool__(self) -> bool:
if self.name is None:
return bool(self._lines)
return True
def append(self, token: Union[Option, Comment], option_name: str = None, before: bool = True):
index = None
option = None
if option_name is not None:
index = self._lines.index(self.get(option_name))
if not before:
index += 1
if isinstance(token, Option):
try:
option = self.get(token.name)
except NoOptionError:
pass
else:
index = self._lines.index(option)
if index is None:
self._lines.append(token)
else:
if option:
self._lines[index] += token
else:
self._lines.insert(index, token)
@property
def comments(self) -> Generator[Comment, None, None]:
return (x for x in self._lines if isinstance(x, Comment))
@property
def options(self) -> Generator[Option, None, None]:
return (x for x in self._lines if isinstance(x, Option))
def get(self, option_name: str) -> Option:
"""
:raise: NoOptionError
"""
for value in self.options:
if value.name == option_name:
return value
raise NoOptionError(option_name)
def remove(self, token: Union[Option, Comment]):
self._lines.remove(token)
class Unit(UnitBase):
"""Класс парсера ini файла."""
_lines: List[Section]
def __init__(self):
super().__init__()
self._lines = [Section(None)]
def read(self, lines: Iterable[str]):
sections = {}
section_cur = self.get()
# Определяем тип строки.
for line in self._line_split_backslash(lines):
token = self.__line_to_type(line)
if token is None:
pass
elif isinstance(token, Section):
try:
section_cur = sections[token.name]
except KeyError:
section_cur = token
sections[token.name] = section_cur
self.append(section_cur)
else:
section_cur.append(token)
@staticmethod
def __line_to_type(line: str) -> Union[Comment, Option, Section, None]:
if not line:
return
try:
token = Comment(line, test=True)
except LineError:
try:
token = Section(line, test=True)
except LineError:
try:
token = Option(line, test=True)
except LineError:
token = Comment(line)
return token
@staticmethod
def is_token(line: str) -> bool:
return bool(Comment.test(line) or Section.test(line) or Option.test(line))
def _line_split_backslash(self, lines: Iterable[str]) -> Generator[str, None, None]:
line_save = ''
for line in lines:
line = line.strip()
if not line_save:
if not line:
pass
elif line[-1] == '\\':
line_save = line[:-1]
else:
yield line
continue
if not line:
yield line_save
line_save = ''
elif line[-1] == '\\':
line = line[:-1]
if self.is_token(line):
yield line_save
line_save = line
else:
line_save += ' ' + line
elif self.is_token(line):
yield line_save
line_save = line
else:
line_save += ' ' + line
yield line_save
line_save = ''
if line_save:
yield line_save
@property
def sections(self) -> Iterator[str]:
return map(lambda x: x.value, self._lines)
def get(self, section_name: Optional[str] = None) -> Section:
"""
:raise: NoSectionError
"""
for value in self._lines:
if value.name == section_name:
return value
raise NoSectionError(section_name)
def remove(self, section: Section):
self._lines.remove(section)
def append(self, section_new: Section, section: Optional[Section] = None, before: bool = False):
"""
:raise: DuplicateSectionError
"""
try:
self.get(section_new.name)
except NoSectionError:
pass
else:
raise DuplicateSectionError(section_new.name) from None
index = None
if section_new.name is None:
index = 0
elif section is not None:
index = self._lines.index(section)
if not before:
index += 1
if index is None:
self._lines.append(section_new)
else:
self._lines.insert(index, section_new)
|
#!/usr/bin/env python
"""
This is a basic example of how to use the swiftradio library to
fetch radio telemetry information from a SWIFT-SDR and print the information
to the console. The SwiftRadioClient class is used to connect to a Swift-SDR
unit, execute the 'sysstat' command and then process and print the received
information to stdout.
"""
__author__ = "Steve Alvarado"
__maintainer__ = "Steve Alvarado"
__email__ = "alvarado@tethers.com"
__company__ = "Tethers Unlimited Inc."
__version__ = "1.0.2"
__date__ = "Late Updated: 05/18/16 (SRA)"
#-------------------------------------------------------------------------------------------------------------
# Imports
#-------------------------------------------------------------------------------------------------------------
import sys
import traceback
import argparse
import time
sys.path.insert(1, "../../Packages")
import swiftradio
from swiftradio.clients import SwiftRadioEthernet
#---------------------------------------------------------------------------------------------------
# Main Program
#---------------------------------------------------------------------------------------------------
if __name__ == "__main__":
radio_interface = None
# create command line parser
parser = argparse.ArgumentParser(prog = __file__, description = __doc__, add_help=True)
parser.add_argument("ip", type = str, help = "ip address of radio.")
args = parser.parse_args()
print "--"
print "Display Radio Status"
print "Version {}".format(__version__)
print "Tethers Unlimited Inc. (c)"
try:
# Create a SWIFT-SDR Interface and Connect to Radio
radio_interface = SwiftRadioEthernet(args.ip)
# connect to radio
if radio_interface.connect():
# display connected radio information
sysinfo = radio_interface.execute_command("sysinfo")
print "\n[Radio Info]"
print " Device ID : {}".format( sysinfo["id"] )
print " Platform : {}".format( sysinfo["platform"] )
print " Programmer : {}".format( sysinfo["software_builder"] )
print " Build Revision : {}".format( sysinfo["build_revision"] )
while(1):
# execute sysstat command to get real-time temp and uptime data
sysstat = radio_interface.execute_command("sysstat")
if "temp" in sysstat and "uptime" in sysstat:
temp = sysstat["temp"]
uptime = sysstat["uptime"]
# note that sysstat outputs are not registered in some FPGA builds yet. However
# we can use find_command_data_by_name() to parse returned sysstat data.
else:
temp = swiftradio.tools.find_command_data_by_name(sysstat, "temp", "float")
uptime = swiftradio.tools.find_command_data_by_name(sysstat, "uptime", "uint")
if (temp is None) or (uptime is None):
print "\n**dropped packets**"
continue
hrs = (uptime / 60) / 60
mins = (uptime / 60) % 60
secs = uptime % 60
# Print Telemetry Data
print "\n---------------------------------------------"
print " Telemetry"
print "---------------------------------------------"
print " Uptime: {} hours {} mins {} secs".format( hrs, mins, secs)
print " Temperature: {:.2f} C".format( temp )
time.sleep(1)
except KeyboardInterrupt:
print "\nexiting program...\n"
except:
traceback.print_exc()
# Exit Program
if radio_interface != None:
radio_interface.disconnect() # Always close connection before program exit!
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots['test_BatchCompiler_1 1'] = {
'b/c/base': '''head:
base - head
body:
base - body
''',
'b/f2': '''f2 -- BBB
There out her child sir his lived.''',
'f1': '''head:
base - head
body:
f1 -- AAA
In post mean shot ye.
'''
}
snapshots['test_compile_dir_2_zip 1'] = {
'b/c/base': '''head:
base - head
body:
base - body
''',
'b/f2': '''f2 -- BBB compile_dir_to_zip
There out her child sir his lived.''',
'f1': '''head:
base - head
body:
f1 -- AAA compile_dir_to_zip
In post mean shot ye.
'''
}
snapshots['test_compile_zip_2_zip 1'] = {
'b/c/base': '''head:
base - head
body:
base - body
''',
'b/f2': '''f2 -- BBB zip
There out her child sir his lived.''',
'f1': '''head:
base - head
body:
f1 -- AAA zip
In post mean shot ye.
'''
}
snapshots['test_compile_file 1'] = '''hello compile_file!
a is
b is '''
snapshots['test_compile_zip_2_dir 1'] = {
'b/c/base': '''head:
base - head
body:
base - body
''',
'b/f2': '''f2 -- BBB zip
There out her child sir his lived.''',
'f1': '''head:
base - head
body:
f1 -- AAA zip
In post mean shot ye.
'''
}
snapshots['test_compile_file_usisng_extensions 1'] = "[1, 2, 'jack']"
|
from cascade.input_data.configuration.form import Configuration
from flask import Blueprint, abort, jsonify, request
bp = Blueprint("service", __name__)
@bp.route("/", methods=("POST",))
def validate():
if not request.is_json:
abort(406)
raw_settings = request.get_json()
settings = Configuration(raw_settings)
errors = settings.validate_and_normalize()
# strip display names
errors = [[key, error] for key, _, error in errors]
return jsonify(errors)
|
# 5. Lambda Functions - Anonymous function
# simple function
def func(x):
return x + 5
# lambda inside function
def func3(x):
func4 = lambda x: x + 10
return func4(20)
func2 = lambda x: x + 5 # simple lambda
func5 = lambda x, y: x + y # lambda with 2 variables
func6 = lambda x, y=5: x * y # lambda with default value
# lambda, map, filter
a = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
new_list = list(map(lambda x: x + 5, a)) # lambda with map
new_list2 = list(filter(lambda x: x % 2 == 0, a)) # lambda with filter
print(new_list2)
|
'''
@author: Necro
'''
import tensorflow as tf
import numpy as np
xdata = np.random.rand(100).astype(np.float32)
ydata=0.3*xdata+0.1
weight=tf.Variable([0.])
bias=tf.Variable([0.])
y=weight*xdata+bias
#loss=tf.reduce_mean(tf.square(y-ydata))
loss=tf.losses.mean_squared_error(y, ydata)
# optimizer=tf.train.GradientDescentOptimizer(0.1)
optimizer=tf.train.AdamOptimizer(1)
train=optimizer.minimize(loss)
init=tf.global_variables_initializer()
sess=tf.Session()
sess.run(init)
R=100
print(sess.run(weight),sess.run(bias))
for step in range(R) :
sess.run(train)
if step % 50 == 0:
print(sess.run(weight),sess.run(bias))
#
# for step in range(R) :
# sess.run(optimizer.minimize(tf.losses.absolute_difference(y, ydata)))
#
# print(sess.run(weight),sess.run(bias))
#
# for step in range(R) :
# sess.run(optimizer.minimize(tf.losses.compute_weighted_loss(y, ydata)))
#
# print(sess.run(weight),sess.run(bias))
#
# for step in range(R) :
# sess.run(optimizer.minimize(tf.losses.hinge_loss(y, ydata)))
#
# print(sess.run(weight),sess.run(bias))
#
# for step in range(R) :
# sess.run(optimizer.minimize(tf.losses.huber_loss(y, ydata)))
#
# print(sess.run(weight),sess.run(bias))
#
# for step in range(R) :
# sess.run(optimizer.minimize(tf.losses.log_loss(y, ydata)))
#
# print(sess.run(weight),sess.run(bias))
#
# for step in range(R) :
# sess.run(optimizer.minimize(tf.losses.mean_pairwise_squared_error(y, ydata)))
#
# print(sess.run(weight),sess.run(bias))
#
# for step in range(R) :
# sess.run(optimizer.minimize(tf.losses.sigmoid_cross_entropy(y, ydata)))
#
# print(sess.run(weight),sess.run(bias)) |
from django.conf.urls import url
from . import views
urlpatterns = [
#login START
url(r'^$', views.index),
url(r'^home$', views.home),
url(r'^login$', views.login),
url(r'^create$', views.create),
url(r'^success$', views.success),
url(r'^logout$', views.logout),
#login END
url(r'^add_item', views.add_item),
url(r'^create_item', views.create_item),
url(r'^detail/(?P<id>\d+)$', views.detail),
url(r'^add_list/(?P<id>\d+)$', views.add_list),
url(r'^delete/(?P<product_id>\d+$)', views.delete),
url(r'^remove_list/(?P<id>\d+$)', views.remove_list),
] |
#!/usr/bin/env python3
#
# # +type xyc
# # - x are strings correspondig to centers of the violins (as if individual bars in a barplot)
# # - y is the y-axis placement
# # - c is [0-1] value indicating the shade
# cat xyc.txt | mplot sina +type xyc
# snv 10 0.5
# snv 100 1
# snv 20 0
# indel 20 0.3
# indel 5 0.1
#
# cat xy.txt | mplot sina +type xy # same as +type xyc but using 1 for shading
# cat xy.txt | mplot sina +type xycb # same as +type xyc but overlay a boxplot created from 4-th column
#
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
import numpy as np
from matplotlib.gridspec import GridSpec
from matplotlib.patches import Polygon
import matplotlib.cm as cm
import sys,random
fs = None # +fs 18 # increase font size of all texts
# fs: fs
if fs!=None: plt.rcParams.update({'font.size': fs})
type = 'xyc' # +type <type> .. xyc xy xycb
# type: 'type'
if type!='xyc' and type!='xy' and type!='xycb':
print('Only +type xyc, xy, or xycb is supported atm')
sys.exit(1)
do_color = False
do_box = False
if type=='xycb': do_box = True
if type=='xycb' or type=='xyc': do_color = True
files = []
# FILES
fname = files[0]
sty = 'mine' # +sty ggplot
# sty: 'sty'
wh = (5,4) # height ratios
# wh: wh
nbin = 100 # +n 100
# n: nbin
jitter = ['0,0'] # +jr 0.5,0.5 # x=fraction of the maximum value,y=abs,y%=rel
# jr: 'jitter'
jitter = jitter.split(',')
xmin = None
xmax = None
def color_scale(c1,c2,mix=0):
c1=np.array(mpl.colors.to_rgb(c1))
c2=np.array(mpl.colors.to_rgb(c2))
return mpl.colors.to_hex((1-mix)*c1 + mix*c2)
def percentile(vals,p):
N = len(vals)
n = p*(N+1)
k = int(n)
d = n-k
if k<=0: return vals[0]
if k>=N: return vals[N-1]
return vals[k-1] + d*(vals[k] - vals[k-1])
def adjacent_values(vals):
q1 = percentile(sdat,0.25)
q3 = percentile(sdat,0.75)
uav = q3 + (q3-q1)*1.5
if uav > vals[-1]: uav = vals[-1]
if uav < q3: uav = q3
lav = q1 - (q3-q1)*1.5
if lav < vals[0]: lav = vals[0]
if lav > q1: lav = q1
return [lav,uav]
# -- Read the data --
def read_data(fname):
global xmin,xmax,jitter
ymin = ymax = None
dat = { 'raw':{}, 'binned':{}, 'box':{}, 'xticks':[], 'xtick_labels':[] }
file = open(fname,'r')
for line in file:
row = line.rstrip('\n').split('\t')
if row[0][0] == '#': continue
bar = row[0]
if bar not in dat['raw']:
dat['raw'][bar] = []
dat['box'][bar] = []
dat['xticks'].append(len(dat['raw'])+1)
dat['xtick_labels'].append(row[0])
y = float(row[1])
if ymin==None or ymin>y: ymin = y
if ymax==None or ymax<y: ymax = y
c = 1
if do_color: c = float(row[2])
dat['raw'][bar].append({'y':y,'c':c})
if do_box: dat['box'][bar].append(float(row[3]))
for bar in dat['raw']:
bins = [{'y':0,'n':0,'c':[]} for x in range(nbin+1)]
raw = dat['raw'][bar]
for row in raw:
i = int(nbin*(row['y']-ymin)/(ymax-ymin))
bins[i]['y'] = row['y']
bins[i]['n'] += 1
bins[i]['c'].append(row['c'])
dat['binned'][bar] = bins
nmax = 0
for bar in dat['binned']:
bins = dat['binned'][bar]
for bin in bins:
if nmax < bin['n']: nmax = bin['n']
xmin = 0.5*nmax
xmax = len(dat['binned'])*nmax+0.5*nmax
xjitter = nmax*float(jitter[0])
yjitter = jitter[1]
if yjitter[-1]=='%': yjitter = -float(yjitter[:-1])
else: yjitter = float(yjitter)
for i in range(len(dat['xticks'])):
dat['xticks'][i] = (dat['xticks'][i]-1)*nmax
xdat = []
ydat = []
cdat = []
bdat = {'dat':[],'pos':[],'wd':[]}
ibar = 0
for bar in dat['binned']:
ibar += 1
bins = dat['binned'][bar]
for bin in bins:
icolor = 0
x0 = -0.5*bin['n'] + ibar*nmax
cvals = sorted(bin['c'],reverse=True)
for i in range(bin['n']):
xrand = random.random()*xjitter - 0.5*xjitter
yjmax = yjitter
if yjmax<0: bin['y']*(-yjmax)/100.
yrand = random.random()*yjmax - 0.5*yjmax
# this can only be used when colorbar is not needed, otherwise cmap has to be created:
# color = color_scale(colors[0],colors[1],cvals[i])
color = cvals[i]
xdat.append(xrand + x0+i)
ydat.append(yrand + bin['y'])
cdat.append(color)
if do_box:
bdat['dat'].append(dat['box'][bar])
bdat['pos'].append(ibar*nmax)
bdat['wd'].append(xmax*0.15)
return (dat,xdat,ydat,cdat,bdat)
dat,xdat,ydat,cdat,bdat = read_data(fname)
# -- create colormap --
colors = None # +cl '#242424,#faa300,#f4640d' # 3rd color for boxplot, optional
# cl: 'colors'
if colors!=None:
colors = colors.split(',')
if len(colors)==1: colors[2] = colors[1] = colors[0]
else:
colors = ['#242424','#faa300','#f4640d']
clo = mpl.colors.to_rgb(colors[0])
chi = mpl.colors.to_rgb(colors[1])
cm.register_cmap(cmap=mpl.colors.LinearSegmentedColormap('mycmap',{
'red': [(0.,clo[0],clo[0]), (1.,chi[0],chi[0])],
'green': [(0.,clo[1],clo[1]), (1.,chi[1],chi[1])],
'blue': [(0.,clo[2],clo[2]), (1.,chi[2],chi[2])],
}))
# -- plot the data --
plt_args = {} # for example: +pa "mec='grey',mfc='grey',zorder=100,clip_on=False,alpha=0.5"
plt_args = {'zorder':100,'clip_on':False,'alpha':0.5}
# pa: {plt_args}
fig, ax1 = plt.subplots(1, 1, figsize=wh)
sc = ax1.scatter(xdat,ydat, s=10,marker='o',c=cdat,cmap='mycmap',**plt_args)
if do_box:
parts = ax1.boxplot(bdat['dat'],positions=bdat['pos'],widths=bdat['wd'],patch_artist=True,vert=True,showfliers=False)
ec = 'black'
for i in range(len(parts['boxes'])):
pc = parts['boxes'][i]
fc = [1,1,1,0]
ec = colors[2]
pc.set_facecolor(fc)
pc.set_edgecolor(ec)
for item in ['whiskers', 'fliers', 'medians', 'caps']:
for pc in parts[item]:
pc.set_color(ec)
pc.set_linewidth(1.5)
cbl = None # +cbl 'Colorbar label'
# cbl: 'cbl'
if cbl!=None:
# https://matplotlib.org/3.1.0/api/_as_gen/matplotlib.pyplot.colorbar.html
cb = fig.colorbar(sc,shrink=0.7,aspect=15,pad=0.0) #, orientation='horizontal',#aspect=10, pad=0.04)
cb.set_label(cbl,labelpad=10)
cb.set_clim(0,1)
if sty=='mine':
ax1.spines['top'].set_visible(False)
ax1.spines['right'].set_visible(False)
ax1.get_xaxis().tick_bottom()
ax1.get_yaxis().tick_left()
ax1.spines['bottom'].set_color('grey')
ax1.spines['left'].set_color('grey')
mpl.rcParams['text.color'] = '555555'
#args = {'color':'#555555'}
ax1.patch.set_visible(False)
yscale = None # +ys log,symlog
# ys: 'yscale'
if yscale!=None: ax1.set_yscale(yscale) # log, symlog
ta_args = {'y':1.08} # for example: +ta "y=1.08"
# ta: {ta_args}
title = None
# title: 'title'
if title!=None: ax1.set_title(title,**ta_args)
ylabel = None
# yl: 'ylabel'
if ylabel!=None: ax1.set_ylabel(ylabel,labelpad=5)
xlabel = None
# xl: 'xlabel'
if xlabel!=None: ax1.set_xlabel(xlabel,labelpad=10)
xlim = None # +xr 1.5 # extend nmax (+10% in this example)
# xr: xlim
if xlim==None: xlim = 1.1
x = 0.5*(xmax+xmin)
w = (xmax-xmin)*xlim
ax1.set_xlim(x-w*0.5,x+w*0.5)
xsci = None
ysci = None
# xsci: (xsci)
# ysci: (ysci)
if xsci!=None: ax1.ticklabel_format(style='sci', scilimits=xsci, axis='x') # +xsci -2,2
if ysci!=None: ax1.ticklabel_format(style='sci', scilimits=ysci, axis='y')
lbl_args = {'rotation':35,'ha':'right','multialignment':'center'} # for example: +la "rotation=35,ha='right',ma='center',fontsize=9"
# la: {lbl_args}
ax1.set_xticks(dat['xticks'])
ax1.set_xticklabels(dat['xtick_labels'],**lbl_args)
hdt = None # hide ticks: +hdt 1
# hdt: hdt
if hdt:
ax1.xaxis.set_tick_params(length=0)
adjust = None
# adj: {adjust}
if adjust!=None: plt.subplots_adjust(**adjust) # for example: +adj bottom=0.2,left=0.1,right=0.95
# dpi: dpi
# SAVE
plt.close()
|
import utils as u
import QUtils as qu
import numpy as np
import multiprocessing as mp
import scipy.fftpack as sp
import time
import matplotlib.pyplot as plt
import pickle
from numpy import linalg as LA
import scipy.stats as st
import sys
import yt; yt.enable_parallelism()
import sys
simName = "Gr_r2"
decimate = 2
label = ""
PLOT = True
class figObj(object):
def __init__(self):
self.meta = None
self.tags = None
self.N = None
self.dt = None
self.framesteps = None
self.IC = None
self.phi = None
self.name = None
self.fileNames_psi = None
self.indToTuple = None
self.tupleToInd = None
self.decimate = None
fo = figObj()
def GetOffDiag(psi):
M = np.zeros((fo.N, fo.N)) + 0j
for j in range(len(fo.indToTuple)):
state_j = np.array(fo.indToTuple[j])
if np.abs(psi[j]) > 0:
for b in range(fo.N):
for a in range(b+1, fo.N):
state_i = state_j.copy()
state_i[a] = state_j[a] - 1
state_i[b] = state_j[b] + 1
if tuple(state_i) in fo.tupleToInd:
i_ = fo.tupleToInd[ tuple(state_i) ]
val_ = psi[j]
val_ *= np.sqrt(state_j[b] + 1)
val_ *= np.sqrt(state_j[a])
val_ *= np.conj(psi[i_])
M[b,a] += val_
M[a,b] += np.conj(val_)
return M
def Getaa(psi):
aa = np.zeros((fo.N, fo.N)) + 0j
a = np.zeros(fo.N) + 0j
for i in range(fo.N):
a[i] += qu.calcOp(psi, fo, a = [i])
for j in range(i, fo.N):
aa_ = qu.calcOp(psi, fo, a = [i,j])
aa[i,j] += aa_
if i != j:
aa[j,i] += aa_
return aa, a
def analyzeTimeStep(i):
psi, N = qu.GetPsiAndN(i, fo)
norm = np.sum(np.abs(psi)**2)
# time stamp
t = fo.dt*fo.framsteps*(i+1)
# calculate M hat
M = np.zeros((fo.N, fo.N)) + 0j
M += np.diag(N)
M += GetOffDiag(psi)
# calculate lam
eigs, _ = LA.eig(M)
eigs = qu.sortE(np.abs(eigs),eigs)
# get aa op
aa, a = Getaa(psi)
# get Q param
Q = np.sum( N - a*np.conj(a) ) / np.sum(fo.IC)
return t, N, M, eigs, aa, a, Q, norm
def analyze():
print("Starting di_analysis...")
#pool = mp.Pool(mp.cpu_count())
#outputs = pool.map(analyzeTimeStep, range(0, len(fo.fileNames_psi), decimate) )
outputs = {}
nkeys = len(fo.fileNames_psi)
for sto, key in yt.parallel_objects( range(0, len(fo.fileNames_psi), fo.decimate) , 0, storage=outputs):
sys.stdout.flush()
outputs_ = analyzeTimeStep(key)
sto.result = outputs_
sto.result_id = key
print("Latest Analyzed: %i of %i" %(key, nkeys))
print("Data analyzed...")
n_out = len(outputs)
t = np.zeros(n_out)
N = np.zeros((n_out,fo.N)) + 0j
M = np.zeros((n_out, fo.N, fo.N)) + 0j
eigs = np.zeros((n_out,fo.N)) + 0j
aa = np.zeros((n_out, fo.N, fo.N)) + 0j
a = np.zeros((n_out,fo.N)) + 0j
Q = np.zeros(n_out) + 0j
norm = np.zeros(n_out) + 0j
for i in range(len(outputs.keys())):
key_ = outputs.keys()[i]
t_, N_, M_, eigs_, aa_, a_, Q_, norm_ = outputs[key_]
t[i] = t_
N[i] = N_
M[i] = M_
eigs[i] = eigs_
aa[i] = aa_
a[i] = a_
Q[i] = Q_
norm[i] = norm_
print("min norm: %f\nmax norm: %f" %(np.min(norm), np.max(norm)) )
if np.abs(np.min(norm) - 1) > .05 or np.abs(np.max(norm) - 1) > .05:
print("WARNING: evolution was not unitary, consider a smaller timestep.")
return t, N, M, eigs, aa, a, Q
def setFigObj(name):
# read in simulation parameters
meta = u.getMetaKno(name, dir = 'Data/', N = "N", dt = "dt", frames = "frames",
framesteps = "framesteps", IC = "IC", omega0 = "omega0", Lambda0 = "Lambda0")
fo.meta = meta
# sets the figure object with these parameters
# this is basically just so I can access them in the glocal scope
fo.name = name
fo.N = fo.meta["N"]
fo.dt = fo.meta["dt"]
fo.framsteps = fo.meta["framesteps"]
fo.IC = fo.meta["IC"]
np.random.seed(1)
fo.phi = np.random.uniform(0, 2 * np.pi, fo.N)
# this is basically just to see how many time drops there were
fo.fileNames_psi = u.getNamesInds('Data/' + name + "/" + "psi" + fo.tags[0])
qu.GetDicts(fo)
def makeNFig(t, N):
fig, ax = plt.subplots(figsize = (6,6))
ax.set_xlabel(r'$t$')
for i in range(fo.N):
ax.plot(t, N[:,i], label = r'$E[\hat N_%i]$' %i)
ax.set_xlim(0, np.max(t) )
ax.set_ylim(0, np.max(N)*1.05 )
ax.legend()
fig.savefig("../Figs/" + fo.name + "_Num.pdf",bbox_inches = 'tight')
def makeMFig(t, lams):
fig, ax = plt.subplots(figsize = (6,6))
ax.set_xlabel(r'$t$')
for i in range(fo.N):
ax.plot(t, lams[:,i], label = r'$\lambda_%i$' %i)
ax.set_xlim(0, np.max(t) )
ax.set_ylim(0, np.max(lams)*1.05 )
ax.legend()
fig.savefig("../Figs/" + fo.name + "_lams.pdf",bbox_inches = 'tight')
def makePOQFig(t, eigs, Q):
fig, ax = plt.subplots(figsize = (6,6))
n = np.sum(fo.IC)
ax.set_xlabel(r'$t$')
PO = 1. - (eigs[:,-1] / n)
ax.plot(t, PO, label = r'$1 - \lambda_p/n_{tot}$')
ax.plot(t, Q, label = r'$Q$')
ax.set_xlim(0, np.max(t) )
ax.set_ylim(0, 1.05)
ax.legend()
fig.savefig("../Figs/" + fo.name + "_POQ.pdf",bbox_inches = 'tight')
def constructSq(a,aa,M):
N = len(a[0])
n = np.sum(np.diag(M[0]))
xi_p = np.zeros( (len(a), N) ) + 0j
aaS = np.zeros( len(a) ) + 0j
baS = np.zeros( len(a) ) + 0j
aS = np.zeros( len(a) ) + 0j
for i in range(len(a)):
M_ = M[i]
eigs, psis = LA.eig(M_)
psis = qu.sortVects(np.abs(eigs),psis)
eigs = qu.sortE(np.abs(eigs),eigs)
principle = psis[:,-1]
xi_p[i,:] = principle#*np.sqrt(eigs[-1])
for k in range(N):
k_ = (-1*k -1)%N
#xi_k = np.conj(xi_p[i,k_])
xi_k = xi_p[i,k]
aS[i] += xi_k*a[i,k]
for j in range(N):
j_ = (-1*j -1)%N
#xi_j = np.conj(xi_p[i,j_])
xi_j = xi_p[i,j]
aaS[i] += xi_k*xi_j*aa[i,k,j]
baS[i] += np.conj(xi_k)*xi_j*M[i,k,j]
dbaS = baS - np.conj(aS)*aS
daaS = aaS - aS*aS
return 1 + 2*dbaS - 2*np.abs(daaS)
def makeSqueezeFig(t, aa, M, a):
sq = constructSq(a, aa, M)
fig, ax = plt.subplots(figsize = (6,6))
ax.set_xlabel(r'$t$')
ax.plot(t, sq)
ax.text(.5,.9,r'$1 + 2 E[\delta \hat a_S^\dagger \delta \hat a_S ] - 2 |Var[\hat a_S]|$', ha='center', va='center', transform= ax.transAxes,
bbox = {'facecolor': 'white', 'pad': 5})
ax.plot([0, np.max(t)], [1,1], "r:")
r_pred = np.log(fo.n**(1/6.))
t_pred = .6/(5*.1)
ax.plot([t_pred], [np.exp(-2*r_pred)], 'ko')
index = np.argmin(sq)
ax.plot([t[index]], [sq[index]], 'bo')
ax.set_xlim(0, np.max(t[sq<2]) )
ax.set_ylim(0,2.)
ax.legend(loc = 'lower right')
fig.savefig("../Figs/" + fo.name + "_Sq.pdf",bbox_inches = 'tight')
def SaveStuff(t, Num, M, eigs, aa, a, Q):
Num = qu.sortE(t, Num)
M = qu.sortE(t, M)
eigs = qu.sortE(t, eigs)
aa = qu.sortE(t, aa)
a = qu.sortE(t, a)
Q = qu.sortE(t,Q)
t = qu.sortE(t,t)
np.save("../Data/" + fo.name + "/" + "_t" + ".npy", t)
np.save("../Data/" + fo.name + "/" + "_N" + ".npy", Num)
np.save("../Data/" + fo.name + "/" + "_M" + ".npy", M)
np.save("../Data/" + fo.name + "/" + "_eigs" + ".npy", eigs)
np.save("../Data/" + fo.name + "/" + "_aa" + ".npy", aa)
np.save("../Data/" + fo.name + "/" + "_a" + ".npy", a)
np.save("../Data/" + fo.name + "/" + "_Q" + ".npy", Q)
def main(name, tags = [], label = "", decimate = 1, plot = PLOT):
time0 = time.time()
fo.tags = tags
fo.decimate = decimate
setFigObj(name)
t, N, M, eigs, aa, a, Q = analyze()
SaveStuff(t, N, M, eigs, aa, a, Q)
if plot:
u.orientPlot()
makeNFig(t, N)
makeMFig(t, eigs)
makePOQFig(t, eigs, Q)
makeSqueezeFig(t, aa, N, a)
print('completed in %i hrs, %i mins, %i s' %u.hms(time.time()-time0))
if __name__ == "__main__":
# load in the tags on the data directories
try:
fo.tags = np.load("../Data/" + simName + "/tags.npy")
except IOError:
fo.tags = [""]
main(simName, fo.tags, decimate=decimate)
|
count = 0
maxcount = 0
def prime(n):
if n % 2 == 0 and n > 2:
return False
for i in range(3, int(n/2 + 1), 2):
if n % i == 0:
return False
return True
def getFactor(n,factors):
i = 2
while i * i <= n:
if n % i:
i += 1
else:
n //= i
return n
def checkbase(num):
global count
numrev = str(num)[::-1]
primeflg = False
factors = []
for i in range(2,11):
value = 0
for j in range(0,len(num)):
digit = int(numrev[j])
value = value+(i**j)*digit
if prime(value):
primeflg = True
break
else:
factors.append(getFactor(value,factors[:]))
if primeflg == False:
print num,
for fct in factors:
print fct,
print ""
count = count + 1
def makebinary(leng):
global count
global maxcount
leng = leng - 2
flg = False
arr = []
for i in range(0,leng):
arr.append(0)
while True:
if flg:
break;
i = len(arr) - 1
while True:
if arr[i] == 0:
arr[i] = 1;
break;
else:
arr[i] = 0
i = i - 1
if i == -1:
flg = True
break;
s = "1"
for i in arr:
s = s + str(i)
s = s + "1"
# print s
checkbase(s)
if count == maxcount:
break
t = int(raw_input())
for i in xrange(1, t + 1):
n, maxcount = [int(s) for s in raw_input().split(" ")] # read a list of integers, 2 in this case
print "Case #{}:".format(i)
makebinary(n)
|
def isPalindrome(s):
"""
:type s: str
:rtype: bool
"""
abc = 'asdfghjklqwertyuiopzxcvbnm'
t=''
t=t+x for x in s.lower() if x in abc
return t[::-1] == t
st="A man, a plan, a canal: Panama"
print(isPalindrome(st))
|
from rest_framework import serializers
from computerapp.models import Product,Manufacturer,Category,UserProfile,DeliveryAddress,Order
from django.contrib.auth.models import User
class UserProfileSerializer(serializers.ModelSerializer):
class Meta:
model = UserProfile
fields = ('id','user','mobile_phone','nickname','description','icon','created','updated',)
read_only_fields = ('user',)
class UserInfoSerializer(serializers.ModelSerializer):
profile_of = UserProfileSerializer()
class Meta:
model = User
fields = ('id','username','email','first_name','last_name','date_joined','profile_of',)
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ('id','username','password','email','first_name','last_name',)
extra_kwargs = {'password':{'write_only':True}}
def create(self,validated_data):
user = User(**validated_data)
user.set_password(validated_data['password'])
user.save()
user_profile = UserProfile(user = user)
user_profile.save()
return user
class ManufacturerSerializer(serializers.ModelSerializer):
class Meta:
model = Manufacturer
fields = ('id','name',)
class CategorySerializer(serializers.ModelSerializer):
class Meta:
model = Category
fields = ('id','name',)
class ProductListSerializer(serializers.ModelSerializer):
class Meta:
model = Product
fields = ('id','model','image','price','sold','category','manufacturer',)
class ProductRetrieveSerializer(serializers.ModelSerializer):
manufacturer = ManufacturerSerializer()
category = CategorySerializer()
class Meta:
model = Product
fields = ('id','model','image','price','sold','category','manufacturer','description','created','updated',)
class DeliveryAddressSerializer(serializers.ModelSerializer):
class Meta:
model = DeliveryAddress
fields = ('id','user','contact_person','contact_mobile_phone','delivery_address','created','updated',)
read_only_fields = ('user',)
class OrderListSerializer(serializers.ModelSerializer):
product = ProductListSerializer()
address = DeliveryAddressSerializer()
class Meta:
model = Order
fields = ('id','status','user','product','price','quantity','remark','address','created','updated',)
class OrderCreateSerializer(serializers.ModelSerializer):
class Meta:
model = Order
fields = ('id','status','user','product','price','quantity','remark','address','created','updated',)
read_only_fields = ('user','price','address','status',)
class OrderRUDSerializer(serializers.ModelSerializer):
class Meta:
model = Order
fields = ('id',)
|
from django.shortcuts import render
from first_app import models,forms
#from django.http import HttpResponse
# Create your views here.
word=""
wordscount={}
def home(request):
#return HttpResponse("hello")
return render(request,"html/home.html")
def wordcounthome(request):
#return HttpResponse("hello")
return render(request,"html/wordcounthome.html")
def wordcount(request):
#return HttpResponse("hello")
return render(request,"html/wordcount.html")
def result(request):
word = request.GET['word']
count=0
#return HttpResponse("hello")
for i in word.split():
count = count + 1
return render(request,"html/result.html",{'count':count})
def users(request):
j=0
users_details=models.User.objects.all()
users_details_dict={'user_details_list':users_details,'j':j}
return render(request,"html/users.html",context=users_details_dict)
def formcreation(request):
form = forms.formcreate()
if request.method == "POST":
form = forms.formcreate(request.POST)
if form.is_valid():
print("success")
print("sno "+str(form.cleaned_data['sno']))
print("name "+form.cleaned_data['name'])
print("testtext "+form.cleaned_data['testtext'])
return render(request,"html/form.html",{'form':form})
def modelFormcreate(request):
form = forms.modelFormcreate(request.POST)
print(form.is_valid())
if form.is_valid():
form.save()
return render(request,"html/formmodel.html",{'form':form})
|
from django.shortcuts import render
from .models import Pixel
from django.db import models
def home(request):
pixels = Pixel.objects.all()
context = {
'pixels':pixels
}
return render(request, 'pixels/home.html', context)
def search_results(request):
if 'pixel' in request.GET and request.GET["pixel"]:
search_term = request.GET.get("pixel")
searched_images = Pixel.search_by_category(search_term)
message = f"{search_term}"
return render(request, 'pixels/search.html',{"message":message,"pixels": searched_images})
else:
message = "You haven't searched for any image category"
return render(request, 'pixels/search.html', {"message": message})
|
from sqlalchemy import create_engine
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker
SQLALCHEMY_DATABASE_URI = "sqlite:///./DB.db"
engine = create_engine(SQLALCHEMY_DATABASE_URI)
SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine)
Base = declarative_base()
|
f = open('/home/sezim/zadachi/rabota_s_files/users.txt', 'w')
name = input("Vedite login: ")
password = input("Vvedite password: ")
f.write(f"login: {name} \nPassword: {password}")
f.close()
|
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 3 18:49:26 2014
@author: anneubuntu
"""
# -*- coding: utf-8 -*-
"""
Created on Wed Apr 2 00:32:11 2014
@author: anneubuntu
"""
import pygame
import math
from pygame.locals import *
import random
import time
from abc import ABCMeta, abstractmethod
import planes
from planes import Plane
import planes.gui
WINDOWWIDTH = 600
WINDOWHEIGHT = 600
MOVEBUTTON_HEIGHT = 100
MOVEBUTTON_WIDTH = 75
BLACK = (0, 0, 0)
GREEN = (0, 255, 0)
WHITE = (255, 255, 255)
BLUE = (0, 0, 255)
class Actor(planes.Plane):
def __init__(self, x, y, width, height, name, draggable=False, grab=False):
planes.Plane.__init__(self, name, pygame.Rect(x,y,width,height), draggable, grab)
self.x = x
self.y = y
self.width = width
self.height = height
self.rect = pygame.Rect(self.x, self.y, self.width, self.height)
self.vx = 0.0
self.vy = 0.0
def update(self):
'''updates the actor's position and updates the rectangle object of the
actor'''
self.x += self.vx
self.y += self.vy
self.rect = pygame.Rect(self.x, self.y, self.width, self.height)
class Robot(Actor):
def __init__(self,x,y,width,height):
Actor.__init__(self,x,y,width,height,"robot")
class Model:
def __init__(self):
self.left = LeftButton("left",pygame.Rect(0,MOVEBUTTON_HEIGHT+5,MOVEBUTTON_WIDTH,MOVEBUTTON_HEIGHT),LeftButton.clicked)
self.right = RightButton("right",pygame.Rect(MOVEBUTTON_WIDTH+5,MOVEBUTTON_HEIGHT+5,MOVEBUTTON_WIDTH,MOVEBUTTON_HEIGHT),RightButton.clicked)
self.up = UpButton("up",pygame.Rect(0,0,MOVEBUTTON_WIDTH,MOVEBUTTON_HEIGHT),UpButton.clicked)
self.down = DownButton("down",pygame.Rect(MOVEBUTTON_WIDTH+5,0,MOVEBUTTON_WIDTH,MOVEBUTTON_HEIGHT),DownButton.clicked)
self.commands = planes.gui.Label("commands", "testing", pygame.Rect(2*MOVEBUTTON_WIDTH + 10, 10, WINDOWWIDTH-2*MOVEBUTTON_WIDTH + 10, 2*MOVEBUTTON_HEIGHT))
self.actors = [self.left,self.right,self.up,self.down]
def update(self):
for actor in self.actors:
actor.update()
class View:
def __init__(self, model, screen):
self.model = model
self.screen = screen
def draw(self):
self.screen.fill(BLACK)
for actor in self.model.actors:
pygame.draw.rect(self.screen, WHITE, actor.rect)
pygame.display.update()
class Button(planes.gui.Button):
def __init__(self, label, rect, callback):
planes.gui.Button.__init__(self, label, rect, callback)
self.image.fill((150, 150, 150))
def clicked(self, button_name):
print "clicked it woot"
def update(self):
pass
class UpButton(Button):
def __init__(self, label, rect, callback):
Button.__init__(self, label, rect, callback)
def clicked(self, button_name):
print "will go up"
class DownButton(Button):
def __init__(self, label, rect, callback):
Button.__init__(self, label, rect, callback)
def clicked(self, button_name):
print "will go down"
class LeftButton(Button):
def __init__(self, label, rect, callback):
Button.__init__(self, label, rect, callback)
def clicked(self, button_name):
print "will go left"
class RightButton(Button):
def __init__(self, label, rect, callback):
Button.__init__(self, label, rect, callback)
def clicked(self, button_name):
print "will go right"
class MoveScreen:
pygame.init()
size = (WINDOWWIDTH,WINDOWHEIGHT)
screen = pygame.display.set_mode(size)
model = Model()
view = View(model,screen)
running = True
screen = planes.Display((900, 700))
screen.grab = True
screen.image.fill((0, 128, 0))
for actor in model.actors:
screen.sub(actor)
while running:
events = pygame.event.get()
for event in events:
if event.type == pygame.QUIT:
print("got pygame.QUIT, terminating")
raise SystemExit
screen.process(events)
screen.update()
screen.render()
model.update()
view.draw()
time.sleep(.001)
pygame.quit()
if __name__ == '__main__':
moveScreen()
|
# Generated by Django 3.0.3 on 2020-04-14 09:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('competition_calendar', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='distance',
name='distance_km',
field=models.DecimalField(decimal_places=4, help_text='The ideal track length in kilometer.', max_digits=7, verbose_name='distance'),
),
migrations.AlterField(
model_name='distance',
name='name',
field=models.CharField(blank=True, help_text='Name of the distance', max_length=255, null=True, verbose_name='Name'),
),
migrations.AlterUniqueTogether(
name='distance',
unique_together={('distance_km', 'competition')},
),
]
|
from abc import ABC, abstractmethod
from data.field import Field
import numpy as np
class Predictor(ABC):
@abstractmethod
def train(self, features: [Field], responses: [Field], **kwargs):
pass
@abstractmethod
def predict(self, features: [Field]) -> np.array:
pass
@abstractmethod
def get_name(self):
pass
|
from ga_init import *
maxgen = 5
npool = 5
pmut = 0.15
ncross = 2
t1 = initialize_GA_calc(npool,ncross,pmut,maxgen)
|
# -*- coding: utf-8 -*-
"""
Created on Wed May 10 10:52:00 2017
@author: kkonudul
"""
from random import randint, random
import matplotlib.pyplot as plt
def individual(length, min, max):
return [randint(min, max) for x in range(length)]
#individual(5, 0,100)
def population(count, length, min, max):
# Create a number of individuals (i.e. a population).
# count: the number of individuals in the population
# length: the number of values per individual
# min: the min possible value in an individual's list of values
# max: the max possible value in an individual's list of values
return [individual(length, min, max) for x in range(count)]
#population(3, 5, 0, 100)
def fitness(individual, target):
# Determine the fitness of an individual. Lower is better.
#
# individual: the individual to evaluate
# target: the sum of numbers that individuals are aiming for
sm = sum(individual)
return abs(target-sm)
#fitness([62, 8, 61, 74, 18], 200)
def grade(pop, target):
#'Find average fitness for a population.'
# Calculate the total fitness score of all the individuals in the population and average
pop_total = sum([fitness(p, target) for p in pop])
return pop_total / (len(pop) * 1.0)
def evolve(pop, target, min, max, retain = 0.2, random_select = 0.05, mutate = 0.01):
# retain is used to select the top parents
# random_select is used to select individuals ransomly to maintain genetic diversity
# mutate is used to mutate individuals
# min and max are the numbers used to create the population
# Get the fitness score for each infividual in the population
graded = [(fitness(x, target), x) for x in pop]
# Sort the individuals based on the fitness score and drop the fitness score. Our fitness score is calculated
# so that the least is the best. If we calculated the score to be highest best, then we would sort in reverse.
graded = [x[1] for x in sorted(graded)]
# Getting the index to select the parents and select those individuals from the population
retain_length = int(len(graded)*retain)
parents = graded[:retain_length]
# Randomly add other elements to promote genetic diversity
for individual in graded[retain_length:]:
if random_select > random():
parents.append(individual)
# mutate some individuals
for individual in parents:
if mutate > random():
pos_to_mutate = randint(0, len(individual) - 1)
individual[pos_to_mutate] = randint(min, max)
# crossover parents to create children
parents_length = len(parents)
desired_length = len(pop) - parents_length
children = []
while len(children) < desired_length:
male = randint(0, parents_length - 1)
female = randint(0, parents_length - 1)
if male != female:
# Getting the male and female from the positions we got earlier
male = parents[male]
female = parents[female]
half = int(len(male) / 2)
child = male[:half] + female[half:]
children.append(child)
parents.extend(children)
return parents
# Main code to run the program
target = 200
p_count = 100
i_length = 5
i_min = 0
i_max = 100
iterations = 100
p = population(p_count, i_length, i_min, i_max)
fitness_history = [grade(p, target)]
for i in range(iterations):
p = evolve(p, target, i_min, i_max)
fitness_history.append(grade(p, target))
plt.plot(fitness_history)
plt.xlabel('Evolution curve')
plt.show() |
from django import forms
class FilterForm(forms.Form):
filter = forms.ChoiceField(choices=[(1, "True pairs"), (0, "Not pairs"), (-1, "No Filter")],
widget=forms.RadioSelect(
attrs={'onchange': 'form.submit()',
'id':'filter'},
)) |
# ClearML - Fastai example code, automatic logging the model and scalars
#
import argparse
from clearml import Task
import fastai
try:
from fastai.vision import untar_data, URLs, ImageDataBunch, rand_pad, imagenet_stats, cnn_learner, models, accuracy
except ImportError:
raise ImportError("FastAI version %s imported, but this example is for FastAI v1." % fastai.__version__)
def main(epochs):
Task.init(project_name="examples", task_name="fastai v1")
path = untar_data(URLs.MNIST_SAMPLE)
data = ImageDataBunch.from_folder(path, ds_tfms=(rand_pad(2, 28), []), bs=64, num_workers=0)
data.normalize(imagenet_stats)
learn = cnn_learner(data, models.resnet18, metrics=accuracy)
accuracy(*learn.get_preds())
learn.fit_one_cycle(epochs, 0.01)
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--epochs", default=3)
args = parser.parse_args()
main(args.epochs)
|
'''
Script to get events information from CATALOG obspy
(https://docs.obspy.org/packages/autogen/obspy.clients.fdsn.client.Client.get_events.html)
'''
import numpy as np
from obspy import UTCDateTime
import os
import json
from obspy import read_events
from obspy.clients.fdsn import Client
from parameters_py.config import (
OUTPUT_JSON_FILE_DIR,INITIAL_DATE_EVENT,FINAL_DATE_EVENT,EV_MAGNITUDE_MB,LATITUDE_MIN,LATITUDE_MAX,
LONGITUDE_MIN,LONGITUDE_MAX
)
print('Get Event Parameters')
print('\n')
irisclient=Client("IRIS")
starttime = UTCDateTime(INITIAL_DATE_EVENT)
endtime = UTCDateTime(FINAL_DATE_EVENT)
events = irisclient.get_events(starttime=starttime, endtime=endtime,minmagnitude=EV_MAGNITUDE_MB)
dic_event = {
'ev_timeUTC':[],
'ev_year':[],
'ev_month':[],
'ev_day':[],
'ev_julday':[],
'ev_hour':[],
'ev_minute':[],
'ev_second':[],
'ev_microsecond':[],
'evla':[],
'evlo':[],
'evdp':[],
'mag':[]}
for i,j in enumerate(events):
if LATITUDE_MIN <= j['origins'][0]['latitude'] <= LATITUDE_MAX and LONGITUDE_MIN <= j['origins'][0]['longitude'] <= LONGITUDE_MAX:
print('event - '+str(j['origins'][0]['time']))
temp = j['origins'][0]['time']
dic_event['ev_year'].append('{:04}'.format(temp.year))
dic_event['ev_month'].append('{:02}'.format(temp.month))
dic_event['ev_julday'].append('{:03}'.format(temp.julday))
dic_event['ev_day'].append('{:02}'.format(temp.day))
dic_event['ev_hour'].append('{:02}'.format(temp.hour))
dic_event['ev_minute'].append('{:02}'.format(temp.minute))
dic_event['ev_second'].append('{:02}'.format(temp.second))
dic_event['ev_microsecond'].append('{:04}'.format(temp.microsecond))
dic_event['ev_timeUTC'].append(str(temp))
dic_event['evla'].append(j['origins'][0]['latitude'])
dic_event['evlo'].append(j['origins'][0]['longitude'])
if j['origins'][0]['depth'] > 1000:
dic_event['evdp'].append(float(j['origins'][0]['depth'])/1000)
else:
dic_event['evdp'].append(j['origins'][0]['depth'])
dic_event['mag'].append(j['magnitudes'][0]['mag'])
print('Number of Events: '+str(len(dic_event['mag'])))
print('\n')
print('Saving Event Parameters in JSON file')
print('\n')
os.makedirs(OUTPUT_JSON_FILE_DIR,exist_ok=True)
with open(OUTPUT_JSON_FILE_DIR+'EVENT_dic.json', 'w') as fp:
json.dump(dic_event, fp) |
import cv2
import sys
import os
import sys
sys.path.insert(0, 'Yolo')
from yolo import use_yolo
path = "Annotator/Videos/" # set path for video directory
selection = []
def check_file(vid, class_name):
try:
with open(path+class_name+"_log.txt", "r") as f:
for line in f:
line = line.split('\n')[0]
if line == vid:
return True
except:
return False
return False
def on_Mouse_Event(event, x, y, flags, param):
global selection
if event == cv2.EVENT_LBUTTONDOWN:
selection.append([x, y])
def assign_boxes(bb):
res = []
for sel in selection:
rec = "-"
for box in bb:
if sel[0] > box[0] and sel[0] < (box[0] + box[2]) and sel[1] > box[1] and sel[1] < (box[1] + box[3]):
rec = box[4]
res.append(rec)
print(res)
return res
def driver(class_name):
global selection
files = os.listdir(path+class_name)
for vid in files:
bool_chk = check_file(vid, class_name)
if bool_chk == True:
continue
res = {}
cap = cv2.VideoCapture(path+class_name+"/"+vid)
frame_length = int(cap.get(cv2.CAP_PROP_FRAME_COUNT))
cnt = 0
for i in range(frame_length):
cnt += 1
if cnt % 10 != 0:
continue
selection = [] # make sure it is global
chk, frame = cap.read()
cv2.namedWindow("image")
cv2.setMouseCallback("image", on_Mouse_Event)
frame = cv2.resize(frame, (224, 224), interpolation=cv2.INTER_AREA)
bb = use_yolo(frame)
for box in bb:
cv2.rectangle(
frame, (box[0], box[1]), (box[0]+box[2], box[1]+box[3]), (0, 255, 0), 1)
cv2.putText(frame, box[4], (box[0], box[1] - 5),
cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
cv2.imshow("image", frame)
k = cv2.waitKey(100000)
temp_rec = []
if k == ord('o'):
temp_rec.append("out")
temp_rec.extend(assign_boxes(bb))
res[i] = temp_rec
continue
elif k == ord('i'):
temp_rec.append("in")
temp_rec.extend(assign_boxes(bb))
res[i] = temp_rec
continue
# cv2.destroyAllWindows()
else:
continue
with open(path+"Roles/"+vid+".txt", 'w') as f:
for item in res.keys():
feature = str(item) + ","
for i in res[item]:
feature += i + ","
f.write(feature+"\n")
with open(path+class_name+"_log.txt", "a") as f:
f.write(vid+'\n')
cap.release()
class_name = sys.argv[1]
driver(class_name)
|
@cherrypy.expose
def connect(self):
return """<html>
<head><title>Connect Server</title></head>
<body>
<form method="get" action="connecting">
server:<br>
<input type="text" name="_server"><br>
port:<br>
<input type="text" name="_port"><br>
<input type="submit" value="Connect">
</form>
</body>
</html>"""
@cherrypy.expose
def connecting(self, _server, _port):
response = os.system("nc -z -v " + _server + " " + _port)
if response == 0:
cherrypy.session['server'] = _server
return "Successfully connected " + _server + " : " + _port + self.back
else:
return "Failed to connect " + _server + " : " + _port + self.back
if __name__ == "__main__":
conf = {
'/': {
'tools.sessions.on': True
}
}
cherrypy.server.socket_host = '192.168.0.161'
cherrypy.quickstart(ImgTransfer(), '/', conf)
|
import pygame
from pygame.locals import (K_DOWN, K_LEFT, K_RIGHT, K_UP)
class Player(pygame.sprite.Sprite):
def __init__(self, x: int,
y: int,
width: int,
height: int,
color: (int, int, int),
vel: int = 3):
super(Player, self).__init__()
self.x = x
self.y = y
self.width = width
self.height = height
self.color = color
self.vel = vel
# self.surf = pygame.Surface((self.width, self.height))
self.set_rect()
def set_rect(self):
self.rect = pygame.Rect(self.x, self.y, self.width, self.height)
def draw(self, surf):
"""Drawing player on window (pygame.Surface)
Args:
win (pygame.Surface): pygame window surface
"""
pygame.draw.rect(surf, self.color, self.rect)
def move(self, max_width, max_height):
keys = pygame.key.get_pressed()
if keys[K_LEFT]:
self.rect.move_ip(-self.vel, 0)
if keys[K_RIGHT]:
self.rect.move_ip(self.vel, 0)
if keys[K_UP]:
self.rect.move_ip(0, -self.vel)
if keys[K_DOWN]:
self.rect.move_ip(0, self.vel)
# Keep player on the screen
if self.rect.left < 0:
self.rect.left = 0
if self.rect.right > max_width:
self.rect.right = max_width
if self.rect.top <= 0:
self.rect.top = 0
if self.rect.bottom >= max_height:
self.rect.bottom = max_height
|
# Code for beta-VAE
# Paper: https://openreview.net/forum?id=Sy2fzU9gl
from abc import ABC
import torch
from torch import Tensor
from torch import nn
from torch.nn import functional as F
from . import ConvNet, GaussianLayer, GenerativeAE, UpsampledConvNet, FCBlock, DittadiConvNet, DittadiUpsampledConv
from .utils import act_switch
class VAEBase(nn.Module, GenerativeAE, ABC):
def __init__(self, params):
super().__init__()
self.params = params
self.latent_size = params["latent_size"]
self.gaussian_latent = GaussianLayer(self.latent_size, self.latent_size, params["gaussian_init"])
self.act = nn.Sigmoid()
def encode(self, inputs: Tensor):
codes = self.encoder(inputs)
z, logvar, mu = self.gaussian_latent(codes)
return [z, mu, logvar]
def encode_mu(self, inputs:Tensor, **kwargs) -> Tensor:
""" returns latent code (not noise) for given input"""
return self.encode(inputs)[1]
def decode(self, noise: Tensor, activate:bool) -> Tensor:
out = self.decoder(noise)
if activate: out = self.act(out)
return out
def sample_noise_from_prior(self, num_samples:int):
return self.gaussian_latent.sample_standard(num_samples)
def sample_noise_from_posterior(self, inputs: Tensor):
#TODO: change here
return self.encode(inputs)[0]
def generate(self, x: Tensor, activate:bool) -> Tensor:
""" Simply wrapper to directly obtain the reconstructed image from
the net"""
return self.forward(x, activate)[0]
def forward(self, inputs: Tensor, activate:bool=False) -> list:
z, mu, logvar = self.encode(inputs)
return [self.decode(z, activate), mu, logvar]
def loss_function(self, *args, **kwargs) -> dict:
X_hat = args[0]
mu = args[1]
log_var = args[2]
X = kwargs["X"]
# In this context it makes sense to normalise β by latent z size
# m and input x size n in order to compare its different values
# across different latent layer sizes and different datasets
KL_weight = kwargs["KL_weight"] # usually weight = M/N
use_MSE = kwargs.get("use_MSE",True)
# ELBO = reconstruction term + prior-matching term
# Note: for both losses we take the average over the batch and sum over the other dimensions
BCE = torch.sum(F.binary_cross_entropy_with_logits(X_hat, X, reduction="none"),
tuple(range(X_hat.dim()))[1:]).mean() #sum over all dimensions except the first one (batch)
MSE = torch.sum(F.mse_loss(self.act(X_hat), X, reduction="none"),
tuple(range(X_hat.dim()))[1:]).mean()
recons_loss = MSE if use_MSE else BCE
KL_loss = -0.5 * torch.sum(1 + log_var - mu.pow(2) - log_var.exp(), 1).mean()
loss = recons_loss + self.beta * KL_weight * KL_loss
return {'loss': loss, 'Reconstruction_loss':recons_loss, 'KL':KL_loss, 'MSE':MSE, 'BCE':BCE}
def get_prior_range(self):
""" returns a range in format [(min, max)] for every dimension that should contain
most of the data density (905)"""
return self.gaussian_latent.prior_range
class VAE(VAEBase):
def __init__(self, params: dict, dim_in) -> None:
super().__init__(params)
self.beta = params["beta"]
self.dittadi_v = params["dittadi"] # boolean flag determining whether or not to use Dittadi convolutional structure
self.dim_in = dim_in # C, H, W
# Building encoder
conv_net = ConvNet(dim_in, depth=params["enc_depth"], **params) if not self.dittadi_v \
else DittadiConvNet(self.latent_size)
if not self.dittadi_v:
fc_enc = FCBlock(conv_net.final_dim, [128, 64, self.latent_size], act_switch(params["act"]))
fc_dec = FCBlock(self.latent_size, [64, 128, conv_net.final_dim], act_switch(params["act"]))
self.encoder = conv_net if self.dittadi_v else nn.Sequential(conv_net, fc_enc)
self.decoder_initial_shape = conv_net.final_shape
deconv_net = UpsampledConvNet(self.decoder_initial_shape, self.dim_in, depth=params["dec_depth"], **params) \
if not self.dittadi_v else DittadiUpsampledConv(self.latent_size)
self.decoder = deconv_net if self.dittadi_v else nn.ModuleList([fc_dec, deconv_net])
def decode(self, noise: Tensor, activate:bool) -> Tensor: #overriding parent class implementation to inser reshaping
noise = self.decoder[0](noise)
noise = noise.view((-1, )+self.decoder_initial_shape) # reshaping into image format
out = self.decoder[1](noise)
if activate: out = self.act(out)
return out
class XVAE(VAEBase):
""" Explicit latent block + VAE """
#TODO
pass
class VecVAE(VAEBase):
"""Version of VAE model for vector based (not image based) data"""
def __init__(self, params: dict, dim_in: int, **kwargs) -> None:
""" full: whether to use the VecSCMDecoder layer as a decoder"""
super().__init__(params)
self.dim_in = dim_in[0]
self.beta = params["beta"]
# dim_in is a single number (since the input is a vector)
layers = list(torch.linspace(self.dim_in, self.latent_size, steps=params["depth"]).int().numpy())
self.encoder = FCBlock(self.dim_in, layers, act_switch(params.get("act")))
self.decoder = FCBlock(self.latent_size, reversed(layers), act_switch(params.get("act")))
def decode(self, noise:Tensor, activate:bool):
# since x is a constant we're always going to get the same output
output = self.decoder(noise)
return output |
class words_filter():
def __init__(self, str1):
self.lis3 = ""
for i in str1:
if i != '\n' and i != '\t':
self.lis3 = self.lis3 + i
else:
pass
def output(self):
return self.lis3
|
"""Path utilities based on :mod:`posixpath`.
:func:`normpath` has been modified not to allow relative paths.
:mod:`posixpath` is part of the Python standard library, and is licensed under
the `Python Software Foundation License <http://docs.python.org/license.html>`_,
which can be linked with libraries of other licenses and allows changes to be
released under different licenses. See LICENSE.txt for a copy of the PSFL.
The original code can be found `here
<http://hg.python.org/releasing/2.7.3/file/7bb96963d067/Lib/posixpath.py>`_.
The following license text refers to changes to the original code:
Copyright 2012 the original author or authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
"AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
"""
def normpath(path):
"""Normalize path, eliminating double slashes, etc.
"""
comps = path.split('/')
new_comps = []
for comp in comps:
if comp == '':
continue
if comp in ('.', '..'):
raise ValueError('relative paths not allowed')
new_comps.append(comp)
slash = u'/' if isinstance(path, unicode) else '/'
new_path = slash.join(new_comps)
if path.startswith('/'):
return slash + new_path
return new_path
def join(a, *p):
"""Join two or more pathname components, inserting '/' as needed.
If any component is an absolute path, all previous path components
will be discarded.
"""
path = a
for b in p:
if b.startswith('/'):
path = b
elif path == '' or path.endswith('/'):
path += b
else:
path += '/' + b
return path
def isabs(s):
"""Test whether a path is absolute. """
return s.startswith('/')
def basename(p):
"""Returns the final component of a pathname"""
i = p.rfind('/') + 1
return p[i:]
|
'''
CS370 Assignment 8
Solves ProjectEuler #82
Taylor He, Jacob Manzelmann, Tommy Osterman
I pledge my honor that I have abided by the Stevens Honor System
'''
import sys
def backtrack(matrix, dp_matrix, row):
'''Backtracks the matrix, starting at the given row'''
col = len(matrix) - 1
path = []
while col != 0:
diff = dp_matrix[row][col] - matrix[row][col]
path.append(matrix[row][col])
# If we can traverse left, do it
if diff == dp_matrix[row][col-1]:
col -= 1
# If col == 0 (We are at the left col), then break because we are done
# If we should go down, do that
elif row + 1 < len(matrix) and diff == dp_matrix[row+1][col]:
row += 1
# Else we should traverse up
elif row >= 1 and diff == dp_matrix[row-1][col]:
row -= 1
# When col == 0, append the last item and break
path.append(matrix[row][0])
return path[::-1]
def solve(matrix):
'''Solves ProjectEuler 82 using DP
returns (solution, path)
'''
size = len(matrix)
# If size == 1, trivial case
if size == 1:
return (matrix[0][0], matrix[0])
# Allocate dp matrix
dp_matrix = [[None] * size for _ in range(size)]
# Populate the left column with the only option
for row in range(size):
dp_matrix[row][0] = matrix[row][0]
# Start building the dp matrix
for col in range(1, size):
# The upper row can only come from the left
dp_matrix[0][col] = dp_matrix[0][col - 1] + matrix[0][col]
# Traverse the row moving downward
for row in range(1, size):
dp_matrix[row][col] = matrix[row][col] + min(dp_matrix[row-1][col], dp_matrix[row][col-1])
# Then traverse back up the row, changing values when necessary
for row in range(size-2, -1, -1):
dp_matrix[row][col] = min(dp_matrix[row][col], matrix[row][col] + dp_matrix[row+1][col])
# After we are done building the table, find the min sum and its index
min_sum = dp_matrix[0][size-1]
start = 0
for row in range(1, size):
if dp_matrix[row][size-1] < min_sum:
min_sum = dp_matrix[row][size-1]
start = row
return (min_sum, backtrack(matrix, dp_matrix, start))
def print_sol(min_sum, path):
'''Prints the solution as specified in the homework'''
print('Min sum:', min_sum)
print('Values:', path)
if __name__ == '__main__':
# If given a parameter, use that as filename instead.
filename = 'matrix.txt' if len(sys.argv) == 1 else sys.argv[1]
matrix = []
with open(filename, 'r') as f:
for line in f:
matrix.append([int(x) for x in line.split(',')])
# Solve and print
(min_sum, path) = solve(matrix)
print_sol(min_sum, path)
|
from django.db import models
from django.utils.timezone import now
# Create your models here.
class Template(models.Model):
docfile = models.FileField(upload_to='templateuri/%Y/%m/%d/')
filename = models.CharField(max_length=100, null=True, blank=True)
created_on = models.DateTimeField(blank=False, default=now)
fields = models.CharField(max_length=2500, null=True, blank=True, default="")
filetype = models.CharField(max_length=100, null=True, blank=True, default="")
def __str__(self):
return '{}'.format(self.filename)
def file_link(self):
if self.docfile:
return "<a download href='%s'>download</a>" % (self.docfile.url,)
else:
return "No attachment"
|
class WrongDocumentUpdateStrategy(Exception):
pass
class DocumentNotFound(Exception):
pass
class DocumentAlreadyCreated(Exception):
pass
class DocumentWasNotSaved(Exception):
pass
|
import xml.etree.cElementTree as ET
import codecs
import pprint
import json
import re
import audit #local *.py file
import os
#Set the proper current working directory
os.getcwd()
os.chdir('C:/Users/sheethal/Desktop/DWMDB')
lower = re.compile(r'^([a-z]|_)*$')
lower_colon = re.compile(r'^([a-z]|_)*:([a-z]|_)*$')
problemchars = re.compile(r'[=\+/&<>;\'"\?%#$@\,\. \t\r\n]')
CREATED = ["version", "changeset", "timestamp", "user", "uid"]
def get_pos(element):
"""Returns the latitude and longitude of the element in an array."""
lat = float(element.attrib['lat'])
lon = float(element.attrib['lon'])
pos = [lat, lon]
return pos
def ignoring(k):
"""Returns True if key k should be ignored."""
KEYS = ['ele', 'import_uuid', 'source', 'wikipedia']
PREFIXES = ['gnis:', 'is_in', 'nhd-s']
if k in KEYS or k[:5] in PREFIXES:
return True
return False
def fix_postcode(v):
"""
Reduces postcodes to 5 digit strings. Some zips take the form
'NC12345' or '12345-6789' hindering MongoDB aggregations.
"""
postcode = ''
for char in v:
if char.isdigit():
postcode += char
if len(postcode) == 5:
break
return postcode
def node_update_k(node, value, tag):
"""Adds 'k' and 'v' values from tag as new key:value pair to node."""
k = value
v = tag.attrib['v']
if k.startswith('addr:'):
# Ignore 'addr:street:' keys with 2 colons
if k.count(':') == 1:
if 'address' not in node:
node['address'] = {}
if k == 'addr:postcode' and len(v) > 5:
v = fix_postcode(v)
# Fix all substrings of street names using a
# more generalized update method from audit.py
elif k == 'addr:street':
v = audit.update(v, audit.mapping)
node['address'][k[5:]] = v
# Check for highway exit number nodes
elif k == 'ref' and node['type'] == 'node':
node['exit_number'] = v
# Check for 'k' values that equal the string 'type'
# which would overwrite previously written node['type']
elif k == 'type':
node['service_type'] = v
# Process other k:v pairs normally
else:
node[k] = v
return node
def process_tiger(node, value, tag):
"""
Adds a Tiger GPS value ('tiger:__') from the tag as a new
key:value pair to node['address']
"""
name_segments = ['name_type', 'name_base', 'name_direction_prefix',
'name_direction_suffix', 'name_direction_suffix_1']
k = value[6:] # the substring following 'tiger:'
v = tag.attrib['v']
if 'address' not in node:
node['address'] = {}
if 'name' in node:
node['address']['street'] = node['name']
elif k == 'zip_left':
node['address']['postcode'] = v
elif k in name_segments:
if 'street' not in node['address']:
node['address']['street'] = {k:'' for k in name_segments}
elif isinstance(node['address']['street'], dict):
node['address']['street'][k] = v
return node
def join_segments(s):
"""
Joins 'tiger:__' street name substring values (prefix, base,
type, suffix) in dict s to a string
"""
for segment in s:
if segment in audit.mapping:
s[segment] = audit.mapping[segment]
ordered = [ s['name_direction_prefix'], s['name_base'],
s['name_type'], s['name_direction_suffix'],
s['name_direction_suffix_1'] ]
segments = [s for s in ordered if s]
return ' '.join(segments)
def shape_element(element):
"""
Takes an XML tag as input and returns a cleaned and reshaped
dictionary for JSON ouput. If the element contains an abbreviated
street name, it returns with an updated full street name.
"""
node = {}
if element.tag == "node" or element.tag == "way" :
node['type'] = element.tag
node['created'] = {}
if 'lat' in element.attrib:
# Get coordinates
node['pos'] = get_pos(element)
# Begin iterating over subtags
for tag in element.iter():
for key, value in tag.items():
if key in CREATED:
node['created'][key] = value
# Check for problem characters and ignored values
# in second-level tag 'k' attribute
elif key == 'k' and not re.search(problemchars, value):
if not ignoring(value):
if value.startswith('tiger:'):
node = process_tiger(node, value, tag)
else:
node = node_update_k(node, value, tag)
# Create/update array 'node_refs'
elif key == 'ref':
if 'node_refs' not in node:
node['node_refs'] = []
node['node_refs'].append(value)
# Process remaining tags
elif key not in ['v', 'lat', 'lon']:
node[key] = value
if 'address' in node and 'street' in node['address']:
if isinstance(node['address']['street'], dict):
# Replace saved dict of street name segments with full street name
node['address']['street'] = join_segments(node['address']['street'])
# Safe to clear() now that element has been processed
element.clear()
return node
else:
return None
def process_map(file_in, pretty = False):
"""
Outputs a JSON file with the above structure.
Returns the data as a list of dictionaries.
If running main_test(), comment out all array 'data' operations.
"""
file_out = "{0}.json".format(file_in)
#data = []
with codecs.open(file_out, "w") as fo:
parser = ET.iterparse(file_in)
for __, elem in parser:
el = shape_element(elem)
if el:
#data.append(el)
# Output to JSON
if pretty:
fo.write(json.dumps(el, indent=2)+"\n")
else:
fo.write(json.dumps(el) + "\n")
del parser
#return data
def main_test():
data = process_map('raleigh_north-carolina.osm', False)
#print len(data)
print 'Map processed'
if __name__ == '__main__':
main_test()
|
from gym.envs.registration import register
register(
id='{{cookiecutter.env_name}}-v0',
entry_point='gym_{{cookiecutter.env_name.lower()}}.envs:{{cookiecutter.env_name}}Env',
)
|
#! /usr/bin/python
import naive_bayes_EM
import os,sys,getopt,random,csv
import numpy as np
from data_utilities import countData,readData,random_gen
DATAPATH="/home/wei/data_processing/data/car/car.data"
DATAPATHS=['data/car/car.unacc.data','data/car/car.acc.data','data/car/car.good.data','data/car/car.vgood.data']
ITERCN = 20
ITERSN = 1
_VERBOSE = False
_MAXLOG = False
_OUTPUT = False
_DATE = False
ATTRIBUTES = ['buyPrice','maintPrice','numDoors','numPersons','lugBoot','safety']
OUTPUTDIR ='/home/wei/share/nbem/outputs/'
LTYPE = 0
def usage():
"""function of displaying usage"""
print "%s [-c type_of_likelihood] [-n nonstochastic_iteration_times] [-s stochastic_iteration_times] [-v] [-o] [-d] [-k initial_clustering_number] [-i initial_method] [-a] [-u] [filenames]"%sys.argv[0]
print " [-c type_of_likelihood]: 0 for normal likelihood;1 for classification likelihood;2 for naive bayesian network. 0 By default"
print " [-n iteration_times]: set nonstochastic iteration times for EM method. Default is 20"
print " [-s stochastic_iteration_times]: set stochastic iteration times for EM method. Default is 1"
print " [-v]: set verbose mode. Print other detail infomation"
print " [-o]: output predicted class label and original label as well for further analysis"
print " [-d]: output file name with time stamp, only valid together with -o option"
print " [-k initial_clustering_number]: set an initial clustering number for EMNB or ECMNB."
print """ [-i initial_method]: set initial methods of label for EM method.
initial_method: an integer specifying the initial method
0: uniform initialization
1: k points methods. Refer to PHAM2009 'Unsupervised training of bayesian networks for data clustering'"""
print " [-a]: set default attributes information"
print " [-b]: add bayes smooth operation at last"
print " [--alpha value_of_alpha]: specify value of alpha for prior dirichelet distribution"
print " [-u]: has no label info"
def main(argv):
try:
opts, args = getopt.getopt(argv,"hc:n:s:k:i:bvodpu",["help","alpha="])
except getopt.GetoptError:
print 'option parsing error!'
usage()
sys.exit(2)
global ITERCN
global ITERSN
global _VERBOSE
global _MAXLOG
global _OUTPUT
global _DATE
global LTYPE
global OUTPUTDIR
global LOGDIR
global DATAPATHS
initMethod = 0
_PARTITION = False
_attr = False
_bayes= False
numc = 4
alpha = 2.0
_labeled = True
for opt,arg in opts:
if opt in ("-h","--help"):
usage()
sys.exit(0)
elif opt in ("-c"):
LTYPE = int(arg)
#if LTYPE != 0 and LTYPE !=1 and LTYPE!=2:
#print "Oh I don't know this type of likelihood: %d"
elif opt in ("-n"):
ITERCN = int(arg)
elif opt in ("-s"):
ITERSN = int(arg)
elif opt in ("-v"):
_VERBOSE = True
#elif opt in ("-l"):
#_MAXLOG= True
elif opt in ("-o"):
_OUTPUT= True
elif opt in ("-d"):
_DATE= True
elif opt in ("-p"):
_PARTITION= True
elif opt in ("-k"):
numc = int(arg)
elif opt in ("-i"):
initMethod = int(arg)
elif opt in ("-a"):
_attr=True
elif opt in ("-b"):
_bayes=True
elif opt in ("-u"):
_labeled=False
elif opt in ("--alpha"):
print arg
alpha=float(arg)
if LTYPE == 0:
random.seed()
if len(args) > 0:
DATAPATHS = args
rdata = random_gen(DATAPATHS,_random=True)
out_rdata = open('rdata.csv','w')
writer = csv.writer(out_rdata)
writer.writerows(rdata)
out_rdata.close()
nbem=naive_bayes_EM.MultinomialNBEM(alpha=alpha)
nbem.setVerbose(_VERBOSE)
if _OUTPUT:
nbem.setOutput(OUTPUTDIR)
if _labeled:
if _attr:
xdata_ml,ydata=nbem.fit_transformRaw(rdata,True,ATTRIBUTES)
else:
xdata_ml,ydata=nbem.fit_transformRaw(rdata,True)
else:
if _attr:
xdata_ml=nbem.fit_transformRaw(rdata,False,ATTRIBUTES)
else:
xdata_ml=nbem.fit_transformRaw(rdata,False)
nbem.build(numc,xdata_ml,ITERSN,ITERCN,initMethod,_DATE,_bayes=_bayes)
if _labeled:
nbem.testModel(xdata_ml,ydata,_DATE)
else:
nbem.testModel(xdata_ml,timestamp=_DATE)
#out_proba = open('predict_prob','w')
#res = nbem.predict_proba(xdata_ml)
#for item in res:
#print >>out_proba,item
#out_proba.close()
else:
raise ValueError("Oh I just know NBEM. The corresponding LTYPE is 0")
if __name__=='__main__':
if len(sys.argv) > 1:
main(sys.argv[1:])
else:
main("")
|
import mlconf
nested = {'a':{'c': 1, 'd': (2, {'a': 3}), 'e': {'f': 3, 'g': 4}}, 'b': 5}
flat = {'a.c': 1, 'a.d': (2, {'a': 3}), 'a.e.f': 3, 'a.e.g': 4, 'b': 5}
def test_to_flat_dict():
f = mlconf.to_flat_dict(nested)
assert(f == flat)
def test_to_flat_on_flat():
f = mlconf.to_flat_dict(flat)
assert(f == flat)
def test_to_flat_no_copy():
flat_copy = dict(flat)
mlconf.to_nested_dict(flat_copy, copy=False)
assert(flat_copy == nested)
def test_to_nested_dict():
n = mlconf.to_nested_dict(flat)
assert(n == nested)
def test_to_nested_on_nested():
n = mlconf.to_nested_dict(nested)
assert(n == nested)
def test_to_nested_no_copy():
flat_copy = dict(flat)
mlconf.to_nested_dict(flat_copy, copy=False)
assert(flat_copy == nested)
def test_to_and_from():
assert(flat == mlconf.to_flat_dict(mlconf.to_nested_dict(flat)))
assert(nested == mlconf.to_nested_dict(mlconf.to_flat_dict(nested)))
|
# -*- coding: utf-8 -*-
"""
-------------------------------------------------
File Name: bullet
Description :
Author : zhengbin
date: 2019/11/11
-------------------------------------------------
Change Activity:
2019/11/11:
-------------------------------------------------
"""
__author__ = 'zhengbin <rjguanwen001@163.com>'
import pyglet
from version_6.game import physicalobject, resources, player
class Bullet(physicalobject.PhysicalObject):
""" 子弹 """
def __init__(self, *args, **kwargs):
super(Bullet, self).__init__(
resources.bullet_image,
*args,
**kwargs
)
# 0.5秒后调用一次 die 方法
pyglet.clock.schedule_once(self.die, 1)
# 标记自己是子弹
self.is_bullet = True
# 子弹从屏幕消失
def die(self, dt):
self.dead = True
def handle_collision_with(self, obj2):
""" 碰撞之后的动作 """
# if isinstance(obj2, player.Player):
# # 如果飞船是与子弹相碰
# pass
# else:
# self.dead = True
self.dead = True |
import copy
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.patches as patches
class Simulator:
def __init__(self, world_map, robots):
"""
Inputs:
map_gt: The map to be explored
robot: the Robot object from RobotClass.py
"""
self.map = copy.deepcopy(world_map)
self.robots = copy.deepcopy(robots)
#Container to identify, which survivors have been seen by the robots
self.visited_survivors = set()
self.score = 0
self.iterations = 0
self.found_goal = False
def run(self):
duration = max([len(r.final_path) for r in self.robots])
for x in range(0, duration):
end = self.tick()
if end:
break
def tick(self):
self.iterations += 1
#Update the location of the robots
for r in self.robots:
# Generate an action from the robot path
# action = r.follow_direction_path()
action = r.follow_path()
# Move the robot
r.move(action)
# Update the explored map based on robot position
self._update_map()
# Update the score
self.score = len(self.visited_survivors)*10
#End when all survivors have been reached OR 1,000 iterations
if (len(self.visited_survivors) == self.map.num_survivors) or (self.iterations == 1000):
self.found_goal = len(self.visited_survivors) == self.map.num_survivors
return True
else:
return False
def reset_game(self):
self.iterations = 0
self.score = 0
self.visited_survivors = set()
def get_score(self):
return self.score
def get_iteration(self):
return self.iterations
def _update_map(self):
# Sanity check the robot is in bounds
r_locs = list()
for r in self.robots:
if not r.check_valid_loc():
print(r.get_loc())
raise ValueError(f"Robot has left the map. It is at position: {r.get_loc()}, outside of the map boundary")
r_locs.append(r.get_loc())
new_survivors = self.map.nearby_survivors(r_locs, self.robots[0].sensing_range)
self.visited_survivors = self.visited_survivors.union(new_survivors)
def visualize(self):
plt.xlim(self.map.bounds[0]-.5, self.map.bounds[1]+(self.map.bounds[1]*.05))
plt.ylim(self.map.bounds[0]-.5, self.map.bounds[1]+(self.map.bounds[1]*.05))
ax = plt.gca()
survivor_x = [i[0] for i in self.map.survivor_locs]
survivor_y = [i[1] for i in self.map.survivor_locs]
plt.scatter(survivor_x, survivor_y, color='tab:red')
survivor_x = [i[0] for i in self.visited_survivors]
survivor_y = [i[1] for i in self.visited_survivors]
plt.scatter(survivor_x, survivor_y, color='tab:green')
hotspot_x = [i[0] for i in self.map.hotspots]
hotspot_y = [i[1] for i in self.map.hotspots]
plt.scatter(hotspot_x, hotspot_y, color='black', marker="x")
for spot in self.map.invalid_locations:
hole = patches.Rectangle(spot, 1, 1, linewidth=2, facecolor='black')
ax.add_patch(hole)
for r in self.robots:
robot_x = [p.location[0] for p in r.final_path]
robot_y = [p.location[1] for p in r.final_path]
plt.plot(robot_x, robot_y)
plt.show()
|
print("Substraction of two integers!!!\n")
x = int(input("Please enter the first number: "))
y = int(input("please enter the second number: "))
z = x-y
print("Substraction is: ",z)
|
# coding=utf-8
"""
@author: mirrorChen
@license: (C) Copyright 2011-2018, mirror personal Limited.
@contact: chenjingxu3@dafycredit.com
@software: JY_Android_AT
@file: add.py
@time: 2019/6/26 19:34
@desc:
"""
import time, datetime
import pymysql.cursors
import random
from excel_pub import ExcelUtil
mysql_info = {"host": 'localhost',
"port": 3306,
"user": 'root',
"passwd": 'cjx123456',
"db": 'atp',
"charset": 'utf8'}
class MysqlUtil():
'''
mysql数据库相关操作
连接数据库信息:mysql_info
创建游标:mysql_execute
查询某个字段对应的字符串:mysql_getstring
查询一组数据:mysql_getrows
关闭mysql连接:mysql_close
'''
def __init__(self, mysql_info=mysql_info):
self.Excel = ExcelUtil("D:\\5435.xlsx", "ai")
self.db_info = mysql_info
u'''连接池方式'''
self.conn = MysqlUtil.__getConnect(self.db_info)
@staticmethod
def __getConnect(db_info):
'''静态方法,从连接池中取出连接'''
try:
conn = pymysql.connect(host=db_info['host'],
port=db_info['port'],
user=db_info['user'],
passwd=db_info['passwd'],
db=db_info['db'],
charset=db_info['charset'])
return conn
except Exception as a:
print("数据库连接异常:%s" % a)
def mysql_execute(self, sql):
'''执行sql语句'''
cur = self.conn.cursor()
try:
cur.execute(sql)
except Exception as a:
self.conn.rollback() # sql执行异常后回滚
print("执行SQL语句出现异常:%s" % a)
else:
cur.close()
self.conn.commit() # sql无异常时提交
def cdata(self):
constT = 500
phoneO = 15433331001
info = self.Excel.next()
st = []
#for i in range(0, constT):
for k in info:
tt = int(k["ACCOUNT_NO"])
ti = k["IDENT"]
print(k)
#st = 'INSERT INTO CD(phone,bankNo,ident) VALUES (' + str(phoneO) + str(tt)+str(ti)+ ')'
# if ti[-1] == "X":
# pass
# else:
# sql = st+str(ti)+ ')'
#self.mysql_execute(st)
#phoneO = phoneO + 2
# for j in info:
# if j["IDENT"][-1] == "X":
# print("身份证尾号是X")
# else:
# tt = int(j["IDENT"])
# sql = 'INSERT INTO CD(ident) VALUES(' + str(tt) + ')'
# self.mysql_execute(sql)
# print("ident Done!")
if __name__ == "__main__":
cda = MysqlUtil()
cda.cdata()
|
import numpy as np
# constants for strategies
SELL = 0
BUY = 1
# annual average market return (expected)
RISK_FREE_RATE = 0.07
####################################
# Strategies #
####################################
# Contract:
# each strategy function must produce a
# tuple of buys and sells
def getMomentum(prices, ma1, ma2):
"""
Momentum strategies assume price is trending when a
short term moving average crosses a long term moving
average.
"""
short = long = None
if len(ma1) < len(ma2):
short = ma2
long = ma1
else:
short = ma1
long = ma2
# truncate all 3 series to same length
trunc = len(long)
prices = prices[len(prices) - trunc:]
short = short[len(short) - trunc:]
buys, sells = [], []
side = BUY
# Long only - find first buy signal
for i in range(1, len(prices)):
if side == BUY:
if short[i-1] <= long[i-1] and short[i] > long[i]: # buy signal
buys.append(prices[i])
side = SELL
else: # side == SELL
if short[i-1] > long[i-1] and short[i] <= long[i]: # sell signal
sells.append(prices[i])
side = BUY
# if last sell signal was not generated,
# sell on last period
if len(buys) > len(sells):
sells.append(prices[len(prices)-1])
return buys, sells
def getMeanReversion(prices, ma1, ma2):
"""
Mean reversion strategies assume a security
is trending **abnormally** when a short term
moving average crosses a long term moving average
"""
short = long = None
if len(ma1) < len(ma2):
short = ma2
long = ma1
else:
short = ma1
long = ma2
# truncate all 3 series to same length
trunc = len(long)
prices = prices[len(prices) - trunc:]
short = short[len(short) - trunc:]
buys, sells = [], []
side = BUY
# Long only - find first buy signal
for i in range(1, len(prices)):
if side == BUY:
if short[i - 1] > long[i - 1] and short[i] <= long[i]: # buy signal
buys.append(prices[i])
side = SELL
else: # side == SELL
if short[i - 1] <= long[i - 1] and short[i] > long[i]: # sell signal
sells.append(prices[i])
side = BUY
# if last sell signal was not generated,
# sell on last period
if len(buys) > len(sells):
sells.append(prices[len(prices) - 1])
return buys, sells
def getBuyAndHold(prices):
"""
CONTROL STRATEGY
The Buy and Hold strategy assumes purchase on first price and
sell on last price
:param prices: NP Array of asset prices
"""
buys = [prices[0]]
sells = [prices[len(prices)-1]]
return buys, sells
####################################
# Performance Metrics #
####################################
# Contract:
# Each performance metric must
# take a list of buy and sell prices
def getCumulativeProfit(buys, sells):
if len(buys) != len(sells):
raise ValueError("Incompatible dimensions")
cumProfit = 1
for buyPrice, sellPrice in zip(buys, sells):
cumProfit *= sellPrice / buyPrice
# if cumProfit is '1', we really made no profit
cumProfit = (cumProfit - 1) / 100
return cumProfit
def getAverageProfitPerTrade(buys, sells):
if len(buys) != len(sells):
raise ValueError("Incompatible dimensions")
# accumulator
averageProfit = 0
for buyPrice, sellPrice in zip(buys, sells):
averageProfit += sellPrice - buyPrice
# divided by number of trades
averageProfit /= len(buys)
return averageProfit
def getSharpeRatio(buys, sells):
if len(buys) != len(sells):
raise ValueError("Incompatible dimensions")
cumProfit = 1
returns = []
# we could use cumulative profit function, but we would
# still need to retrieve the every asset returns
for buyPrice, sellPrice in zip(buys, sells):
cumProfit *= sellPrice / buyPrice
returns.append(sellPrice / buyPrice)
# if cumProfit is '1', we really made no profit
cumProfit -= 1
if len(returns) == 1:
return (cumProfit - RISK_FREE_RATE) / 1
else:
return (cumProfit - RISK_FREE_RATE) / np.std(returns)
def getSortinoRatio(buys, sells):
if len(buys) != len(sells):
raise ValueError("Incompatible dimensions")
cumProfit = 1
negReturns = []
# we could use cumulative profit function, but we would
# still need to retrieve the negative asset returns
for buyPrice, sellPrice in zip(buys, sells):
cumProfit *= sellPrice / buyPrice
if buyPrice > sellPrice:
negReturns.append(sellPrice / buyPrice)
# if cumProfit is '1', we really made no profit
cumProfit -= 1
if len(negReturns) <= 1:
return (cumProfit - RISK_FREE_RATE) / 1
else:
return (cumProfit - RISK_FREE_RATE) / np.std(negReturns)
def getSterlingRatio(buys, sells):
if len(buys) != len(sells):
raise ValueError("Incompatible dimensions")
cumProfit = 1
drawdowns = []
# we could use cumulative profit function, but we would
# still need to retrieve the drawdowns
for buyPrice, sellPrice in zip(buys, sells):
cumProfit *= sellPrice / buyPrice
if buyPrice > sellPrice:
drawdowns.append(sellPrice - buyPrice)
# if cumProfit is '1', we really made no profit
cumProfit -= 1
if len(drawdowns) <= 1:
return (cumProfit - RISK_FREE_RATE) / 1
else:
return (cumProfit - RISK_FREE_RATE) / np.std(drawdowns)
# testing code
if __name__ == "__main__":
prices = np.array([1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17])
ma2 = np.array([0.841470985,0.909297427,0.141120008,-0.756802495,-0.958924275,-0.279415498,0.656986599,0.989358247,0.412118485,-0.544021111,-0.999990207,-0.536572918,0.420167037,0.990607356,0.65028784,-0.287903317,-0.961397492])
ma1 = np.array([0.479425539,0.841470985,0.997494987,0.909297427,0.598472144,0.141120008,-0.350783228,-0.756802495,-0.977530118,-0.958924275,-0.705540326,-0.279415498,0.215119988,0.656986599,0.937999977,0.989358247])
buys, sells = getMACrossover(prices, ma1, ma2)
cumProfit = getCumulativeProfit(buys, sells)
average = getAverageProfitPerTrade(buys, sells)
buys, sells = getBuyAndHold(prices)
cumProfit = getCumulativeProfit(buys, sells)
average = getAverageProfitPerTrade(buys, sells) |
from rest_framework import serializers
from sale.models import Sale, SaleProduct
from user.models import User
class SaleSerializer(serializers.ModelSerializer):
class Meta:
model = Sale
fields = '__all__'
class SaleProductSerializer(serializers.ModelSerializer):
class Meta:
model = SaleProduct
fields = '__all__'
|
#-------------------------------------------------------------------------------
# Name: hangman
# Purpose:
#
# Author: D.Kiselevsky
#
# Created: 26.05.2020
# Copyright: (c) D.Kiselevsky 2020
# Licence: GPL
#-------------------------------------------------------------------------------
def main():
pass
if __name__ == '__main__':
main()
import random
print('H A N G M A N\n')
def game_cycle():
wordlist = ['python', 'java', 'kotlin', 'javascript']
guessed = random.choice(wordlist)
userletters = []
typed = []
attempts = 8
alphabet = 'qwertyuiopasdfghjklzxcvbnm'
while attempts > 0:
hyphens = ''
for i in range(len(guessed)):
if guessed[i] in set(userletters):
hyphens += guessed[i]
else:
hyphens += '-'
print(f'\n{hyphens}')
letter = input('Input a letter:')
if len(letter) != 1:
print('You should input a single letter')
else:
if not (str(letter).islower() and str(letter) in set(alphabet)):
print('It is not an ASCII lowercase letter')
else:
if letter in set(typed):
print('You already typed this letter')
else:
if letter in guessed:
userletters.append(letter)
else:
attempts -= 1
print('No such letter in the word')
typed.append(letter)
if (set(userletters) == set(guessed)):
attempts = 0
print(f'\n{guessed}\nYou guessed the word!\nYou survived!\n')
if attempts == 0 and not (set(userletters) == set(guessed)):
print('You are hanged!\n')
command_line = input('Type "play" to play the game, "exit" to quit:')
while command_line != 'exit':
if command_line == 'play':
game_cycle()
command_line = input('Type "play" to play the game, "exit" to quit:')
|
import json
import os
import numpy
from datetime import datetime
from shutil import rmtree
import keras
import keras_applications
keras_applications.set_keras_submodules(
backend=keras.backend,
engine=keras.engine,
layers=keras.layers,
models=keras.models,
utils=keras.utils)
from keras_applications.mobilenet_v2 import MobileNetV2
from keras.models import Model
from keras.layers import GlobalAveragePooling2D, Dense
from keras.optimizers import SGD
from keras.callbacks import TensorBoard
from coremltools.converters.keras import convert
from .generator import make_generator
from .database import categories
def train_mobilenets(epochs=None):
batch_size = 16
base_model = MobileNetV2(
input_shape=(224, 224, 3),
include_top=False,
weights='imagenet',
)
category_ids, labels, class_weights = categories()
x = base_model.output
x = GlobalAveragePooling2D()(x)
x = Dense(len(labels), activation='sigmoid', name='sigmoid')(x)
model = Model(inputs=base_model.input, outputs=x)
# train the top
for layer in base_model.layers:
layer.trainable = False
model.compile(
optimizer=SGD(lr=0.01, momentum=0.9),
loss='categorical_crossentropy'
)
timestamp = datetime.now().isoformat(' ')[:19]
log_dir = os.path.join('log', timestamp)
if not os.path.exists(log_dir):
os.makedirs(log_dir)
tb_callback = TensorBoard(
log_dir=log_dir,
batch_size=batch_size,
write_grads=True,
write_images=True
)
train_generator, steps = make_generator(
target_size=(224, 224, ),
batch_size=batch_size,
category_ids=category_ids,
)
# train the top of the model
model.fit_generator(
train_generator(),
steps_per_epoch=steps,
epochs=epochs,
class_weight=class_weights,
callbacks=[ tb_callback ],
)
# fine tune lower layers, only the top 2 blocks
for layer in model.layers[:70]: layer.trainable = False
for layer in model.layers[70:]: layer.trainable = True
# slower learning rate for fine tuning
model.compile(
optimizer=SGD(lr=0.0001, momentum=0.9),
loss='categorical_crossentropy'
)
model.fit_generator(
train_generator(),
steps_per_epoch=steps,
epochs=epochs,
class_weight=class_weights,
callbacks=[ tb_callback ],
)
if not os.path.exists('model'):
os.makedirs('model')
with open('model/labels.json', 'w') as fp:
json.dump(labels, fp)
model.save('model/mobilenets.h5')
coreml_model = convert(
model,
input_names='image',
image_input_names='image',
class_labels=labels,
image_scale=2./255,
red_bias=-1,
green_bias=-1,
blue_bias=-1,
)
coreml_model.save('model/mobilenets.mlmodel')
|
import logging
import re
import ipaddress
import datetime
import broker
import broker.bro
from select import select
from enum import Enum, unique
logger = logging.getLogger(__name__)
def convertRecord(name, m):
if not isinstance(m, list):
logger.error("Got non record element")
rec = m
elements = None
if name == "rule":
elements = ['ty', 'target', 'entity', 'expire', 'priority', 'location', 'out_port', 'mod', 'id', 'cid']
elif name == "rule->entity":
elements = ['ty', 'conn', 'flow', 'ip', 'mac']
elif name == "rule->entity->conn":
elements = ['orig_h', 'orig_p', 'resp_h', 'resp_p']
elif name == "rule->entity->flow":
elements = ['src_h', 'src_p', 'dst_h', 'dst_p', 'src_m', 'dst_m']
elif name == "rule->mod":
elements = ['src_h', 'src_p', 'dst_h', 'dst_p', 'src_m', 'dst_m', 'redirect_port']
else:
logger.error("Unknown record type %s", name)
return
dict = {}
for i in range(0, len(elements)):
if rec[i] is None:
dict[elements[i]] = None
continue
elif isinstance(rec[i], list):
dict[elements[i]] = convertRecord(name+"->"+elements[i], rec[i])
continue
dict[elements[i]] = convertElement(rec[i])
return dict
def convertElement(el):
if isinstance(el, broker.Count):
return el.value
if isinstance(el, ipaddress.IPv4Address):
return str(el);
if isinstance(el, ipaddress.IPv6Address):
return str(el);
if isinstance(el, ipaddress.IPv4Network):
return str(el);
if isinstance(el, ipaddress.IPv6Network):
return str(el);
if isinstance(el, broker.Port):
p = str(el)
ex = re.compile('([0-9]+)(.*)')
res = ex.match(p)
return (res.group(1), res.group(2))
if isinstance(el, broker.Enum):
tmp = el.name
return re.sub(r'.*::', r'', tmp)
if isinstance(el, list):
return [convertElement(ell) for ell in el];
if isinstance(el, datetime.datetime):
return el
if isinstance(el, datetime.timedelta):
return el
if isinstance(el, int):
return el
if isinstance(el, str):
return el
logger.error("Unsupported type %s", type(el) )
return el;
@unique
class ResponseType(Enum):
ConnectionEstablished = 1
Error = 2
AddRule = 3
RemoveRule = 4
SelfEvent = 5
class NetControlResponse:
def __init__(self):
self.type = (ResponseType.Error)
self.errormsg = ""
self.rule = ""
def __init__(self, rty, **kwargs):
self.type = rty
self.errormsg = kwargs.get('errormsg', '')
self.pluginid = kwargs.get('pluginid', None)
self.rule = kwargs.get('rule', None)
self.rawrule = kwargs.get('rawrule', None)
class Endpoint:
def __init__(self, queue, host, port):
self.queuename = queue
self.epl = broker.Endpoint()
self.epl.listen(host, port)
self.status_subscriber = self.epl.make_status_subscriber(True)
self.subscriber = self.epl.make_subscriber(self.queuename)
logger.debug("Set up listener for "+host+":"+str(port)+" ("+queue+")")
def getNextCommand(self):
while True:
logger.debug("Waiting for broker message...")
readable, writable, exceptional = select(
[self.status_subscriber.fd(), self.subscriber.fd()],
[], [])
if ( self.status_subscriber.fd() in readable ):
logger.debug("Handling broker status message...")
msg = self.status_subscriber.get()
if isinstance(msg, broker.Status):
if msg.code() == broker.SC.PeerAdded:
logger.info("Incoming connection established")
return NetControlResponse(ResponseType.ConnectionEstablished)
continue
elif ( self.subscriber.fd() in readable ):
logger.debug("Handling broker message...")
msg = self.subscriber.get()
return self.handleBrokerMessage(msg)
def handleBrokerMessage(self, m):
if type(m).__name__ != "tuple":
logger.error("Unexpected type %s, expected tuple", type(m).__name__)
return NetControlResponse(ResponseType.Error)
if len(m) < 1:
logger.error("Tuple without content?")
return NetControlResponse(ResponseType.Error)
(topic, event) = m
ev = broker.bro.Event(event)
event_name = ev.name()
logger.debug("Got event "+event_name)
if event_name == "NetControl::broker_add_rule":
return self._add_remove_rule(ev.args(), ResponseType.AddRule)
elif event_name == "NetControl::broker_remove_rule":
return self._add_remove_rule(ev.args(), ResponseType.RemoveRule)
elif event_name == "NetControl::broker_rule_added":
return NetControlResponse(ResponseType.SelfEvent)
elif event_name == "NetControl::broker_rule_removed":
return NetControlResponse(ResponseType.SelfEvent)
elif event_name == "NetControl::broker_rule_error":
return NetControlResponse(ResponseType.SelfEvent)
elif event_name == "NetControl::broker_rule_timeout":
return NetControlResponse(ResponseType.SelfEvent)
else:
logger.warning("Unknown event %s", event_name)
return NetControlResponse(ResponseType.Error, errormsg="Unknown event"+event_name)
def _add_remove_rule(self, m, rtype):
if ( (rtype == ResponseType.AddRule) and ( len(m) != 2 ) ) or ( (rtype == ResponseType.RemoveRule) and ( len(m) != 3 ) ):
logger.error("wrong number of elements or type in tuple for add/remove_rule event")
return NetControlResponse(ResponseType.Error, errormsg="wrong number of elements or type in tuple for add/remove_rule event")
if ( not isinstance(m[0], broker.Count) or
not isinstance(m[1], list) ):
logger.error("wrong types of elements or type in tuple for add/remove_rule event")
return NetControlResponse(ResponseType.Error, errormsg="wrong types of elements or type in tuple for add/remove_rule event")
id = m[0].value
rule = convertRecord("rule", m[1])
return NetControlResponse(rtype, pluginid=id, rule=rule, rawrule=m[1])
def sendRuleAdded(self, response, msg):
self._rule_event("added", response, msg)
def sendRuleRemoved(self, response, msg):
self._rule_event("removed", response, msg)
def sendRuleError(self, response, msg):
self._rule_event("error", response, msg)
def _rule_event(self, event, response, msg):
args = [broker.Count(response.pluginid), response.rawrule, msg]
ev = broker.bro.Event("NetControl::broker_rule_"+event, args)
self.epl.publish(self.queuename, ev)
|
import pygame
import sys
from settings import Settings
from board import Board
class ChessGame:
def __init__(self):
pygame.init()
self.settings = Settings()
self.screen = pygame.display.set_mode((self.settings.screen_width, self.settings.screen_height))
pygame.display.set_caption("2d Chess by Jason Chen")
self.board = Board(self)
self.square_selected = ()
self.select_history = []
self.w_turn = True
def run_game(self):
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
elif event.type == pygame.MOUSEBUTTONDOWN:
mouse_location = pygame.mouse.get_pos()
row = mouse_location[0] // self.settings.square_size
col = mouse_location[1] // self.settings.square_size
if self.square_selected == (row, col):
self.square_selected = ()
self.select_history = []
else:
if len(self.select_history) == 0:
if self.board.board_object[col][row] is None:
pass
elif self.w_turn:
if self.board.board_object[col][row].team == 'w':
self.square_selected = (row, col)
self.select_history.append(self.square_selected)
else:
if self.board.board_object[col][row].team == 'b':
self.square_selected = (row, col)
self.select_history.append(self.square_selected)
else:
self.square_selected = (row, col)
self.select_history.append(self.square_selected)
if len(self.select_history) == 2:
if self.board.move(self.select_history):
self.w_turn = not self.w_turn
self.select_history = []
self.square_selected = ()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.K_u:
self.board.undo_move()
self.screen.fill(self.settings.bg_color)
self.board.draw_board()
self.board.draw_piece()
pygame.display.flip()
if __name__ == '__main__':
ai = ChessGame()
ai.run_game()
|
from HTTPServer.httpserver import *
from HTTPServer.httpserver_thread import *
from SA.Tree import *
if __name__ == '__main__':
# Iniciando o diretorio raiz e o conteudo do arquivo
snake = Tree('snake', 'rarararororoelatemehumaso')
# Iniciando o server no localhost na porta 8888
# se quiser outra porta é só editar aqui
print("Escolha qual tipo de server você quer inicializar\n")
print("Servidor com thread - (1)\n" + "Servidor sem thread(2)\n")
a = input()
if a == '1':
server = HTTPServer_thread('localhost', 8888, snake)
server.iniciaServer()
elif a == '2':
server = HTTPServer('localhost', 8888, snake)
server.iniciaServer()
else:
print("Entrada invalida!")
|
import re
import os
import json
import socket
from subprocess import Popen, PIPE, check_output, CalledProcessError
import base64
HOST, PORT = '', 8080
#https://docs.python.org/fr/3/howto/sockets.html
#INET:IPV4, STREAM:TCP
#By default, sockets are always created in blocking mode
listen_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
listen_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listen_socket.bind((HOST, PORT))
#l'argument passé à listen indique à la bibliothèque de connecteurs que nous voulons
#mettre en file d'attente jusqu'à 5 (1 ici) requêtes de connexion (le maximum normal) avant
#de refuser les connexions externes. Si le reste du code est écrit correctement, cela devrait suffire.
listen_socket.listen(1)
print ('Serving HTTP on port %s ...' % PORT)
while True:
# accept() blocks and waits for an incoming connection. When a client connects, it returns a new socket
# object representing the connection and a tuple holding the address of the client. The tuple will contain
# (host, port) for IPv4 connections or (host, port, flowinfo, scopeid) for IPv6.
# One thing that’s imperative to understand is that we now have a new socket object from accept(). This is
# important since it’s the socket that you’ll use to communicate with the client. It’s distinct from the
# listening socket that the server is using to accept new connections.
# To see the current state of sockets on your host, use netstat -an
client_connection, client_address = listen_socket.accept()
# The bufsize argument of 2048 used below is the maximum amount of data to be received at once.
# It doesn’t mean that recv() will return 2048 bytes.
request = client_connection.recv(2048)
#Lorsqu'un recv renvoie 0 octet, cela signifie que l'autre partie a fermé (ou est en train de fermer)
#la connexion. Vous ne recevrez plus de données sur cette connexion. Jamais
#if len(request)==0:
# continue
print(str(request))
m1 = re.search(r'GET /getFilter\?([^ ]*) HTTP', str(request))
m2 = re.search(r'GET /getReducedEdges\?data=([^ ]*) HTTP', str(request))
try:
if m1:
data = m1.group(1)
nr_rects = int(data[:3], 16)
print('nr_rects')
print(str(nr_rects))
data = data[3:]
rectdim = data[:nr_rects*6]
data = data[nr_rects*6:]
nr_links = int(data[:3],16)
data = data[3:]
print('nr_links')
print(str(nr_links))
links = data[:nr_links*6]
print(rectdim)
print(links)
#"http_get_param":"01408d0280a203804006807807808d03810b04804708804e0480d304808505806a06806907803903803f04809a0580620380700380700480320380c407801700100e00200e00200500300400501100600e00600500700000701000800900900f00a01200a01000a00900b00d00c00e00e00701000301100701300a01300701300801300b"
#http://localhost:8080/getFilter?0110af0880940180550180b00180940280850580a10880a20880940380b70180940180c40d811118808603807806806303807805801200300100c00b00c00800e00800d00800a00800a00900100000700800700500700600700200700100700200900700f00801000f002004ffff1
#http://localhost:8080/getFilter?01a04e06807f08808510807104808d0380630180860180860180710180b618807801809401806301809401807f0280780380a90580550180470180710380780580710180a901806a02808d02807107802201000201001301401601401500e00900e00d01700401901501901601901801800400900800900600900700900a00900b00900400901000900400900400900f00901400900200900e00901900900000900c000002000001002001003004004000004002006005ffffff3
command=['Release/latuile', '--rectdim', rectdim, '--links', links]
print(str(command))
json1 = check_output(command).decode("ascii")
rectdim = [rectdim[i:i+6] for i in range(0, len(rectdim), 6)]
print('rectdim')
print(str(rectdim))
edges = [(int(links[i:i+3],16), int(links[i+3:i+6],16)) for i in range(0, len(links), 6)]
print('links')
print(str(edges))
data = json.loads(json1)
for context in data['contexts']:
frame = context['frame']
print('frame')
print(str(frame))
frame="{:04x}{:04x}{:04x}{:04x}".format(frame['left'],frame['right'],frame['top'],frame['bottom'])
print('frame')
print(frame)
translations = "".join("{:03x}{:03x}".format(tB['translation']['x'],tB['translation']['y']) for tB in context['translatedBoxes'])
print(translations)
idmap={}
for tB in context['translatedBoxes']:
idmap[int(tB['id'])] = len(idmap)
print('idmap')
print(idmap)
reverse_idmap = {v:k for k,v in idmap.items()}
print('reverse_idmap')
print(reverse_idmap)
rectdim_ = "".join([rectdim[int(tB['id'])] for tB in context['translatedBoxes']])
print('rectdim_')
print(rectdim_)
assert(len(rectdim_)==len(translations))
print('translations')
print(translations)
links_ = "".join(["{:02x}{:02x}".format(idmap[s],idmap[t]) for s,t in edges if s in idmap and t in idmap])
print('links_')
print(links_)
command=['Release/bombix','--frame', frame,'--rectdim', rectdim_,'--translations', translations,'--links', links_]
print(str(command))
json2 = check_output(command).decode("ascii")
print('json2')
print(json2)
polylines = json.loads(json2)
for polyline in polylines:
for u in ['from','to']:
polyline[u] = reverse_idmap[ polyline[u] ]
context['links'] = polylines
print('contexts')
print(json.dumps(data))
http_response = bytearray(json.dumps(data),'ascii')
client_connection.sendall(b'HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin:*\r\nAccess-Control-Allow-Headers: Origin, X-Requested-With, Content-Type, Accept\r\nContent-Length: %d\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n%s\r\n' % (len(http_response), http_response))
# client_connection.close()
elif m2:
data = m2.group(1)
print(data)
frame = data[:16]
print('frame')
print(frame)
data = data[16:]
nr_rects = int(data[:3], 16)
print('nr_rects');
print(nr_rects);
data = data[3:]
rectdim = data[:6*nr_rects]
print('rectdim')
print(rectdim)
data = data[6*nr_rects:]
translations = data[:6*nr_rects]
print('translations')
print(translations)
data = data[6*nr_rects:]
nr_links = int(data[:3],16)
links = data[3:]
print('links')
print(links)
command=['Release/bombix','--frame', frame,'--rectdim', rectdim,'--translations', translations,'--links', links]
print(str(command))
json2 = check_output(command).decode("ascii")
print('json2')
print(json2)
http_response = bytearray(json2,'ascii')
client_connection.sendall(b'HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin:*\r\nAccess-Control-Allow-Headers: Origin, X-Requested-With, Content-Type, Accept\r\nContent-Length: %d\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n%s\r\n' % (len(http_response), http_response))
# client_connection.close()
except CalledProcessError as e:
http_response = bytearray(e.output,'ascii')
client_connection.sendall(b'HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin:*\r\nAccess-Control-Allow-Headers: Origin, X-Requested-With, Content-Type, Accept\r\nContent-Length: %d\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n%s\r\n' % (len(http_response), http_response))
client_connection.close()
print(e.output)
except ValueError:
http_response = bytearray("Could not convert data to an integer.",'ascii')
client_connection.sendall(b'HTTP/1.0 200 OK\r\nAccess-Control-Allow-Origin:*\r\nAccess-Control-Allow-Headers: Origin, X-Requested-With, Content-Type, Accept\r\nContent-Length: %d\r\nContent-Type: text/html; charset=UTF-8\r\n\r\n%s\r\n' % (len(http_response), http_response))
client_connection.close()
print("Could not convert data to an integer.")
|
from scipy import stats
from sklearn.feature_extraction.text import CountVectorizer
import numpy as np
import pickle
import os
import random
from matplotlib import pyplot as plt
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.svm import SVC, LinearSVC
from sklearn.feature_extraction.text import CountVectorizer
from nltk.stem.snowball import EnglishStemmer
import graphs
def stemmed_words(doc):
#stems words (it gets only the "root" of a word, deleting suffixes). Improves accuracy and speed.
return (stemmer.stem(w) for w in analyzer(doc))
# Make the directory Graphs if it is not there.
if "Graphs" not in os.listdir():
os.mkdir("Graphs")
# Define what stemmer will be used and which function will turn words into numbers.
stemmer = EnglishStemmer()
analyzer = CountVectorizer().build_analyzer()
# Make the directory Women if it is not there.
if "Women" not in os.listdir():
os.mkdir("Women")
women_list = []
os.chdir("Women")
# open all the books in the directory and save the content in the proper list.
for _file in os.listdir():
try:
with open(_file, "r") as read_file:
women_list.append(read_file.read())
except:
continue
# Give the category tag to each book
women_list = [("W", x) for x in women_list]
# Take only the first 250 books.
women_list = women_list[:250]
os.chdir("..")
men_list = []
# Make the directory Men if it is not there.
if "Men" not in os.listdir():
os.mkdir("Men")
os.chdir("Men")
# open all the books in the directory and save the content in the proper list.
for _file in os.listdir():
try:
with open(_file, "r") as read_file:
men_list.append(read_file.read())
except:
continue
os.chdir("..")
# Give a category tag to each book in this list
men_list = [("M", x) for x in men_list]
# Take only the first 250 books
men_list = men_list[:250]
# Put together the 250 books by men and the 250 books by women.
both_list = women_list + men_list
# make a word count to vector object which takes single words, bigrams, trigrams and 4-grams,
# deletes stopwords and stems the remaining
cv = CountVectorizer(ngram_range = (1,4),
stop_words = "english",
analyzer = stemmed_words)
######
#
# PLOTTING OF DISTRIBUTIONS HERE
# This takes time.
"""
dist_number = 0
for book in both_list:
if len(book[1]) > 1000:
try:
f=plt.figure(figsize=(14,14))
book_vec = cv.fit_transform([book[1]])
book_vec = book_vec.toarray()
print(book_vec.shape)
graphs.graphics(book_vec)
plt.savefig("./Graphs/distribution-%s.png" % dist_number)
plt.close()
dist_number += 1
except:
continue
"""
#######
# The scores of each time you execute the experiment will be stored here.
scoresLinear = []
for testing in range(10):
# make the selection random.
random.shuffle(both_list)
# get the books contents, not their category
texts = [book[1] for book in both_list]
print("Number of Books:", len(both_list))
# get the cateogries for each book (either "Man" or "Woman")
categories = [x[0] for x in both_list]
print("Categories: ", set(categories))
# choose the training set size
texts_train = texts[:350]
categories_train = categories[:350]
# choose the testing set size
texts_test = texts[350:]
categories_test = categories[350:]
print("Fitting the Data...\n")
# fit the training set set.
cv_fit= cv.fit_transform(texts_train)
cv_trans = cv.transform(texts_train)
# get the names of the features.
names = cv.get_feature_names()
# convert the data into a matrix (np array)
arr = cv_fit.toarray()
# apparently in bag-of-word methodology this algorithm is often applied
# Term-Frequency-Inverse-Document-Frequency.
transformer = TfidfTransformer().fit(cv_fit)
# Turn the huge sparse matrix to a dense one to save resources.
x_train_tf = transformer.transform(cv_fit).todense()
print("Training the algorithm...\n")
# Feed the matrix to the LinearSVC algorithm.
linear = LinearSVC().fit(x_train_tf, categories_train)
# Transform the testing set as well now
X_new_counts = cv.transform(texts_test)
X_new_tfidf = transformer.transform(X_new_counts)
# Use the LinearSVC trained algorithm on the testing set
predicted = linear.predict(X_new_tfidf)
# get the average of accuracy and append it to the list of scores.
print("linear: ", np.mean(predicted == categories_test))
scoresLinear.append(np.mean(predicted == categories_test))
# print the scores, save them in a pickle file.
print(scoresLinear)
pickle.dump(scoresLinear, open( "scoresLinear.p", "wb" ))
|
--- setup.py Mon Oct 4 05:39:34 2004
+++ setup.py.new Tue Oct 5 09:22:31 2004
@@ -29,7 +29,7 @@
],
packages = ['cfgparse'],
data_files = [
- ('share/doc/cfgparse-%s' % VERSION, ['README', 'LICENSE-PSF',
+ ('share/doc/py-cfgparse', ['README', 'LICENSE-PSF',
'LICENSE', 'Changelog',
'html/index.html',
'html/style.css',
|
from django.http import HttpResponse
from django.shortcuts import redirect,render
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from techquiz.models import user
from django.contrib import auth
from django.template.context_processors import csrf
from django.views import generic
from django.core.mail import send_mail
def login(request):
c={}
c.update(csrf(request))
return render_to_response('login.html',c)
def register(request):
c1={}
c1.update(csrf(request))
return render_to_response('Register.html',c1)
def addUser(request):
Uname=request.POST.get('user','')
passw=request.POST.get('password','')
email=request.POST.get('Email','')
bdate=request.POST.get('BDate','')
gender=request.POST.get('gender','')
u= user(User_Role="user",User_Email=email,User_Birth_Date=bdate,User_Password=passw,User_Gender=gender,User_Name=Uname)
u.save()
return redirect('login:login')
def homepage(request):
return render_to_response('Homepage.html')
def forgot(request):
c2={}
c2.update(csrf(request))
return render_to_response('forgot.html',c2)
def forgotdetail(request):
us=request.POST.get('user','')
email=request.POST.get('email','')
u = user.objects.filter(User_Name=us,User_Email=email)
if u.exists():
fu=user.objects.get(User_Name=us,User_Email=email)
content="your new password is :="+str(fu.User_Password)
send_mail('Hello '+str(fu.User_Name),content,'ramsky2021@gmail.com',[fu.User_Email],fail_silently=True)
return redirect('login:login')
|
__author__ = 'Bill'
class Board:
"""
Class for the Connect 5 Game Board. The Board is always a 7 long by
9 high. A board always remembers the last move made.
"""
# Class Constants
BLANK_SPACE = "_" # Blank Space
PLAYER_ONE = "R" # Red Chip
PLAYER_TWO = "B" # Black Chip
BOARD_HEIGHT = 7
BOARD_LENGTH = 9
SCORE_TO_WIN = 5
def __init__(self):
"""
Create a new board.
"""
self.grid = self.create_grid()
self.last_move = None
def piece_at(self, row, col):
"""
Get the value of the piece at the specified location.
:param row: specified row
:param col: specified column
:return: the value of the piece at row 'row' and column 'col'.
"""
return self.grid[col][row]
def can_drop(self, col_num):
"""
:param col_num: the column number .
:return: An array containing a boolean and an integer. The boolean is used to determine
if a piece can be dropped and the integer is used to tell which row the piece will be dropped into.
"""
blank_found = False
row_num = self.BOARD_HEIGHT - 1
while not blank_found and row_num >= 0:
if self.grid[col_num][row_num] is not self.BLANK_SPACE:
row_num -= 1
else:
blank_found = True
return [blank_found, row_num]
def drop(self, dropped_char, col_num):
"""
Drop a piece into a column.
:param dropped_char: the value of the piece being dropped.
:param col_num: the column to drop the piece into.
:return: true if the drop was successful or false if the column is full.
"""
drop = self.can_drop(col_num)
can_drop = drop[0]
row_num = drop[1]
if can_drop:
self.grid[col_num][row_num] = dropped_char
self.last_move = [dropped_char, col_num, row_num]
def is_filled(self):
"""
:return: true if there are no blank spaces on the board.
"""
blank_found = False
for i in range(0, self.BOARD_HEIGHT):
if blank_found:
break
for j in range(0, self.BOARD_LENGTH):
if blank_found:
break
if self.grid[j][i] == self.BLANK_SPACE:
blank_found = True
return not blank_found
def find_n_in_a_row(self, piece, n):
"""
Find the number of times on the board where there are N adjacent
non-blank pieces of the same type on the board.
:param piece: the value of the piece.
:param n: Look for n in_a_row
:return: the number of n in_a_row for the specific piece on this board.
"""
count = 0
for row in range(0, Board.get_height()):
for col in range(0, Board.get_length()):
current_piece = self.grid[col][row]
if current_piece == piece:
count += self.num_n_in_a_row_horizontal(row, col, current_piece, n) # 0 or 1
count += self.num_n_in_a_row_vertical(row, col, current_piece, n) # 0 or 1
count += self.num_n_in_a_row_diagonal(row, col, current_piece, n) # 0, 1, or 2
return count
def num_n_in_a_row_horizontal(self, row, col, piece, n):
"""
Find the number of times on the board where there are N adjacent non-blank
pieces of the same type in a horizontal line on the board.
:param row: the row number of the piece
:param col: the column number of the piece
:param piece: the value of the piece
:param n: Look for n_in_a_row
:return: the number of times this piece satisfies the n_in_a_row condition horizontally. (Either 0 or 1)
"""
streak = 0
for curr_col in range(col, col + n): # count the initial piece automatically
if curr_col > Board.get_length() - 1: # check out of bounds
break
if self.grid[curr_col][row] != piece:
return 0
else:
streak += 1
if streak == n:
return 1
else:
return 0
def num_n_in_a_row_vertical(self, row, col, piece, n):
"""
Find the number of times on the board where there are N adjacent non-blank
pieces of the same type in a vertical line on the board.
:param row: the row number of the piece
:param col: the column number of the piece
:param piece: the value of the piece
:param n: Look for n_in_a_row
:return: the number of times this piece satisfies the n_in_a_row condition vertically. (Either 0 or 1)
"""
streak = 0
for curr_row in range(row - n + 1, row + 1): # count the initial piece automatically
if curr_row < 0: # check out of bounds
break
if self.grid[col][curr_row] != piece:
return 0
else:
streak += 1
if streak == n:
return 1
else:
return 0
def num_n_in_a_row_diagonal(self, row, col, piece, n):
"""
Find the number of times on the board where there are N adjacent non-blank
pieces of the same type in a diagonal line on the board.
:param row: the row number of the piece
:param col: the column number of the piece
:param piece: the value of the piece
:param n: Look for n_in_a_row
:return: the number of times this piece satisfies the n_in_a_row condition diagonally. (Either 0, 1, or 2)
"""
result = 0
streak = 0
for modifier in range(0, n): # count the initial piece automatically
curr_col = col + modifier
curr_row = row - modifier
if curr_col > Board.get_length() - 1 or curr_row < 0: # check out of bounds
break
if self.grid[curr_col][curr_row] != piece:
break
else:
streak += 1
if streak == n:
result += 1
streak = 0 # reset streak
for modifier in range(0, n): # count the initial piece automatically
curr_col = col - modifier
curr_row = row - modifier
if curr_col < 0 or curr_row < 0: # check out of bounds
break
if self.grid[curr_col][curr_row] != piece:
break
else:
streak += 1
if streak == n:
result += 1
return result
def find_winner(self):
"""
Check if the current board has a winner. A winner has 5 in a row horizontally, vertically, or diagonally.
:return: a character representing a player, or None if there is no winner.
"""
win_amount = self.SCORE_TO_WIN
if self.find_n_in_a_row(Board.PLAYER_ONE, win_amount) >= 1:
return Board.PLAYER_ONE
elif self.find_n_in_a_row(Board.PLAYER_TWO, win_amount) >= 1:
return Board.PLAYER_TWO
else:
return None
def find_disconnected_wins(self, piece):
"""
Find blank areas that, when filled in with the given type of piece, cause a victory
for that piece.
:param piece: the player piece
:return: the number of unconnected wins.
"""
count = 0
for row in range(0, Board.get_height()):
for col in range(0, Board.get_length()):
current_piece = self.grid[col][row]
if current_piece == Board.BLANK_SPACE:
count += self.find_horizontal_disconnected_wins(piece, col, row)
count += self.find_disconnected_diagonal_wins(piece, col, row)
return count
def find_horizontal_disconnected_wins(self, piece, col_num, row_num):
"""
:param piece: the character to check for.
:param col_num: the column number of the last move made.
:param row_num: the row number of the last move made.
:return: 1 if the filled in blank piece causes a horizontal victory for its corresponding player.
"""
connection = 1
curr_col = col_num - 1
while curr_col >= 0:
if self.grid[curr_col][row_num] == piece:
connection += 1
curr_col -= 1
else: # can't possibly have a longer connection this way.
break
curr_col = col_num + 1
while curr_col < self.BOARD_LENGTH:
if self.grid[curr_col][row_num] == piece:
connection += 1
curr_col += 1
else: # can't possibly have a longer connection this way.
break
if connection >= 4:
return 1
return 0
def find_disconnected_diagonal_wins(self, piece, col_num, row_num):
"""
:param piece: the character to check for.
:param col_num: the column number of the last move made.
:param row_num: the row number of the last move made.
:return: 1 if the filled in blank piece causes a diagonal victory for its corresponding player.
"""
result = 0
connection = 1
# Upper Left to Lower Right Diagonal
curr_row = row_num - 1
curr_col = col_num - 1
while curr_row >= 0 and curr_col >= 0:
if self.grid[curr_col][curr_row] == piece:
connection += 1
curr_col -= 1
curr_row -= 1
else: # can't possibly have a longer connection this way.
break
curr_row = row_num + 1
curr_col = col_num + 1
while curr_row < self.BOARD_HEIGHT and curr_col < self.BOARD_LENGTH:
if self.grid[curr_col][curr_row] == piece:
connection += 1
curr_col += 1
curr_row += 1
else: # can't possibly have a longer connection this way.
break
if connection >= 4:
result += 1
# Lower Left to Upper Right Diagonal
connection = 1
curr_row = row_num - 1
curr_col = col_num + 1
while curr_row >= 0 and curr_col < self.BOARD_LENGTH:
if self.grid[curr_col][curr_row] == piece:
connection += 1
curr_row -= 1
curr_col += 1
else: # can't possibly have a longer connection this way.
break
curr_row = row_num + 1
curr_col = col_num - 1
while curr_row < self.BOARD_HEIGHT and curr_col >= 0:
if self.grid[curr_col][curr_row] == piece:
connection += 1
curr_row += 1
curr_col -= 1
else: # can't possibly have a longer connection this way.
break
if connection >= 4:
result += 1
return result
def create_grid(self):
"""
:return: a 2-dimensional array representing the game grid.
"""
grid = []
for i in range(0, self.BOARD_LENGTH):
grid.append(self.create_empty_column())
return grid
def get_last_move(self):
"""
:return: An array containing information about the last move, including the value
of the piece, the column, and the row.
"""
return self.last_move
def __str__(self):
"""
:return: a string representation of the board.
"""
board_string = ""
for i in range(0, self.BOARD_HEIGHT):
for j in range(0, self.BOARD_LENGTH):
board_string += " " + self.grid[j][i] + " "
board_string += "\n"
return board_string
def create_empty_column(self):
"""
:return: an array representing an empty column on the board.
"""
column = []
for i in range(0, self.BOARD_HEIGHT):
column.append(self.BLANK_SPACE)
return column
@staticmethod
def get_length():
"""
:return: the length of this board.
"""
return Board.BOARD_LENGTH
@staticmethod
def get_height():
"""
:return: the height of this board.
"""
return Board.BOARD_HEIGHT
|
import json
import os
import asyncio
import discord
from discord import Permissions
from discord.ext import commands
from datetime import datetime
import utils
import io
from utils.Utility import clean_code, Pag
import contextlib
from traceback import format_exception
import ast
import importlib
import textwrap
# part of the pruge members
class MemberIDConverter(commands.MemberConverter):
async def convert(self, ctx, argument):
try:
return await super().convert(ctx, argument)
except commands.BadArgument:
try:
return int(argument)
except ValueError:
raise commands.BadArgument()
def insert_returns(body):
# insert return stmt if the last expression is a expression statement
if isinstance(body[-1], ast.Expr):
body[-1] = ast.Return(body[-1].value)
ast.fix_missing_locations(body[-1])
# for if statements, we insert returns into the body and the orelse
if isinstance(body[-1], ast.If):
insert_returns(body[-1].body)
insert_returns(body[-1].orelse)
# for with blocks, again we insert returns into the body
if isinstance(body[-1], ast.With):
insert_returns(body[-1].body)
class devlopment(commands.Cog, command_attrs=dict(hidden=True)):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_ready(self):
print(f"{self.__class__.__name__}: [Loaded]")
@commands.Cog.listener()
async def on_command_completion(self, ctx):
if ctx.command.qualified_name == "logout":
return
if await self.bot.command_usage.find(ctx.command.qualified_name) is None:
await self.bot.command_usage.upsert(
{"_id": ctx.command.qualified_name, "usage_count": 1}
)
else:
await self.bot.command_usage.increment(
ctx.command.qualified_name, 1, "usage_count"
)
@commands.command(aliases=["cs"])
@commands.is_owner()
@commands.cooldown(1, 5, commands.BucketType.guild)
async def command_stats(self, ctx):
data = await self.bot.command_usage.get_all()
command_map = {item["_id"]: item["usage_count"] for item in data}
# get total commands run
total_commands_run = sum(command_map.values())
# Sort by value
sorted_list = sorted(command_map.items(), key=lambda x: x[1], reverse=True)
pages = []
cmd_per_page = 10
for i in range(0, len(sorted_list), cmd_per_page):
message = "Command Name: `Usage % | Num of command runs`\n\n"
next_commands = sorted_list[i: i + cmd_per_page]
for item in next_commands:
use_percent = item[1] / total_commands_run
message += f"**{item[0]}**: `{use_percent: .2%} | Ran {item[1]} times`\n"
pages.append(message)
await Pag(title="Command Usage Statistics!", color=0xff7700, entries=pages, length=1).start(ctx)
# list servers
@commands.command(aliases=['list'])
@commands.is_owner()
async def servers(self, ctx):
message = '```'
message += '{!s:19s} | {!s:>5s} | {} | {}\n'.format('ID:', 'Member:', 'Name:', 'Owner:')
for guild in self.bot.guilds:
message += '{!s:19s} | {!s:>5s}| {} | {}\n'.format(guild.id, guild.member_count, guild.name, guild.owner)
message += '```'
embed = discord.Embed(
color = 0xff7700
)
embed.add_field(name="Here are the servers i'm in:", value=f'{message}', inline=False)
await ctx.send(embed=embed)
# list bans
@commands.command(name="listbans", description="shows all the bans in the server")
@commands.is_owner()
async def bans(self, ctx):
users = await ctx.guild.bans()
if len(users) > 0:
message = f'``{"ID:":21}{"Name:":25}Reason:\n'
for entry in users:
userID = entry.user.id
userName = str(entry.user)
if entry.user.bot:
userName = '🤖' + userName #:robot: emoji
reason = str(entry.reason) #Could be None
message += f'{userID:<21}{userName:25} {reason}\n'
embed = discord.Embed(colour=0xff7700)
embed.add_field(name='List of bans users:', value=message + '``', inline=False)
await ctx.send(embed=embed)
else:
await ctx.send('There are no banned users!')
# log command
# logging the chat, onl 100 char long.
@commands.command()
@commands.is_owner()
async def log(self, ctx, messages : int = 10000, *, chan : discord.TextChannel = None):
timeStamp = datetime.today().strftime("%Y-%m-%d %H.%M")
logFile = 'Logs-{}.txt'.format(timeStamp)
if not chan:
chan = ctx
# Remove original message
await ctx.message.delete()
mess = await ctx.send('Saving logs to *{}*...'.format(logFile))
# Use logs_from instead of purge
counter = 0
msg = ''
async for message in chan.history(limit=messages):
counter += 1
msg += message.content + "\n"
msg += '----Sent-By: ' + message.author.name + '#' + message.author.discriminator + "\n"
msg += '---------At: ' + message.created_at.strftime("%Y-%m-%d %H.%M") + "\n"
if message.edited_at:
msg += '--Edited-At: ' + message.edited_at.strftime("%Y-%m-%d %H.%M") + "\n"
msg += '\n'
msg = msg[:-2].encode("utf-8")
with open(logFile, "wb") as myfile:
myfile.write(msg)
await mess.edit(content='Uploading *{}*...'.format(logFile), delete_after = 5)
await ctx.author.send(file=discord.File(fp=logFile))
await mess.edit(content='Uploaded *{}!*'.format(logFile), delete_after = 5)
os.remove(logFile)
# load . cogs
@commands.command()
@commands.is_owner()
async def load(self, ctx, extension):
extension = extension.lower()
self.bot.load_extension(f'cogs.{extension}')
embed = discord.Embed(title = f"The cog ``{extension}`` has loaded", color = 0xff7700, timestamp=ctx.message.created_at)
embed.set_footer(text=f"{ctx.author}", icon_url=ctx.author.avatar_url)
await ctx.message.delete()
await ctx.send(embed=embed, delete_after=4)
# unload . cogs
@commands.command()
@commands.is_owner()
async def unload(self, ctx, extension):
extension = extension.lower()
self.bot.unload_extension(f'cogs.{extension}')
embed = discord.Embed(title = f"The cog ``{extension}`` has unloaded", color = 0xff7700, timestamp=ctx.message.created_at)
embed.set_footer(text=f"{ctx.author}", icon_url=ctx.author.avatar_url)
await ctx.message.delete()
await ctx.send(embed=embed, delete_after=4)
# reload . cogs
@commands.command()
@commands.is_owner()
async def reload(self, ctx, extension):
extension = extension.lower()
self.bot.unload_extension(f'cogs.{extension}')
self.bot.load_extension(f'cogs.{extension}')
embed = discord.Embed(title = f"The cog ``{extension}`` has reloaded", color = 0xff7700, timestamp=ctx.message.created_at)
embed.set_footer(text=f"{ctx.author}", icon_url=ctx.author.avatar_url)
await ctx.message.delete()
await ctx.send(embed=embed, delete_after=4)
@commands.command(aliases=['ra'])
@commands.is_owner()
async def reloadall(self, ctx):
firstTime = True
reloaded = []
for filename in os.listdir('./cogs'):
if filename.endswith('.py') and not filename.startswith("_"):
self.bot.reload_extension(f'cogs.{filename[:-3]}')
reloaded += [filename[:-3], ]
if firstTime:
embedvar = discord.Embed(title='Reloading Cogs...', description='If you see this message for more than 10 seconds, an error most likely occurred, no cogs were reloaded', color = 0xff7700)
message = await ctx.send(embed=embedvar)
firstTime = False
else:
embedvar1 = discord.Embed(title='Reloading Cogs...', description=f"Reloaded cog(s): {', '.join(reloaded)}", color = 0xff7700)
await asyncio.sleep(1)
await message.edit(embed=embedvar1)
#await ctx.send(f'Cog: {filename[:-3]} was reloaded')
embedvar1 = discord.Embed(title='Reloading Cogs...', description=f"Reloaded cog(s): {', '.join(reloaded)}", color = 0xff7700)
embedvar1.add_field(name='Success!', value="Successfully reloaded all Cogs")
await message.edit(embed=embedvar1, delete_after=15)
# purge the members
@commands.command(aliases=['dl', 'member'])
@commands.is_owner()
async def dmember(self, ctx, member: MemberIDConverter, limit: int=100, channel: discord.TextChannel=None):
if channel is None:
channel = ctx.channel
if isinstance(member, discord.Member):
def predicate(message):
return message.author == member
else:
def predicate(message):
return message.author.id == member
# noinspection PyUnresolvedReferences
messages = await channel.purge(limit=limit, check=predicate)
messages = len(messages)
await ctx.message.delete()
@commands.command(aliases=['bbl'])
@commands.is_owner()
async def bblacklist_add(self, ctx, user: discord.Member):
if ctx.message.author.id == user.id:
await ctx.send("Hey, you cannot blacklist yourself!")
return
self.bot.blacklisted_users.append(user.id)
data = utils.dbloader.read_json("blacklist")
data["blacklistedUsers"].append(user.id)
utils.dbloader.write_json(data, "blacklist")
embed = discord.Embed(title = f"Hey, I have blacklisted `{user.name}` for you.", color = 0xff7700)
await ctx.message.delete()
await ctx.send(embed=embed, delete_after=10)
@commands.command(aliases=['ubl'])
@commands.is_owner()
async def bblacklist_remove(self, ctx, user: discord.Member):
self.bot.blacklisted_users.remove(user.id)
data = utils.dbloader.read_json("blacklist")
data["blacklistedUsers"].remove(user.id)
utils.dbloader.write_json(data, "blacklist")
embed = discord.Embed(title = f"Hey, I have unblacklisted `{user.name}` for you.", color = 0xff7700)
await ctx.message.delete()
await ctx.send(embed=embed, delete_after=10)
# restart coommand
@commands.command()
@commands.is_owner()
async def restart(self, ctx):
embed = discord.Embed(title="Restarting...", color = 0xff7700)
try:
print("---------------------Restarting---------------------")
await ctx.send(embed=embed)
await self.bot.cwlose()
except:
pass
finally:
os.system("python main.py --cmd run")
#hack ban (massive)
@commands.command()
@commands.is_owner()
async def masshackban(self, ctx, objects: commands.Greedy[discord.Object], *, reason: str=None):
reason = reason or "Part of a mass ban!"
for obj in objects:
await ctx.guild.ban(obj, reason=reason)
total = len(objects)
embed = discord.Embed(
title = f"hackbanned {total} user{'s' if total not in [0,1] else ''} from this guild.",
color = 0xff7700
)
await ctx.send(embed=embed, delete_after=15)
#hack ban (massive)
@commands.command()
@commands.is_owner()
async def massban(self, ctx, objects: commands.Greedy[discord.Member], *, reason: str=None):
reason = reason or "Part of a mass ban!"
for obj in objects:
await ctx.guild.ban(obj, reason=reason)
total = len(objects)
embed = discord.Embed(
title = f"banned {total} user{'s' if total not in [0,1] else ''} from this guild.",
color = 0xff7700
)
await ctx.send(embed=embed, delete_after=15)
@commands.command(aliases=["e"])
@commands.is_owner()
async def enabled(self, ctx, *, command):
command = self.bot.get_command(command)
if command is None:
embed = discord.Embed(
title = "I can't find a command with that name!",
color = 0xff7700
)
await ctx.send(embed=embed)
elif ctx.command == command:
embed = discord.Embed(
title = "You cannnot disable this command!",
color = 0xff7700
)
await ctx.send(embed=embed)
else:
command.enabled = not command.enabled
ternary = "Enabled" if command.enabled else "Disabled"
embed = discord.Embed(
title = f"I Have **{ternary}** ``{command.qualified_name}`` for you!",
color = 0xff7700
)
await ctx.send(embed=embed)
@commands.command(name="eval", aliases=["exec"])
@commands.is_owner()
async def _eval(self,ctx, *, code):
code = clean_code(code)
local_variables = {
"discord": discord,
"commands": commands,
"bot": self.bot,
"ctx": ctx,
"channel": ctx.channel,
"author": ctx.author,
"guild": ctx.guild,
"message": ctx.message
}
stdout = io.StringIO()
try:
with contextlib.redirect_stdout(stdout):
exec(
f"async def func():\n{textwrap.indent(code, ' ')}", local_variables,
)
obj = await local_variables["func"]()
result = f"{stdout.getvalue()}\n-- {obj}\n"
except Exception as e:
result = "".join(format_exception(e, e, e.__traceback__))
pager = Pag(
timeout=100,
entries=[result[i: i + 2000] for i in range(0, len(result), 2000)],
length=1,
prefix="```py\n",
suffix="```"
)
await pager.start(ctx)
# part of the snipe command
@commands.command()
@commands.is_owner()
async def catch_snipes(self, ctx):
try:
with open('settings/snipe.json') as snipeFile:
snipes = json.load(snipeFile)
guilds = []
for x in snipes:
guilds.append(x['id'])
except Exception as e:
print(e)
for y in ctx.bot.guilds:
if y.id not in guilds:
try:
with open('settings/snipe.json') as snipeFile:
snipes = json.load(snipeFile)
except Exception as e:
print(e)
newGuild = {
"id": y.id,
"author_avatar": "",
"content": "",
}
snipes.append(newGuild)
embed = discord.Embed(
title = "Done!",
color = 0xff7700
)
await ctx.send(embed=embed)
try:
with open('settings/snipe.json', 'w') as outfile:
json.dump(snipes, outfile, indent=4)
except Exception as e:
print(e)
# get server info command
@commands.command(aliases=["gsi"])
@commands.is_owner()
async def getserverinfo(self, ctx, *, guild_id: int):
guild = self.bot.get_guild(guild_id)
if guild is None:
return await ctx.send("Hmph.. I got nothing..")
members = set(guild.members)
bots = filter(lambda m: m.bot, members)
bots = set(bots)
members = len(members) - len(bots)
if guild == ctx.guild:
roles = " ".join([x.mention for x in guild.roles != "@everyone"])
else:
roles = ", ".join([x.name for x in guild.roles if x.name != "@everyone"])
embed = discord.Embed(
title=f"**Guild info:** ``{guild.name}``",
color = 0xff7700,
timestamp=ctx.message.created_at
)
embed.add_field(name="**Owner:**", value=f"{guild.owner}\n{guild.id}", inline=True)
embed.add_field(name="**Owner Nick:**", value=f"{guild.owner.nick}", inline=True)
embed.add_field(name="**Owner Status:**", value=f"{guild.owner.status}", inline=True)
embed.add_field(name="**Members/Bots:**", value=f"{members}:{len(bots)}", inline=True)
embed.add_field(name="**Created at:**", value=guild.created_at.__format__('%A, %d. %B %Y'), inline=True)
embed.add_field(name="**Region:**", value=f"{guild.region}", inline=True)
embed.add_field(name="**Channels:**", value=len(guild.channels), inline=True)
embed.add_field(name="**Voice Channels:**", value=len(guild.voice_channels), inline=True)
embed.add_field(name="**Boosters:**", value=guild.premium_subscription_count, inline=True)
embed.add_field(name="**Highest role:**", value=guild.roles[-1], inline=True)
embed.add_field(name="**Verification Level:**", value=str(guild.verification_level), inline=True)
embed.add_field(name="**Number of emotes:**", value=len(guild.emojis), inline=True)
embed.add_field(name="**Number of roles:**", value=len(guild.roles), inline=True)
embed.add_field(name="**Roles:**", value=f"{roles}", inline=True)
embed.set_footer(text=f"{ctx.author.name}#{ctx.author.discriminator}", icon_url=ctx.author.avatar_url)
embed.set_thumbnail(url=guild.icon_url)
await ctx.send(embed=embed)
def setup(bot):
bot.add_cog(devlopment(bot))
|
from pandas import DataFrame
from typing import Callable, List
from snapshottest.pytest import PyTestSnapshotTest
from tests.model.lib import helper_descriptive_stats_groupby, helper_filesnapshot, helper_descriptive_stats
from saitama_data.datasetup.models.info import ClassIdSchoolId, City, SchoolQestionInfo
from saitama_data.datasetup.models.info.classid_schoolid import SchidSchoolid, SchoolClass
from saitama_data.datasetup.models.info.correspondence.model import Correspondence
def test_class_id_school_id(snapshot: PyTestSnapshotTest):
df: DataFrame = (
ClassIdSchoolId().read().data
.pipe(helper_descriptive_stats, funcs=['count'])
)
helper_filesnapshot(snapshot=snapshot, dfx=df, name="class_id_school_id")
def test_sch_id_school_id(snapshot: PyTestSnapshotTest):
df: DataFrame = (
SchidSchoolid().read().data
.sort_values(['sch_id'])
)
helper_filesnapshot(snapshot=snapshot, dfx=df, name="sch_id_school_id")
def test_school_class(snapshot: PyTestSnapshotTest):
df: DataFrame = (
SchoolClass().read().data
.sort_values(['class_id'])
)
helper_filesnapshot(snapshot=snapshot, dfx=df, name="school_class")
def test_correspondence(snapshot: PyTestSnapshotTest):
df: DataFrame = (
Correspondence().read().data
.sort_values(['qes', 'question_id', 'year'])
)
helper_filesnapshot(snapshot=snapshot, dfx=df, name="correspondence")
def test_school_qestion_info(snapshot: PyTestSnapshotTest):
df: DataFrame = (
SchoolQestionInfo().read().data
.sort_values(['key_unique'])
)
helper_filesnapshot(snapshot=snapshot, dfx=df, name="school_qestion_info")
def test_city(snapshot: PyTestSnapshotTest):
df: DataFrame = (
City().read()
.sort_values(['city_id'])
)
helper_filesnapshot(snapshot=snapshot, dfx=df, name="city")
|
def recursive_x(x, caminho):
if caminho > x:
pass
else:
print(caminho)
caminho += 2
recursive_x(x, caminho)
def main():
x = int(input())
recursive_x(x, 1)
main() |
from random import randint
from time import sleep
n = int(input('Digite um valor entre [1 e 5]: '))
s = randint(1, 5)
print('Processando!')
sleep(3)
if n >= 1 and n <= 5:
if n == s:
print('Parabens! Nº Sorteado {} / Nº Escolhido {}'.format(s, n))
else:
print('Infelizmente não deu! Nº Sorteado {} / Nº Escolhido {}'.format(s, n))
else:
print('Você escolheu um valor invalido! {}... por favor digite entre 1 e 5.'.format(n))
|
#!/usr/bin/env python
__coding__ = "utf-8"
__author__ = " Ng WaiMing "
from pandas import DataFrame, Series
import pandas as pd
import numpy as np
import statsmodels.api as sm
from matplotlib import pyplot as plt
def peak_to_peak(arr):
return arr.max() - arr.min()
def demean(arr):
return arr - arr.mean()
def top(df, n=5, column='tip_pct'):
return df.sort_values(by=column)[-n:]
def get_stats(group):
return ({'min': group.min(), 'max': group.max(), 'count': group.count(), 'mean': group.mean()})
def draw(deck, n=5):
return deck.take(np.random.permutation(len(deck))[:n])
def regress(data, yvar, xvars):
Y = data[yvar]
X = data[xvars]
X['intercept'] = 1.
result = sm.OLS(Y, X).fit()
return result.params
def get_top_amounts(group, key, n=5):
totals = group.groupby(key)['contb_receipt_amt'].sum()
# 根据key对totals进行降序排列
return totals.sort_values(ascending=False)[:n]
if __name__ == "__main__":
np.random.seed(0)
pd.set_option('display.width', 100000)
df = DataFrame({'key1': ['a', 'a', 'b', 'b', 'a'],
'key2': ['one', 'two', 'one', 'two', 'one'],
'data1': np.random.randn(5),
'data2': np.random.randn(5)})
# print(df, '\n')
# # # # 按key1进行分组,并计算data1列的平均值
grouped = df['data1'].groupby(df['key1'])
# # # # 变量grouped是一个GroupBy对象.它实际上还没有进行任何计算,只是含有一些有关分组建df['key1']的中间数据而已.即,grouped已经有了接下来对各组执行运算所需的一切信息
# # # # 数据(Series)根据分组键进行了聚合,产生了一个新的Series,其索引为key1列中的唯一值.之所以结果索引的名字为key1,是因为原始DF的列df['key1']就叫这个名字
# print(grouped.mean())
# # # # 一次传入多个数组,通过两个键对数据进行了分组,得到的Series具有一个层次化索引(由唯一的键对组成)
means = df['data1'].groupby([df['key1'], df['key2']]).mean()
# print(means)
# # # # 分组键可以是任何长度适当的数组
states = np.array(['Ohio', 'California', 'California', 'Ohio', 'Ohio'])
years = np.array([2005, 2005, 2006, 2005, 2006])
# print(df['data1'].groupby([states,years]).mean())
# # # # 将列名(可以是字符串,数字或其它python对象)用作分组键
# print(df.groupby('key1').mean())
# print(df.groupby(['key1','key2']).mean())
# # # # groupby的size返回一个含有分组大小的series
size = df.groupby(['key1', 'key2']).size()
# print(size)
# # # 对分组进行迭代
# # # # Groupby对象支持迭代,可以产生一组二元元组(由分组名和数据块组成).
# for name,group in df.groupby('key1'):
# print(name)
# print(group)
# # # # 对于多重键的情况,元组的第一个元素将会是由键值组成的元组
# for (k1,k2),group in df.groupby(['key1','key2']):
# print(k1,k2)
# print(group)
# # # # 将数据片段做成一个字典
pieces = dict(list(df.groupby('key1')))
# print(pieces['b'])
# # # # groupby默认实在axis=0上进行分组的,通过设置也可以在其他任何轴上进行分组
# print(df.dtypes)
grouped = df.groupby(df.dtypes, axis=1)
dic = dict(list(grouped))
# print(dic)
# # # 选取一个或一组列
# # # # 对于由DF产生的groupby对象,如果用一个(单个字符串)或一组(字符串数组)列名对其进行索引.就能实现选取部分列进行聚合的目的
# print(df['data1'].groupby(df['key1']))
# print(df[['data2']].groupby(df['key1']))
# # # # 尤其对于大数据集,很可能只需要对部分列进行聚合.例如,在前面那个数据集中,如果只需要计算data2列的平均值并以DF形式得到结果:
# print(df.groupby(['key1','key2'])[['data2']].mean())
# # # # 这种索引操作返回的对象是一个已分组的DF(如果传入的是列表或数组)或已分组的Series(如果传入的是标量形式的单个列名):
s_grouped = df.groupby(['key1', 'key2'])['data2']
# print(s_grouped.mean())
# # # 通过字典或Series进行分组
# # # # 除数组以外,分组信息还可以其它形式存在.
people = DataFrame(np.random.randn(5, 5),
columns=['a', 'b', 'c', 'd', 'e'],
index=['Joe', 'Steve', 'Wes', 'Jim', 'Travis'])
people.loc[2:3, ['b', 'c']] = np.nan
# print(people)
# # # # 假设一直列的分组关系,并希望根据分组计算列的总计:
mapping = {'a': 'red', 'b': 'red', 'c': 'blue',
'd': 'blue', 'e': 'red', 'f': 'orange'}
# # # # 将上面这组字典传给groupby即可:
by_column = people.groupby(mapping, axis=1)
# print(by_column.sum())
# # # # Series也有同样的功能,它可以被看作一个固定大小的映射,对于上面那个例子,如果用Series作为分组键,则pandas会检查Series以确保其索引跟分组轴对齐的:
map_series = Series(mapping)
# print(people.groupby(map_series, axis=1).count())
# # # 通过函数进行分组
# # # # 相较于字典或Series,python函数在定义分组映射关系时可以更有创意且更为抽象.任何被当作分组键的函数都会在各个索引值上被调用一次,其返回值就会被用作分组名称.
# print(people.groupby(len).sum())
# # # # 将函数跟数组,列表,字典,Seris混合使用也不是问题,因为任何东西最终都会被转换为数组
key_list = ['one', 'one', 'one', 'two', 'two']
# print(people.groupby([len,key_list]).min())
# # # 根据索引级别分组
# # # # 层次化索引数据集最方便的地方就在于它能够根据索引级别进行聚合.要实现该目的,通过level关键字传入级别编号或名称即可
columns = pd.MultiIndex.from_arrays([['US', 'US', 'US', 'JP', 'JP'], [1, 3, 5, 1, 3]], names=['cty', 'tenor'])
hier_df = DataFrame(np.random.randn(4, 5), columns=columns)
# print(hier_df)
# print(hier_df.groupby(level='cty',axis=1).count())
# # # 数据聚合
# # # # 许多常见的聚合运算都有就地计算数据集统计信息的优化实现
grouped = df.groupby('key1')
# # # # groupby会高效地对series进行切片,然后对各片调用piece.quantile(0.9),最后将这些结果组装成最终结果
# print(grouped['data1'].quantile(0.9))
# # # # 要使用自定义聚合函数,只需将其传入aggregate或agg方法即可
# print(grouped.agg(peak_to_peak))
# # # # 有些方法(如describe)也是可以用在这里的,即使严格来讲,它们并非聚合运算:
# print(grouped.describe())
# # # 案例:
tips = pd.read_csv('../../data/examples/tips.csv')
tips['tip_pct'] = tips['tip'] / tips['total_bill']
# print(tips.head())
# # # 面向列得多函数应用
# # # # 对Series或DF列得聚合运算其实就是使用aggregate(使用自定义函数)或嗲用诸如mean,std之类的方法.然而,如何针对不同的列使用不同的聚合函数,或一次应用多个函数?
grouped = tips.groupby(['sex', 'smoker'])
grouped_oct = grouped['tip_pct']
# print(grouped_oct.agg('mean'))
# # # # 如果传入一组函数或函数名,得到的DF的列就会以相应的函数命名
# print(grouped_oct.agg(['mean','std',peak_to_peak]))
# # # # 并非一定要接受groupby自动给出的那些列名,特别是lambda函数,他们的名称是'<lambda>',这样的辨识度就很低了.如果传入的是一个由(name,function)猿族组成的列表,则各元组的第一个元素就会被用作DF列名
# print(grouped_oct.agg([('foo','mean'),('bar',np.std)]))
# # # # 对于DF,还可以定义一组应用于全部列的函数,或不同的列应用不同的函数.
functions = ['count', 'mean', 'max']
result = grouped['tip_pct', 'total_bill'].agg(functions)
# print(result)
# # # # 结果DF拥有层次化的列,相当于分别对各列进行聚合,然后用concat将结果组装到一起(列名用作keys参数)
# print(result['tip_pct'])
# # # # 这里也可以传入带有自定义名称的远足列表
ftuples = [('Durchschnitt', 'mean'), ('Abweichung', np.var)]
# print(grouped['tip_pct','total_bill'].agg(ftuples))
# # # # 要对不同的列应用不同的函数.具体的办法是向agg传入一个从列明映射到函数的字典:
# print(grouped.agg({'tip':np.max,'size':'sum'}))
# print(grouped.agg({'tip_pct':['min','max','mean','std'],'size':'sum'}))
# # # 以"无索引"的形式返回聚合数据
# # # # 由于并不总是需要聚合数据都由唯一的分组键组成索引(可能还是层次化的),可以向groupby传入as_index=False以禁用该功能
# print(tips.groupby(['sex','smoker'],as_index=False).mean())
# # # 分组级运算和转换
# # # # 聚合不过是分组运算的其中一种而已,它是数据转换的一个特例.即,它接受能够将一维数组简化为标量值的函数.
# # # # transform和apply方法能够执行更多其它的分组运算
# # # # 例1:假设想要为一个DF添加一个用于存放各索引分组平均值的列,其中一个办法是先聚合再合并
# print(df)
k1_means = df.groupby('key1').mean().add_prefix('mean_')
# print(k1_means)
# print(pd.merge(df,k1_means,left_on='key1',right_index=True))
# # # # 在groupby上使用transform
key = ['one', 'two', 'one', 'two', 'one']
# print(people.groupby(key).mean(),'\n')
# # # # transform会将一个函数应用到各个分组,然后将结果放置到合适的位置上.如果各分组产生的是一个标量值,则该值就会被广播出去.
# print(people.groupby(key).transform(np.mean))
# # # # 从各组中减去平均值,为此,先创建一个距平化函数,然后将其传给transform
demeaned = people.groupby(key).transform(demean)
# print(demeaned)
# # # # 查看各组平均值是否为0
# print(demeaned.groupby(key).mean())
# # # Apply:一般性的"拆分-应用-合并"
# # # # 跟aggregate一样,transform也是一个有着严格条件的特殊函数,传入的函数只能产生两种结果,瑶妹产生一个可以广播的标量值(如np.mean),要么产生一个相同大小的结果数组.
# # # # 最一般化的groupby方法是apply.apply会将待处理的对象拆分成多个片段,然后对各片段调用传入的函数,最后尝试将各片段组合到一起
# # # # 例:根据分组选出最高的5各tip_pct值.首先,编写一个选取指定列具有最大值的行的函数
# print(top(tips,n=6))
# # # # 对smoker分组并用该函数调用apply
# # # # top函数在DF的各个片段上调用,然后结果由pandas.concat组装到一起,并以分组名称进行了标记.于是,最终结果就有了一个层次化索引,其内层索引值来自原DF
# print(tips.groupby('smoker').apply(top))
# # # # 如果传给apply的函数能够接受其它参数或关键字,则可以将这些内容放在函数名后面一并传入
# print(tips.groupby(['smoker', 'day']).apply(top, n=1, column='total_bill'))
result = tips.groupby('smoker')['tip_pct'].describe()
# print(result,'\n')
# print(result.unstack('smoker'))
# # # # 在groupby中,调用诸如describe之类的方法时,实际上只是应用了下面两条代码的快捷方式而已
f = lambda x: x.describe()
grouped.apply(f)
# # # 禁止分组键
# # # # 分组键会跟原始对象的索引共同构成结果对象中的层次化索引,将group_keys=False传入groupby即可禁止该效果
# print(tips.groupby('smoker',group_keys=False).apply(top))
# # # 分位数和桶分析
# # # # pandas有一些能够根据指定面元或样本分位数将数据拆分成多块的工具(比如cuthe qcut).将这些函数跟groupby结合起来,就能非常轻松地实现对数据集的桶(bucket)或分位数(quantile)分析了
frame = DataFrame({'data1': np.random.randn(1000),
'data2': np.random.randn(1000)})
factor = pd.cut(frame.data1, 4)
# print(factor[:10], '\n')
# # # # 由cut返回的factor对象可直接用于groupby,因此,可以像下面这样对data2做一些统计计算
grouped = frame.data2.groupby(factor)
# print(grouped.apply(get_stats).unstack(), '\n')
# # # # 这些都是长度相等的桶,要根据样本分位数得到大小相等的桶,使用qcut即可.传入labels=False即可只获取分位数的编号
grouping = pd.qcut(frame.data1, 10, labels=False)
grouped = frame.data2.groupby(grouping)
# print(grouped.apply(get_stats).unstack())
# # # 示例:用于特定分组的值填充缺失值
# # # # 对于缺失数据的清理工作,有时会用dropna将其过滤,而有时则可能希望用一个固定值或由数据集本身所衍生出来的值去填充NA值.这时就得使用fillna这个工具了
s = Series(np.random.randn(6))
s[::2] = np.nan
# print(s)
# print(s.fillna(s.mean()))
# # # # 如果需要对不同的分组填充不同的值,只需将数据分组,并使用apply和一个能够对各数据块调用fillna的函数即可
state = ['Ohio', 'New York', 'Vermont', 'Florida', 'Oregon', 'Nevada', 'California', 'Indaho']
group_by = ['East'] * 4 + ['West'] * 4
data = Series(np.random.randn(8), index=state)
data[['Vermont', 'Nevada', 'Indaho']] = np.nan
# print(data)
# # # # 用分组平均值去填充NA值
fill_mean = lambda g: g.fillna(g.mean())
# print(data.groupby(group_by).apply(fill_mean))
# # # # 此外,可以在代码中预定义各组的填充值,由于分组具有一个name属性,所以我们可以拿来用一下
fill_values = {'East': 0.5, 'West': -1}
fill_func = lambda g: g.fillna(fill_values[g.name])
# print(data.groupby(group_by).apply(fill_func))
# # # 示例:随机采样和排列
# # # # 假设想要从一个大数据集中随机抽取样本以进行蒙特卡罗模拟或其他分析工作."抽取"的方式有很多,其中一些的效率会比其他的高很多.一个办法是,选取np.random.permutation(N)的前K个元素,其中N为完整数据的大小,K为期望的样本大小.
# 红桃(Hearts),黑桃(Spades),梅花(Clubs),方片(Diamonds)
suits = ['H', 'S', 'C', 'D']
card_val = (list(range(1, 11)) + [10] * 3) * 4
base_names = ['A'] + list(range(2, 11)) + ['J', 'K', 'Q']
cards = []
for suit in suits:
cards.extend(str(num) + suit for num in base_names)
deck = Series(card_val, index=cards)
# print(deck)
# print(draw(deck))
# # # # 假设想要从每种花色中随机抽取两张牌.由于花色是牌名的最后一个字符,所以可以据此进行分组,并使用apply
get_suit = lambda card: card[-1] # 只要最后一个字母就可以了
# print(deck.groupby(get_suit).apply(draw, n=2))
# # # # 另一种办法
# print(deck.groupby(get_suit, group_keys=False).apply(draw, n=2))
# # # 分组加权平均数和相关系数
# # # # 根据groupby的"拆分-应用-合并"范式,df的列于列之间或两个series之间的运算(比如分组加权平均)成为一种标准作业
df = DataFrame({'category': ['a', 'a', 'a', 'a', 'b', 'b', 'b', 'b'],
'data': np.random.randn(8),
'weights': np.random.rand(8)})
# print(df)
# # # # 利用category计算分组加权平均数
grouped = df.groupby('category')
get_wavg = lambda g: np.average(g['data'], weights=g['weights'])
# print(grouped.apply(get_wavg))
# # # 来自Yahoo!Finance的数据集
close_px = pd.read_csv('../../data/examples/stock_px.csv', parse_dates=True, index_col=0)
# print(close_px.head(1))
# # # # 计算一个由日收益率(通过百分数变化计算)于SPX之间的年度相关系数组成的DF
rets = close_px.pct_change().dropna()
spx_corr = lambda x: x.corrwith(x['SPX'])
by_year = rets.groupby(lambda x: x.year)
# print(by_year.apply(spx_corr).head())
# # # # 计算列与列之间的相关系数
# print(by_year.apply(lambda g:g['AAPL'].corr(g['MSFT'])))
# # # 示例:面向分组的线性回归
# # # # 利用grouby执行更为复杂的分组统计分析,只要函数返回的是pandas对象或标量值即可.
# # # # 例如:定义一个函数对各数据块执行普通最小二乘法回归
# # # # 按年计算AAPL对SPX收益率的线性回归
# print(by_year.apply(regress,'AAPL',['SPX']).head())
# # # 透视表和交叉表
# # # # 透视表是各种电子表格程序和其他数据分析软件中一种常见的数据汇总工具.他根据一个或多个键对数据进行聚合,并根据行和列上的分组键将数据分配到各个矩形区域中.在pandas中,可以通过groupby功能以及(能够利用层次化索引的)重塑运算制作透视表.DF有一个pivot_table方法,此外还有一个顶级的pandas.pivot_table函数.除能为groupby提供便利之外,pivot_table还可以添加分项小计(也叫margins)
# # # # 根据sex和smoker计算分组平均数,并将sex和smoker放到行上
# print(tips.pivot_table(index=['sex', 'smoker']))
# # # # 聚合tip_pct和size,而且根据day进行分组
# print(tips.pivot_table(['tip_pct','size'],index=['sex','day'],columns='smoker'))
# # # # 对透视表做进一步处理,传入margins=True添加分项小计.这将会添加标签为all的行和列,其值对应于单个等级中所有数据的分组统计.
# print(tips.pivot_table(['tip_pct', 'size'], index=['sex', 'day'], columns='smoker', margins=True))
# # # # 要使用其他的聚合函数,将其传给aggfunc即可.例如,使用count或len即可得到有关分组大小的交叉表
# print(tips.pivot_table('tip_pct', index=['sex', 'smoker'], columns='day', aggfunc=len, margins=True))
# # # # 针对空值,可以设置fill_value
# print(tips.pivot_table('size', index=['time', 'sex', 'smoker'], columns='day', aggfunc='sum', fill_value=0))
# # # 交叉表:crosstab
# # # # 交叉表是一种用于计算分组频率的特殊透视表
data = pd.read_csv('../../data/examples/Wikipedia.csv')
# # # # 根据性别和用手习惯对这段数据进行汇总统计,虽然用pivot_table可以实现该功能,但是用pandas.crosstab函数会更方便
# print(pd.crosstab(data.Gender,data.Handedness,margins=True))
# # # # crosstab的前两个参数可以是数组,Series或数组列表
# print(pd.crosstab([tips.time,tips.day],tips.smoker,margins=True))
# # # 示例:2010联邦选举委员会数据库
fec = pd.read_csv('../../data/dataSets/fec/P00000001-ALL.csv', low_memory=False)
# print(fec.info(),'\n')
# print(fec.loc[123456])
# # # # 通过unique获取全部的候选人名单
unique_cands = fec.cand_nm.unique()
# print(unique_cands)
# # # # 利用字典说明党派关系
parties = {'Bachmann, Michelle': 'Republican',
'Cain, Herman': 'Republican',
'Gingrich, Newt': 'Republican',
'Huntsman, Jon': 'Republican',
'Johnson, Gary Earl': 'Republican',
'McCotter, Thaddeus G': 'Republican',
'Obama, Barack': 'Democrat',
'Paul, Ron': 'Republican',
'Pawlenty, Timothy': 'Republican',
'Perry, Rick': 'Republican',
"Roemer, Charles E. 'Buddy' III": 'Republican',
'Romney, Mitt': 'Republican',
'Santorum, Rick': 'Republican'}
# # # # 通过以上映射以及Series对象的map方法,可以根据候选人姓名得到一组党派信息
# print(fec.cand_nm[123456:123461])
fec['party'] = fec.cand_nm.map(parties)
# print(fec['party'].value_counts())
# # # # 注意,1.该数据既包括赞助也包括退款(负的出资额)
# print((fec.contb_receipt_amt>0).value_counts())
# # # # 为简化分析,限定该数据集只能由正的出资额
fec = fec[fec.contb_receipt_amt > 0]
# # # # 创建一个只包含主要候选人的子集
fec_mrbo = fec[fec.cand_nm.isin(['Obama, Barack', 'Romney, Mitt'])]
# # # 根据职业和雇主计赞助信息
# # # # 根据职业机算出资总额
# print(fec.contbr_occupation.value_counts()[:10])
occ_mapping = {
'INFORMATION REQUESTED PER BEST EFFORTS': 'NOT PROVIDED',
'INFORMATION REQUESTED': 'NOT PROVIDED',
'INFORMATION REQUESTED (BEST EFFORTS)': 'NOT PROVIDED',
'C.E.O.': 'CEO'
}
# # # # 如果没有提供相关映射,则返回x
f = lambda x: occ_mapping.get(x, x)
fec.contbr_occupation = fec.contbr_occupation.map(f)
emp_mapping = {
'INFORMATION REQUESTED PER BEST EFFORTS': 'NOT PROVIDED',
'INFORMATION REQUESTED': 'NOT PROVIDED',
'SELF': 'SELF-EMPLOYED',
'SELF EMPLOYED': 'SELF-EMPLOYED',
}
f = lambda x: emp_mapping.get(x, x)
fec.contbr_employer = fec.contbr_employer.map(f)
# # # # 通过pivot_table根据党派和职业对数据进行聚合,然后过滤掉总出资额不足200万美元的数据
by_occupation = fec.pivot_table('contb_receipt_amt', index='contbr_occupation', columns='party', aggfunc='sum')
over_2mm = by_occupation[by_occupation.sum(1) > 2000000]
# print(over_2mm.head())
# over_2mm.plot(kind='barh')
# plt.show()
# # # # 根据职业和估值进行聚合
grouped = fec_mrbo.groupby('cand_nm')
# print(grouped.apply(get_top_amounts, 'contbr_occupation', n=7))
# print(grouped.apply(get_top_amounts, 'contbr_employer', n=10))
# # # 对出资额分组
# # # # 利用cut函数根据出资额的大小将数据离散化到多个面元中
bins = np.array([0, 1, 10, 100, 1000, 10000, 100000, 1000000, 10000000])
labels = pd.cut(fec_mrbo.contb_receipt_amt, bins)
# print(labels)
# # # # 根据侯选人姓名以及面元标签对数据进行分组
grouped = fec_mrbo.groupby(['cand_nm', labels])
# print(grouped.size().unstack(0))
bucket_sums = grouped.contb_receipt_amt.sum().unstack(0)
# print(bucket_sums)
normed_sums = bucket_sums.div(bucket_sums.sum(axis=1), axis=0)
# print(normed_sums)
# normed_sums[:-2].plot(kind='barh',stacked=True)
# plt.show()
# # # 根据州统计赞助信息
# # # # 根据候选人和州对数据进行聚合
grouped = fec_mrbo.groupby(['cand_nm', 'contbr_st'])
totals = grouped.contb_receipt_amt.sum().unstack(0).fillna(0)
totals = totals[totals.sum(1) > 100000]
# print(totals.head(10))
# # # # 对各行除以总赞助额,就会得到各候选人在各州的总赞助额比例
percent = totals.div(totals.sum(1), axis=0)
# print(percent.head(10))
|
import random
import turtle
import time
lifespan = 200 #This is how long each rocket will live
popSize = 10 #This is how many rockets is there
speed = 10 #This will be how fast the rocket will be, it can move in a 20 pix radius
screen = turtle.Screen() #Higher lifespan or popSize will make them slower, and lower speed makes them slower.
turtle.tracer(0,0) #This makes it so turtles will move at its fastest
def Update ():
for i in range(lifespan):
#For every tick in lifespan, a rocket will update its properties.
for a in population:
#This will iterate through each rocket instance
#Updating vectors, its fitness, and its position.
a.RandomVectors()
a.UpdatePosition(i)
a.UpdateFitness()
#print(a.fitness)
for a in population:
a.turt.hideturtle()
#After the turtle has done lived its lifespan, they will hide.
chancePool = []
#chancePool is a list of copies of rockets, higher fitness will put more of one copies in there
for a in population:
chance = round(a.fitness)
copy = a
for b in range(chance):
chancePool.append(copy)
#print(len(chancePool))
#fakePop will make sure it wont effect the children
fakePop = []
for b in range(popSize):
#This will make a child, randomly choosing from the chance pool.
parA = chancePool[random.randint(0, len(chancePool)-1)]
parB = chancePool[random.randint(0, len(chancePool)-1)]
child = Rocket()
child.RandomVectors()
#This is so that the vector can be overridden.
for c in range(lifespan):
#This is the how vectors will be transferred
#It will randomly select one of the parents and give it to the child.
#there is also a 1 in a 50 chance of it being a random vector
z = random.randint(0,50)
if z <= 25:
child.SetVector(parA.vectorList[c], c)
elif z < 49 and z > 25:
child.SetVector(parB.vectorList[c], c)
elif z == 50:
child.SetVector((random.randint(-speed, speed),(random.randint(-speed, speed))), c)
fakePop.append(child)
for i in range(popSize):
population[i] = fakePop[i]
Update()
class Rocket():
#Every rocket will have properties of a turtle, vector, position, fitness, and vectorList
#A vector is just a coord that the rocket will go to, in a radius of speed.
#vectorList will act as the genes, every tick of lifespan, there will be a vector
#Fitness will be how well the rocket is doing, measured by how far it is from the circle
def __init__(self):
self = self
self.vectorList = []
self.position = [0,0]
self.turt = turtle.Turtle()
self.fitness = 0
self.won = False
self.startTime = time.clock()
self.turt.penup()
self.turt.setpos(0, 0)
self.turt.shape("turtle")
self.turt.speed("fastest")
#self.turt.pendown()
def RandomVectors(self):
#This will make a random vector for every tick in lifespan
for i in range(lifespan):
self.vectorList.append((random.randint(-speed,speed), random.randint(-speed,speed)))
def SetVector(self, vector, vectorIndex):
#If given a vector and its index, this will replace it.
self.vectorList[vectorIndex] = vector
def UpdatePosition(self, vectIndex):
#Each vector will add on to the rockets position
if self.won == False:
self.position[0] = self.position[0] + self.vectorList[vectIndex][0]
self.position[1] = self.position[1] + self.vectorList[vectIndex][1]
self.turt.setpos(self.position[0], self.position[1])
else:
self.turt.setpos(0,200)
turtle.update()
def UpdateFitness(self):
d = abs(self.turt.distance(0, 200))
#If the turtle is in a 20 pixel radius, it will be given a high fitness rating
if (self.turt.pos()[0] >= -20 and self.turt.pos()[0] <= 20) and (self.turt.pos()[1] >= 180 and self.turt.pos()[1] <= 220):
self.fitness = (100/(time.clock() - self.startTime)*.5)
#print(time.clock(), self.startTime, time.clock() - self.startTime)
self.won = True
#This is how it calculates fitness
else:
self.fitness = (1/(d+0.1)*1000)
circ = turtle.Turtle()
circ.penup()
circ.setpos(0, 200)
circ.pendown()
circ.circle(10)
circ.hideturtle()
#This will create a new population starting the program
population = [Rocket() for i in range(popSize)]
Update()
input()
|
#!/usr/bin/env python
# encoding: utf-8
"""
@author: Wayne
@contact: wangye.hope@gmail.com
@software: PyCharm
@file: Find the Smallest Divisor Given a Threshold
@time: 2019/12/9 15:46
"""
class Solution:
def smallestDivisor(self, nums, threshold):
l, r = 1, max(nums)
while l < r:
m = (l + r) // 2
if sum((i + m - 1) // m for i in nums) > threshold:
l = m + 1
else:
r = m
return l
so = Solution()
print(so.smallestDivisor([1, 2, 3], 1000000))
print(so.smallestDivisor(nums=[1, 2, 5, 9], threshold=6))
print(so.smallestDivisor(nums=[2, 3, 5, 7, 11], threshold=11))
print(so.smallestDivisor(nums=[19], threshold=5))
|
import socket, sys, time, platform, struct
from PyQt4 import QtGui, QtCore
def ip(self, packet, extractedAttIndex, printKey):
# Header lengths.
ethHeaderLength = 14
ip_hlen = 20
# Get IP header using begin and end.
# Specific Linux and Windows calibration is needed.
if self.os == self.windows:
begin = 0
end = begin + ip_hlen
ipHeader = packet[begin:end]
# Unpack the header because it originally in hex.
# The regular expression helps unpack the header.
# ! signifies we are unpacking a network endian.
# B signifies we are unpacking an integer of size 1 byte.
# H signifies we are unpacking an integer of size 2 bytes.
# 4s signifies we are unpacking a string of size 4 bytes.
ipHeaderUnpacked = struct.unpack('!BBHHHBBH4s4s' , ipHeader)
# The first B is 1 byte and contains the version and header length.
# Both are 4 bits each, split ipHeaderUnpacked[0] in "half".
ipVersionAndHeaderLength = ipHeaderUnpacked[0]
ipVersion = ipVersionAndHeaderLength >> 4
ip_hlen = ipVersionAndHeaderLength & 0xF
# The first H is 2 bytes and contains the total length.
ipTotalLength = ipHeaderUnpacked[2]
# The second H is 2 bytes and contains the total length.
ipIdentification = ipHeaderUnpacked[3]
# The third H is 2 bytes and contains the flags and fragment offset.
# Flags is 3 bits and fragment offset is 13 bits.
# Split ipHeaderUnpacked[4].
ipFlagsAndFragmentOffset = ipHeaderUnpacked[4]
ipFlags = ipFlagsAndFragmentOffset >> 13
ipFragmentOffset = ipFlagsAndFragmentOffset & 0x1FFF
# The third B is 1 byte and contains the time to live.
ip_TTL = ipHeaderUnpacked[5]
# Our fourth B is 1 byte and contains the protocol.
ipProtocol = ipHeaderUnpacked[6]
# The fourth H is 2 bytes and contains the header checksum.
ipHeaderChecksum = ipHeaderUnpacked[7]
# The first 4s is 4 bytes and contains the source address.
ip_src_addr = socket.inet_ntoa(ipHeaderUnpacked[8]);
# The second 4s is 4 bytes and contains the dest address.
ip_dest_addr = socket.inet_ntoa(ipHeaderUnpacked[9]);
# Check if the print key is True.
# If true, header information will be printed.
# Check if the user selected extracted attribute index is 0.
# If true, all attributes will be printed.
# If false, the attribute the user selected extracted attribute index corresponds to will be printed.
# If false, the attribute the user selected attribute index corresponds to will be returned.
if printKey == True:
# Print IP Header
# Some segments of the header are switched back to hex form because that
# is the format wireshark has it.
self.unpackedInfo.append('\n********************\n******** IP ********\n********************')
if (extractedAttIndex == 1) or (extractedAttIndex == 0):
self.unpackedInfo.append('IP Version: ' + str(ipVersion))
if (extractedAttIndex == 2) or (extractedAttIndex == 0):
self.unpackedInfo.append('Header Length: ' + str(ip_hlen) + ' 32-bit words')
if (extractedAttIndex == 3) or (extractedAttIndex == 0):
self.unpackedInfo.append('Total Length: ' + str(ipTotalLength) + ' bytes')
if (extractedAttIndex == 4) or (extractedAttIndex == 0):
self.unpackedInfo.append('Identification: ' + format(ipIdentification, '#04X') + ' , ' + str(ipIdentification))
if (extractedAttIndex == 5) or (extractedAttIndex == 0):
self.unpackedInfo.append('Flags: ' + format(ipFlags, '#04X') + ' , ' + str(ipFlags))
if (extractedAttIndex == 6) or (extractedAttIndex == 0):
self.unpackedInfo.append('Fragment Offset: ' + str(ipFragmentOffset) + ' eight-byte blocks')
if (extractedAttIndex == 7) or (extractedAttIndex == 0):
self.unpackedInfo.append('Time to Live: ' + str(ip_TTL) + ' hops')
if (extractedAttIndex == 8) or (extractedAttIndex == 0):
self.unpackedInfo.append('Protocol: ' + str(ipProtocol))
if (extractedAttIndex == 9) or (extractedAttIndex == 0):
self.unpackedInfo.append('Header Checksum: ' + format(ipHeaderChecksum, '#04X'))
if (extractedAttIndex == 10) or (extractedAttIndex == 0):
self.unpackedInfo.append('Source Address: ' + str(ip_src_addr))
if (extractedAttIndex == 11) or (extractedAttIndex == 0):
self.unpackedInfo.append('Destination Address: ' + str(ip_dest_addr))
else:
if (extractedAttIndex == 1):
return str(ipVersion)
if (extractedAttIndex == 2):
return str(ip_hlen)
if (extractedAttIndex == 3):
return str(ipTotalLength)
if (extractedAttIndex == 4):
return format(ipIdentification, '#04X')
if (extractedAttIndex == 5):
return format(ipFlags, '#04X')
if (extractedAttIndex == 6):
return str(ipFragmentOffset)
if (extractedAttIndex == 7):
return str(ip_TTL)
if (extractedAttIndex == 8):
return str(ipProtocol)
if (extractedAttIndex == 9):
return format(ipHeaderChecksum, '#04X')
if (extractedAttIndex == 10):
return str(ip_src_addr)
if (extractedAttIndex == 11):
return str(ip_dest_addr) |
from aws_parsecf.parser import Parser
import boto3
import json
import yaml
def load_json(stream, default_region=boto3.Session().region_name, parameters={}):
return _load(json.load(stream), default_region, parameters)
def loads_json(string, default_region=boto3.Session().region_name, parameters={}):
return _load(json.loads(string), default_region, parameters)
def load_yaml(stream_or_string, default_region=boto3.Session().region_name, parameters={}):
return _load(yaml.load(stream_or_string), default_region, parameters)
def _load(root, default_region, parameters={}):
"""
>>> import json
>>> print(json.dumps(_load({
... 'Conditions': {'ConditionName': {'Fn::Equals': [1, 2]}},
... 'Resources': {'SomeResource': {'Condition': 'ConditionName', 'Type': 'AWS::Lambda::Function'}}
... }, 'us-east-1'), sort_keys=True))
{"Conditions": {"ConditionName": false}, "Resources": {}}
>>> print(json.dumps(_load({
... 'Conditions': {'ConditionName': {'Fn::Equals': [1, 1]}},
... 'Resources': {'SomeResource': {'Condition': 'ConditionName', 'Type': 'AWS::Lambda::Function'}}
... }, 'us-east-1'), sort_keys=True))
{"Conditions": {"ConditionName": true}, "Resources": {"SomeResource": {"Condition": "ConditionName", "Type": "AWS::Lambda::Function"}}}
>>> print(json.dumps(_load({
... 'Resources': {'SomeResource': {'Condition': {'DateGreaterThan': {'aws:CurrentTime': '2013-12-15T12:00:00Z'}, 'Type': 'AWS::IAM::Role'}}}
... }, 'us-east-1'), sort_keys=True))
{"Resources": {"SomeResource": {"Condition": {"DateGreaterThan": {"aws:CurrentTime": "2013-12-15T12:00:00Z"}, "Type": "AWS::IAM::Role"}}}}
>>> print(json.dumps(_load({
... 'Conditions': {'ConditionName': {'Fn::Equals': [1, 2]}},
... 'Resources': {'SomeResource': {'Attribute': {'Fn::If': ['ConditionName', '1', {'Ref': 'AWS::NoValue'}]}, 'Type': 'AWS::Lambda::Function'}}
... }, 'us-east-1'), sort_keys=True))
{"Conditions": {"ConditionName": false}, "Resources": {"SomeResource": {"Type": "AWS::Lambda::Function"}}}
>>> print(json.dumps(_load({
... 'Conditions': {'ConditionName': {'Fn::Equals': [{'Ref': 'SomeBucket'}, 'SomeBucketName']}},
... 'Resources':
... {'SomeBucket': {'Properties': {'BucketName': 'SomeBucketName'}, 'Type': 'AWS::S3::Bucket'},
... 'SomeResource': {'Attribute': {'Fn::If': ['ConditionName', '1', '2']}, 'Type': 'AWS::Lambda::Function'}}
... }, 'us-east-1'), sort_keys=True, indent=4))
{
"Conditions": {
"ConditionName": true
},
"Resources": {
"SomeBucket": {
"Properties": {
"BucketName": "SomeBucketName"
},
"Type": "AWS::S3::Bucket"
},
"SomeResource": {
"Attribute": "1",
"Type": "AWS::Lambda::Function"
}
}
}
"""
if not default_region:
raise TypeError("No default region in aws configuration, please specify one (with `aws configure` or `default_region=`)")
parser = Parser(root, default_region, parameters)
parser.explode(root)
parser.cleanup(root)
return root
|
from django.urls import path
from . import views
urlpatterns = [
path("buyerHome", views.buyerHome, name="buyerHome"),
path("bidderHome", views.bidderHome, name="bidderHome"),
path("help", views.help, name="help"),
]
|
import json
from flask import jsonify, request, abort
from marshmallow import ValidationError
from api.db import db
from api.utils.utils import SUBSECTIONS_GAME
from api.views.validate import validate
from api.db.model import PageSchema, FilterSchema
from api.utils.authorizers import require_api_key
def _process_response(resource_attributes: str, games: tuple, pagination=False):
"""builds a response list of objects for each game"""
response = []
for game in games:
response_obj = {}
response_obj['id'] = game.get('id')
# add different subsections in case it's been specified
# otherwise add everything
if resource_attributes == 'info' or not resource_attributes:
response_obj['info'] = {
'name': game.get('name'),
'description': game.get('description'),
'developer': game.get('developer')
}
if resource_attributes == 'minimum_requirements' or not resource_attributes:
response_obj['minimum_requirements'] = {
'ram_min': game.get('ram_min'),
'cpu_min': game.get('cpu_min'),
'storage_min': game.get('storage_min'),
'gpu_min': game.get('gpu_min'),
'OS_min': game.get('OS_min')
}
if resource_attributes == 'recommended_requirements' or not resource_attributes:
response_obj['recommended_requirements'] = {
'ram_rec': game.get('ram_rec'),
'cpu_rec': game.get('cpu_rec'),
'storage_rec': game.get('storage_rec'),
'gpu_rec': game.get('gpu_rec'),
'OS_rec': game.get('OS_rec')
}
response.append(response_obj)
return jsonify(response)
@require_api_key
def get_game(resource=None):
"""view for game queries, validate query parameters
and pass them to db"""
if resource and resource not in SUBSECTIONS_GAME:
return json.dumps(f'No endpoint for resource {resource}'), 404
page=None
filters = None
name = request.args.get('name')
developer = request.args.get('dev')
if request.args.get('page'):
page_schema = PageSchema()
try:
page = page_schema.loads(request.args['page'])
except json.JSONDecodeError as err:
return validate({'page': err.msg})
except ValidationError as err:
return validate(err.messages)
if request.args.get('filters'):
filter_schema = FilterSchema(many=True)
try:
filters = filter_schema.loads(request.args['filters'])
except json.JSONDecodeError as err:
return validate({'filters': err.msg})
except ValidationError as err:
return validate(err.messages)
if resource:
games = db.game_query(*SUBSECTIONS_GAME[resource],
page=page,
filters=filters,
name=name,
developer=developer)
else:
games = db.game_query(page=page,
filters=filters,
name=name,
developer=developer)
if games:
return _process_response(resource, games)
else:
return json.dumps('No resources to get'), 404 |
#This program is to calculate the Area of a Cylinder
from math import pi
radius = float(input("Enter a value for radius: "))
height = float(input("Enter a value for heigh: "))
Area = (2*pi*radius**2)+height*(2*pi*radius)
print("Area of the Cylinder is ", round(Area,4))
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
from django.conf import settings
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Appointment',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('patient_first_name', models.CharField(max_length=15, null=True)),
('patient_last_name', models.CharField(max_length=15, null=True)),
('patient_phone_number', models.CharField(max_length=10, null=True)),
('reason', models.TextField()),
('created_date', models.DateTimeField(default=django.utils.timezone.now)),
('appointment_date', models.DateTimeField(blank=True, null=True)),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Newpatient',
fields=[
('id', models.AutoField(serialize=False, verbose_name='ID', auto_created=True, primary_key=True)),
('patient_first_name', models.CharField(max_length=15, null=True)),
('patient_last_name', models.CharField(max_length=15, null=True)),
('patient_phone_number', models.CharField(max_length=10, null=True)),
('patient_date_of_birth', models.DateField(max_length=8)),
('patient_address', models.CharField(max_length=100)),
('city', models.CharField(max_length=50)),
('author', models.ForeignKey(to=settings.AUTH_USER_MODEL)),
],
options={
},
bases=(models.Model,),
),
]
|
import requests
## 2015 - 2016 : https://www.liberty.edu/media/1270/dcps/1516/
## 2016 - 2019 : https://www.liberty.edu/media/1270/
base_url = "https://www.liberty.edu/media/1270/"
pdf = '.pdf'
def download_pdf(my_url, file_name):
r = requests.get(my_url, stream=True)
with open('PDF/DCP 2018 - 2019/' + file_name, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
with open("2018 - 2019 PDF.txt", 'r') as pdf_list:
lines = pdf_list.readlines()
for line in lines:
if pdf in line:
index = line.index('.pdf') + 4
file_name = line[0:index]
my_url = base_url + file_name
download_pdf(my_url, file_name) |
# Napisz program, który przyjmie od użytkownika dwie liczby (a i b).
# Wypisz informację która z nich jest większa w postaci:
#
# a jest większe!
print(" w tym cwiczeniu bedziemy porownywac liczby")
a = int(input("prosze podaj liczbe a"))
b = int(input("prosze podaj liczbe b"))
if a > b:
print(" a jest wieksze")
else:
print("b jest wieksze")
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.