text stringlengths 38 1.54M |
|---|
import socket
s = socket.socket(socket.AF_INET6,socket.SOCK_STREAM)
ip=socket.gethostbyname("www.google.com")
print ip
|
"""
Created on 30 oct. 11:20 2020
@author: HaroldKS
"""
from PyQt5 import QtWidgets, QtGui, QtCore
from PyQt5.QtWidgets import *
from gui.panel import Panel
from gui.board import BoardGUI
from faronona import FarononaState
from core import Color
from faronona import FarononaRules
from faronona import FarononaAction
from gui.div import Div
from copy import deepcopy
from utils.timer import Timer
from utils.trace import Trace
import argparse
import time
import sys
class FarononaGUI(QMainWindow):
depth_to_cover = 9
automatic_save_game = False
def __init__(self, app, shape, players, allowed_time=5.0, sleep_time=.500, first_player=-1, boring_limit=50,
parent=None):
super(FarononaGUI, self).__init__(parent)
self.app = app
self.haut = Div("h")
self.bas = Div("b")
self.avant = Div("av")
self.arriere = Div("ar")
self.droitehaut = Div("dh")
self.droitebas = Div("db")
self.gauchehaut = Div("gh")
self.gauchebas = Div("gb")
self.red = Div("red")
self.saved = True
self.board_shape = shape
self.players = players
self.allowed_time = allowed_time
self.sleep_time = sleep_time
self.first_player = first_player
self.just_stop = boring_limit
self.setWindowTitle("[*] MAIC 2021 - Fanorona Game")
self.statusBar()
self.setWindowIcon(QtGui.QIcon("assets/icon.png"))
layout = QHBoxLayout()
layout.addStretch()
self.board_gui = BoardGUI(self.board_shape)
layout.addWidget(self.board_gui)
layout.addSpacing(15)
self.panel = Panel([players[-1].name, players[1].name])
layout.addWidget(self.panel)
layout.addStretch()
content = QWidget()
content.setLayout(layout)
self.setCentralWidget(content)
self.create_menu()
self._reset()
# self.trace = Trace(self.board.get_board_array())
# self.random_player = AI(self.board.currentPlayer, self.board_size)
def _reset(self):
self.done = False
self.rewarding_move = False
self.board = BoardGUI(self.board_shape)
self.board_gui.init_board(self.players) #
self.state = FarononaState(board=self.board.get_board_state(), next_player=self.first_player,
boring_limit=self.just_stop)
self.trace = Trace(self.state, players={-1: self.players[-1].name, 1: self.players[1].name})
self.current_player = self.first_player
def reset(self):
self._reset()
def create_menu(self):
menu = self.menuBar()
# Game Menu
game_menu = menu.addMenu("Game")
# New Game Submenu
new_game_action = QAction(QtGui.QIcon.fromTheme("document-new", QtGui.QIcon("../assets/New file.png")),
'New Game',
self)
new_game_action.setShortcut(QtGui.QKeySequence.New)
new_game_action.setStatusTip("New game Luncher")
new_game_action.triggered.connect(self.new_game_trigger)
game_menu.addAction(new_game_action)
game_menu.addSeparator()
# Load Game Submenu
load_game_action = QAction(QtGui.QIcon.fromTheme("document-new", QtGui.QIcon("../assets/Open file.png")),
'Load Game', self)
load_game_action.setShortcut(QtGui.QKeySequence.Open)
load_game_action.setStatusTip("Load a previous game")
load_game_action.triggered.connect(self.load_game_trigger)
game_menu.addAction(load_game_action)
# Save Game
save_game_action = QAction(QtGui.QIcon.fromTheme("document-new", QtGui.QIcon("pieces/Save.png")), 'Save Game',
self)
save_game_action.setShortcut(QtGui.QKeySequence.Save)
save_game_action.setStatusTip("Save current game")
save_game_action.triggered.connect(self.save_game_trigger)
game_menu.addAction(save_game_action)
game_menu.addSeparator()
# Exit and close game
exit_game_action = QAction(QtGui.QIcon.fromTheme("document-new", QtGui.QIcon("pieces/Close.png")), 'Exit Game',
self)
exit_game_action.setShortcut(QtGui.QKeySequence.Quit)
exit_game_action.setMenuRole(QAction.QuitRole)
exit_game_action.setStatusTip("Exit and close window")
exit_game_action.triggered.connect(self.exit_game_trigger)
game_menu.addAction(exit_game_action)
menu.addSeparator()
# Help Menu
help_menu = menu.addMenu("Help")
# Rules
game_rules_action = QAction(QtGui.QIcon.fromTheme("document-new", QtGui.QIcon("pieces/Help.png")), 'Rules',
self)
game_rules_action.setMenuRole(QAction.AboutRole)
game_rules_action.triggered.connect(self.game_rules_trigger)
help_menu.addAction(game_rules_action)
help_menu.addSeparator()
# About
about_action = QAction('About', self)
about_action.setMenuRole(QAction.AboutRole)
about_action.triggered.connect(self.about_trigger)
help_menu.addAction(about_action)
def new_game_trigger(self):
new_game = QMessageBox.question(self, 'New Game', "You're about to start a new Game.",
QMessageBox.Yes | QMessageBox.No, QMessageBox.Yes)
if new_game == QMessageBox.Yes:
self._reset_for_new_game()
self.play_game()
else:
pass
def _reset_for_new_game(self):
self.board.reset_board()
self.board.score = {-1: 0, 1: 0}
self.done = False
self.board.enable_all_squares()
self.panel.reset_panel_player()
self.board.current_player = -1
self.current_player = self.first_player
self.panel.update_current_player(self.current_player)
self.board.init_board(self.players)
self.state = FarononaState(board=self.board.get_board_state(), next_player=self.first_player,
boring_limit=self.just_stop)
self.board.set_default_colors()
for key in self.players.keys():
self.players[key].reset_player_informations()
def step(self, action):
"""Plays one step of the game. Takes an action and perform in the environment.
Args:
action (Action): An action containing the move from a player.
Returns:
bool: Dependent on the validity of the action will return True if the was was performed False if not.
"""
assert isinstance(action, FarononaAction), "action has to be an Action class object"
result = FarononaRules.act(self.state, action, self.current_player)
if isinstance(result, bool):
return False
else:
self.state, self.done = result
FarononaRules.moment_player(self.state, self.players)
self.current_player = self.state.get_next_player()
return True
def play_game(self):
hit = 0
timer_first_player = Timer("first_player", total_time=self.allowed_time, logger=None)
timer_second_player = Timer("second_player", total_time=self.allowed_time, logger=None)
turn = self.first_player
while not self.done:
hit += 1
time.sleep(self.sleep_time)
state = deepcopy(self.state)
remain_time = timer_first_player.remain_time() if turn == -1 else timer_second_player.remain_time()
remain_time_copy = deepcopy(remain_time)
if remain_time > 0:
timer_first_player.start() if turn == -1 else timer_second_player.start()
action = self.players[turn].play(state, remain_time_copy)
elapsed_time = timer_first_player.stop() if turn == -1 else timer_second_player.stop()
remain_time = timer_first_player.remain_time() if turn == -1 else timer_second_player.remain_time()
if self.step(action):
print('Action performed successfully by', turn, ' in', str(elapsed_time), ' rest ', remain_time)
else:
print("An illegal move were given. Performing a random move")
print(f"Lunching a random move for {turn}, and reward is {state.rewarding_move}")
action = FarononaRules.random_play(state, turn) # TODO: Should we use the original state?
else:
print("Not remain time for ", turn, " Performing a random move")
print(f"Lunching a random move for {turn}, and reward is {state.rewarding_move}")
action = FarononaRules.random_play(state, turn) # TODO: Should we use the original state?
self._update_gui()
self.trace.add(self.state)
self.players[turn].update_player_infos(self.get_player_info(turn))
FarononaRules.moment_player(state, self.players)
turn = self.state.get_next_player()
self._update_gui()
self._results()
self.save_game_trigger()
print("\nIt's over.")
def _update_gui(self):
action = self.state.get_latest_move()
self.app.processEvents()
if action['action_type'] == 'MOVE':
to = action['action']['to']
at = action['action']['at']
self.setFleche(at,to)
self.app.processEvents()
time.sleep(self.sleep_time)
self.board_gui.move_piece(at, to, self.state.get_latest_player())
self.app.processEvents()
time.sleep(self.sleep_time)
if self.state.captured is not None:
for capture in self.state.captured:
i, j = capture
self.board_gui.squares[i][j].set_div(self.red)
self.app.processEvents()
time.sleep(self.sleep_time)
self.board_gui.remove_piece(capture)
self.app.processEvents()
time.sleep(self.sleep_time)
self.panel.update_score(self.state.score, self.state.on_board)
self.board_gui.set_default_colors()
self.panel.update_current_player(self.state.get_next_player()) #a voir
def get_player_info(self, player):
return self.state.get_player_info(player)
def is_end_game(self):
return self.done
def _results(self):
if self.done:
self.trace.done = self.done
results = FarononaRules.get_results(self.state)
if not results['tie']:
end = QMessageBox.information(self, "End", f"{self.players[results['winner']].name} wins.")
else:
end = QMessageBox.information(self, "End", "No winners.")
def load_battle(self, states, delay=0.5, done=True):
hit = 0
self.board.set_default_colors()
self.state = states[0]
for state in states[1:]:
action = state.get_latest_move()
self.state = state
self._update_gui()
time.sleep(delay)
self.done = done
self._results()
print("It's over.")
def load_game_trigger(self):
self.board.set_default_colors()
name = QtWidgets.QFileDialog.getOpenFileName(self, 'Load Game', options=QFileDialog.DontUseNativeDialog)
print(name[0])
trace = self.trace.load(name[0])
print(trace.players)
self._reset_for_new_game()
actions = trace.get_actions()
delay, ok = QInputDialog.getDouble(self, 'Enter the delay', '')
players_name = trace.players
self.panel.update_players_name(players_name)
self.load_battle(actions, delay, trace.done)
def save_game_trigger(self):
if self.done:
if self.automatic_save_game:
self.trace.write(self.players[-1].name + "-" + self.players[1].name)
else:
name = QtWidgets.QFileDialog.getSaveFileName(self, 'Save Game', options=QFileDialog.DontUseNativeDialog)
if name[0] == "":
pass
else:
self.trace.write(name[0])
else:
warning = QMessageBox.warning(self, "Warning", "No game ongoing")
def exit_game_trigger(self):
sys.exit(self.app.exec_())
def game_rules_trigger(self):
rules = "Fanorona Rules \n " \
"The game is played on a 5×9 lines board where some intersections are connected to each other by diagonals. The pieces are placed on all intersections except the central intersection at the beginning. Each player has twenty two pieces. Players decide together the first player and alternate turns. In a move, a player may either: \n" \
"-Move one of his pieces on the board to an empty adjacent intersection along the lines. \n" \
"-Capture one or more opponent's piece by approaching or moving away from themby approaching or moving away from them. The captured pieces are removed from the board. \n" \
"-Can chain captures provided that the piece does not occupy the same position twice or take the same direction twice consecutively. \n" \
"The first capture is compulsory. \n" \
"The player who captures all the opponent's pieces or prevents them from moving is the winner. \n" \
"For more informations : https://www.lecomptoirdesjeux.com/regle-fanorona-madagascar.htm";
box = QMessageBox()
box.about(self, "Rules", rules)
def about_trigger(self):
about = "MAIC 2021 fanorona Game by MIFY and AAAI Benin"
box = QMessageBox()
box.about(self, "About", about)
def closeEvent(self, a0: QtGui.QCloseEvent):
if self.exit_game_trigger() == True:
a0.accept()
else:
a0.ignore()
def setFleche(self, at, to):
if(to[0]== at[0] + 1 and to[1] == at[1] + 1): #droitehaut
self.board_gui.squares[at[0]][at[1]].set_div(self.droitehaut)
elif(to[0]== at[0] and to[1] == at[1] + 1): #avant
self.board_gui.squares[at[0]][at[1]].set_div(self.avant)
elif(to[0]== at[0] - 1 and to[1] == at[1] + 1): #droitebas
self.board_gui.squares[at[0]][at[1]].set_div(self.droitebas)
elif(to[0]== at[0] - 1 and to[1] == at[1]): #bas
self.board_gui.squares[at[0]][at[1]].set_div(self.bas)
if(to[0]== at[0] - 1 and to[1] == at[1] - 1): #gauchebas
self.board_gui.squares[at[0]][at[1]].set_div(self.gauchebas)
elif(to[0]== at[0] and to[1] == at[1] - 1): #arriere
self.board_gui.squares[at[0]][at[1]].set_div(self.arriere)
elif(to[0]== at[0] + 1 and to[1] == at[1] - 1): #gauchehaut
self.board_gui.squares[at[0]][at[1]].set_div(self.gauchehaut)
elif(to[0]== at[0] + 1 and to[1] == at[1]): #haut
self.board_gui.squares[at[0]][at[1]].set_div(self.haut)
if __name__ == "__main__":
import sys
import ctypes
app_id = 'myfi.maic.faronona.1.0'
try:
ctypes.windll.shell32.SetCurrentProcessExplicitAppUserModelID(app_id)
except AttributeError:
pass
app = QApplication(sys.argv)
parser = argparse.ArgumentParser()
parser.add_argument('-t', help='total number of seconds credited to each player')
parser.add_argument('-ai0', help='path to the ai that will play as player 0')
parser.add_argument('-ai1', help='path to the ai that will play as player 1')
parser.add_argument('-s', help='time to show the board')
args = parser.parse_args()
# set the time to play
allowed_time = float(args.t) if args.t is not None else .1
sleep_time = float(args.s) if args.s is not None else 0.
player_type = ['human', 'human']
player_type[0] = args.ai0 if args.ai0 != None else 'human'
player_type[1] = args.ai1 if args.ai1 != None else 'human'
for i in range(2):
if player_type[i].endswith('.py'):
player_type[i] = player_type[i][:-3]
agents = {}
# load the agents
k = -1
for i in range(2):
if player_type[i] != 'human':
j = player_type[i].rfind('/')
# extract the dir from the agent
dir = player_type[i][:j]
# add the dir to the system path
sys.path.append(dir)
# extract the agent filename
file = player_type[i][j + 1:]
# create the agent instance
agents[k] = getattr(__import__(file), 'AI')(Color(k))
k *= -1
if None in agents:
raise Exception('Problems in AI players instances. \n'
'Usage:\n'
'-t time credited \n'
'\t total number of seconds credited to each player \n'
'-ai0 ai0_file.py \n'
'\t path to the ai that will play as player 0 \n'
'-ai1 ai1_file.py\n'
'\t path to the ai that will play as player 1 \n'
'-s sleep time \n'
'\t time(in second) to show the board(or move)')
game = FarononaGUI((5, 9), agents, sleep_time=sleep_time, allowed_time=allowed_time)
game.show()
sys.exit(app.exec_())
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.response.AlipayResponse import AlipayResponse
class MybankPaymentTradeSubaccountCreateResponse(AlipayResponse):
def __init__(self):
super(MybankPaymentTradeSubaccountCreateResponse, self).__init__()
self._account_name = None
self._branch_name = None
self._branch_no = None
self._currency_value = None
self._open_account_time = None
self._out_channel_id = None
self._parent_card_no = None
self._request_no = None
self._retry = None
self._sub_card_no = None
@property
def account_name(self):
return self._account_name
@account_name.setter
def account_name(self, value):
self._account_name = value
@property
def branch_name(self):
return self._branch_name
@branch_name.setter
def branch_name(self, value):
self._branch_name = value
@property
def branch_no(self):
return self._branch_no
@branch_no.setter
def branch_no(self, value):
self._branch_no = value
@property
def currency_value(self):
return self._currency_value
@currency_value.setter
def currency_value(self, value):
self._currency_value = value
@property
def open_account_time(self):
return self._open_account_time
@open_account_time.setter
def open_account_time(self, value):
self._open_account_time = value
@property
def out_channel_id(self):
return self._out_channel_id
@out_channel_id.setter
def out_channel_id(self, value):
self._out_channel_id = value
@property
def parent_card_no(self):
return self._parent_card_no
@parent_card_no.setter
def parent_card_no(self, value):
self._parent_card_no = value
@property
def request_no(self):
return self._request_no
@request_no.setter
def request_no(self, value):
self._request_no = value
@property
def retry(self):
return self._retry
@retry.setter
def retry(self, value):
self._retry = value
@property
def sub_card_no(self):
return self._sub_card_no
@sub_card_no.setter
def sub_card_no(self, value):
self._sub_card_no = value
def parse_response_content(self, response_content):
response = super(MybankPaymentTradeSubaccountCreateResponse, self).parse_response_content(response_content)
if 'account_name' in response:
self.account_name = response['account_name']
if 'branch_name' in response:
self.branch_name = response['branch_name']
if 'branch_no' in response:
self.branch_no = response['branch_no']
if 'currency_value' in response:
self.currency_value = response['currency_value']
if 'open_account_time' in response:
self.open_account_time = response['open_account_time']
if 'out_channel_id' in response:
self.out_channel_id = response['out_channel_id']
if 'parent_card_no' in response:
self.parent_card_no = response['parent_card_no']
if 'request_no' in response:
self.request_no = response['request_no']
if 'retry' in response:
self.retry = response['retry']
if 'sub_card_no' in response:
self.sub_card_no = response['sub_card_no']
|
from random import random
class Perceptron():
"""docstring for Perceptron"""
def __init__(self, n):
self.weights = []
self.learn_const = 0.01
for i in range(0,n):
self.weights.append(random()*2-1)
def feedforward(self, inputs):
sum = 0.0
for i, e in enumerate(inputs):
sum += self.weights[i]*e
return self.activate(sum)
def activate(self, sum):
if(sum > 0):
return 1
else:
return -1
def train(self, inputs, desired):
guess = self.feedforward(inputs)
error = float(desired) - float(guess)
print(guess, error)
for i, e in enumerate(self.weights):
e += self.learn_const*error*inputs[i]
class Trainer():
"""docstring for Trainer"""
def __init__(self, x,y,a):
self.inputs = [x,y,1]
self.answer = a
def f(x):
return 2*x + 1
def findAns(x,y):
if(f(x)>y):
return 1
else:
return -1
def makeSamplePoints(n):
t = []
for e in range(0,n):
x = random()*200-100
y = random()*200-100
a = findAns(x,y)
t.append(Trainer(x,y,a))
return t
def trainPerc(p, t):
for e in t:
p.train(e.inputs, e.answer)
p = Perceptron(3)
t = makeSamplePoints(2000)
trainPerc(p,t)
print(p.feedforward([-10,0,1]))
print(p.feedforward([-10,0,1])) |
import cv2 as cv
import os
path = '/home/hussam/Desktop/hands/'
new_path = '/home/hussam/Desktop/Project/Data/'
itr = 1
for file in os.listdir(path):
try:
img = cv.imread(path+file)
dim = (900,900)
resized = cv.resize(img,dim)
name = str(itr) + '.jpg'
print(name)
cv.imwrite(new_path+name,resized)
itr += 1
except Exception as e:
print(str(e))
|
import bpy
import sys
import os
dir = "/home/alkhemi/Documents/thesis/speech-animation-training-tools/blender/"
if not dir in sys.path:
sys.path.append(dir)
from blender_DisplacementLandmarkPredictor import DisplacementLandmarkPredictor
import pandas as pd
# video_filepath = "/mnt/data/thesis data jangan dihapus/video/s19/bbby1s.mpg"
wav_filepath = "/mnt/data/thesis data jangan dihapus/audio/s32/pwwu1s.wav"
video_filepath = "/mnt/data/thesis data jangan dihapus/video/s32/pwwu1s.mpg"
# wav_filepath = "/home/alkhemi/untitled.wav"
identity_landmark = "/home/alkhemi/Documents/thesis/animated/s19/bbby1s_anim.csv"
scene = bpy.context.scene
if not scene.sequence_editor:
scene.sequence_editor_create()
for seq in scene.sequence_editor.sequences:
scene.sequence_editor.sequences.remove(seq)
#Sequences.new_sound(name, filepath, channel, frame_start)
soundstrip = scene.sequence_editor.sequences.new_sound("speech", video_filepath, 2, 0)
videostrip = scene.sequence_editor.sequences.new_movie("video", video_filepath, 1, 0)
predictor = DisplacementLandmarkPredictor(
X_feature_size=39,
Y_feature_size=107,
x_num_pre=12,
x_num_post=12,
y_num_pre=0,
y_num_post=0,
d_model=64,
num_heads=8,
dff=256,
model_weights=dir+"Attention64_h8_dff256_batch64_lr1e-06-100-0.0036.h5",
x_scaler=dir+"x_scaler_coef.csv",
y_scaler=dir+"y_scaler_coef.csv",
)
animation_magnitude = 2
identity_landmark_df = pd.read_csv(identity_landmark).iloc[0]
displacement_landmark_df = predictor.predictAsDF(wav_filepath)
effectstrip = scene.sequence_editor.sequences.new_effect("speed","SPEED", channel=3, frame_start=0,frame_end=len(displacement_landmark_df),seq1=videostrip)
scene.sequence_editor.sequences_all["video"].frame_final_duration = len(displacement_landmark_df)
collection = bpy.data.collections.get("FaceLandmarkAnimation")
if collection:
for o in collection.children:
bpy.data.objects.remove(o)
bpy.context.scene.collection.children.unlink(collection)
bpy.data.collections.remove(collection)
collection = bpy.data.collections.new("FaceLandmarkAnimation")
bpy.context.scene.collection.children.link(collection)
# NOTE the use of 'collection.name' to account for potential automatic renaming
layer_collection = bpy.context.view_layer.layer_collection.children[collection.name]
bpy.context.view_layer.active_layer_collection = layer_collection
for i in range(68):
if bpy.data.objects.get(f"LM_{i}"):
bpy.data.objects.remove(bpy.data.objects.get(f"LM_{i}"))
if bpy.data.meshes.get(f"LM_{i}"):
bpy.data.meshes.remove(bpy.data.meshes.get(f"LM_{i}"))
bpy.ops.mesh.primitive_uv_sphere_add(
radius=0.001,
enter_editmode=False,
align="WORLD",
location=(
identity_landmark_df[f"X_{i}"] / 1000,
identity_landmark_df[f"Z_{i}"] / 1000,
identity_landmark_df[f"Y_{i}"] / 1000,
),
)
ob = bpy.context.object
me = ob.data
ob.name = f"LM_{i}"
me.name = f"LM_{i}"
# Simple but ULTRA MEGA SLOW
# for frame in range(len(displacement_landmark_df)):
# for i in range(68):
# bpy.context.scene.frame_set(frame)
# ob = bpy.data.objects[f"LM_{i}"]
# ob.location = (
# (identity_landmark_df[f"X_{i}"]+displacement_landmark_df[f"X_{i}"][frame])/1000,
# (identity_landmark_df[f"Z_{i}"]+displacement_landmark_df[f"Z_{i}"][frame])/1000,
# (identity_landmark_df[f"Y_{i}"]+displacement_landmark_df[f"Y_{i}"][frame])/1000,
# )
# ob.keyframe_insert(data_path="location",index=-1)
# FAST
for i in range(68):
if bpy.data.actions.get(f"LM_{i}_Action"):
bpy.data.actions.remove(bpy.data.actions.get(f"LM_{i}_Action"))
obj = bpy.data.objects[f"LM_{i}"]
obj.animation_data_create()
obj.animation_data.action = bpy.data.actions.new(name=f"LM_{i}_Action")
fcu_z = obj.animation_data.action.fcurves.new(data_path="location", index=0)
fcu_z.keyframe_points.add(len(displacement_landmark_df))
for frame in range(len(displacement_landmark_df)):
fcu_z.keyframe_points[frame].co = (
frame,
(
identity_landmark_df[f"X_{i}"]
+ animation_magnitude * displacement_landmark_df[f"X_{i}"][frame]
)
/ 1000,
)
fcu_z = obj.animation_data.action.fcurves.new(data_path="location", index=1)
fcu_z.keyframe_points.add(len(displacement_landmark_df))
for frame in range(len(displacement_landmark_df)):
fcu_z.keyframe_points[frame].co = (
frame,
(
identity_landmark_df[f"Z_{i}"]
+ animation_magnitude * displacement_landmark_df[f"Z_{i}"][frame]
)
/ 1000,
)
fcu_z = obj.animation_data.action.fcurves.new(data_path="location", index=2)
fcu_z.keyframe_points.add(len(displacement_landmark_df))
for frame in range(len(displacement_landmark_df)):
fcu_z.keyframe_points[frame].co = (
frame,
(
identity_landmark_df[f"Y_{i}"]
+ animation_magnitude * displacement_landmark_df[f"Y_{i}"][frame]
)
/ 1000,
)
|
from InBedManagementDialog import *
from Gui.ItemModels.LithologyInBedItemModel import *
from Gui.ItemViews.LithologyInBedItemView import *
class LithologyInBedManagementDialog(InBedManagementDialog):
def __init__(self, parent, bed, 'Lithologies'):
InBedManagementDialog.__init__(self, parent, bed, 'Lithologies')
self.addManagementWidget(LithologyInBedItemView)
self.addCloseButton()
|
#! /usr/bin/python
#run python -m chardetails.tests.chardetails_test from root directory
#of the repository
import unittest
from chardetails import getInstance
class TestChardetails(unittest.TestCase):
def setUp(self):
self.instance = getInstance()
def test_info(self):
self.assertEqual('Shows the Unicode Character Details of a given character',
self.instance.get_info())
def test_english_alphabets(self):
self.assertEqual(self.instance.getdetails(u'AB')[u'A']['HTML Entity'],
'65')
def main():
suite = unittest.TestLoader().loadTestsFromTestCase(TransliterationTest)
unittest.TextTestRunner(verbosity=2).run(suite)
if __name__ == "__main__":
main()
|
nilai = 50
if nilai == 75:
print("nilai anda: ", nilai)
if nilai is not 60:
print("nilai anda: ", nilai)
print(20*"=")
nilai = int(input("masukkan nilai anda: "))
if 80 <= nilai <= 100:
print("nilai anda adalah A")
elif 70 <= nilai <= 80:
print("nilai anda adalah B")
elif 60 <= nilai <= 70:
print("nilai anda adalah C")
elif 50 <= nilai <= 60:
print("nilai anda adalah D, mohon perbaikan nilai")
else:
print("maap nilai anda: ",nilai)
print("mohon segera remedial")
print(20*"+")
print("operator logika untuk list dan string")
print(" ")
gorengan=["bakwan","cireng","bala-bala","gehu","combro","pisang goreng","pukis","risoles"]
beli = "jus"
if beli in gorengan:
print('Mamang bilang, " ya, saya jual',beli,'"')
if not beli in gorengan:
print('" saya gak jual',beli,'"')
karakter = "u"
if karakter in beli:
print("ada",karakter,"di",beli)
else:
print("tidak ada",karakter,"di",beli)
uang = input("berapa uang anda? ")
utang = 20000
if int(uang) < utang:
print("maap coy duit lu kurang ni")
elif int(uang) == utang:
print("thx coy pas nih!")
else:
print("waduh uangnya lebih!")
"""
lebih dari satu syarat
and or not
& | !=
"""
print("PUTRI MENCARI JODOH!")
tamu = "pria"
baik = True
rajin = False
if baik & rajin & (tamu == "pria"):
print("Kita nikah yuk!")
elif baik & rajin & (tamu == "cewe"):
print("kita saudaraan yuk!")
else:
print("Pergi sana!") |
from selenium import webdriver
from selenium.webdriver.chrome.options import Options
import openpyxl
import os
class BuscarValor():
def __init__(self):
chrome_options = Options()
chrome_options.add_argument('--lang=pt-BR')
self.driver = webdriver.Chrome(
executable_path=os.getcwd() + os.sep + 'chromedriver.exe'
)
def Iniciar(self):
self.numero_pagina = 1
self.driver.get(f'https://sp.olx.com.br/sao-paulo-e-regiao/centro?o={self.numero_pagina}&q=bicicleta')
self.criar_planilha()
self.capturar_informacoes_do_site()
def criar_planilha(self):
self.planilha = openpyxl.Workbook()
self.guia_planilha = self.planilha.create_sheet('Valores da OLX')
self.guia_planilha.cell(row=1, column=1, value='Titulo')
self.guia_planilha.cell(row=1, column=2, value='Localidade')
self.guia_planilha.cell(row=1, column=3, value='Preço')
# planilha.save('Projeto_WEB_excel.xlsx')
def capturar_informacoes_do_site(self):
# try:
while True:
self.titulos = self.driver.find_elements_by_xpath('//*[@class="fnmrjs-6 iNpuEh"]')
self.localidades = self.driver.find_elements_by_xpath('//*[@class="sc-ifAKCX sc-7l84qu-1 fVmnUX"]')
self.preco = self.driver.find_elements_by_xpath('//*[@class="fnmrjs-9 dCffJM"]')
if not self.titulos:
print('Pagina não encontrada !!')
self.driver.close()
break
self.armazenar_dados_na_planilha()
self.navegar_para_proxima_pagina()
# except Exception as erro:
# print('Não existe mais paginas !!')
# print(erro)
def armazenar_dados_na_planilha(self):
for indice in range(0, len(self.titulos)):
linha = [self.titulos[indice].text,
self.localidades[indice].text,
self.preco[indice].text]
self.guia_planilha.append(linha)
self.planilha.save('Preços_Bicicletas_centro_SP.xlsx')
def navegar_para_proxima_pagina(self):
self.numero_pagina += 1
self.driver.get(f'https://sp.olx.com.br/sao-paulo-e-regiao/centro?o={self.numero_pagina}&q=bicicleta')
busca = BuscarValor()
busca.Iniciar()
|
""" interpolation.f90 test suite
WARNING: don't forget that Fortran's default array indexing is (1...size) unlike Python's
(0...size-1)
"""
import math
from sys import float_info
from interpolation import binary_search, interp_values, log_interp_values, dist_interp_values
def test_binary_search():
# Test a basic case
haystack = list(range(100))
assert binary_search(50, haystack) == (51, 51)
# Test a case with odd len
haystack = list(range(99))
assert binary_search(50, haystack) == (51, 51)
# Range test case
assert binary_search(3.1, haystack) == (4, 5)
assert binary_search(97.9, haystack) == (98, 99)
assert binary_search(0.1, haystack) == (1, 2)
# Test the case with needle outside the haystack range
assert binary_search(-1, haystack) == (1, 1)
assert binary_search(99, haystack) == (99, 99)
# Corner cases
assert binary_search(0, haystack) == (1, 1)
assert binary_search(98, haystack) == (99, 99)
def test_binary_search_reversed():
# Test a basic case
haystack = list(reversed(range(100)))
assert binary_search(50, haystack, decreasing=True) == (50, 50)
# Test a case with odd len
haystack = list(reversed(range(99)))
assert binary_search(50, haystack, decreasing=True) == (49, 49)
# Range test case
assert binary_search(3.1, haystack, decreasing=True) == (95, 96)
assert binary_search(97.9, haystack, decreasing=True) == (1, 2)
assert binary_search(0.1, haystack, decreasing=True) == (98, 99)
# Test the case with needle outside the haystack range
assert binary_search(-1, haystack, decreasing=True) == (99, 99)
assert binary_search(99, haystack, decreasing=True) == (1, 1)
# Corner cases
assert binary_search(0, haystack, decreasing=True) == (99, 99)
assert binary_search(98, haystack, decreasing=True) == (1, 1)
# def test_linear_interpolation():
# xs = [0, 1, 2, 3, 4]
# ys = [0, 0, 1, 0, 5]
# assert ys == [interp_values(x, xs, ys) for x in xs]
# assert 0 == interp_values(0.5, xs, ys)
# assert 0.5 == interp_values(1.5, xs, ys) == interp_values(2.5, xs, ys)
# assert abs(4 - interp_values(3.8, xs, ys)) < 10 * float_info.epsilon
def test_logarithmic_interpolation():
def f(x):
return math.exp(-math.pi*x)
xs = range(-5, 5)
ys = list(map(f, xs))
xxs = [x / 5. for x in range(-25, 21)]
yys = list(map(f, xxs))
assert [abs(log_interp_values(x, xs, ys) - y) < 10 * float_info.epsilon
for x, y in zip(xs, ys)]
assert [abs(log_interp_values(x, xs, ys) - y) < 10 * float_info.epsilon
for x, y in zip(xxs, yys)]
def test_distribution_interpolation():
def f(x):
return 1. / (math.exp(x / math.pi) + 1)
xs = range(-5, 5)
ys = list(map(f, xs))
xxs = [x / 5. for x in range(-25, 21)]
yys = list(map(f, xxs))
assert [abs(dist_interp_values(x, xs, ys) - y) < 10 * float_info.epsilon
for x, y in zip(xs, ys)]
assert [abs(dist_interp_values(x, xs, ys) - y) < 10 * float_info.epsilon
for x, y in zip(xxs, yys)]
|
# pylint: disable=W0621
import pytest
import deedee
@pytest.fixture
def context():
return deedee.Context()
def test_positional(context):
@deedee.resolve
def example(param1, param2, param3=context.example):
return (param1, param2, param3)
context.register("example", "foobar")
assert example("a", "b") == ("a", "b", "foobar")
assert example(param1="a", param2="b") == ("a", "b", "foobar")
assert example(param2="b", param1="a") == ("a", "b", "foobar")
def test_override(context):
@deedee.resolve
def example(param1, param2, param3=context.example):
return (param1, param2, param3)
context.register("example", "foobar")
assert example("a", "b", "c") == ("a", "b", "c")
assert example("a", "b", param3="c") == ("a", "b", "c")
assert example(param3="c", param1="a", param2="b") == ("a", "b", "c")
def test_kwargs(context):
@deedee.resolve
def example_kwargs(param1, param2, *, param3=context.example_kwargs):
return (param1, param2, param3)
context.register("example_kwargs", "foobar")
assert example_kwargs("a", "b") == ("a", "b", "foobar")
def test_multiple(context):
@deedee.resolve
def example_multiple(param1=context.p1, param2=context.p2, param3=context.p3):
return (param1, param2, param3)
context.register("p1", "v1")
context.register("p2", "v2")
context.register("p3", "v3")
assert example_multiple() == ("v1", "v2", "v3")
assert example_multiple(1) == (1, "v2", "v3")
assert example_multiple(1, 2) == (1, 2, "v3")
assert example_multiple(1, 2, 3) == (1, 2, 3)
def test_mutable(context):
@deedee.resolve
def example_mutable(param1=context.p1):
return param1
v1 = [1]
context.register("p1", v1)
v1.append(5)
assert example_mutable() == [1, 5]
assert example_mutable() is v1
def test_force_register(context):
@deedee.resolve
def example(param1=context.p1):
return param1
context.register("p1", 1)
assert example() == 1
context.register("p1", 2, force=True)
assert example() == 2
|
import dash
import dash_core_components as dcc
import dash_html_components as html
import pandas as pd
import plotly.express as px
import plotly.graph_objects as go
from dash.dependencies import Input, Output
from plotly.subplots import make_subplots
# Read the files
dfConfirmed = pd.read_csv("confirmed.csv").drop("Unnamed: 0", axis=1)
dfDeaths = pd.read_csv("deaths.csv").drop("Unnamed: 0", axis=1)
dailyGlobalNewCases = pd.read_csv("dailyGlobalNewCases.csv")
dfActive = pd.read_csv("active.csv").drop("Unnamed: 0", axis=1)
dfRecovered = pd.read_csv("recovered.csv").drop("Unnamed: 0", axis=1)
dfgbConfirmed = pd.read_csv("globalTtl.csv").drop("Unnamed: 0", axis=1)
listOfCountries = list(dfConfirmed.columns[1:])
optCountries = []
for country in listOfCountries:
dic = {"label": country, "value": country}
optCountries.append(dic) |
# coding: utf-8
# In[66]:
9
1 2 3 4 5 6 7 8 9
9
10 1 2 3 11 21 55 6 8
# In[72]:
#a but not b,set manipulations symmetrical difference
a=int(input())
x=set(map(int,input().split()[:a]))
n=int(input())
y=set(map(int,input().split()[:n]))
m=x^y
print(len(m))
# In[62]:
a
|
from tkinter import filedialog
import tkinter
from ia import *
from random import choice
def showmaze(Maze): # affiche le labyrinthe dans la console
print("\n".join(Maze))
def showmazetk(canvas,Maze): # affiche le labyrinthe dans canvas
minimum = 20
maximum = 980
height = len(Maze)
width = len(Maze[0])
ratiowidth = maximum/width
ratioheight = maximum/height
for i in range(height):
Maze[i] = list(Maze[i])
for j in range(width):
if Maze[i][j] == "─":
canvas.create_line(minimum+ratiowidth*(j-0.5),minimum+ratioheight*i,minimum+ratiowidth*(j+0.5),minimum+ratioheight*i)
elif Maze[i][j] == "│":
canvas.create_line(minimum+ratiowidth*j,minimum+ratioheight*(i-0.5),minimum+ratiowidth*j,minimum+ratioheight*(i+0.5))
elif Maze[i][j] == "┼" or Maze[i][j] == "┘" or Maze[i][j] == "┐" or Maze[i][j] == "┬" or Maze[i][j] == "┴" or Maze[i][j] == "├" or Maze[i][j] == "┤" or Maze[i][j] == "┌" or Maze[i][j] == "└":
#create_circle(minimum+ratiowidth*j,minimum+ratioheight*i,3,canvas)
pos = Plus(Maze,j,i)
if pos[0] != 0:
canvas.create_line(minimum+ratiowidth*j,minimum+ratioheight*i,minimum+ratiowidth*(j-0.5),minimum+ratioheight*i)
if pos[1] != 0:
canvas.create_line(minimum+ratiowidth*j,minimum+ratioheight*i,minimum+ratiowidth*(j+0.5),minimum+ratioheight*i)
if pos[2] != 0:
canvas.create_line(minimum+ratiowidth*j,minimum+ratioheight*i,minimum+ratiowidth*j,minimum+ratioheight*(i-0.5))
if pos[3] != 0:
canvas.create_line(minimum+ratiowidth*j,minimum+ratioheight*i,minimum+ratiowidth*j,minimum+ratioheight*(i+0.5))
elif Maze[i][j] == "1":
canvas.create_rectangle(minimum+ratiowidth*(j-0.9),minimum+ratioheight*(i-0.9),minimum+ratiowidth*(j+0.9),minimum+ratioheight*(i+0.9),fill="#000fff000")
elif Maze[i][j] == "2":
canvas.create_rectangle(minimum+ratiowidth*(j-0.9),minimum+ratioheight*(i-0.9),minimum+ratiowidth*(j+0.9),minimum+ratioheight*(i+0.9),fill="red")
elif Maze[i][j] == ".":
canvas.create_rectangle(minimum+ratiowidth*(j-0.75),minimum+ratioheight*(i-0.75),minimum+ratiowidth*(j+0.75),minimum+ratioheight*(i+0.75),fill="orange")
Maze[i] = "".join(Maze[i])
def showgraphTk(canvas,Maze): #affiche le graphe dans canvas
minimum = 20
maximum = 980
height = len(Maze)
width = len(Maze[0])
ratiowidth = maximum/width
ratioheight = maximum/height
Graph = getGraph(Maze)
S = Graph[0]
A = Graph[1]
Spos= Graph[2]
P = Graph[3]
for i in range(len(P)):
color = "#"
for c in range(6):
color = color + choice(["1","2","3","4","5","6","7","8","9","a","b","c","d","e","f"])
for j in range(len(P[i])):
if j != 0:
a = P[i][j-1][0]
b = P[i][j-1][1]
x = P[i][j][0]
y = P[i][j][1]
#create_circle(minimum+ratiowidth*x,minimum+ratioheight*y,3,canvas)
canvas.create_line(minimum+ratiowidth*a,minimum+ratioheight*b,minimum+ratiowidth*x,minimum+ratioheight*y,width=(ratiowidth+ratioheight)/20, fill=color)
for i in range(len(S)):
x = Spos[i][0]
y = Spos[i][1]
create_circle(minimum+ratiowidth*x,minimum+ratioheight*y,(ratiowidth+ratioheight)/2/8,canvas)
def showsolvedTk(canvas,Maze): #trace le plus court chemin pour sortir du labyrinthe
minimum = 20
maximum = 980
height = len(Maze)
width = len(Maze[0])
ratiowidth = maximum/width
ratioheight = maximum/height
Graph = Djisktra(Maze)
S = Graph[0]
A = Graph[1]
Spos= Graph[2]
P = Graph[3]
for i in range(len(P)):
for j in range(len(P[i])):
if j != 0:
a = P[i][j-1][0]
b = P[i][j-1][1]
x = P[i][j][0]
y = P[i][j][1]
#create_circle(minimum+ratiowidth*x,minimum+ratioheight*y,3,canvas)
canvas.create_line(minimum+ratiowidth*a,minimum+ratioheight*b,minimum+ratiowidth*x,minimum+ratioheight*y,width=(ratiowidth+ratioheight)/20, fill="red")
def create_circle(x, y, r, canvasName): # crée et trace les points de connexions sur le graphe
color = "#"
for c in range(6):
color = color + choice(["0","1","2","3","4","5","6","7","8","9","a","b","c","d","e","f"])
x0 = x - r
y0 = y - r
x1 = x + r
y1 = y + r
return canvasName.create_oval(x0, y0, x1, y1,fill=color)
def Plus(Maze,x,y): # permet de connaître les murs(dans le labyrinthe adapté dans la console) qui entourent la case pour pouvoir tracer le labyrinthe dans canvas (showmazetk)
pos = [0,0,0,0]
h = len(Maze)
w = len(Maze[0])
if x != 0:
if list(Maze[y])[x-1] == "-" or list(Maze[y])[x-1] == "+" or list(Maze[y])[x-1] == "─":
pos = ["G",0,0,0]
if x != w-1:
if list(Maze[y])[x+1] == "-" or list(Maze[y])[x+1] == "+" or list(Maze[y])[x+1] == "─":
pos[1] = "D"
if y != 0:
if list(Maze[y-1])[x] == "|" or list(Maze[y-1])[x] == "+" or list(Maze[y-1])[x] == "│":
pos[2] = "H"
if y != h-1:
if list(Maze[y+1])[x] == "|" or list(Maze[y+1])[x] == "+" or list(Maze[y+1])[x] == "│":
pos[3] = "B"
return pos
def ExportFile(): # gère le clic sur "exporter" du menu "labyrinthe"
f=tkinter.filedialog.asksaveasfile(
title="Enregistrer le labyrinthe",
filetypes=[('MAZE files','.maze')])
print(f.name)
def ImportFile(): # gère le clic sur "importer" du menu "labyrinthe"
f=tkinter.filedialog.askopenfilename(
title="Ouvrir une labyrinthe",
filetypes=[('MAZE files','.maze')])
print(f.name)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import re
import sys
from collections import defaultdict
LINE_RE = re.compile(
r'#(?P<id>\d+)\s+@\s+(?P<x>\d+),(?P<y>\d+):'
r'\s+(?P<width>\d+)x(?P<height>\d+)'
)
def get_input(filename):
inp = []
with open(filename, 'r') as fp:
for line in fp:
line = line.strip()
if line:
inp.append(line)
return inp
def fill_area(area, x, y, width, height):
for row in range(y, y+height):
for square in range(x, x+width):
area[(square, row)] += 1
return area
def check_overlapping(area, x, y, width, height):
for row in range(y, y+height):
for square in range(x, x+width):
if area[(square, row)] != 1:
return False
return True
def main(filename):
inp = get_input(filename)
area = defaultdict(int)
claims = []
for line in inp:
match = LINE_RE.search(line)
if match:
claims.append(tuple(map(int, match.groups())))
area = fill_area(area, *claims[-1][1:])
count = 0
for square in area.values():
if square > 1:
count += 1
print(f'Part 1: {count}')
for claim in claims:
if check_overlapping(area, *claim[1:]):
print(f'Part 2: #{claim[0]}')
break
if __name__ == '__main__':
try:
main(sys.argv[1])
except IndexError:
print(f'Usage: {sys.argv[0]} <filename>')
|
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.metrics import r2_score
TotalPop = 1360000000
HerdImmPop = 0.75 * TotalPop
Jabs = 215000000
CovidRecovered = 300000000
TargetJabs = HerdImmPop*2 - CovidRecovered
ToBeJabbed = TargetJabs - Jabs
#Monthly Prod
CovShProd = 60000000
CovaxProd = 20000000
SputnikProd = 3000000
CovaxProdTarget = 70000000
CovShProdTarget = 65000000
OtherVaxTarget = 5000000
MonthlyVax = CovaxProd + CovShProd + SputnikProd
TargetMonthlyVax = 300000000
TargetJabs2 = 2000000000
y = np.arange(Jabs,TargetJabs, MonthlyVax)
n = len(y)
x = range(1,n+1)
y2 = np.arange(y[2],TargetJabs2, TargetMonthlyVax)
n2 = len(y2)
x2 = range(3,n2+3)
y3 = np.arange(y[3],TargetJabs, TargetMonthlyVax)
n3 = len(y2)
x3 = range(4,n2+4)
targLine = [y[n-1]] * 26
MonthRef = ['Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec', 'Jan', 'Feb', 'Mar', 'Apr', 'May']
YearRef = ['2021', '2022', '2023']
CurRateLabel = str(MonthlyVax/1000000) + ' Mil.'
TargetRateLabel = str(TargetMonthlyVax/1000000) + ' Mil.'
plt.scatter(x,y)
plt.scatter(x2, y2, color = 'green')
for i in range(0, n):
j = y[i]
if(i < 12):
month = MonthRef[i]
if(i >= 12):
month = MonthRef[(i -12)]
if(i <= 7):
xlab = month + ' ' + YearRef[0]
if(i > 7 and i <= 19):
xlab = month + ' ' + YearRef[1]
if(i > 19):
xlab = month + ' ' + YearRef[2]
yinMil = j/1000000
ylab = str(yinMil) + ' Mil.'
if(i == 0 or i%3 == 0 or i == n-1):
plt.annotate( '('+ xlab + ', ' + ylab +')', (i+1.5,j-25000000), color = 'blue')
for i in range(0,n2):
j = y2[i]
if(i < 12):
month = MonthRef[i+2]
if(i >= 12):
month = MonthRef[(i -12+2)]
if(i <= 7):
xlab = month + ' ' + YearRef[0]
if(i > 7 and i <= 19):
xlab = month + ' ' + YearRef[1]
if(i > 19):
xlab = month + ' ' + YearRef[2]
yinMil = j/1000000
ylab = str(yinMil) + ' Mil.'
print(j)
plt.annotate( '('+ xlab + ', ' + ylab +')', (i-1.25,j-25000000), color = 'green')
plt.title('Vaccination Trend in India at Current and Government Target Rate')
plt.plot(x,y, label = 'Total Vaccine jabs at Current Rate of: ' + CurRateLabel + " Per Month")
plt.plot(x2, y2, color = 'green', label = 'Total Vaccine Jabs at Target Rate of: ' + TargetRateLabel + " Per Month")
plt.plot(targLine, color = 'red')
plt.xlabel('Number of Months from May 2021')
plt.ylabel('Total number of Covid 19 Vaccination Shots Administered in India')
plt.annotate('Target No. of Vaccine shots: ' + str(y[n-1]/1000000) + ' Mil.', (8, y[n-1]+10000000), color = 'red')
plt.xlim(0,25)
plt.ylim(125000000,1900000000)
plt.legend()
plt.show()
|
from django.urls import path,include
from appone import views
app_name = 'appone'
urlpatterns = [
path('join/', views.form_name_view, name='join'),
path('members/',views.members, name='members')
]
|
from unittest.mock import patch
import datetime
from freezegun import freeze_time
import pytest
import app
from app.schedule_scaler import ScheduleScaler
WORKDAY_1420 = datetime.datetime(2018, 3, 15, 14, 20, 00)
WORKDAY_1020 = datetime.datetime(2018, 3, 15, 10, 20, 00)
WEEKEND_1420 = datetime.datetime(2018, 3, 17, 14, 20, 00)
WEEKEND_1020 = datetime.datetime(2018, 3, 17, 10, 20, 00)
class TestScheduleScaler:
def test_init_assigns_basic_values(self):
input_attrs = {
'min_instances': 1,
'max_instances': 2,
'threshold': 1500,
'schedule': {'workdays': ['08:00-10:00'], 'scale_factor': 0.4}
}
schedule_scaler = ScheduleScaler(**input_attrs)
assert schedule_scaler.min_instances == input_attrs['min_instances']
assert schedule_scaler.max_instances == input_attrs['max_instances']
assert schedule_scaler.threshold == input_attrs['threshold']
assert schedule_scaler.scale_factor == 0.4
@pytest.mark.parametrize('now,expected', [
(WORKDAY_1420, 3),
(WORKDAY_1020, 1),
(WEEKEND_1420, 3),
(WEEKEND_1020, 1),
], ids=[
"Workday in schedule",
"Workday off schedule",
"Weekend in schedule",
"Weekend off schedule",
])
def test_get_desired_instance_count_schedule(self, now, expected):
input_attrs = {
'min_instances': 1,
'max_instances': 5,
'threshold': 1500,
'schedule': {'workdays': ['13:00-15:00'], 'weekends': ['13:00-15:00'], 'scale_factor': 0.6}
}
with freeze_time(now):
schedule_scaler = ScheduleScaler(**input_attrs)
assert schedule_scaler.get_desired_instance_count() == expected
@pytest.mark.parametrize('enabled,expected', [
(True, 3),
(False, 1),
], ids=[
"Scheduled scaler enabled",
"Scheduled scaler disabled",
])
def test_disabled_schedule(self, enabled, expected):
input_attrs = {
'min_instances': 1,
'max_instances': 5,
'threshold': 1500,
'schedule': {'workdays': ['00:00-23:59'], 'weekends': ['00:00-23:59'], 'scale_factor': 0.6}
}
schedule_scaler = ScheduleScaler(**input_attrs)
with patch.dict(app.config.config, {'SCALERS': {'SCHEDULE_SCALER_ENABLED': enabled}}):
assert schedule_scaler.get_desired_instance_count() == expected
|
# Generated by Django 3.0.6 on 2020-07-29 15:03
# Modificado por Dante Travisany 2020-07-29 15:06
from django.db import migrations, models
from django.conf import settings
def create_data(apps, schema_editor):
User = apps.get_model(settings.AUTH_USER_MODEL)
user = User(pk=1, username="auth0user", is_active=True , email="dtravisany@dim.uchile.cl")
user.save()
class Migration(migrations.Migration):
dependencies = [
('cov2cl', '0008_snv_cdsseq'),
]
operations = [
migrations.RunPython(create_data),
migrations.AlterField(
model_name='sample',
name='title',
field=models.CharField(max_length=400),
),
]
|
from appium import webdriver
from appium.webdriver.common.touch_action import TouchAction
from selenium.webdriver.common.action_chains import ActionChains
from .config import desired_cap
from datetime import datetime
def InitDriver(logger, command_executor=""):
driver = webdriver.Remote(
command_executor = command_executor or "http://localhost:4723/wd/hub",
desired_capabilities = desired_cap,
keep_alive = True
)
driver.implicitly_wait(30)
#webview = driver.contexts[1]
#driver.switch_to.context(webview)
startTime = datetime.now()
logger.logging.info("---------------------------------splash screen invoked---------------------------------")
logger.logging.info(datetime.now() - startTime)
return driver
#switch to webview
#webview = driver.contexts.last
#driver.switch_to.context(webview)
|
from django.contrib import admin
from .models import (
PresentacionProducto,
Marca,
Producto,
Proveedor,
VisitaProveedor,
Compra,
Venta,
DetalleVenta,
Transaccion,
Lote,
Categoria
)
@admin.register(PresentacionProducto)
class PresentacionProductoAdmin(admin.ModelAdmin):
pass
@admin.register(Marca)
class MarcaAdmin(admin.ModelAdmin):
pass
@admin.register(Categoria)
class CategoriaAdmin(admin.ModelAdmin):
pass
@admin.register(Producto)
class ProductoAdmin(admin.ModelAdmin):
pass
@admin.register(Lote)
class LoteAdmin(admin.ModelAdmin):
pass
@admin.register(Proveedor)
class ProveedorAdmin(admin.ModelAdmin):
pass
@admin.register(VisitaProveedor)
class VisitaProveedorAdmin(admin.ModelAdmin):
pass
@admin.register(Compra)
class CompraAdmin(admin.ModelAdmin):
pass
@admin.register(Venta)
class VentaAdmin(admin.ModelAdmin):
pass
@admin.register(DetalleVenta)
class DetalleVentaAdmin(admin.ModelAdmin):
pass
@admin.register(Transaccion)
class TransaccionAdmin(admin.ModelAdmin):
pass |
from django.db import models
from djCell.apps.catalogos.models import Ciudad,CP,Colonia, Estado
from djCell.apps.ventas.models import Venta
from djCell.apps.clientes.models import ClienteFacturacion
from django.contrib.auth.models import User
from datetime import datetime, timedelta
class EstadoSubdistribuidor(models.Model):
estado = models.CharField(max_length=80, unique=True)
def __unicode__(self):
return self.estado
class Meta:
ordering = ['estado']
class Subdistribuidor(models.Model):
cliente = models.ForeignKey(ClienteFacturacion)
limCredito = models.DecimalField(max_digits=10,decimal_places=2)
fxIngreso = models.DateTimeField(auto_now=True) #fatyma was here
edo = models.ForeignKey(EstadoSubdistribuidor)
telefono = models.CharField(max_length=200, null=True, blank=True)
def __str__(self):
cliente ="%s --- %s"%(self.cliente.razonSocial,self.cliente.rfc)
return cliente
class Meta:
ordering = ['-fxIngreso','cliente']
class EstadoCredito(models.Model):
estado = models.CharField(max_length=80)
def __unicode__(self):
return self.estado
class Meta:
ordering = ['estado']
class Credito(models.Model):
folioc = models.CharField(max_length=80) #generar clave CR-AAMMDD-INT_Cred
subdist = models.ForeignKey(Subdistribuidor)
venta = models.ForeignKey(Venta, related_name='venta_a_credito')
totalvta = models.DecimalField(max_digits=10,decimal_places=2) #guardar el monto de venta
plazo = models.IntegerField()
fxCredito = models.DateTimeField(auto_now=True) #fecha del credito
edo = models.ForeignKey(EstadoCredito) #pagado, no pagado, retrasado, puntual, default Nuevo Credito
observacion = models.TextField(null=True, blank=True)
def __unicode__(self):
credSubdist ="%s - %s --- %s . %s . %s"%(self.subdist, self.venta.folioVenta, self.totalvta, self.fxCredito.strftime("%d-%m-%Y %X"), self.edo.estado)
return credSubdist
class Meta:
ordering = ['-fxCredito','subdist','folioc']
class HistorialSubdistribuidor(models.Model):
credito = models.ForeignKey(Credito)
abono = models.DecimalField(max_digits=10,decimal_places=2)
fxAbono = models.DateTimeField(auto_now=True)
def __unicode__(self):
historial="%s %s %s"%(self.credito, self.abono, self.fxAbono.strftime("%d-%m-%Y %X"))
return historial
class Meta:
ordering = ['-fxAbono','credito','abono'] |
class Solution:
def wordSquares(self, words: List[str]) -> List[List[str]]:
self.words = words
self.n = len(words[0])
self.buildPrefTable(self.words);
ans, wordSquares = [], []
for word in words:
wordSquares = [word]
self.backtrack(1, wordSquares, ans)
return ans
def buildPrefTable(self, words):
self.prefTable = {}
for word in words:
for prefix in (word[:i] for i in range(1, len(word))):
self.prefTable.setdefault(prefix, set()).add(word);
def backtrack(self, step, wordSquares, ans):
if step == self.n:
ans.append(wordSquares[:])
return
prefix = ''.join([word[step] for word in wordSquares])
for candidate in self.getWordsWithPrefix(prefix):
wordSquares.append(candidate)
self.backtrack(step + 1, wordSquares, ans)
wordSquares.pop()
def getWordsWithPrefix(self, prefix):
if prefix in self.prefTable:
return self.prefTable[prefix]
else:
return set([]) |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import inspect
from peg import *
import grammar
Definition = grammar.Spacing* grammar.Definition
def Definition_action(yytext):
return yytext.child[-1]()
Definition.action = Definition_action
class PEGerror(Exception):
def __init__(self, msg):
self.message = msg
def __str__(self):
return self.message
class PEGrule(object):
''' A decorator class used to decorate action functions.
(see examples)
'''
def __init__(self, action):
self.rule = action.func_doc
if self.rule is None:
self.rule = ''
self.peg = Definition.match(self.rule)
if self.peg is None:
raise PEGerror('Invalid rule syntax in function ' + action.func_name)
self.symbol, self.PEGobject = self.peg()
self.action = action
print self.PEGobject
print [type(x) for x in self.PEGobject.symbol]
def __call__(self, *args, **kwargs):
return self.action(*args, **kwargs)
@property
def symbolName(self):
''' Returns left part of the rule.
'''
return str(self.peg.child[1].child[0])
@property
def definition(self):
''' Returns right part of the rule.
'''
return self.peg.child[1].child[2]
def __str__(self):
return '%s <- %s' % (self.symbolName, self.definition)
def match(self, inputSequence, pos = None):
return self.PEGobject.match(inputSequence, pos)
class Parser(object):
''' A PEG parser generator
'''
def __init__(self):
caller = inspect.currentframe().f_back
symbols = {} # Left-rule symbols (a dict of lists)
self.rules = [x[1] for x in
inspect.getmembers(sys.modules[caller.f_globals['__name__']])
if isinstance(x[1], PEGrule)]
if not self.rules:
raise PEGerror('No rules defined.')
start = self.rules[0].symbolName # First symbol will be the start one
for obj in self.rules:
symbols[obj.symbolName] = symbols.get(obj.symbolName, []) + [obj]
def __str__(self):
return '\n'.join(x.rule.strip() for x in self.rules)
def parse(self, inputSequence):
pass
|
# encoding: utf-8
# author: wm-chen
# main_case.py
# 2021/3/10 11:08 上午
# desc:测试各个页面之间的跳转
import unittest
from actions.main_action import MainAction
from common.base_page import BasePage
from common.base_selenium import BaseSelenium
from common.config_utils import local_config
from common.test_data_utils import TestData
from actions.login_action import LoginAction
class MainCase(BaseSelenium):
testdata = TestData(local_config.get_test_case_path, 'main_test').get_test_data()
@unittest.skipUnless(testdata['test_qa_page']['is_not'], '为否时不执行跳过')
def test_qa_page(self):
test_data = self.testdata['test_qa_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).qa()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转测试页面失败')
@unittest.skipUnless(testdata['test_my_page']['is_not'], '为否时不执行跳过')
def test_my_page(self):
test_data = self.testdata['test_my_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).my()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转我的地盘页面失败')
@unittest.skipUnless(testdata['test_product_page']['is_not'], '为否时不执行跳过')
def test_product_page(self):
test_data = self.testdata['test_product_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).product()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转产品页面失败')
@unittest.skipUnless(testdata['test_project_page']['is_not'], '为否时不执行跳过')
def test_project_page(self):
test_data = self.testdata['test_project_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).project()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转项目页面失败')
@unittest.skipUnless(testdata['test_doc_page']['is_not'], '为否时不执行跳过')
def test_doc_page(self):
test_data = self.testdata['test_doc_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).doc()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转文档页面失败')
@unittest.skipUnless(testdata['test_report_page']['is_not'], '为否时不执行跳过')
def test_report_page(self):
test_data = self.testdata['test_report_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).report()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转统计页面失败')
@unittest.skipUnless(testdata['test_company_page']['is_not'], '为否时不执行跳过')
def test_company_page(self):
test_data = self.testdata['test_company_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).company()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转组织页面失败')
@unittest.skipUnless(testdata['test_admin_page']['is_not'], '为否时不执行跳过')
def test_admin_page(self):
test_data = self.testdata['test_admin_page']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).admin()
self.assertEqual(self.base_page.get_title(), test_data['export'], '跳转后台页面失败')
@unittest.skipUnless(testdata['test_search_bug']['is_not'], '为否时不执行跳过')
def test_search_bug(self):
test_data = self.testdata['test_search_bug']
self._testMethodDoc = test_data['test_name']
login_action = LoginAction(self.base_page.driver).login_success_default()
MainAction(login_action.driver).search_bug(test_data['test_para']['index'], test_data['test_para']['text'])
#断言test_data['export']包含在base_page.get_title()中则通过
self.assertIn(test_data['export'], self.base_page.get_title(), '提交bug失败')
if __name__ == '__main__':
unittest.main |
from monopyly import *
import random
class GenerousDaddyAI(PlayerAIBase):
'''
An AI that plays like a dad (or at least, similarly to how
I play when I'm playing with my children).
- It initially buys any properties it can.
- It builds houses when it has complete sets.
- It makes favourable deals with other players.
- It keeps a small reserve of cash.
'''
def __init__(self):
'''
The 'constructor'.
'''
self.cash_reserve = 500
def get_name(self):
'''
Returns the name shown for this AI.
'''
return "Generous Daddy"
def landed_on_unowned_property(self, game_state, player, property):
'''
Called when we land on an unowned property. We always buy it if we
can while keeping a small cash reserve.
'''
if player.state.cash > (self.cash_reserve + property.price):
return PlayerAIBase.Action.BUY
else:
return PlayerAIBase.Action.DO_NOT_BUY
def deal_proposed(self, game_state, player, deal_proposal):
'''
Called when a deal is proposed by another player.
'''
# We only accept deals for single properties wanted from us...
if len(deal_proposal.properties_offered) > 0:
return DealResponse(DealResponse.Action.REJECT)
if len(deal_proposal.properties_wanted) > 1:
return DealResponse(DealResponse.Action.REJECT)
# We'll accept as long as the price offered is greater than
# the original selling price...
property = deal_proposal.properties_wanted[0]
return DealResponse(
action=DealResponse.Action.ACCEPT,
minimum_cash_wanted=property.price+1)
def build_houses(self, game_state, player):
'''
Gives us the opportunity to build houses.
'''
# We find the first set we own that we can build on...
for owned_set in player.state.owned_unmortgaged_sets:
# We can't build on stations or utilities, or if the
# set already has hotels on all the properties...
if not owned_set.can_build_houses:
continue
# We see how much money we need for one house on each property...
cost = owned_set.house_price * owned_set.number_of_properties
if player.state.cash > (self.cash_reserve + cost):
# We build one house on each property...
return [(p, 1) for p in owned_set.properties]
# We can't build...
return []
def property_offered_for_auction(self, game_state, player, property):
'''
We offer the face face plus or minus a random amount.
'''
return property.price + random.randint(-50, 50)
|
# l = list()
# print(dir(l))
# Append
l = [1, 2, 3]
a = 10
l.append(a)
print(l)
# method 1
# l1 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
# l2 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
# l1.extend(l2)
# print(l1)
# for x in l2: l1.append(x)
# print(l1)
# method 2
l1 = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
l2 = [11, 12, 13, 14, 15, 16, 17, 18, 19, 20]
l3 = []
l3.extend(l1)
l3.extend(l2)
print(l3)
# l1 = [1, 2, 3]
# print(l1.pop(), l1)
# l1.pop()
# print(l1)
# to create fast list
|
"""
simple POC server for:
- receive face images, timestamp from Pis
- classify faces into old customer/new customer/employee/...
- facial analysis: age, gender analysis for new customer
- receive timely heatmap update from Nanos
"""
import os
import time
from datetime import datetime
import json
import base64
import io
import logging
import configparser
import cv2
import numpy as np
from bson import ObjectId
from pymongo import MongoClient
from sys import argv
from http.server import BaseHTTPRequestHandler, HTTPServer
class S(BaseHTTPRequestHandler):
def __init__(self, *args, **kwargs):
self.config = configparser.ConfigParser()
self.config.read("config.ini")
self.customer_img_dir = self.config["IMG_DIR"]["Customer"]
self.employee_img_dir = self.config["IMG_DIR"]["Employee"]
self.face_logging = self.config["SERVICE"]['Face_logging']
super().__init__(*args, **kwargs)
def do_OPTIONS(self):
self.send_response(200, "ok")
self.send_header('Access-Control-Allow-Origin', '*')
self.send_header("Access-Control-Allow-Credentials", "true")
self.send_header('Access-Control-Allow-Methods',
'GET,PUT,POST,DELETE,PATCH,OPTIONS')
self.send_header("Access-Control-Allow-Headers",
"Content-Range, Content-Disposition, Authorizaion, Access-Control-Allow-Headers, Origin, Accept, X-Requested-With, Content-Type, Access-Control-Request-Method, Access-Control-Request-Headers")
self.end_headers()
response = {
'error_code': 0
}
response_js = json.dumps(response)
self.wfile.write(response_js.encode('utf-8'))
def _set_response(self):
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
def loadLogData(self):
"""
return collection
"""
url = self.config["MONGO"]['Url']
port = int(self.config["MONGO"]['Port'])
db_name = self.config["MONGO"]['Database']
col_name = self.config["MONGO"]['LogCollection']
client = MongoClient(url, port)
db = client[db_name]
collection = db[col_name]
# get the whole collection
# logs = list(collection.find())
return collection
def face_analysis(self, data_dict):
"""
analyze uploaded faces
"""
now = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
for i, person in enumerate(data_dict):
faces = person['faces']
for j, face in enumerate(faces):
face_bin = base64.b64decode(face)
face_stream = io.BytesIO(face_bin)
face_cv = cv2.imdecode(np.fromstring(
face_stream.read(), np.uint8), 1)
img_name = "{}_{}_{}{}".format(now, i, j, ".jpg")
img_name = img_name.replace("-", "_")
img_name = img_name.replace(":", "_")
img_name = img_name.replace(" ", "_")
img_path = os.path.join(self.customer_img_dir, img_name)
cv2.imwrite(img_path, face_cv)
self._set_response()
response = {
'error_code': 0
}
response_js = json.dumps(response)
self.wfile.write(response_js.encode('utf-8'))
return 0
# def updateLog(self, new_log, face_img):
# p_id = self.log_collection.insert_one(new_log).inserted_id
# self.log_collection.update_one({'_id': p_id}, {"$set": new_log}, upsert=False)
# new_img_name = "{}_{}.jpg".format(new_log['time'], new_log['result'])
# # remove special chars
# new_img_name = new_img_name.replace("-", "_")
# new_img_name = new_img_name.replace(":", "_")
# new_img_name = new_img_name.replace(" ", "_")
# new_img_path = os.path.join(self.log_img_dir, new_img_name)
# # print(new_img_path)
# cv2.imwrite(new_img_path, face_img)
# return p_id
def do_GET(self):
logging.info("GET request,\nPath: %s\nHeaders:\n%s\n",
str(self.path), str(self.headers))
self._set_response()
self.wfile.write("GET request for {}".format(
self.path).encode('utf-8'))
def do_POST(self):
content_length = int(self.headers['Content-Length'])
post_data = self.rfile.read(content_length)
# print(post_data)
data_dict = json.loads(post_data.decode('utf-8'))
# data_dict = json.loads(post_data) # use this if no utf encode is used
print(content_length)
if self.path == self.face_logging:
self.face_analysis(data_dict)
def run(server_class=HTTPServer, handler_class=S, port=9999):
logging.basicConfig(level=logging.INFO)
server_address = ('', port)
print(server_address)
# global face_recog
# face_recog = FaceRecognition()
httpd = server_class(server_address, handler_class)
logging.info('Starting httpd...\n')
try:
httpd.serve_forever()
except KeyboardInterrupt:
pass
httpd.server_close()
logging.info('Stopping httpd...\n')
if __name__ == '__main__':
if len(argv) == 2:
run(port=int(argv[1]))
else:
run()
|
import pygame
from os import path
from src.Utils import randomVector, randomDirection
class Animal:
def __init__(self, obj_name, screen, size, height, width, bound=0):
self.size = self.loadImg(obj_name)
self.screen = screen
self.pX, self.pY, self.direction = randomVector(
width, height, self.size, bound)
self.width = width
self.height = height
self.bound = bound
self.sX, self.sY = randomDirection()
self.die = False
def loadImg(self, obj_name):
img = pygame.image.load(path.join("./assets/images/", obj_name))
self.obj = pygame.transform.scale(
img, (int(img.get_width()/5), int(img.get_height()/5)))
return (int(img.get_width()/5), int(img.get_height()/5))
def placeObj(self, x, y):
self.screen.blit(self.obj, (x, y))
def rePosition(self, obj_name):
self.size = self.loadImg(obj_name)
self.pX, self.pY, self.direction = randomVector(
self.width, self.height, self.size, self.bound)
self.sX, self.sY = randomDirection()
def updatePosition(self):
self.placeObj(self.pX, self.pY)
self.pX += self.sX
self.pY += self.sY
# wall on left or right
if(self.pX + self.size[0] > self.width+self.bound or self.pX < self.bound):
self.sX = -self.sX
# top or bottom
if(self.pY < 0 or self.pY + self.size[1] > self.height):
self.sY = -self.sY
|
#!/usr/bin/env python
import argutil
env = {}
parser = argutil.get_parser(__file__, env)
opts = parser.parse_args()
|
from fireo.fields import TextField, ReferenceField
from fireo.models import Model
def test_fix_issue_78():
class CompanyIssue78(Model):
name = TextField()
class EmployeeIssue78(Model):
name = TextField()
company = ReferenceField(CompanyIssue78, auto_load=False)
c = CompanyIssue78(name="Abc_company")
c.save()
company_key = c.key
e = EmployeeIssue78()
e.name = 'Employee Name'
e.company = c
e.save()
e_l = EmployeeIssue78.collection.filter(company=company_key).fetch()
count = 0
for e in e_l:
count = 1
assert e.key is not None
assert count != 0 |
from django.db import models
from .Consumer import Consumer
class Complaint(models.Model):
consumer = models.ForeignKey(Consumer, on_delete=models.CASCADE)
complaint_data = models.TextField(max_length=150)
is_resolved = models.BooleanField(default=False,null=True)
class Meta:
db_table = 'COMPLAINTS'
@staticmethod
def get_all_complaints():
return Complaint.objects.all()
def delete_by_id(id_):
return Complaint.objects.filter(id=id_).delete()
def register(self):
self.save()
@staticmethod
def get_complaint_by_complaint_id(c_id):
try:
return Complaint.objects.get(id=c_id)
except:
return False
def get_complaint_by_consumer(consumer):
return Complaint.objects.filter(consumer=consumer)
|
# -*- coding:utf-8 -*-
from API_Automation.Common import dbselect
import pytest
def test_getuser(db):
users=dbselect.select_user(db)
print(len(users))
assert len(users)==13
|
from flask import Flask
from flask_ask import Ask, statement, question, session
import jsonify
import requests
import time
import unidecode
from alexa_emails import alexa_mail
app = Flask(__name__)
ask = Ask(app, "/mailexa")
# A 'hello world' message in the / route - just for test
@app.route("/")
def hello():
return "Hello, World!"
# the welcome message said by alexa when the application is called
@ask.launch
def start_skill():
welcome_message = "Hello there, I am here to help you send emails easily .."
# 'question' in the following line will expect a response/ an entry from the
# client
return question(welcome_message)
# if the intent 'remainder' is detected, send an appropriate email
@ask.intent('remainder', mapping={'name': 'name'})
def send_remainder(name):
try:
answer = alexa_mail(name, 'remainder')
except:
answer = "Error !"
return statement(answer)
# if the intent 'assistance' is detected, send an appropriate email
@ask.intent('assistance', mapping={'name': 'name'})
def send_remainder(name):
try:
answer = alexa_mail(name, 'assistance')
except:
answer = "Error !"
return statement(answer)
# if the intent 'absence' is detected, send an appropriate email
@ask.intent('absence', mapping={'name': 'name'})
def send_remainder(name):
try:
answer = alexa_mail(name, 'absence')
except:
answer = "Error !"
return statement(answer)
if __name__ == "__main__":
app.run(debug=True)
|
# What is the temperature in F?
# Temp in C is _______
# hint c/5 = (f-32)/9
# c = (f-32)(5)/9
f_temp = float(input("Input the temp in F: "))
print("Temp in C is ",(f_temp-32)*5/9) |
"""
This file contains user configured variables for the example code
"""
# User Configuration
dbaas_manager_ip = "dbaas900.hyperp-dbaas.cloud.ibm.com" # Dallas DBaaS DBaaSManager
port = "20000"
cluster_guid = "your_cluster_id"
api_key = "your_ibmcloud_api_key"
path_to_cert = "local/path/to/cert.pem"
|
# Third-party packages.
import numpy as np
import torch
import xgboost as xgb
from hyperopt import STATUS_OK, Trials, fmin, hp, tpe
from sklearn.metrics import log_loss
def load_tensors_data_fn(tensors_path):
"""Load pytorch tensor data from file
Args:
tensors_path (str): a full filename path.
Returns:
X (array): a numpy array of features.
y (array): a numpy array of labels.
"""
tensors = torch.load(tensors_path)
X = tensors[:, :-1].copy()
y = tensors[:, -1].copy().astype(int)
return (X, y)
def optimize_model_fn(X_train, X_valid, y_train, y_valid, max_evals=10):
"""Optimize the predicting model.
Args:
trials:
max_evals:
Returns:
best_hyperparameters:
"""
def score_model_fn(params):
"""Score the predicting model with hyperparameters.
Args:
X_train, y_train:
X_valid, y_valid:
params:
Returns:
loss:
status:
"""
num_round = int(params['n_estimators'])
del params['n_estimators']
dtrain = xgb.DMatrix(X_train, label=y_train)
dvalid = xgb.DMatrix(X_valid, label=y_valid)
model = xgb.train(params, dtrain, num_round)
predictions = model.predict(dvalid).reshape((X_valid.shape[0], 5))
score = log_loss(y_valid, predictions)
return {'loss': score, 'status': STATUS_OK}
params_space = {'n_estimators' : hp.quniform('n_estimators', 100, 1000, 1),
'eta' : hp.quniform('eta', 0.025, 0.5, 0.025),
'max_depth' : hp.choice('max_depth', np.arange(1, 14, dtype=int)),
'min_child_weight' : hp.quniform('min_child_weight', 1, 6, 1),
'subsample' : hp.quniform('subsample', 0.5, 1, 0.05),
'gamma' : hp.quniform('gamma', 0.5, 1, 0.05),
'colsample_bytree' : hp.quniform('colsample_bytree', 0.5, 1, 0.05),
'num_class' : 5,
'eval_metric': 'mlogloss',
'objective': 'multi:softprob',
'nthread' : 6,
'verbosity' : 1}
best_params = fmin(score_model_fn,
params_space,
algo=tpe.suggest,
trials=Trials(),
max_evals=max_evals)
return best_params |
import zipfile
import re
comments = []
filename = '90052.txt'
with zipfile.ZipFile('channel.zip') as file:
count = len(file.infolist()) - 1
for f in range(count):
comments.append(file.getinfo(filename).comment.decode("utf-8"))
data = file.read(filename).decode("utf-8")
if(re.search('\d+',data)):
nothing = re.findall('\d+', data)[0]
else:
print('Else: ', data)
filename = ''+nothing+'.txt'
print("".join(comments))
|
import jwt
import json
from functools import wraps
from django.utils import timezone
from django.http import JsonResponse
from user.models import User
from local_settings import SECRET_KEY, ALGORITHM
def id_auth(func):
@wraps(func)
def decorated_function(self, request, *args, **kwargs):
try:
access_token = request.headers.get('Authorization')
access_token = access_token.split(' ')[-1]
payload = jwt.decode(access_token, SECRET_KEY, algorithms=ALGORITHM)
login_user = User.objects.get(id = payload['id'])
request.user = login_user
return func(self, request, *args, **kwargs)
except jwt.ExpiredSignatureError:
return JsonResponse({"message": "EXPIRED_TOKEN"}, status = 400)
except jwt.exceptions.DecodeError:
return JsonResponse({"message": "INVALID_TOKEN"}, status = 400)
except User.DoesNotExist:
return JsonResponse({"message": "INVALID_USER"}, status = 400)
return decorated_function
|
from unittest import TestCase
from core import Core
from os import path
from datetime import date
fixtures = path.join(path.dirname(__file__), "test_fixtures")
class TestCore(TestCase):
def test_weekday(self):
"On a regular weekday we expect a normal menu"
weekday = date(2018, 4, 20)
expected = """*MH-kafeen* (stenger *16:00* i dag) serverer:
>*Lunsj*
>• *35,-* Dagens suppe med focaccia
>• *41,-* Risgrøt med saft
>• *52,-* MHs go' blanding
>• *55,-* Rømmegrøt med saft
>*Middag*
>• *74,-* Nachosform med stæsj
*Teorifagskafeen* (stenger *17:00* i dag) serverer:
>*Ukategorisert*
>• *41,-* Fredagsgrøt (Risengrynsgrøt med sukker, kanel, smør og saft.)
>• *43,-* Dagens suppe (Dagens hjemmelagede gluten og laktosefrie suppe)
>*Middag*
>• Garasjesalg
"""
weekday_menu = Core(
source=open(path.join(fixtures, "weekday.html")),
date=weekday
)
self.assertEqual(expected, weekday_menu.response())
def test_weekend(self):
"On the weekends everything is closed, expect an empty response"
weekend = date(2018, 4, 21)
expected = "The cafeterias are closed today."
weekend_menu = Core(
source=open(path.join(fixtures, "weekend.html")),
date=weekend
)
self.assertEqual(expected, weekend_menu.response())
def test_triggers(self):
"Ensure that plugin responds to the correct keywords"
self.assertTrue(Core.can_respond_to("lunsj pls"))
self.assertTrue(Core.can_respond_to("I want some lunch"))
self.assertTrue(Core.can_respond_to("hva er til middag?"))
self.assertTrue(Core.can_respond_to("din din dinner me up"))
|
"""
This exercise stub and the test suite contain several enumerated constants.
Since Python 2 does not have the enum module, the idiomatic way to write
enumerated constants has traditionally been a NAME assigned to an arbitrary,
but unique value. An integer is traditionally used because it’s memory
efficient.
It is a common practice to export both constants and functions that work with
those constants (ex. the constants in the os, subprocess and re modules).
You can learn more here: https://en.wikipedia.org/wiki/Enumerated_type
"""
# Score categories.
# Change the values as you see fit.
YACHT = "yacht"
ONES = "ones"
TWOS = "twos"
THREES = "threes"
FOURS = "fours"
FIVES = "fives"
SIXES = "sixes"
FULL_HOUSE = "full_house"
FOUR_OF_A_KIND = "four_of_a_kind"
LITTLE_STRAIGHT = "little_straight"
BIG_STRAIGHT = "big_straight"
CHOICE = "choice"
def score(dice, category):
pass
|
import json
import os
import re
import sqlite3
from collections import Counter
from typing import Set, Collection
import pandas as pd
from traceutils.file2 import fopen2, fopen
from traceutils.progress import Progress
from traceutils.radix.ip2as import IP2AS
from bdrmapit.algorithm.algorithm import Bdrmapit
from bdrmapit.algorithm.updates_dict import Updates, UpdateObj
from bdrmapit.graph.node import Interface, Router
from scripts.traceparser import ParseResults
class Save:
def __init__(self, filename, bdrmapit: Bdrmapit, rupdates: Updates = None, iupdates: Updates = None, replace=True):
self.filename = filename
self.bdrmapit = bdrmapit
self.rupdates = rupdates if rupdates is not None else bdrmapit.rupdates
self.iupdates = iupdates if iupdates is not None else bdrmapit.iupdates
exists = os.path.exists(filename)
if not exists or replace:
if exists:
os.remove(filename)
dir_path = os.path.dirname(os.path.realpath(__file__))
with open(os.path.join(dir_path, 'tables.sql')) as f:
script = f.read()
con = sqlite3.connect(filename)
cur = con.cursor()
cur.executescript(script)
con.commit()
con.close()
def save_annotations(self):
interface: Interface
values = []
pb = Progress(len(self.bdrmapit.graph.interfaces), 'Writing annotations', increment=100000)
con = sqlite3.connect(self.filename)
cur = con.cursor()
for interface in pb.iterator(self.bdrmapit.graph.interfaces.values()):
addr = interface.addr
router: Router = interface.router
rupdate: UpdateObj = self.rupdates[router]
iupdate: UpdateObj = self.iupdates[interface]
if rupdate is None:
rasn = -1
rorg = -1
rtype = -1
else:
rasn = rupdate.asn
rorg = rupdate.org
rtype = rupdate.utype
if iupdate is None or interface.org != rorg:
iasn = interface.asn
iorg = interface.org
itype = -1 if iupdate is None else 0
else:
iasn = iupdate.asn
iorg = iupdate.org
itype = iupdate.utype
phop = bool(interface.pred)
row = {'addr': addr, 'router': router.name, 'asn': rasn, 'org': rorg, 'conn_asn': iasn, 'conn_org': iorg, 'echo': False, 'nexthop': router.nexthop, 'phop': phop, 'rtype': rtype, 'itype': itype, 'iasn': interface.asn}
# if addr == '202.68.67.250':
# print(row)
values.append(row)
if len(values) > 100000:
cur.executemany('INSERT INTO annotation (addr, router, asn, org, conn_asn, conn_org, echo, nexthop, phop, rtype, itype, iasn) VALUES (:addr, :router, :asn, :org, :conn_asn, :conn_org, :echo, :nexthop, :phop, :rtype, :itype, :iasn)', values)
con.commit()
values.clear()
if values:
cur.executemany('INSERT INTO annotation (addr, router, asn, org, conn_asn, conn_org, echo, nexthop, phop, rtype, itype, iasn) VALUES (:addr, :router, :asn, :org, :conn_asn, :conn_org, :echo, :nexthop, :phop, :rtype, :itype, :iasn)', values)
con.commit()
cur.close()
con.close()
def save_echos(self, echos, ip2as, as2org):
interface: Interface
values = []
con = sqlite3.connect(self.filename)
cur = con.cursor()
pb = Progress(len(echos), 'Writing echos', increment=100000)
for addr in pb.iterator(echos):
rasn = iasn = ip2as[addr]
rorg = iorg = as2org[rasn]
rtype = 0
itype = 0
row = {'addr': addr, 'router': addr, 'asn': rasn, 'org': rorg, 'conn_asn': iasn, 'conn_org': iorg, 'echo': True, 'rtype': rtype, 'itype': itype}
values.append(row)
if len(values) > 100000:
cur.executemany(
'INSERT INTO annotation (addr, router, asn, org, conn_asn, conn_org, echo, rtype, itype) VALUES (:addr, :router, :asn, :org, :conn_asn, :conn_org, :echo, :rtype, :itype)',
values)
con.commit()
values.clear()
if values:
cur.executemany('INSERT INTO annotation (addr, router, asn, org, conn_asn, conn_org, echo, rtype, itype) VALUES (:addr, :router, :asn, :org, :conn_asn, :conn_org, :echo, :rtype, :itype)', values)
con.commit()
cur.close()
con.close()
def save_ixps(self):
values = []
for router in self.bdrmapit.routers_succ:
conn_asn = self.rupdates[router].asn
conn_org = self.bdrmapit.as2org[conn_asn]
for isucc in router.succ:
if isucc.asn <= -100:
pid = (isucc.asn * -1) - 100
rsucc = isucc.router
asn = self.rupdates[rsucc].asn
org = self.bdrmapit.as2org[asn]
value = {'addr': isucc.addr, 'router': router.name, 'asn': asn, 'org': org, 'conn_asn': conn_asn, 'conn_org': conn_org, 'pid': pid, 'nexthop': router.nexthop}
values.append(value)
con = sqlite3.connect(self.filename)
cur = con.cursor()
cur.executemany('INSERT INTO ixp (addr, router, asn, org, conn_asn, conn_org, pid, nexthop) VALUES (:addr, :router, :asn, :org, :conn_asn, :conn_org, :pid, :nexthop)', values)
con.commit()
con.close()
def save_links(self):
values = []
for isucc in self.bdrmapit.interfaces_pred:
rsucc = isucc.router
asn = self.rupdates[rsucc].asn
org = self.bdrmapit.as2org[asn]
ixp = isucc.asn <= -100
for router in isucc.pred:
conn_asn = self.rupdates[router].asn
conn_org = self.bdrmapit.as2org[conn_asn]
if conn_org != org:
value = {'addr': isucc.addr, 'router': router.name, 'asn': asn, 'org': org, 'conn_asn': conn_asn, 'conn_org': conn_org, 'ixp': ixp}
values.append(value)
con = sqlite3.connect(self.filename)
cur = con.cursor()
cur.executemany('INSERT INTO link (addr, router, asn, org, conn_asn, conn_org, ixp) VALUES (:addr, :router, :asn, :org, :conn_asn, :conn_org, :ixp)', values)
con.commit()
con.close()
def save_caches(self):
values = []
for isucc, iupdate in self.bdrmapit.caches.items():
rsucc = isucc.router
asn = self.rupdates[rsucc].asn
org = self.bdrmapit.as2org[asn]
ixp = isucc.asn <= -100
conn_asn = iupdate.asn
conn_org = iupdate.org
if conn_org != org:
value = {'addr': isucc.addr, 'router': rsucc.name, 'asn': asn, 'org': org, 'conn_asn': conn_asn, 'conn_org': conn_org, 'ixp': ixp}
values.append(value)
con = sqlite3.connect(self.filename)
cur = con.cursor()
cur.executemany('INSERT INTO cache (addr, router, asn, org, conn_asn, conn_org, ixp) VALUES (:addr, :router, :asn, :org, :conn_asn, :conn_org, :ixp)', values)
con.commit()
con.close()
def extras(self, parseres: ParseResults, ip2as: IP2AS):
values = []
loops = set()
for addrs in parseres.loopadjs:
for addr in addrs:
if addr not in self.bdrmapit.graph.interfaces:
loops.add(addr)
asn = ip2as[addr]
org = self.bdrmapit.as2org[asn]
row = {'addr': addr, 'asn': asn, 'reason': 'loop'}
values.append(row)
echos = set()
for addr in parseres.echos:
if addr not in loops and addr not in self.bdrmapit.graph.interfaces:
echos.add(addr)
asn = ip2as[addr]
org = self.bdrmapit.as2org[asn]
row = {'addr': addr, 'asn': asn, 'reason': 'echo'}
values.append(row)
con = sqlite3.connect(self.filename)
cur = con.cursor()
cur.executemany('INSERT INTO excluded (addr, asn, org, reason) VALUES (:addr, :asn, :org, :reason)', values)
con.commit()
con.close()
def save_node_as(self, filename, include_all=False):
with fopen(filename, 'wt') as f:
for router in self.bdrmapit.graph.routers.values():
if router.name[0] == 'N':
update = self.bdrmapit.rupdates[router]
if update.asn <= 0:
continue
if update.utype == 1:
method = 'interfaces'
elif update.utype < 10:
method = 'last_hop'
else:
method = 'refinement'
f.write('node.AS {}: {} {}\n'.format(router.name, update.asn, method))
def isregexasn(utype):
return (utype & 0xff00) == 0xff00
def isregexorg(utype):
return utype & 0xfe00 == 0xfe00
def regexreasons(utype):
reasons = []
if utype == 0xff00:
reasons.append('noinfo')
if utype & 0x0001:
reasons.append('origin')
if utype & 0x0002:
reasons.append('subsequent')
if utype & 0x0004:
reasons.append('dest')
if utype & 0x0008:
reasons.append('provider')
if not reasons:
print(utype)
return reasons
class ITDK:
def __init__(self, bdrmapit: Bdrmapit, rupdates: Updates = None):
self.bdrmapit = bdrmapit
self.rupdates = rupdates if rupdates is not None else bdrmapit.rupdates
def default_reason(self, router, update: UpdateObj):
if update.asn <= 0:
return 'unknown'
elif router.hints and update.asn in router.hints:
return 'as-hints'
elif router.succ:
return 'refinement'
elif router.dests:
return 'lasthop'
else:
return 'origins'
def hint_reason(self, router, update: UpdateObj):
utype = update.utype
if isregexasn(utype):
reasons = regexreasons(utype)
reason = '|'.join(reasons)
elif isregexorg(utype):
reasons = regexreasons(utype)
reason = '|'.join(reasons)
else:
reason = 'normal'
return reason
def write_nodes(self, filename, reason_func=None, include_all=False):
if reason_func is None:
reason_func = self.default_reason
with fopen2(filename, 'wt') as f:
for name, router in self.bdrmapit.graph.routers.items():
if include_all or name[0] == 'N':
update = self.rupdates[router]
asn = update.asn
reason = reason_func(router, update)
f.write('node.AS\t{}\t{}\t{}\n'.format(name, asn, reason))
def node_info(self, filename):
with fopen2(filename, 'wt') as f:
for name, router in self.bdrmapit.graph.routers.items():
if name[0] == 'N':
update = self.rupdates[router]
if not router.hints:
hints = []
else:
hints = [int(h) for h in router.hints]
if isregexasn(update.utype):
restype = 'asn'
elif isregexorg(update.utype):
restype = 'org'
else:
restype = None
if restype is not None:
reasons = regexreasons(update.utype)
else:
reasons = []
origins = dict(Counter(interface.asn if interface.asn >= 0 else -100 for interface in router.interfaces))
succs = dict(Counter(interface.asn if interface.asn >= 0 else -100 for interface in router.succ))
dests = list(router.dests)
d = {'node': name, 'asn': int(update.asn), 'hints': hints, 'match': restype, 'reasons': reasons,
'origins': origins, 'subsequent': succs, 'dests': dests}
f.write(json.dumps(d) + '\n')
class Analyze:
def __init__(self, bdrmapit: Bdrmapit):
self.bdrmapit = bdrmapit
def todf(self, interfaces: Collection[Interface], rupdates: Updates = None):
if rupdates is None:
rupdates = self.bdrmapit.rupdates
rows = []
for interface in interfaces:
router = interface.router
update = rupdates[router]
asn = update.asn
org = self.bdrmapit.as2org[asn]
rtype = update.utype
row = {'addr': interface.addr, 'asn': asn, 'org': org, 'rtype': rtype}
rows.append(row)
return pd.DataFrame(rows)
def todf_addrs(self, addrs, *args, **kwargs):
interfaces = [self.bdrmapit.graph.interfaces[addr] for addr in addrs if addr in self.bdrmapit.graph.interfaces]
return self.todf(interfaces, *args, **kwargs)
class Test:
def __init__(self, bdrmapit, names, file, rupdates: Updates = None, iupdates: Updates = None):
self.bdrmapit = bdrmapit
self.rupdates = rupdates if rupdates is not None else bdrmapit.rupdates
self.iupdates = iupdates if iupdates is not None else bdrmapit.iupdates
self.names = names
self.file = file
def create_tags(self):
interre = re.compile(r'([a-z]+)\..*\.ntwk\.msn\.net')
tags = {}
with open(self.file) as f:
for line in f:
if line.startswith('#'):
continue
tag, *asns = line.split()
if asns:
tags[tag] = tuple({int(asn) for asn in asns})
valmap = {}
for addr, name in self.names.items():
m = interre.match(name)
if m:
tag = m.group(1)
if tag in tags:
valmap[addr] = tags[tag]
self.valmap = valmap
def todf(self, addrs=None):
if addrs is not None:
interfaces: Set[Interface] = {self.bdrmapit.graph.interfaces[a] for a in addrs if a in self.bdrmapit.graph.interfaces}
else:
interfaces: Set[Interface] = self.bdrmapit.graph.interfaces.values()
values = []
for interface in interfaces:
addr = interface.addr
router: Router = interface.router
rupdate: UpdateObj = self.rupdates[router]
iupdate: UpdateObj = self.iupdates[interface]
if rupdate is None:
rasn = -1
rorg = -1
rtype = -1
else:
rasn = rupdate.asn
rorg = rupdate.org
rtype = rupdate.utype
if iupdate is None or interface.org != rorg:
iasn = interface.asn
iorg = interface.org
itype = -1 if iupdate is None else 0
else:
iasn = iupdate.asn
iorg = iupdate.org
itype = iupdate.utype
row = {'addr': addr, 'router': router.name, 'asn': rasn, 'org': rorg, 'conn_asn': iasn, 'conn_org': iorg,
'rtype': rtype, 'itype': itype}
values.append(row)
return pd.DataFrame(values)
def ixps(self, start_asn=None):
values = []
seen = set()
for router in self.bdrmapit.routers_succ:
conn_asn = self.rupdates[router].asn
if conn_asn != start_asn:
continue
conn_org = self.bdrmapit.as2org[conn_asn]
for isucc in router.succ:
if isucc.asn <= -100:
if isucc.addr in seen:
continue
seen.add(isucc.addr)
pid = (isucc.asn * -1) - 100
rsucc = isucc.router
asn = self.rupdates[rsucc].asn
org = self.bdrmapit.as2org[asn]
value = {'addr': isucc.addr, 'router': router.name, 'asn': asn, 'org': org, 'conn_asn': conn_asn, 'conn_org': conn_org, 'pid': pid}
values.append(value)
return pd.DataFrame(values)
|
# -*- coding: utf-8 -*-
from openerp import models, fields
class AccountMoveInherit(models.Model):
_inherit = "account.move"
period_id = fields.Many2one('account.period', 'Periodo', required=True,
states={'posted':[('readonly',True)]},
)
#def _get_period(self):
#ctx = dict(context or {})
#period_id = self.env['account.period'].search([('state', '=', 'draft')], limit=1, order='date_start')
#return period_id |
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt
import numpy as np
import pickle
import os
import itertools
class logistic_regression():
def __init__(self, learning_rate, iterations):
self.learning_rate = learning_rate
self.iterations = iterations
def fit(self, X, Y):
self.m , self.n = X.shape
self.W = np.zeros(self.n)
self.b = 0
self.X = X
self.Y = Y
for i in range(self.iterations):
self.update_weights()
return self
def update_weights(self):
A = 1 / (1 + np.exp(-(self.X.dot(self.W) + self.b)))
temp = (A - self.Y.T)
temp = np.reshape(temp,self.m)
dw = np.dot(self.X.T,temp)/self.m
db = np.sum(temp)/self.m
self.W = self.W - self.learning_rate*dw
self.b = self.b - self.learning_rate*db
return self
def predict(self, X):
Z = 1 / (1 + np.exp(-(X.dot(self.W) + self.b)))
Y = np.where(Z > 0.5,1,0)
return Y
def status_to_int(s):
if (s == 'benign'): return 0
if (s == 'malware'): return 1
# combine all benign and malware csv files csv files
os.system('find ./CSV_FILES_BATCH/ -name "*_benign.csv" > ./CSV_FILES_PATH_LIST/benign_batch_csv_files_list.txt')
os.system('find ./CSV_FILES_BATCH/ -name "*_malware.csv" > ./CSV_FILES_PATH_LIST/malware_batch_csv_files_list.txt')
benign_file_list = open('./CSV_FILES_PATH_LIST/benign_batch_csv_files_list.txt').read()
malware_file_list = open('./CSV_FILES_PATH_LIST/malware_batch_csv_files_list.txt').read()
benign_file_list = benign_file_list.split('\n')[0:-1]
malware_file_list = malware_file_list.split('\n')[0:-1]
# combine all files list
benign_csv_combined = pd.concat([pd.read_csv(f) for f in benign_file_list ], ignore_index = True)
malware_csv_combined = pd.concat([pd.read_csv(f) for f in malware_file_list ], ignore_index = True)
# export to csv
benign_csv_combined.to_csv( "./CSV_FILES_BATCH/benign.csv", index = False)
malware_csv_combined.to_csv( "./CSV_FILES_BATCH/malware.csv", index = False)
# read combined dataframes
df_benign = pd.read_csv('./CSV_FILES_BATCH/benign.csv')
df_malware = pd.read_csv('./CSV_FILES_BATCH/malware.csv')
df = [df_benign, df_malware]
df = pd.concat(df, ignore_index = True)
df = df.fillna(0)
df['status'] = df['status'].apply(status_to_int)
X_drop_columns = ['status']
drop = ['ip.src.len.entropy', 'ip.src.len.cvq','ip.dst.len.entropy', 'ip.dst.len.cvq', 'sport.entropy', 'sport.cvq', 'dport.entropy', 'dport.cvq', 'tcp.flags.entropy', 'tcp.flags.cvq',]
X_drop_columns_more = ['ip.src.len.median', 'ip.src.len.var', 'ip.src.len.std','ip.src.len.cv', 'ip.src.len.rte', 'ip.dst.len.median', 'ip.dst.len.var', 'ip.dst.len.std', 'ip.dst.len.cv', 'ip.dst.len.rte','tcp.flags.mean', 'tcp.flags.median', 'tcp.flags.var', 'tcp.flags.std', 'tcp.flags.entropy', 'tcp.flags.cv', 'tcp.flags.cvq', 'tcp.flags.rte']
y_drop_columns = ['ip.proto', 'ip.src.len.mean', 'ip.src.len.median', 'ip.src.len.var', 'ip.src.len.std', 'ip.src.len.entropy', 'ip.src.len.cv', 'ip.src.len.cvq', 'ip.src.len.rte', 'ip.dst.len.mean', 'ip.dst.len.median', 'ip.dst.len.var', 'ip.dst.len.std', 'ip.dst.len.entropy', 'ip.dst.len.cv', 'ip.dst.len.cvq', 'ip.dst.len.rte', 'sport.mean', 'sport.median', 'sport.var', 'sport.std', 'sport.entropy', 'sport.cv', 'sport.cvq', 'sport.rte', 'dport.mean', 'dport.median', 'dport.var', 'dport.std', 'dport.entropy', 'dport.cv', 'dport.cvq', 'dport.rte', 'tcp.flags.mean', 'tcp.flags.median', 'tcp.flags.var', 'tcp.flags.std', 'tcp.flags.entropy', 'tcp.flags.cv', 'tcp.flags.cvq', 'tcp.flags.rte']
X = df.drop(drop + X_drop_columns_more + ['status'], axis = 1).values
y = df.drop(y_drop_columns, axis = 1).values
# convert dtype to int
X = X.astype(float)
y = y.astype(float)
X_train, X_test, y_train, y_test = train_test_split(X, y)
# == PREPROCESSING ==
scaler = StandardScaler()
scaler.fit(X_train)
X_train_scaled = scaler.transform(X_train)
X_test_scaled = scaler.transform(X_test)
# == TRAINING ==
logistic_model = logistic_regression(0.01,1000)
logistic_model.fit(X_train_scaled, y_train.ravel())
filename = 'batch_network_traffic_logistic_classifier.sav'
pickle.dump(logistic_model , open(filename, 'wb'))
y_pred = logistic_model.predict(X_test_scaled)
correctly_classified = 0
count = 0
tp = tn = fp = fn = 0
for i in range(np.size(y_pred)):
if y_test[i] == 1:
if y_pred[i] == 1:
correctly_classified+=1
tp+=1
else:
fn+=1
else:
if y_pred[i] == 0:
correctly_classified+=1
tn+=1
else:
fp+=1
count+=1
confusion_matrix = [[tp,fp],[fn,tn]]
# print accuracy
print("Accuracy: " + str((correctly_classified/count)*100))
# == PRINT METRICS ==
# Confusion Matrix
def plot_confusion_matrix(cm, classes,
title='Confusion matrix',
cmap=plt.cm.Blues):
"""
This function prints and plots the confusion matrix.
Normalization can be applied by setting `normalize=True`.
"""
cm = np.asarray(cm)
plt.imshow(cm, interpolation='nearest', cmap=cmap)
plt.title(title)
plt.colorbar()
tick_marks = np.arange(len(classes))
plt.xticks(tick_marks, classes, rotation=45)
plt.yticks(tick_marks, classes)
print('Confusion matrix')
print(cm)
thresh = cm.max() / 2.
for i, j in itertools.product(range(cm.shape[0]), range(cm.shape[1])):
plt.text(j, i, cm[i, j],
horizontalalignment="center",
color="white" if cm[i, j] > thresh else "black")
plt.tight_layout()
plt.ylabel('True label')
plt.xlabel('Predicted label')
plt.show()
# Compute confusion matrix
np.set_printoptions(precision=2)
# Plot confusion matrix
plt.figure()
plot_confusion_matrix(confusion_matrix, classes=['Safe Trafic', 'DDoS Traffic'], title='Confusion matrix, without normalization')
|
from flask import Flask
from flask_apscheduler import APScheduler
from flask_sqlalchemy import SQLAlchemy
app = Flask(__name__)
app.config.from_object('config')
db = SQLAlchemy(app, session_options={"autoflush": False})
app.scheduler = APScheduler()
from app import views, models, tasks
# Logging einbauen
import logging
logging.basicConfig()
from logging.handlers import RotatingFileHandler
file_handler = RotatingFileHandler(app.config['LOG_FILE'], 'a', 1 * 1024 * 1024, 10)
file_handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
file_handler.setLevel(logging.INFO)
app.logger.setLevel(logging.INFO)
app.logger.addHandler(file_handler)
aplogger = logging.getLogger('apscheduler')
aplogger.setLevel(logging.WARNING)
aplogger.addHandler(file_handler) |
from django.urls import path
from app_users.views import login_view, AnotherLoginView, logout_view, AnotherLogoutView, register_view, \
another_register_view
urlpatterns = [
path('login/', login_view, name="login"),
path('another_login/', AnotherLoginView.as_view(), name="another_login"),
path('logout/', logout_view, name="logout"),
path('another_logout/', AnotherLogoutView.as_view(), name="another_logout"),
path('register/', register_view, name="register"),
path('another_register/', another_register_view, name="another_register"),
]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Jan 28 02:58:27 2019
@author: PeterXu
"""
import linear_regression as lr
import numpy as np
import time
#def
str_to_write = ""
with open('../t2_results_tmp3.txt','w', buffering=1) as fout_t2r:
#str_to_write = k + " " + str(v) + "\n"
#fout_t2r.write(str_to_write)
#X = np.array([[0.86, 1], [0.09, 1], [-0.85, 1], [0.87, 1], [-0.44, 1], [-0.43, 1], [-1.10, 1], [0.40, 1], [-0.96, 1], [0.17, 1]])
#X = np.array([[0.86], [0.09], [-0.85], [0.87], [-0.44], [-0.43], [-1.10], [0.40], [-0.96], [0.17]])
X = np.array([[0.75, 0.86, 1], [0.01, 0.09, 1], [0.73, -0.85, 1], [0.76, 0.87, 1], [0.19, -0.44, 1], [0.18, -0.43, 1], [1.22, -1.10, 1], [0.16, 0.40, 1], [0.93, -0.96, 1], [0.03, 0.17, 1]])
#print("X = \n", X)
#print(X.shape)
str_to_write = str(X) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
str_to_write = str(X.shape) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
Y = np.array([[2.49], [0.83], [-0.25], [3.10], [0.87], [0.02], [-0.12], [1.81], [-0.83], [0.43]])
#print("Y = \n", Y)
#print(Y.shape)
str_to_write = str(Y) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
str_to_write = str(Y.shape) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
X_training_set, X_validation_set = np.split(X, [8])
str_to_write = "X_training_set = \n" + str(X_training_set) + "\n"
str_to_write += "X_validation_set = \n" + str(X_validation_set) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
#print("X_training_set = \n", X_training_set)
#print("X_validation_set = \n", X_validation_set)
Y_training_set, Y_validation_set = np.split(Y, [8])
#print("Y_training_set = \n", Y_training_set)
#print("Y_validation_set = \n", Y_validation_set)
str_to_write = "Y_training_set = \n" + str(Y_training_set) + "\n"
str_to_write += "Y_validation_set = \n" + str(Y_validation_set) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
start_time = time.time()
# your code
W, str_to_write = lr.least_squares_estimate_linear_regression_alg(X, Y)
fout_t2r.write(str_to_write)
print(str_to_write)
elapsed_time = time.time() - start_time
#print("lr.least_squares_estimate_linear_regression_alg W = \n", W)
#print("elapsed_time = ", elapsed_time)
str_to_write = "lr.least_squares_estimate_linear_regression_alg W = \n" + str(W) + "\n"
str_to_write += "elapsed_time = " + str(elapsed_time) + "\n\n\n\n\n"
fout_t2r.write(str_to_write)
print(str_to_write)
"""
start_time = time.time()
W, str_to_write = lr.gradient_descent_linear_regression_alg(X, Y)
fout_t2r.write(str_to_write)
print(str_to_write)
elapsed_time = time.time() - start_time
#print("lr.gradient_descent_linear_regression_alg W = \n", W)
#print("elapsed_time = ", elapsed_time)
str_to_write = "lr.gradient_descent_linear_regression_alg W = \n" + str(W) + "\n"
str_to_write += "elapsed_time = " + str(elapsed_time) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
"""
#print("\n\n\n\n\n\n\n\n")
#print("for training and validation \n\n")
str_to_write = "\n\n\n\n\n\n\n\n" + "for training and validation \n\n"
fout_t2r.write(str_to_write)
print(str_to_write)
start_time = time.time()
W, str_to_write = lr.least_squares_estimate_linear_regression_alg(X_training_set, Y_training_set)
#print("lr.least_squares_estimate_linear_regression_alg W = \n", W)
fout_t2r.write(str_to_write)
print(str_to_write)
str_to_write = "lr.least_squares_estimate_linear_regression_alg W = \n" + str(W) + "\n"
elapsed_time = time.time() - start_time
str_to_write += "elapsed_time = " + str(elapsed_time) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
#print("elapsed_time = ", elapsed_time)
est_Y = np.dot(X_validation_set, W)
tmp_mse = lr.mean_squared_error(est_Y, Y_validation_set)
#print("est_Y = \n", est_Y)
#print("tmp_mse =", tmp_mse)
str_to_write = "est_Y = \n" + str(est_Y) + "\n"
str_to_write += "tmp_mse = " + str(tmp_mse) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
# -2 ~ -8
epsilon_power = -2
# 1 ~ -3
beta_power = 1
#for beta_power in range(0, -4, -1):
#for beta_power in range(0, -4, -1):
#for epsilon_power in range(-5, -10, -1):
for epsilon_power in range(-6, -10, -1):
#print("\n\n\n\n")
str_to_write = "\n\n\n\n"
fout_t2r.write(str_to_write)
print(str_to_write)
start_time = time.time()
W, str_to_write = lr.gradient_descent_linear_regression_alg(X_training_set, Y_training_set, 10**epsilon_power)
fout_t2r.write(str_to_write)
print(str_to_write)
#print("lr.gradient_descent_linear_regression_alg W = \n", W)
str_to_write = "lr.gradient_descent_linear_regression_alg W = \n" + str(W) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
elapsed_time = time.time() - start_time
#print("elapsed_time = ", elapsed_time)
est_Y = np.dot(X_validation_set, W)
tmp_mse = lr.mean_squared_error(est_Y, Y_validation_set)
#print("est_Y = \n", est_Y)
#print("tmp_mse =", tmp_mse)
str_to_write = "elapsed_time = " + str(elapsed_time) + "\n"
str_to_write += "est_Y = \n" + str(est_Y) + "\n"
str_to_write += "tmp_mse = " + str(tmp_mse) + "\n"
fout_t2r.write(str_to_write)
print(str_to_write)
|
import nltk
import spacy
import sys
from nltk.corpus import stopwords
from nltk.probability import FreqDist
from nltk.stem import PorterStemmer
from nltk.stem.wordnet import WordNetLemmatizer
from nltk.tokenize import sent_tokenize, word_tokenize
sys.path.append('../')
from entity import Entity
nlp = spacy.load("en_core_web_lg")
file = open("/home/harsh/Downloads/data/abc_datafiles/01.txt", "r")
text = file.read()
text_nlp = nlp(text)
stop_words = set(stopwords.words("english"))
ps = PorterStemmer()
lem = WordNetLemmatizer()
result = []
for token in text_nlp:
if token.text not in stop_words:
entity = Entity(token)
entity.stem = ps.stem(token.text)
entity.lem = lem.lemmatize(token.text, "v")
result.append(entity)
print(entity.lem, entity.token.text, entity.token.pos_)
|
import subprocess
import configparser
import os
py_versions = ('27','33', '34')
architectures = ('32', '64')
def python_homes():
for py in py_versions:
for arch in architectures:
yield arch, 'C:\\Python{py}-{arch}'.format(py=py, arch=arch)
def normalize(name):
"""Convert project name to canonical PyPI form"""
return name.lower().replace('_', '-')
def build_wheels(project, extras=None):
env = os.environ.copy()
project = normalize(project)
tmpl = os.path.join('Config', '{}.cfg.template')
template = None
if os.path.exists(tmpl):
home = os.path.abspath(os.curdir)
env['HOME'] = home
with open(tmpl) as f:
template = f.read()
for arch, pyhome in python_homes():
if template:
with open('pydistutils.cfg', 'w') as f:
f.write(template.format(lib=os.path.join(home, 'Lib'),
arch=arch))
python = os.path.join(pyhome, 'python.exe')
cmd = [python, '-m', 'pip', 'wheel', '--no-deps', '--find-links', 'wheelhouse', '--wheel-dir', 'wheelhouse', project]
print(cmd)
subprocess.check_call(cmd, env=env)
def read_config(config):
cp = configparser.ConfigParser(allow_no_value=True)
cp.read(config)
return cp
if __name__ == '__main__':
cp = read_config('build.ini')
for project in cp['projects']:
build_wheels(project)
|
from abc import ABC, abstractmethod
class Employee(ABC):
def __init__(self, first_name, last_name):
self.first_name = first_name
self.last_name = last_name
@property
def full_name(self):
return f"{self.first_name} {self.last_name}"
@abstractmethod
def get_salary(self):
pass
|
import matplotlib.pyplot as plt
import matplotlib as mpl
from DataOperation import DataOperation
from mpl_toolkits.mplot3d import Axes3D
class PlotCreator:
"""
Klasa tworzaca wykresy z posiadanych danych
"""
def __init__(self, data, time_interval):
"""
Przy kazdej funkcji nalezy w argumencie podac True jesli chcemy zeby wyswietlila plot
i False jesli chcemy tylko obiekt pyplot.figure np do wyswietlenia w oknie pyqt
:param data: dane z pliku .txt zapisanego przez program
:param time_interval: interwal czasowy - jednostka czasu
:return:
"""
self.data = data
self.time = data[:, 0]
self.lo = data[:, 1]
self.la = data[:, 2]
self.alt = data[:, 3]
self.roll = data[:, 4]
self.pitch = data[:, 5]
self.heading = data[:, 6]
self.time_interval = time_interval
def altitude_time(self, show_or_not):
fig = plt.figure()
plt.plot(self.time, self.alt)
plt.xlabel('time (s)')
plt.ylabel('wysokosc (m n.p.m)')
plt.title('Zmiany wysokosci')
plt.grid(True)
plt.savefig("img/alt.png")
if show_or_not:
plt.show()
plt.close()
return fig
def position_time(self, show_or_not):
mpl.rcParams['legend.fontsize'] = 10
fig = plt.figure()
ax = fig.gca(projection='3d')
ax.plot(self.lo, self.la, self.alt, label='parametric curve')
ax.set_zlabel('wysokosc (m n.p.m)')
ax.title.set_color('red')
ax.set_xlabel('dl. geograficzna')
ax.set_ylabel('sz. geograficzna')
ax.legend()
plt.title('Trasa samolotu')
plt.savefig("img/path.png")
if show_or_not:
plt.show()
return fig
def pitch_time(self, show_or_not):
fig = plt.figure()
plt.plot(self.time, self.pitch)
plt.xlabel('time (s)')
plt.ylabel('kat wznoszenia (stopnie)')
plt.title('Zmiany kata wznoszenia')
plt.grid(True)
plt.savefig("img/pitch.png")
if show_or_not:
plt.show()
plt.close()
return fig
def roll_time(self, show_or_not):
fig = plt.figure()
plt.plot(self.time, self.roll)
plt.xlabel('time (s)')
plt.ylabel('przechyl boczny (stopnie) lewo<--->prawo')
plt.title('Zmiany przechylu')
plt.grid(True)
plt.savefig("img/roll.png")
if show_or_not:
plt.show()
plt.close()
return fig
def dst_accu_time(self, show_or_not):
dst = DataOperation.geo_m(self.data)
fig = plt.figure()
plt.plot(self.time[1:], dst)
plt.xlabel('time (s)')
plt.ylabel('pokonana odleglosc (m)')
plt.title('Odleglosc pokonana w danej sekundzie')
plt.grid(True)
plt.savefig("img/dst.png")
if show_or_not:
plt.show()
plt.close()
return fig
def dst_time(self, show_or_not):
dst = DataOperation.geo_m_accumulate(self.data)
fig = plt.figure()
plt.plot(self.time[1:], dst)
plt.xlabel('time (s)')
plt.ylabel('pokonana odleglosc (m)')
plt.title('Sumaryczna odleglosc pokonana w czasie')
plt.grid(True)
plt.savefig("img/dst_accu.png")
if show_or_not:
plt.show()
plt.close()
return fig
def speed_time(self, show_or_not):
speed = DataOperation.speed(self.data, self.time_interval)
fig = plt.figure()
plt.plot(self.time[1:], speed)
plt.xlabel('time (s)')
plt.ylabel('predkosc (km/h)')
plt.title('Predkosc w czasie')
plt.grid(True)
plt.savefig("img/speed.png")
if show_or_not:
plt.show()
plt.close()
return fig
def acceleration_time(self, show_or_not):
acc = DataOperation.acceleration(self.data, self.time_interval)
fig = plt.figure()
plt.plot(self.time[1:], acc)
plt.xlabel('time (s)')
plt.ylabel('pprzyspieszenie (m/s)')
plt.title('Przyspieszenie w czasie')
plt.grid(True)
plt.savefig("img/acceleration.png")
if show_or_not:
plt.show()
plt.close()
return fig
|
# encoding: utf-8
"""
open booking connect
Copyright (c) 2021, binary butterfly GmbH
Use of this source code is governed by an MIT-style license that can be found in the LICENSE file.
"""
import json
import aiohttp
from aiohttp.client_exceptions import ClientConnectorError, ClientOSError, ContentTypeError
from quart import Quart
from typing import Union
from .misc import DefaultJSONEncoder
from ..extensions import logger
class Requests:
initialized = False
def init_app(self, app: Quart) -> None:
app.before_serving(self._before_serving)
app.after_serving(self._after_serving)
async def _before_serving(self) -> None:
self.session = aiohttp.ClientSession()
self.initialized = True
async def _after_serving(self) -> None:
await self.session.close()
async def close(self):
await self.session.close()
async def get(self, url: str, log_file: str, log_message: str, auth: Union[tuple, None] = None, headers: Union[dict, None] = None):
if not self.initialized:
await self._before_serving()
kwargs = {}
if auth:
kwargs['auth'] = aiohttp.BasicAuth(auth[0], auth[1])
if headers:
kwargs['headers'] = headers
try:
async with self.session.get(url, **kwargs) as response:
if response.content_type == 'application/json':
return await response.json()
return await response.text()
except (ClientConnectorError, ClientOSError, ContentTypeError):
logger.info(log_file, log_message)
async def post(self, url: str, data: str, log_file: str, log_message: str, auth: Union[tuple, None] = None, headers: Union[dict, None] = None):
if not self.initialized:
await self._before_serving()
kwargs = {}
if not headers:
headers = {}
if type(data) in [dict, list]:
kwargs['data'] = json.dumps(data, cls=DefaultJSONEncoder)
headers['Content-Type'] = 'application/json'
else:
kwargs['data'] = data
if auth:
kwargs['auth'] = aiohttp.BasicAuth(auth[0], auth[1])
if headers is not {}:
kwargs['headers'] = headers
try:
async with self.session.post(url, **kwargs) as response:
if response.status != 200:
logger.info(log_file, 'bad status code %s: %s' % (response.status, await response.text()))
return None
if response.content_type == 'application/json':
return await response.json()
return await response.text()
except (ClientConnectorError, ClientOSError, ContentTypeError):
logger.info(log_file, log_message)
|
from menu.interface_menu import InterfaceMenu
from telebot import types
class SettingsMenu(InterfaceMenu):
def __init__(self, bot, user):
super().__init__(bot, user)
self.lots_manager = bot.lots_manager
self.msgs.append(self.lang.SETTINGS_MSG)
self.markup = types.InlineKeyboardMarkup()
self.markup.add(types.InlineKeyboardButton(text=self.lang.NEW_LOT_BTN,
callback_data=self.lang.NEW_LOT_BTN))
self.markup.add(types.InlineKeyboardButton(text=self.lang.MANAGE_LOTS_BTN,
callback_data=self.lang.MANAGE_LOTS_BTN))
self.markup.add(types.InlineKeyboardButton(text=self.lang.SET_NEW_ADDRESS_BTN,
callback_data=self.lang.SET_NEW_ADDRESS_BTN))
self.markup.add(types.InlineKeyboardButton(text=self.lang.TOGGLE_STORE_MODE_BTN,
callback_data=self.lang.TOGGLE_STORE_MODE_BTN))
def handle_message(self, message):
return super().handle_message(message)
def handle_callback(self, call):
if call.data == self.lang.NEW_LOT_BTN:
self.user.set_property('lot_id', 0)
self.user.change_menu('InputTitleMenu')
return 1
if call.data == self.lang.MANAGE_LOTS_BTN:
self.user.change_menu('ManageLotsMenu')
return 1
if call.data == self.lang.SET_NEW_ADDRESS_BTN:
self.user.change_menu('InputAddressMenu')
return 1
if call.data == self.lang.TOGGLE_STORE_MODE_BTN:
show_all = self.user.get_property('show_all')
if show_all:
show_all = 0
self.respond(self.lang.FAV_MODE_ON, types.InlineKeyboardMarkup())
else:
show_all = 1
self.respond(self.lang.FAV_MODE_OFF, types.InlineKeyboardMarkup())
self.user.set_property('show_all', show_all)
return 1
return super().handle_callback(call) |
#!/usr/bin/env python2
# Raphael Javaux <raphaeljavaux@gmail.com> - December 2011
import sys
from PyQt4 import QtGui
from gui import rainbow
if __name__ == '__main__':
app = QtGui.QApplication(sys.argv)
w = rainbow.Rainbow()
w.show()
sys.exit(app.exec_()) |
import vishelper.config as config
import vishelper.helpers as helpers
def add_hline(ax, y, **kwargs):
"""Adds a horizontal line to the axis `ax` at the provided `y` value."""
xmin = ax.get_xlim()[0]
xmax = ax.get_xlim()[1]
ax.hlines(y, xmin, xmax, **kwargs)
return ax
def add_vline(ax, x, **kwargs):
"""Adds a vertical line to the axis `ax` at the provided `x` value."""
ymin = ax.get_ylim()[0]
ymax = ax.get_ylim()[1]
ax.vlines(x, ymin, ymax, **kwargs)
return ax
def add_dline(ax, m=1, b=0, **kwargs):
"""Adds a diagonal line with slope, m, and y-intercept, b, between `x_min` and `x_max`"""
xmin = ax.get_xlim()[0]
xmax = ax.get_xlim()[1]
y0 = m*xmin + b
y1 = m*xmax + b
ax = ax.plot([xmin, xmax], [y0, y1], **kwargs)
return ax
def line(x, y, ax=None, color=None, **kwargs):
fig, ax = helpers.get_ax_fig(ax, kwargs=kwargs)
if color is None:
color = config.formatting['color.single']
if 'marker' not in kwargs.keys():
marker = None
else:
marker = kwargs.pop('marker')
if 'markersize' not in kwargs.keys():
markersize = config.formatting['markersize']
else:
markersize = kwargs.pop('markersize')
if 'linestyle' not in kwargs.keys():
linestyle = "-"
else:
linestyle = kwargs.pop("linestyle")
if 'linewidth' not in kwargs.keys():
linewidth = config.formatting["lines.linewidth"]
else:
linewidth = kwargs.pop("linewidth")
if 'alpha' not in kwargs.keys():
alpha = config.formatting['alpha.single']
else:
alpha = kwargs.pop('alpha')
ax.plot(x, y, color=color, linestyle=linestyle, alpha=alpha, marker=marker, markersize=markersize,
linewidth=linewidth, **kwargs)
if fig is None:
return ax
else:
return fig, ax
|
# http://pythonstudy.xyz/python/article/103-PyQt-%EC%9C%84%EC%A0%AF
# http://stackoverflow.com/questions/10082299/qvboxlayout-how-to-vertically-align-widgets-to-the-top-instead-of-the-center
# http://stackoverflow.com/questions/805066/call-a-parent-classs-method-from-child-class-in-python
# QTableWidget
# https://wikidocs.net/5240
# http://stackoverflow.com/questions/26620191/adding-dynamically-a-row-in-a-qtablewidget
import os
import sys
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from qt.DataManager import DataManager
from qt.backup.QPlainTextEditLogger import *
class MyMainWindow(QMainWindow):
def __init__(self, parent=None):
super(MyMainWindow, self).__init__(parent)
self.dm = DataManager()
self.form_widget = FormWidget(self, self.dm)
self.setCentralWidget(self.form_widget)
self.statusBar().showMessage('Ready')
def changeStatusBar(self, msg):
self.statusBar().showMessage(msg)
class FormWidget(QWidget):
def __init__(self, parent, dm):
super(FormWidget, self).__init__(parent)
self.parent = parent
self.dm = dm
self.btnQueryList = QPushButton("종목파일생성")
self.btnQueryList.clicked.connect(self.writeFile)
self.btnReadList = QPushButton("종목파일읽기")
self.btnReadList.clicked.connect(self.readList)
self.button1 = QPushButton("Button 1")
self.button2 = QPushButton("Button 2")
self.textEdit = QPlainTextEdit()
self.textEdit.setReadOnly(True)
self.testButton = QPushButton("테스트")
self.testButton.clicked.connect(self.test)
self.itemTable = QTableWidget()
# self.itemTableModel = QStandardItemModel()
# self.itemTableModel.setHorizontalHeaderLabels((['코드', '이름']))
# self.itemTable.setModel(self.itemTableModel)
self.itemTable.setColumnCount(2)
self.itemTable.setHorizontalHeaderLabels(['코드', '이름'])
# table.setHorizontalHeaderLabels(QString("HEADER 1,HEADER 2,HEADER 3").split(","))
# self.tableWidget.insertRow(self.tableWidget.rowCount())
# self.itemTable.setRowCount(2)
# self.itemTable.setColumnCount(2)
# self.itemTable.setItem(0, 0, QTableWidgetItem("(0,0)"))
# self.itemTable.setItem(0, 1, QTableWidgetItem("(0,1)"))
# self.itemTable.setItem(1, 0, QTableWidgetItem("(1,0)"))
# self.itemTable.setItem(1, 1, QTableWidgetItem("(1,1)"))
self.gridLayout = QGridLayout(self)
self.gridLayout.addWidget(self.btnQueryList, 0, 0)
self.gridLayout.addWidget(self.btnReadList, 1, 0)
self.gridLayout.addWidget(self.testButton, 2, 0, Qt.AlignVCenter)
self.gridLayout.addWidget(self.textEdit, 0, 1, 2, 1)
self.gridLayout.addWidget(self.itemTable, 3, 0)
# self.setLayout(self.gridLayout)
def writeFile(self):
f = open('../data/testkospi.csv', 'w')
f.write("%s,%s\n" % ('key', 'value'))
f.close()
def printPath(self):
print(os.getcwd()) # 현재 디렉토리의
print(os.path.realpath(__file__)) # 파일
print(os.path.dirname(os.path.realpath(__file__))) # 파일이 위치한 디렉토리
def readList(self):
"""파일을 읽어서, 리스트 위젯에 담는다"""
self.printPath()
#file = '..\\data\\kospi.csv'
#rel_path = '../data/testkospi.csv'
rel_path = '../data/kospi.csv'
script_dir = os.path.dirname(__file__) # <-- absolute dir the script is in
abs_file_path = os.path.join(script_dir, rel_path)
# 만약 파일이 열러지 않는다면
# 다른 프로세스에서 파일을 제어하고 있는지 확인 필요
itemList = []
with open(abs_file_path, 'r') as f:
for line in f:
datas = line.split(',')
code = datas[0]
name = datas[1]
itemList.append({'code': code.strip(), 'name': name.strip()})
# 종목 데이터를 메모리에 보관한다
self.dm.setItemList(itemList)
self.itemTable.setItem(0, 0, QTableWidgetItem("(0,0)"))
self.itemTable.setItem(0, 1, QTableWidgetItem("(0,1)"))
self.itemTable.setItem(1, 0, QTableWidgetItem("(1,0)"))
self.itemTable.setItem(1, 1, QTableWidgetItem("(1,1)"))
# 화면에 표출한다.
for item in itemList:
row = self.itemTable.rowCount()
self.itemTable.insertRow(row)
self.itemTable.setItem(row, 0, QTableWidgetItem(item['code']))
self.itemTable.setItem(row, 1, QTableWidgetItem(item['name']))
# 상태표시로 알려준다.
sender = self.sender()
self.parent.changeStatusBar(sender.text() + ' was pressed')
def test(self):
self.textEdit.appendPlainText('damn, a bug')
sender = self.sender()
self.parent.changeStatusBar(sender.text() + ' was pressed')
if __name__ == "__main__":
app = QApplication([])
foo = MyMainWindow()
foo.show()
sys.exit(app.exec_()) |
import datetime
from rest_framework import status, viewsets
from rest_framework.response import Response
from mdm.clients.models import Cliente
# , ClienteInfo
from mdm.orders import serializers
from mdm.orders.models import Compra, Factura, Pedido
# from mdm.utils import call_me
# Create your views here.
class CompraViewSet(viewsets.ModelViewSet):
'''List, create, retrieve, update, partial_update or destroy compras'''
queryset = Compra.objects.all()
serializer_class = serializers.CompraSerializer
def list(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def create(self, request, *args, **kwargs):
try:
cliente = Cliente.objects.get(
id=request.data.get('id'),
is_deleted=False
)
except Exception:
return Response(
data={"Response": "NOT_FOUND"},
status=status.HTTP_404_NOT_FOUND
)
try:
dataCompra = request.data.get('compra')
dataPedido = request.data.get('pedido')
except Exception:
return Response(
data={"Response": "NOT_ACCEPTABLE"},
status=status.HTTP_406_NOT_ACCEPTABLE
)
try:
compra = Compra.objects.create(
cliente=cliente,
noTarjeta=dataCompra["noTarjeta"],
mesTarjeta=dataCompra["mesTarjeta"],
anioTarjeta=dataCompra["anioTarjeta"],
total=dataCompra["total"],
calle=dataCompra["calle"],
numero=dataCompra["numero"],
colonia=dataCompra["colonia"],
ciudad=dataCompra["ciudad"],
cp=dataCompra["cp"],
estado=dataCompra["estado"],
entreCalles=dataCompra["entreCalles"]
)
except Exception:
return Response(status=status.HTTP_400_BAD_REQUEST)
for producto in dataPedido:
try:
Pedido.objects.create(
compra=compra,
codigoProducto=producto["codigoProducto"],
cantidadProducto=producto["cantidadProducto"],
precioProducto=producto["precioProducto"]
)
except Exception:
return Response(
data={"Response": "ERROR"},
status=status.HTTP_406_NOT_ACCEPTABLE
)
serializer = self.get_serializer(compra)
"""
# Call LOG
url = 'https://logistica-294123.uc.r.appspot.com/generate'
headers = {
"Content-Type": "application/json"
}
status_code = 200
data = serializer.data
clienteInfo = ClienteInfo.objects.get(
cliente=cliente,
is_main=True
)
data['name'] = cliente.nombrePila
data['email'] = clienteInfo.correo
print(data)
call_LOG = call_me.maybe(
url,
headers,
data,
status_code
)
if not call_LOG:
compra.delete()
return Response(
data={"Response": "LOGISTICS_FAILED"},
status=status.HTTP_417_EXPECTATION_FAILED
)
"""
return Response(
data=serializer.data,
status=status.HTTP_201_CREATED
)
def retrieve(self, request, *args, **kwargs):
try:
cliente = Cliente.objects.get(id=self.kwargs['pk'])
if not cliente.is_deleted:
serializer = serializers.ClienteSerializer(cliente)
data = serializer.data
else:
return Response(
data={"Response": "NOT_FOUND"},
status=status.HTTP_404_NOT_FOUND
)
except Exception:
return Response(
data={"Response": "NOT_FOUND"},
status=status.HTTP_404_NOT_FOUND
)
return Response(
data=data,
status=status.HTTP_202_ACCEPTED
)
def update(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def partial_update(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def destroy(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
class FacturaViewSet(viewsets.ModelViewSet):
'''List, create, retrieve, update, partial_update or delete compras'''
queryset = Factura.objects.all()
serializer_class = serializers.FacturaSerializer
def Duplicate(self, compra):
try:
Factura.objects.get(compra=compra)
return True
except Factura.DoesNotExist:
return False
def list(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def create(self, request, *args, **kwargs):
try:
data = request.data
compra_id = data["compra_id"]
except Exception:
return Response(
data={"Response": "BAD_REQUEST"},
status=status.HTTP_400_BAD_REQUEST
)
try:
compra = Compra.objects.get(
id=compra_id
)
cliente = compra.cliente
if cliente.is_deleted:
return Response(
data={"Response": "NOT_FOUND"},
status=status.HTTP_404_NOT_FOUND
)
except Exception:
return Response(
data={"Response": "NOT_FOUND"},
status=status.HTTP_404_NOT_FOUND
)
try:
duplicate = self.Duplicate(compra)
if duplicate:
factura = Factura.objects.get(compra=compra)
else:
factura = Factura.objects.create(
RFC=data["RFC"],
razonSocial=data["razonSocial"],
correo=data["correo"],
telefono=data["telefono"],
calle=data["calle"],
numero=data["numero"],
colonia=data["colonia"],
ciudad=data["ciudad"],
cp=data["cp"],
estado=data["estado"],
entreCalles=data["entreCalles"],
compra=compra,
)
except Exception:
return Response(status=status.HTTP_400_BAD_REQUEST)
serializer = self.get_serializer(factura)
# Call MKT
# url = 'https://diz-marketing.herokuapp.com/NEW_PURCHASE'
# headers = {
# "Content-Type": "application/json"
# }
# status_code = 200
# data = {
# serializer.data
# }
# call_me.maybe(
# url,
# headers,
# data,
# status_code
# )
return Response(
data=serializer.data,
status=status.HTTP_201_CREATED
)
def retrieve(self, request, *args, **kwargs):
try:
factura = self.get_object()
cliente = factura.compra.cliente
if cliente.is_deleted:
return Response(
data={"Response": "NOT_FOUND"},
status=status.HTTP_404_NOT_FOUND
)
serializer = serializers.FacturaSerializer(factura)
except Exception:
return Response(
data={"Response": "NOT_FOUND"},
status=status.HTTP_404_NOT_FOUND
)
return Response(
data=serializer.data,
status=status.HTTP_202_ACCEPTED
)
def update(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def partial_update(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def destroy(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
class ValidateCardView(viewsets.ModelViewSet):
queryset = Cliente.objects.all()
serializer_class = serializers.ClienteSerializer
def evenDigits(self, card, length, end):
sum = 0
for i in range(length-2, end, -2):
number = eval(card[i])
number = number * 2
if number > 9:
strNumber = str(number)
number = eval(strNumber[0]) + eval(strNumber[1])
sum += number
return sum
def oddDigits(self, card, length, end):
sumOdd = 0
for i in range(length-3, end, -2):
sumOdd += eval(card[i])
return sumOdd
def card_luhn(self, card):
length = len(card)
if length == 16:
sumEven = self.evenDigits(card, length, -1)
sumOdd = self.oddDigits(card, length, 0)
total = sumEven + sumOdd + int(card[15])
if total % 10 == 0:
return True
else:
return False
elif length == 15:
sumEven = self.evenDigits(card, length, 0)
sumOdd = self.oddDigits(card, length, -1)
total = sumEven + sumOdd + int(card[14])
if total % 10 == 0:
return True
else:
return False
else:
return False
def expired_card(self, monthC, yearC):
year = datetime.datetime.today().year
currentMonth = datetime.datetime.today().month
currentYear = int(str(year)[2:4])
if monthC > currentMonth:
if yearC >= currentYear:
return True
else:
return False
else:
if yearC > currentYear:
return True
else:
return False
def list(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def create(self, request, *args, **kwargs):
try:
noTarjeta = request.data.get('noTarjeta')
mesTarjeta = request.data.get('mesTarjeta')
anioTarjeta = request.data.get('anioTarjeta')
except Exception:
return Response(
data={"Response": "BAD_REQUEST"},
status=status.HTTP_400_BAD_REQUEST
)
checkTarjeta = self.card_luhn(noTarjeta)
checkExpired = self.expired_card(int(mesTarjeta), int(anioTarjeta))
if checkTarjeta and checkExpired:
return Response(
data={
"noTarjeta": noTarjeta,
"mesTarjeta": mesTarjeta,
"anioTarjeta": anioTarjeta
},
status=status.HTTP_202_ACCEPTED
)
else:
if not checkTarjeta:
return Response(
data={"Response": "CARD_NOT_VALID"},
status=status.HTTP_406_NOT_ACCEPTABLE
)
if not checkExpired:
return Response(
data={"Response": "EXPIRED_CARD"},
status=status.HTTP_406_NOT_ACCEPTABLE
)
return Response(
data={"Response": "ERROR"},
status=status.HTTP_400_BAD_REQUEST
)
def retrieve(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def update(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def partial_update(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
def destroy(self, request, *args, **kwargs):
return Response(
data={"BOSS ERROR": "XIME NO ESTÁ SATISFECHA"},
status=status.HTTP_417_EXPECTATION_FAILED
)
|
import random
print("H A N G M A N\n")
choices = ['python', 'java', 'kotlin', 'javascript']
selected = random.sample(choices, 1)
sele = selected[0]
g = 0
se = set()
i = 0
s = ""
ip = input('Type "play" to play the game, "exit" to quit:')
op = list(sele)
bs = [None] * len(op)
tr = [None] * len(op)
while ip == "play":
while i < 8:
for j in range(len(op)):
if op[j] == s and tr[j] is None:
bs[j] = s
tr[j] = 1
elif tr[j] is None:
bs[j] = "-"
if i < 8:
print()
print(''.join(bs))
s = input("Input a letter: ")
if len(s) > 1 or len(s) == 0:
print("You should input a single letter\n")
continue
elif ord(s) not in range(97, 123):
print("It is not an ASCII lowercase letter\n")
continue
elif s in se:
print("You already typed this letter\n")
continue
elif s not in sele:
i += 1
print("No such letter in the word")
if s not in se:
se.add(s)
if "-" not in bs:
print("You guessed the word ".join(bs) + "!")
print("You survived!")
else:
print("You are hanged!")
ip = input('Type "play" to play the game, "exit" to quit:')
|
# nhap vao 1 chuoi gom ca ky tu chu va so
string = 'abdwndjwd123434'
# in chuoi ban vua nhap
print(string[1])
i=0
j=0
string1 = ' '
string2 = ' '
#sap xep chuoi
for num in range(len(string)):
if ord(string[num]) <=122 and ord(string[num]) >= 97 :
string1[i] = string[num]
i = i+1
if ord(string[num]) >= 48 and ord(string[num]) <= 57 :
string2[j] = string[num]
print(string1)
print(string2)
# sap xep theo thu tu trong tung chuoi |
import sys
import copy
from decimal import Decimal
import itertools
#-----------------------------------Function and Definitions------------------------------
#Splits the given literal into its variable and the value(eg. LeakIdea = +)
def splitLiteral(literal):
literal = literal.strip()
holder = literal.split(' = ')
variable = holder[0].strip()
value = holder[1].strip()
value = True if value == '+' else False
return variable,value
#Sorts the nodes in the topological order and returns the topological sorted list of nodes
def topologicalSort(bayesnet):
nodes = bayesnet.keys()
sortedNodes = []
while len(sortedNodes) < len(nodes):
for node in nodes:
if node not in sortedNodes and all(parent in sortedNodes for parent in bayesnet[node]['parents']):
sortedNodes.append(node)
return sortedNodes
#Returns only the node that are required for the query.
def nodeSelection(evidence,bayesnet,sortedNodes):
addedNodeSet = set(evidence.keys())
isNodePresent = [True if x in addedNodeSet else False for x in sortedNodes]
while len(addedNodeSet) != 0:
popNode = addedNodeSet.pop()
for parent in bayesnet[popNode]['parents']:
addedNodeSet.add(parent)
parentIndex = sortedNodes.index(parent)
isNodePresent[parentIndex] = True
newSortedNodes = []
for node in sortedNodes:
if isNodePresent[sortedNodes.index(node)] == True:
newSortedNodes.append(node)
return newSortedNodes
#Returns the probability using enumeration given the required variables(vars),evidence variables(e) and the bayesian network.
def enumeration(vars,e,bayesnet):
if len(vars) == 0:
return 1.0
Y = vars[0]
if Y in e:
result = probability(Y,e,bayesnet)*enumeration(vars[1:],e,bayesnet)
else:
sumProbability = []
e2 = copy.deepcopy(e)
for y in [True,False]:
e2[Y] = y
sumProbability.append(probability(Y,e2,bayesnet)*enumeration(vars[1:],e2,bayesnet))
result =sum(sumProbability)
return result
#Returns the probability of variable Y given its parents in evidence e.
def probability(Y,e,bayesnet):
if bayesnet[Y]['type'] == 'decision':
return 1.0
if len(bayesnet[Y]['parents']) == 0:
if e[Y] == True:
return float(bayesnet[Y]['prob'])
else:
return 1.0-float(bayesnet[Y]['prob'])
else:
parentTuple = tuple(e[parent] for parent in bayesnet[Y]['parents'])
if e[Y] == True:
return float(bayesnet[Y]['condprob'][parentTuple])
else:
return 1-float(bayesnet[Y]['condprob'][parentTuple])
#-----------------------------------Input & Building Data Structures--------------------------------
#Bayesian Network Dictionary
BayesNet = {}
sortedNodes = []
rawQueryList = []
#Reading the input file
#filename = sys.argv[-1]
filename = 'input02.txt'
inputFile = open(filename)
#Building queries from input
line = inputFile.readline().strip()
while line != '******':
rawQueryList.append(line)
#print line
line = inputFile.readline().strip()
#Building the bayesian network from input
line = ' '
while line != '':
#Declaring the parent list
parents = []
# Input the node names and the parents
line = inputFile.readline().strip()
nodeAndParents = lines = line.split(' | ')
node = nodeAndParents[0].strip()
if len(nodeAndParents) != 1:
parents = nodeAndParents[1].strip().split(' ')
BayesNet[node] = {}
BayesNet[node]['parents'] = parents
BayesNet[node]['children']=[]
#Insert child for all the parents
for parent in parents:
BayesNet[parent]['children'].append(node)
# Input the probabilities
if len(parents) == 0:
line = inputFile.readline().strip()
if line == 'decision':
#Decision Node
BayesNet[node]['type'] = 'decision'
else:
#Node with prior probability
BayesNet[node]['type'] = 'normal'
BayesNet[node]['prob'] = line
else:
#Nodes with conditional probabilies
condprob = {}
for i in range(0,pow(2,len(parents))):
line = inputFile.readline().strip()
lines = line.split(' ')
prob = lines[0]
lines = lines[1:]
truth = tuple(True if x == '+' else False for x in lines)
condprob[truth] = prob
BayesNet[node]['type'] = 'normal'
BayesNet[node]['condprob'] = condprob
line = inputFile.readline().strip()
#print BayesNet['E']
#-------------------------------Declaring Output Files----------------------------------------
outputFile = open('output.txt','w')
#print BayesNet
#--------------------------------Query Inferencing---------------------------------------------
#Sort all the nodes in topological order
sortedNodes = topologicalSort(BayesNet)
#Query Inferencing for all the input queries
for query in rawQueryList:
fullEvidence = {}
observedEvidence = {}
operation = query[:query.index('(')]
operation = operation.strip()
if operation == 'P':
#print 'Operation P'
isSeparatorGiven = False
result = 1.0
#print query
literals = query[query.index('(')+1:query.index(')')]
orIndex = literals.index('|') if '|' in literals else -1
#print orIndex
#If both query and evidence is given.
if orIndex != -1:
isSeparatorGiven = True
holder = literals[:orIndex]
#print holder
xLiterals = holder.strip()
xLiterals = xLiterals.split(',')
for xLiteral in xLiterals:
xLiteral = xLiteral.strip()
xVar,xVal = splitLiteral(xLiteral)
fullEvidence[xVar] = xVal
holder = literals[orIndex+1:]
#print holder.strip()
#If only evidence is given
else:
holder = literals
literals = holder.strip()
literals = literals.split(',')
for literal in literals:
literal = literal.strip()
var,val = splitLiteral(literal)
fullEvidence[var] = val
observedEvidence[var] = val
#Final calculations
if isSeparatorGiven == True:
#Calculating the numerator
sortedNodesForNumerator = nodeSelection(fullEvidence,BayesNet,sortedNodes)
numerator = enumeration(sortedNodesForNumerator,fullEvidence,BayesNet)
#Calculating the denominator
sortedNodesForDenominator = nodeSelection(observedEvidence,BayesNet,sortedNodes)
denominator = enumeration(sortedNodesForDenominator,observedEvidence,BayesNet)
result = numerator/denominator
else:
sortedNodesForQuery = nodeSelection(observedEvidence,BayesNet,sortedNodes)
result = enumeration(sortedNodesForQuery,observedEvidence,BayesNet)
#print 'Full Evidence:',fullEvidence
#print 'Observed Evidence:',observedEvidence
result = Decimal(str(result+1e-8)).quantize(Decimal('.01'))
#print result
outputFile.write(str(result))
outputFile.write('\n')
elif operation == 'EU':
#print 'Operation EU'
isSeparatorGiven = False
result = 1.0
literals = query[query.index('(')+1:query.index(')')]
orIndex = literals.index('|') if '|' in literals else -1
#If both query and evidence is given.
if orIndex != -1:
isSeparatorGiven = True
holder = literals[:literals.index(' | ')]
xLiterals = holder.strip()
xLiterals = xLiterals.split(',')
for xLiteral in xLiterals:
xLiteral = xLiteral.strip()
xVar,xVal = splitLiteral(xLiteral)
fullEvidence[xVar] = xVal
holder = literals[literals.index(' | ')+3:]
#If only evidence is given
else:
holder = literals
literals = holder.strip()
literals = literals.split(',')
for literal in literals:
literal = literal.strip()
var,val = splitLiteral(literal)
fullEvidence[var] = val
observedEvidence[var] = val
fullEvidence['utility'] = True
#Final calculations
if isSeparatorGiven == True:
#Calculating the numerator
sortedNodesForNumerator = nodeSelection(fullEvidence,BayesNet,sortedNodes)
numerator = enumeration(sortedNodesForNumerator,fullEvidence,BayesNet)
#Calculating the denominator
sortedNodesForDenominator = nodeSelection(observedEvidence,BayesNet,sortedNodes)
denominator = enumeration(sortedNodesForDenominator,observedEvidence,BayesNet)
result = numerator/denominator
else:
sortedNodesForQuery = nodeSelection(fullEvidence,BayesNet,sortedNodes)
result = enumeration(sortedNodesForQuery,fullEvidence,BayesNet)
#print 'Full Evidence:',fullEvidence
#print 'Observed Evidence:',observedEvidence
result = int(round(result))
#print result
outputFile.write(str(result))
outputFile.write('\n')
else:
#print 'Operation MEU'
isSeparatorGiven = False
result = {}
maximizationLiterals = []
literals = query[query.index('(')+1:query.index(')')]
orIndex = literals.index('|') if '|' in literals else -1
#If both query and evidence is given.
if orIndex != -1:
isSeparatorGiven = True
holder = literals[:literals.index(' | ')]
xLiterals = holder.strip()
xLiterals = xLiterals.split(',')
for xLiteral in xLiterals:
equalIndex = xLiteral.index('=') if '=' in xLiteral else -1
if equalIndex != -1:
xLiteral = xLiteral.strip()
xVar,xVal = splitLiteral(xLiteral)
fullEvidence[xVar] = xVal
else:
maximizationLiterals.append(xLiteral.strip())
holder = literals[literals.index(' | ')+3:]
#If only evidence is given
else:
holder = literals
literals = holder.strip()
literals = literals.split(',')
for literal in literals:
equalIndex = literal.index('=') if '=' in literal else -1
if equalIndex != -1:
literal = literal.strip()
var,val = splitLiteral(literal)
fullEvidence[var] = val
observedEvidence[var] = val
else:
maximizationLiterals.append(literal.strip())
fullEvidence['utility'] = True
print "maximize", maximizationLiterals
sizeOfMaxLiterals = len(maximizationLiterals)
truthTable = list(itertools.product([True, False], repeat=sizeOfMaxLiterals))
for i in range(0,len(truthTable)):
completeEvidence = copy.deepcopy(fullEvidence)
value = ''
j = 0
for maxLiteral in maximizationLiterals:
completeEvidence[maxLiteral] = truthTable[i][j]
if truthTable[i][j] == True:
value = value + '+ '
else:
value = value + '- '
j = j+1
#Final calculations
if isSeparatorGiven == True:
#Calculating the numerator
sortedNodesForNumerator = nodeSelection(completeEvidence,BayesNet,sortedNodes)
numerator = enumeration(sortedNodesForNumerator,completeEvidence,BayesNet)
#Calculating the denominator
sortedNodesForDenominator = nodeSelection(observedEvidence,BayesNet,sortedNodes)
denominator = enumeration(sortedNodesForDenominator,observedEvidence,BayesNet)
eachResult = numerator/denominator
else:
sortedNodesForQuery = nodeSelection(completeEvidence,BayesNet,sortedNodes)
eachResult = enumeration(sortedNodesForQuery,completeEvidence,BayesNet)
result[eachResult] = value
maxResult = max(result.keys())
#print result[maxResult]+str(int(round(maxResult)))
outputFile.write(result[maxResult]+str(int(round(maxResult))))
outputFile.write('\n')
#-----------------------------Brushing the final output file--------------------------------
outputFile.close()
with open('output.txt', 'rb+') as finalStripFile:
finalStripFile.seek(0,2)
size=finalStripFile.tell()
finalStripFile.truncate(size-1)
finalStripFile.close()
|
import random
soma_l4 = 0
soma_c2 = 0
soma_p = 0
soma_todos = 0
M = 5
for l in range(M):
linha = []
for c in range(M):
x = random.randint(1,10)
linha.append(x)
if l ==3:
soma_l4+=x
if c==1:
soma_c2+=x
if c==l:
soma_p+=x
soma_todos+=x
print(linha)
print('Soma da linha 4: ',soma_l4)
print('Soma da coluna 2: ',soma_c2)
print('Soma da diagonal principal: ',soma_p)
print('Soma de todos os elemnetos: ',soma_todos)
|
import random
print("WELCOME TO THE BOARD")
def players():
print("Select players")
global player
player = int(input())
game()
def game():
if player == 1:
player1_name = input("Enter player name: ")
print("Player 2 will be computer")
# player 1 to selct the number
number_1 = int(input(player1_name + " select the number : "))
number_2 = random.randint(1,10)
print("computer's number were " +format(str(number_2)))
# check its equal
if number_1 > number_2:
print(player1_name +" won the match")
elif number_1 < number_2:
print(player1_name +" loss the match")
else:
print ("DRAW")
elif player == 2:
player1_name = input("Enter player name: ")
player2_name = input("Enter player name: ")
# player 1 & 2 to selct the number
number_1 = int(input(player1_name + " select the number : "))
number_2 = int(input(player2_name +" select the number : "))
# check its equal
if number_1 > number_2:
print(player1_name + " won the match")
elif number_1 < number_2:
print(player2_name + " won the match")
else:
print("DRAW")
else:
print("Only two players are allowed")
players()
if input("wanna play again(yes/no): ") == "yes":
game()
else:
print("game over")
players()
|
from flask import url_for, flash, render_template, make_response
from flask import Blueprint
from flask_login import login_user, login_required, logout_user
from werkzeug.utils import redirect
from expenses_app import db, login_manager
from expenses_app.auth.forms import LogInForm, Register
from expenses_app.models import AuthorisedEmail, User
auth_bp = Blueprint(
'auth_bp', __name__,
template_folder='templates',
static_folder='static'
)
@auth_bp.route("/login", methods=["GET", "POST"])
def login():
form = LogInForm()
if form.validate_on_submit():
email = form.email.data
password = form.password.data
email = AuthorisedEmail.query.filter(AuthorisedEmail.email == email).first()
if email and email.user and email.user.check_password(password):
user = email.user
login_user(user)
return redirect(url_for("grp_bp.index"))
else:
# TODO: Limit number of retries
flash("Invalid email or password!")
return render_template("login.html", form=form)
@auth_bp.route("/register", methods=["GET", "POST"])
def register():
form = Register()
if form.validate_on_submit():
email = form.email.data
username = form.username.data
username_exists = User.query.filter_by(username=username).first()
auth_email = AuthorisedEmail.query.filter_by(email=email).first()
if auth_email and auth_email.is_registered:
flash("You are already registered! Try logging in instead!")
elif auth_email and username_exists:
flash("That username already exists! Try another")
elif auth_email:
password = form.password.data
user = User.create_user(auth_email, password, username)
db.session.commit()
if user:
login_user(user)
return redirect(url_for("grp_bp.index"))
else:
# TODO: Handle these errors more nicely
return make_response("Something went wrong with registration!", 500)
else:
flash("Email is not an authorised email! This is a private service.")
return render_template("register.html", form=form)
@auth_bp.route("/logout")
@login_required
def logout():
logout_user()
return redirect(url_for("auth_bp.login"))
@login_manager.user_loader
def load_user(user_id):
if user_id is not None:
return User.query.get(user_id)
return None
@login_manager.unauthorized_handler
def unauthorized():
flash('You must be logged in to view that page.')
return redirect(url_for('auth_bp.login'))
|
#! /usr/bin/env python
###################################
# Davi Ortega 4/21/2011
###################################
import sys
import bitk
if '-h' in sys.argv:
print 'Exclude sequences above some identity level\nSintax: xclud_ident #Ident_cutoff msa.fa \n IT MUST BE ALIGNED'
sys.exit()
print 'Reading sequences'
ID_cut = float(sys.argv[1])
seq_dic, seq_list = bitk.fastareader(sys.argv[2],'r')
ID = 0
xclud = []
print 'Calculations'
for seq1 in seq_list:
if seq1 not in xclud:
print 'Working on ' + seq1
for seq2 in seq_list:
if (seq2 not in xclud) and (seq1 != seq2):
error = 0
total = 0
for i in range(len(seq_dic[seq1])):
try:
if seq_dic[seq1][i] != seq_dic[seq2][i]:
error += 1
if 1 - (error/float(len(seq_dic[seq1].replace('-','')) )) < ID_cut:
break
if i == len(seq_dic[seq1])-1:
xclud.append(seq2)
print 'Excluding ' + seq2
except IndexError:
print seq1
print seq_dic[seq1]
print len(seq_dic[seq1])
print seq2
print seq_dic[seq2]
print len(seq_dic[seq2])
print i
sys.exit()
output = ''
for seq in seq_list:
if seq not in xclud:
output += '>' + seq + '\n' + seq_dic[seq] + '\n'
outfile = open(sys.argv[2][:-2] + sys.argv[1] + '.fa','w')
outfile.write(output)
outfile.close()
|
import subprocess;
outDir = '/Users/arthur/Desktop/SPHRender/XML/'
filePath = '/Users/arthur/Desktop/SPHRender/locs.txt'
def beginScene():
return '<scene version="0.5.0">\n'\
'<!-- Instantiate a unidirectional path tracer,\n'\
'which renders paths up to a depth of 3 -->\n'\
'<integrator type="path">\n'\
'<integer name="maxDepth" value="3"/>\n'\
'</integrator>\n'
def addParticle(x,y,z):
return '<shape type="sphere">\n'\
'<point name="center" x="'+x+'" y="'+y+'" z="'+z+'"/> <float name="radius" value=".015"/>\n'\
'<bsdf type="diffuse"/>\n'\
'</shape>\n\n'
def addSensor():
return '<sensor type = "perspective">\n'\
'<float name="fov" value="60"/>\n'\
'<transform name ="toWorld">\n'\
'<lookat origin="3, 1, 3" target="5, 1, 5" up ="0, 1, 0"/> \n'\
'</transform>\n'\
'</sensor>\n\n'
def endScene():
return '</scene>'
def process():
i = 0
data = []
scene = ''
#open our location file
posts = open(filePath,'r')
#toss out first line
posts.readline()
scene += beginScene()
scene += addSensor()
for line in posts:
#write the last time series
if (line.startswith('Time')):
scene += endScene()
name = 'out'+str(i).zfill(6)+'.xml'
xml = open(outDir+name,'w')
xml.write(scene)
xml.close()
#reset scene accum and increase scene ct
scene = ''
scene += beginScene()
scene += addSensor()
i+=1
#otherwise process particles
else:
data = line.split()
scene += addParticle(data[0],data[1],data[2])
#close the last Set
scene += endScene()
name = 'out'+str(i).zfill(6)+'.xml'
xml = open(outDir+name,'w')
xml.write(scene)
xml.close()
def main():
process()
#add in mitsuba subprocess?
#add in animate subprocess? ffmpeg?
subprocess.call('mitsuba XML/out*.xml', shell=True)
#can add exr to png conversion
subprocess.call('ffmpeg -r 96 -i XML/out%06d.exr -vb 20M -r 96 myvideo.mpg', shell=True)
if __name__ =='__main__':
main()
|
from django.db import models
from django.contrib.auth.models import User
class Search(models.Model):
searcher = models.ForeignKey(User, on_delete=models.PROTECT, blank=True )
text = models.CharField(max_length=500)
# Create your models here.
|
from serial.tools import list_ports
import util
from pydobot import Dobot
port = list_ports.comports()[0].device
device = Dobot(port=port, verbose=False)
util.pushCubeQueue(device)
device.close()
|
r1 = int(input('Digite a reta 1: '))
r2 = int(input('Digite a reta 2: '))
r3 = int(input('Digite a reta 3: '))
a = r1 < (r2 + r3)
b = r2 < (r1 + r3)
c = r3 < (r1 + r2)
if a and b and c is True:
print('\nPode Foramr um Triangulo')
if r1 == r2 and r1 == r3:
print('É Um Triangulo EQUILÁTERO')
elif r1 != r2 and r1 == r3 or r1 != r3 and r1 == r2:
print('É Um Triangulo ISÓSCELES')
elif r1 != r2 and r2 != r3 and r1 != r3:
print('É Um Triangulo ESCALENO')
else:
print('Não Pode Formar um Triangulo')
|
name = "sudawan"
age = 21
weight = 57
can_walk = True
can_swim = False
print(type(name))
print(type(age))
print(type(weight))
print(type(can_walk))
print(type(can_run)) |
from __future__ import print_function
import os
import py_compile
import re
import sys
def is_in(value, patterns):
for p in patterns:
try:
search = p.search
except AttributeError:
if p == value:
return True
else:
if search(value):
return True
SYSTEM_FILES = {
(2, 7): ['python27.dll'],
(3, 5): ['python35.dll', 'vcruntime140.dll'],
(3, 6): ['python36.dll', 'vcruntime140.dll'],
}[sys.version_info[:2]]
EXCLUDED_STDLIB = [
'test',
re.compile('plat-.+', re.I),
]
EXCLUDED_FILES = [
re.compile(r'tcl.+\.dll$', re.I),
re.compile(r'tk.+\.dll$', re.I),
re.compile(r'\\_test.+\.pyd$', re.I),
re.compile(r'_test\.pyd$', re.I),
re.compile(r'.+_d\.(pyd|dll|exe)$', re.I),
]
DO_NOT_COMPILE_FILES = [
re.compile(r'\\pip\\_vendor\\distlib\\__init__\.py$', re.I),
re.compile(r'\\wfastcgi\.py$', re.I),
]
EXCLUDED_SUFFIX = [
'.pyc',
'.pyo',
'.pdb',
]
if sys.version_info[0] == 3:
EXCLUDED_STDLIB.extend([
'ensurepip',
'idlelib',
'tkinter',
'turtledemo',
'venv',
])
if sys.version_info[0] == 2:
EXCLUDED_STDLIB.extend([
'lib-tk',
'pydoc_data',
])
LIB_ROOT = os.path.join(sys.prefix, 'Lib')
CURRENT_VERSION = '%s%s%s%s' % (
sys.version_info[0],
sys.version_info[1],
sys.version_info[2],
'x64' if sys.maxsize > 2**32 else 'x86'
)
if __name__ == '__main__':
exit_code = 0
try:
VERSION = sys.argv[1]
except:
print('Expected version as first argument', file=sys.stderr)
sys.exit(1)
if VERSION != CURRENT_VERSION:
print('Current version is', CURRENT_VERSION, file=sys.stderr)
sys.exit(2)
try:
TARGET = sys.argv[2]
except:
print('Expected target directory as second argument', file=sys.stderr)
sys.exit(1)
print("Copying Python install from", sys.prefix)
for basedir, subdirs, files in os.walk(sys.prefix):
if basedir == sys.prefix:
subdirs[:] = ['DLLs', 'Lib']
elif basedir == LIB_ROOT:
subdirs[:] = [d for d in subdirs if not is_in(d, EXCLUDED_STDLIB)]
package_name = os.path.split(basedir)[-1].lower()
if package_name in ('__pycache__',):
continue
for name in files:
filename = os.path.join(basedir, name)
if is_in(filename, EXCLUDED_FILES):
continue
target = os.path.join(TARGET, os.path.relpath(filename, start=sys.prefix))
target_dir = os.path.dirname(target)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
suffix = os.path.splitext(filename)[-1].lower()
if suffix in EXCLUDED_SUFFIX:
continue
if suffix in ('.py',) and not is_in(filename, DO_NOT_COMPILE_FILES):
try:
py_compile.compile(filename, os.path.splitext(target)[0] + '.pyc', doraise=True)
except py_compile.PyCompileError:
pass
else:
continue
with open(filename, 'rb') as f1:
with open(target, 'wb') as f2:
f2.write(f1.read())
for name in SYSTEM_FILES:
target = os.path.join(TARGET, name)
system_source = os.path.join(os.getenv('SYSTEMROOT'), 'System32', name)
if not os.path.isfile(target):
if os.path.isfile(system_source):
print('Copying', name, 'from', system_source)
with open(system_source, 'rb') as f1:
with open(target, 'wb') as f2:
f2.write(f1.read())
else:
print('Unable to locate', name, file=sys.stderr)
exit_code = 1
sys.exit(exit_code) |
import numpy as np
import matplotlib.pyplot as plt
from scipy.integrate import ode
class ZNN:
def __init__(self, Pt, Qt, dPt, dQt):
xi, p = 4, 3
self.powersigmoid, self.dpowersigmoid = self._powersigmoid(xi, p), self._dpowersigmoid(xi, p)
self.gamma = 1
self.P, self.Q = Pt, Qt
self.dP, self.dQ = dPt, dQt
def _powersigmoid(self, xi, p):
def ps(X, xi = xi, p = p):
r = (1+np.exp(-xi))/(1-np.exp(xi))*(1-np.exp(-xi*X))/(1+np.exp(-xi*X))
place = np.where(abs(X) >= 1)
r[place] = np.power(X[place], p)
return r
return ps
def _dpowersigmoid(self, xi, p):
def dps(xi, p, X):
r = (1+np.exp(-xi))/(1-np.exp(xi))*(2*xi*np.exp(-xi*X))/((1+np.exp(-xi*X))**2)
place = np.where(X >= 1)
r[place] = p*np.power(X[place], p-1)
return r
return dps
def _righthand_side(self, t, X, func):
X = np.asmatrix(X).reshape(X.size, 1)
return self.P(t).I * (-self.dP(t)*X-self.gamma*func(self.P(t)*X+self.Q(t))-self.dQ(t))
def construct_solver(self, func):
initial_x = np.random.randn(self.Q(0).size, 1)
# initial_x = np.zeros((self.Q(0).size, 1))
initial_t = 0
self.solver = ode(self._righthand_side).set_integrator('dopri5', nsteps = 100)
self.solver.set_initial_value(initial_x, initial_t)
self.solver.set_f_params(func)
###############################################
# The parts for setting parameters #
###############################################
def set_ps(self, xi, p):
self.powersigmoid, self.dpowersigmoid = self._powersigmoid(xi, p), self._dpowersigmoid(xi, p)
def set_parameter(self, gamma):
self.gamma = gamma
def Q(t):
return np.matrix([[np.sin(t)], [np.cos(t)]])
def P(t):
return np.matrix([[0.5*np.sin(t)+2, np.cos(t)], [np.cos(t), 0.5*np.cos(t)+2]])
def dQ(t):
return np.matrix([[np.cos(t)], [-np.sin(t)]])
def dP(t):
return np.matrix([[0.5*np.cos(t), -np.sin(t)], [-np.sin(t), -0.5*np.sin(t)]])
znn = ZNN(P, Q, dP, dQ)
znn.construct_solver(znn.powersigmoid)
x1 = []
x2 = []
at = []
while znn.solver.successful() and znn.solver.t < 100:
#znn.solver.t+1
at.append(znn.solver.t)
x = znn.solver.integrate(znn.solver.t+0.1)
x1.append(x[0, 0])
x2.append(x[1, 0])
a = plt.subplot(211)
a.plot(at, x1)
b = plt.subplot(212)
b.plot(at, x2)
plt.show() |
from sample_slottables import *
import haizea.common.stats as stats
import math
# 64 seed values, generated using int(random.uniform(1,2**32-1))
SEEDS = [660756695, 1080106124, 441535308, 1531785557, 3449773776, 2239192905, 1944782933, 377958281,
1698866825, 4281919021, 2985069635, 1929791444, 2054454583, 3428593444, 3259033264, 643731936,
2921350595, 1575932719, 2236362645, 1020609972, 1592461297, 1460695161, 1636954632, 76307538,
862656448, 2450493480, 247968499, 766348682, 3084561872, 1179378301, 1391629128, 1038658793,
3582609773, 392253809, 2732213167, 3688908610, 866221636, 1817396766, 3402959080, 2653694808,
1596091165, 188549655, 1900651916, 1577002145, 3060320535, 1268074655, 1752021485, 2783937267,
3482472935, 1513342535, 1655096731, 2485501475, 3972059090, 822958367, 4172029370, 3057570066,
1599256642, 2858736230, 1414979451, 303997155, 3247160141, 1629523852, 2258358509, 2132879613]
PAIRS = [(93.0, 93.0), (245.0, 178.0), (32.0, 234.0), (9.0, 151.0)]
NUM_VALUES = 10000
def mean(l):
return sum(l, 0.0) / len(l)
def stdev(l, m):
return math.sqrt(sum([(x - m)**2 for x in l], 0.0) / len(l))
class TestStats(object):
def __init__(self):
pass
def do_test(self, distribution, expected_mean, expected_stdev, error):
for seed in SEEDS:
distribution.seed(seed)
l = distribution.get_list(NUM_VALUES)
m = mean(l)
diff = abs(expected_mean - mean(l))
assert(diff < error)
s = stdev(l, m)
if expected_stdev != None:
diff = abs(expected_stdev - stdev(l, m))
assert(diff < error)
def test_uniform(self):
for a, length in PAIRS:
b = a + length
dist = stats.UniformDistribution(a, b)
expected_mean = (a+b)/2
expected_stdev = math.sqrt((b-a)**2 / 12)
error = length * 0.01
self.do_test(dist, expected_mean, expected_stdev, error)
def test_normal(self):
for mu, sigma in PAIRS:
print mu,sigma
dist = stats.NormalDistribution(mu, sigma)
error = (sigma / math.sqrt(NUM_VALUES)) * 3
self.do_test(dist, mu, sigma, error)
def test_pareto(self):
for l, length in PAIRS:
h = l + length
for a in [2.01, 2.5, 3.0, 3.5]:
dist = stats.BoundedParetoDistribution(l, h, a)
expected_mean = (l**a/(1-(l/h)**a)) * (a/(a-1)) * ((1/l**(a-1)) - (1/h**(a-1)))
expected_stdev = None # No formula to test this
error = length * 0.01
self.do_test(dist, expected_mean, expected_stdev, error) |
import torch
from torch import nn
import numpy as np
import cv2
import os
import time
class Generator(nn.Module):
def __init__(self, in_features, image_shape, label_emb_size, label_size, device=None):
super(Generator, self).__init__()
self.in_features = in_features
self.channels = image_shape[0]
self.height = image_shape[1]
self.width = image_shape[2]
self.size = self.channels * self.width * self.height
self.label_emb_size = label_emb_size
self.label_size = label_size
self.device = device or torch.device("cpu")
self.filters = [128, 64, 32, self.channels]
self.emb = nn.Embedding(label_size, label_emb_size)
self.dense = nn.Linear(self.in_features + label_emb_size, self.size * self.filters[0] // 64)
cov_layers = list()
for c_in, c_out in zip(self.filters[:-1], self.filters[1:]):
cov_layers += self.block(c_in, c_out)
self.backbone = nn.Sequential(*cov_layers)
self.activation = nn.Tanh()
@staticmethod
def block(in_channels, out_channels):
layers = [
nn.BatchNorm2d(in_channels),
nn.ReLU(inplace=True),
nn.Upsample(scale_factor=2),
nn.Conv2d(in_channels, out_channels, 3, stride=1, padding=1),
]
return layers
def forward(self, inputs, labels):
label_emb = self.emb(labels) # batch, emb_size
inputs = torch.cat([inputs, label_emb], dim=1) # batch, latent + emb_size
inputs = self.dense(inputs)
inputs = inputs.view(-1, self.filters[0], self.height // 8, self.width // 8)
outputs = self.backbone(inputs)
outputs = self.activation(outputs)
return outputs
class Discriminator(nn.Module):
def __init__(self, image_shape, label_emb_size, label_size, device=None):
super(Discriminator, self).__init__()
self.channels = image_shape[0]
self.height = image_shape[1]
self.width = image_shape[2]
self.size = self.channels * self.width * self.height
self.label_emb_size = label_emb_size
self.label_size = label_size
self.device = device or torch.device("cpu")
self.label_emb = nn.Embedding(label_size, label_emb_size)
self.model = nn.Sequential(
*self.block(self.size + self.label_emb_size, 512),
*self.block(512, 128),
nn.Linear(128, 1),
nn.Sigmoid(),
)
@staticmethod
def block(in_features, out_features):
return [
nn.Linear(in_features, out_features),
nn.BatchNorm1d(out_features),
nn.LeakyReLU(0.2, inplace=True),
nn.Dropout(0.3),
]
def forward(self, inputs, labels):
inputs = inputs.view(inputs.shape[0], -1)
label_emb = self.label_emb(labels)
inputs = torch.cat([inputs, label_emb], dim=-1)
outputs = self.model(inputs)
return outputs
class Trainer:
def __init__(
self,
generator,
discriminator,
data,
gen_input,
steps_per_epoch=None,
epoch=30,
n_step_per_generator=1,
n_step_per_discriminator=1,
generator_lr=1e-4,
discriminator_lr=1e-4,
device=torch.device("cpu"),
n_epoch_per_evaluate=10,
image_save_path=None
):
self.generator = generator
self.discriminator = discriminator
self.generator_lr = generator_lr
self.discriminator_lr = discriminator_lr
self.n_step_per_generator = n_step_per_generator
self.n_step_per_discriminator = n_step_per_discriminator
self.epoch = epoch
self.n_epoch_per_evaluate = n_epoch_per_evaluate
self.device = device
self.data = data
self.gen_input = gen_input
self.steps_per_epoch = steps_per_epoch or len(self.data)
self.image_save_path = image_save_path
if self.image_save_path is not None and not os.path.exists(self.image_save_path):
os.mkdir(self.image_save_path)
self.loss = nn.MSELoss().to(self.device)
self.generator = self.generator.to(self.device)
self.discriminator = self.discriminator.to(self.device)
self.optimizer_g = torch.optim.Adam(
params=self.generator.parameters(),
lr=self.generator_lr,
betas=(0.5, 0.999)
)
self.optimizer_d = torch.optim.Adam(
params=self.discriminator.parameters(),
lr=self.discriminator_lr,
betas=(0.5, 0.999)
)
def train(self):
for epoch in range(self.epoch):
start_time = time.time()
for i, (images, labels) in enumerate(self.data):
valid = torch.autograd.Variable(torch.ones(images.shape[0], 1), requires_grad=False).to(self.device)
fake = torch.autograd.Variable(torch.zeros(images.shape[0], 1), requires_grad=False).to(self.device)
real_images = torch.autograd.Variable(torch.from_numpy(images)).to(self.device)
labels = torch.autograd.Variable(torch.from_numpy(labels)).to(self.device)
z = torch.from_numpy(np.random.normal(0, 1, (images.shape[0], self.gen_input))).type(torch.FloatTensor).to(self.device)
self.discriminator.train()
self.generator.train()
gen_images = self.generator(z, labels)
# -----------------
# Train discriminator
# -----------------
if i % self.n_step_per_discriminator == 0:
self.optimizer_d.zero_grad() # 以前的梯度清空
real_loss = self.loss(self.discriminator(real_images, labels), valid)
fake_loss = self.loss(self.discriminator(gen_images.detach(), labels), fake) # 不更新生成器
d_loss = (real_loss + fake_loss) / 2
d_loss.backward() # 梯度下降
self.optimizer_d.step() # 更新优化器
# -----------------
# Train Generator
# -----------------
if i % self.n_step_per_generator == 0:
self.optimizer_g.zero_grad()
g_loss = self.loss(self.discriminator(gen_images, labels), valid)
g_loss.backward()
self.optimizer_g.step()
# print("\r", " " * 60, end="")
print(
f"\r[Epoch {epoch + 1:03}/{self.epoch:03}]",
f"Batch {i + 1:05}/{self.steps_per_epoch:05}",
f"D loss: {d_loss.item():.5f} G loss: {g_loss.item():.5f}",
end=""
)
if i >= self.steps_per_epoch - 1:
break
print(f"\r" + " " * 70, end="")
print(
f"\r[Epoch {epoch + 1}/{self.epoch}]",
f"D loss {d_loss.item():5f} G loss {g_loss.item():5f}",
f"Time {time.time() - start_time:.2f}"
)
if (epoch == 0 or (epoch + 1) % self.n_epoch_per_evaluate == 0) and self.image_save_path:
eval_image = self.generate(n=10)
cv2.imwrite(f"{self.image_save_path}/epoch_{epoch+1}.png", eval_image)
def generate(self, n=1):
width = self.generator.width
height = self.generator.width
channels = self.generator.channels
z = torch.from_numpy(np.random.normal(0, 1, (n**2, self.gen_input))).type(torch.FloatTensor).to(self.device)
labels = torch.from_numpy(np.array([i for _ in range(n) for i in range(n)])).to(self.device)
self.generator.eval()
gen_images = self.generator(z, labels).cpu().detach().numpy().transpose((0, 2, 3, 1))
concat_images = np.zeros((height * n, width * n, channels))
for i in range(n):
for j in range(n):
concat_images[
i * height: (i + 1) * height,
j * width: (j + 1) * width
] = gen_images[i * n + j]
concat_images = (concat_images + 1) / 2 * 255
concat_images = np.round(concat_images, 0).astype(int)
return concat_images
|
import io
import requests
from castle.test import unittest
from castle.core.process_response import CoreProcessResponse
from castle.errors import BadRequestError, UnauthorizedError, ForbiddenError, NotFoundError, \
UserUnauthorizedError, InvalidParametersError, InternalServerError, InvalidRequestTokenError
def response(status_code=200, body=None):
resp = requests.Response()
resp.raw = io.BytesIO(body)
resp.status_code = status_code
return resp
class CoreProcessResponseTestCase(unittest.TestCase):
def test_response_none(self):
self.assertEqual(CoreProcessResponse(response()).call(), {})
def test_response_empty(self):
self.assertEqual(CoreProcessResponse(response(body=b'')).call(), {})
def test_response_authenticate_allow(self):
self.assertEqual(
CoreProcessResponse(
response(body=b'{"action":"allow","user_id":"12345"}')).call(),
{"action": "allow", "user_id": "12345"}
)
def test_response_authenticate_allow_with_props(self):
self.assertEqual(
CoreProcessResponse(
response(body=b'{"action":"allow","user_id":"12345","internal":{}}')).call(),
{"action": "allow", "user_id": "12345", "internal": {}}
)
def test_response_authenticate_deny_without_rp(self):
self.assertEqual(
CoreProcessResponse(
response(body=b'{"action":"deny","user_id":"1","device_token":"abc"}')).call(),
{"action": "deny", "user_id": "1", "device_token": "abc"}
)
def test_response_authenticate_deny_with_rp(self):
self.assertEqual(
CoreProcessResponse(
response(body=b'{"action":"deny","user_id":"1","device_token":"abc","risk_policy":{"id":"123","revision_id":"abc","name":"def","type":"bot"}}')).call(),
{"action": "deny", "user_id": "1", "device_token": "abc", "risk_policy": {
"id": "123", "revision_id": "abc", "name": "def", "type": "bot"}}
)
def test_verify_200_299(self):
for status_code in range(200, 299):
self.assertEqual(
CoreProcessResponse(response(status_code=status_code)).verify(), None)
def test_verify_400(self):
with self.assertRaises(BadRequestError):
CoreProcessResponse(response(status_code=400)).verify()
def test_verify_401(self):
with self.assertRaises(UnauthorizedError):
CoreProcessResponse(response(status_code=401)).verify()
def test_verify_403(self):
with self.assertRaises(ForbiddenError):
CoreProcessResponse(response(status_code=403)).verify()
def test_verify_404(self):
with self.assertRaises(NotFoundError):
CoreProcessResponse(response(status_code=404)).verify()
def test_verify_419(self):
with self.assertRaises(UserUnauthorizedError):
CoreProcessResponse(response(status_code=419)).verify()
def test_verify_422(self):
with self.assertRaises(InvalidParametersError):
CoreProcessResponse(response(status_code=422)).verify()
def test_verify_422_record_invalid(self):
with self.assertRaises(InvalidParametersError):
CoreProcessResponse(
response(status_code=422, body=b'{"type":"record_invalid","message":"validation failed"}')).verify()
def test_verify_422_invalid_request_token(self):
with self.assertRaises(InvalidRequestTokenError):
CoreProcessResponse(response(
status_code=422, body=b'{"type":"invalid_request_token","message":"token invalid"}')).verify()
def test_verify_500(self):
with self.assertRaises(InternalServerError):
CoreProcessResponse(response(status_code=500)).verify()
|
from numpy import*
n=input("digita um string: ")
n1=len(n)
s1=n[0:3] #posicao de tres em tres
l=3
while l<n1:
s1= s1 + "." + n[l:l+3]
l=3+l
print(s1)
|
import fuzzer
from Crypto.PublicKey import RSA
import random
import shard
import json
CONFIG = {
'SIMULATION_EPOCHS':5,
'AMAX':3,
'NSHARDS':3,
'EPOCH_SLOTS':4,
'VALIDATORS':10
}
# create 2048 bit RSA modulus using a secure ceremony
key = RSA.generate(2048)
N = key.n # N is used for VDF creation
print("The RSA Random N is:\n" + str(key.n) +"\n")
def next_power_of_2(x):
return x if x == 1 else next_power_of_2((x+1) // 2) * 2
def extend_to_power_of_2(bytez):
return bytez + b'\x00' * (next_power_of_2(len(bytez)) - len(bytez))
def vdf_calc(entropy):
#The VDF should be calculated as Y = X**(2**T) % N, here we use T=1 to reduce simulation runtime
#The AMAX constant specifies how many epochs this calculation takes on ASIC chips
bytes_entropy=bytes(entropy, "ascii")
bytes_entropy = (bytes(extend_to_power_of_2(bytearray(bytes_entropy)))) #X**2**T
bytes_entropy = ((int.from_bytes(bytes_entropy, 'big', signed=False) % N).to_bytes(32, byteorder='big')) #take the modulus by N
return str(bytes_entropy, "ascii")
# set up beacon and validators
def run_sim(config):
logData = []
beacon = fuzzer.fuzzy_beacon()
validators = fuzzer.create_validators(config["VALIDATORS"])
#stake validators
for validator in validators:
beacon.stake(validator)
#set up epoch list - each list contains the most up-to-date information for that epoch
shards = [None] * config["NSHARDS"]
for i in range(len(shards)):
shards[i] = shard.Shard()
epoch_states = a = [None] * (config["AMAX"] + config["SIMULATION_EPOCHS"])
for x in range(config["AMAX"]):
epoch_states[x] = fuzzer.fuzzy_string()
for i in range(config["SIMULATION_EPOCHS"] * config["EPOCH_SLOTS"]):
my_time_block_record = {}
my_time_block_record["shard_validator"] = [None] * config["NSHARDS"]
epoch_id = i // config["EPOCH_SLOTS"]
my_time_block_record["current_epoch_id"] = epoch_id
time_slot_id = i%config["EPOCH_SLOTS"]
my_time_block_record["current_timeslot_id"] = time_slot_id
if(time_slot_id == 0):
for validator in validators:
beacon.request_proposal_hash(validator) # submit hashes from all the validators in the beginning of an epoch, and have some validators reveal them one-by-one in the following time slots
random.seed(hash(epoch_states[epoch_id]))
if((epoch_id - config["AMAX"]) >= 0):
my_time_block_record["epoch_when_r_j_generation_started"] = epoch_id - config["AMAX"]
else:
my_time_block_record["epoch_when_r_j_generation_started"] = None
my_time_block_record["current_random_seed_r_j"] = hash(epoch_states[epoch_id]) # hash it to see it
my_time_block_record = beacon.request_single_proposal(my_time_block_record)
if(time_slot_id == (config["EPOCH_SLOTS"] - 1)):
my_time_block_record["vdf_input"] = beacon.revealed_entropy
epoch_states[epoch_id+config["AMAX"]] = (vdf_calc(beacon.revealed_entropy))
my_time_block_record["vdf_output_r_i"] = hash(epoch_states[epoch_id+config["AMAX"]]) # hash it to see it
beacon.end_of_timeslot()
else:
my_time_block_record["vdf_input"] = None
my_time_block_record["vdf_output_r_i"] = None
for unique_shard in shards:
validator_of_a_shard_at_time_slot = random.choice(validators) # validator shard assignment
my_time_block_record["shard_validator"][shards.index(unique_shard)] = validators.index(validator_of_a_shard_at_time_slot)
my_time_block_record["CONFIG"] = config
logData.append(my_time_block_record)
with open('logData.txt', 'w') as outfile:
json.dump(logData, outfile)
return logData
if __name__ == '__main__':
run_sim(CONFIG)
|
# Generated by Django 3.2.7 on 2021-09-09 17:20
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('chatroom', '0002_chatroomrecordmanager_created_at'),
]
operations = [
migrations.AddField(
model_name='chatroomrecordmanager',
name='active_members',
field=models.IntegerField(default=0),
),
]
|
from tkinter import *
from tkinter import ttk
from tkinter import filedialog
class Theme:
background = "#000000"
lightbackground = "#111111"
lighterbackground = "#222222"
font_0 = "#44ff11"
font_1 = "#88aa11"
font_2 = "#88aa11"
h1 = 'Callibri 72 bold'
h2 = 'Callibri 16 bold'
h3 = 'Callibri 10'
class GUI:
theme = Theme()
def __init__(self):
# setting up root windoww
self.root = Tk()
self.root.title("CHIFA Stats")
self.root.geometry("350x431+450+50")
self.root.resizable(False, False)
self.root.overrideredirect(1)
# setting up titlebar
self.titlebar = Frame(self.root, height=25,
bg='#0f0f0f')
def get_pos(event):
xwin = self.root.winfo_x()
ywin = self.root.winfo_y()
startx = event.x_root
starty = event.y_root
ywin = ywin - starty
xwin = xwin - startx
def move_window(event):
self.root.geometry('+{0}+{1}'.format(event.x_root + xwin, event.y_root + ywin))
self.titlebar.bind('<B1-Motion>', move_window)
self.titlebar.bind("<Button-1>", get_pos)
self.exitbutton = Label(self.titlebar,
text='×', bg='#0f0f0f', fg='white',
font='Arial 12 bold')
self.exitbutton.pack(side='right', padx=5)
self.exitbutton.bind("<Button-1>", lambda x: self.root.quit())
self.titlebar.pack(side='top', expand=True, fill='x')
# setting up main frames
self.frame_top = ttk.Frame(self.root, height='200')
self.frame_top.pack_propagate(0)
self.frame_top.pack(side='top', fill='x',
padx=2, pady=2)
self.frame_middle = ttk.Frame(self.root, height='200')
self.frame_middle.pack_propagate(0)
self.frame_middle.pack(side='top', fill='x',
padx=2, pady=0)
self.frame_bottom = ttk.Frame(self.root, height='140')
self.frame_bottom.pack_propagate(0)
self.frame_bottom.pack(side='top', fill='x',
padx=2, pady=2)
# setting up frame_top widgets
self.title_top = ttk.Label(self.frame_top, text='Daily Statistics')
self.title_top.pack(side='top', fill='x')
self.container_top = ttk.Frame(self.frame_top)
self.container_top.pack_propagate(0)
self.container_top.pack(expand=True, fill='both', padx=20)
self.label_todaytotal = ttk.Label(self.container_top, text='42', width=3)
self.label_todaytotal.grid(row=0, column=0, rows=4)
self.label_totalcnas = ttk.Label(self.container_top, text='CNAS: 24', justify='left', width=20)
self.label_totalcnas.grid(row=0, column=1)
self.label_totalcasnos = ttk.Label(self.container_top, text='CASNOS: 14', justify='left',
width=20, foreground='#ff5500')
self.label_totalcasnos.grid(row=1, column=1)
self.label_totalhc = ttk.Label(self.container_top, text='Hors CHIFA: 4', justify='left',
width=20, foreground='#00ff55')
self.label_totalhc.grid(row=2, column=1)
self.label_totalmt = ttk.Label(self.container_top, text='\nSales of today\n105420.10 DA',
justify='left', width=20, foreground='#00ffff')
self.label_totalmt.grid(row=3, column=1)
# setting up frame_middle widgets
self.title_middle = ttk.Label(self.frame_middle, text='Stats per post')
self.title_middle.pack(side='top', fill='x')
self.container_middle = ttk.Frame(self.frame_middle)
self.container_middle.pack(side='bottom', expand=True, fill='both', padx=4)
self.label_post0title = ttk.Label(self.container_middle, text='Post 0', justify='center')
self.label_post1title = ttk.Label(self.container_middle, text='Post 1', justify='center')
self.label_post0label = ttk.Label(self.container_middle, text='23', width=3, justify='center')
self.label_post1label = ttk.Label(self.container_middle, text='12', width=3, justify='center')
self.label_post0title.grid(row=0, column=0)
self.label_post1title.grid(row=0, column=1)
self.label_post0label.grid(row=1, column=0, rowspan=5)
self.label_post1label.grid(row=1, column=1, rowspan=5)
def apply_theme(self):
self.root.config(bg=self.theme.background)
# configuring styles
ttk.Style().configure("H1.TLabel", font=self.theme.h1,
foreground=self.theme.font_0, background=self.theme.lightbackground)
ttk.Style().configure("H2.TLabel", font=self.theme.h2,
foreground=self.theme.font_1, background=self.theme.lightbackground)
ttk.Style().configure("H3.TLabel", font=self.theme.h3,
foreground=self.theme.font_1, background=self.theme.lightbackground)
ttk.Style().configure("Title.TLabel", font=self.theme.h2,
foreground=self.theme.font_1, background=self.theme.lighterbackground)
ttk.Style().configure("T.TFrame", background=self.theme.lightbackground)
# applying styles
self.frame_bottom.config(style='T.TFrame')
self.frame_middle.config(style='T.TFrame')
self.frame_top.config(style='T.TFrame')
self.container_middle.config(style='T.TFrame')
self.container_top.config(style='T.TFrame')
self.title_middle.config(style='Title.TLabel')
self.title_top.config(style='Title.TLabel')
self.label_todaytotal.config(style='H1.TLabel')
self.label_post0label.config(style='H1.TLabel')
self.label_post1label.config(style='H1.TLabel')
self.label_post0title.config(style='H2.TLabel')
self.label_post1title.config(style='H2.TLabel')
self.label_totalcasnos.config(style='H3.TLabel')
self.label_totalcnas.config(style='H3.TLabel')
self.label_totalhc.config(style='H3.TLabel')
self.label_totalmt.config(style='H2.TLabel')
def start(self):
self.root.mainloop()
def set_todaytotal(self, n):
self.label_todaytotal.config(text='{}'.format(n))
def set_cnastotal(self, n):
self.label_totalcnas.config(text='CNAS: {}'.format(n))
def set_casnostotal(self, n):
self.label_totalcasnos.config(text='CASNOS: {}'.format(n))
def set_hctotal(self, n):
self.label_totalhc.config(text='Hors CHIFA: {}'.format(n))
def set_earnings(self, n):
x = "{:.2f}".format(n)
self.label_totalmt.config(text='\n\nSALES\n{} DA'.format(x))
def set_post0(self, n):
self.label_post0label.config(text='{}'.format(n))
def set_post1(self, n):
self.label_post1label.config(text='{}'.format(n)) |
from torchvision import models
import torch.nn as nn
import torch
class VGG19(torch.nn.Module):
"""docstring for Vgg19"""
def __init__(self, requires_grad=False):
super(VGG19, self).__init__()
vgg_pretrained_features = models.vgg19(pretrained=True).features
self.slice1 = torch.nn.Sequential()
self.slice2 = torch.nn.Sequential()
self.slice3 = torch.nn.Sequential()
self.slice4 = torch.nn.Sequential()
self.slice5 = torch.nn.Sequential()
for x in range(4): # conv1_2
self.slice1.add_module(str(x), vgg_pretrained_features[x])
for x in range(4, 9): # conv2_2
self.slice2.add_module(str(x), vgg_pretrained_features[x])
for x in range(9, 14): # conv3_2
self.slice3.add_module(str(x), vgg_pretrained_features[x])
for x in range(14, 23): # conv4_2
self.slice4.add_module(str(x), vgg_pretrained_features[x])
for x in range(23, 32): # conv5_2
self.slice5.add_module(str(x), vgg_pretrained_features[x])
if not requires_grad:
for param in self.parameters():
param.requires_grad = False
def forward(self, X):
h = self.slice1(X)
h_relu1_2 = h
h = self.slice2(h)
h_relu2_2 = h
h = self.slice3(h)
h_relu3_2 = h
h = self.slice4(h)
h_relu4_2 = h
h = self.slice5(h)
h_relu5_2 = h
out = {}
out['conv1_2'] = h_relu1_2
out['conv2_2'] = h_relu2_2
out['conv3_2'] = h_relu3_2
out['conv4_2'] = h_relu4_2
out['conv5_2'] = h_relu5_2
return out
'''
Sequential(
(0): Sequential(
(0): Conv2d(3, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(1): ReLU(inplace)
(2): Conv2d(64, 64, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(3): ReLU(inplace) ##con1_2
(4): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
(5): Conv2d(64, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(6): ReLU(inplace)
(7): Conv2d(128, 128, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(8): ReLU(inplace) ##con2_2
(9): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
(10): Conv2d(128, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(11): ReLU(inplace)
(12): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(13): ReLU(inplace) ##con3_2
(14): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(15): ReLU(inplace)
(16): Conv2d(256, 256, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(17): ReLU(inplace)
(18): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
(19): Conv2d(256, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(20): ReLU(inplace)
(21): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(22): ReLU(inplace) ##conv4_2
(23): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(24): ReLU(inplace)
(25): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(26): ReLU(inplace)
(27): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
(28): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(29): ReLU(inplace)
(30): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(31): ReLU(inplace) ##conv5_2
(32): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(33): ReLU(inplace)
(34): Conv2d(512, 512, kernel_size=(3, 3), stride=(1, 1), padding=(1, 1))
(35): ReLU(inplace)
(36): MaxPool2d(kernel_size=2, stride=2, padding=0, dilation=1, ceil_mode=False)
)
(1): AdaptiveAvgPool2d(output_size=(7, 7))
(2): Sequential(
(0): Linear(in_features=25088, out_features=4096, bias=True)
(1): ReLU(inplace)
(2): Dropout(p=0.5)
(3): Linear(in_features=4096, out_features=4096, bias=True)
(4): ReLU(inplace)
(5): Dropout(p=0.5)
(6): Linear(in_features=4096, out_features=1000, bias=True)
)
)
'''
|
import unittest
import copy
import numpy as np
import astropy.units as q
from .. import synphot as syn
from .. import spectrum as sp
from bokeh.plotting import figure, output_file, show, save
SPEC = [np.linspace(0.8,2.5,200)*q.um, abs(np.random.normal(size=200))*1E-15*q.erg/q.s/q.cm**2/q.AA, abs(np.random.normal(size=200))*1E-16*q.erg/q.s/q.cm**2/q.AA]
def test_spec_norm():
"""Test to see if spectra are being normalized properly
"""
# Create the spectrum object
vega = sp.Vega()
# Create a bandpass
bp = syn.Bandpass('2MASS.J')
# Normalize it to Vega Jmag=-0.177
norm_spec = vega.renormalize(-0.177, bp)
# Make sure the synthetic mag of the normalized spectrum matches the input Jmag
Jmag = norm_spec.synthetic_magnitude(bp)
print(Jmag)
# Plot
fig = figure()
fig = vega.plot(fig=fig, color='blue')
norm_spec.plot(fig=fig, color='red')
show(fig)
class SpectrumTests(unittest.TestCase):
"""Tests for the Spectrum class"""
def __init__(self):
# Make Spectrum class for testing
self.spec = sp.Spectrum(*SPEC)
def test_Spectrum_data(self):
"""Test that Spectrum is initialized properly"""
s = copy.copy(spec)
self.failUnless(s.spectrum == SPEC)
def test_Spectrum_units(self):
"""Test that units are reassigned properly"""
s = copy.copy(spec)
# Change the wave units
wu = q.AA
s.wave_units = wu
# Change the flux units
fu = q.W/q.m**2/q.um
s.flux_units = fu
# Make sure the units are being updated
self.failUnless((s.spectrum[0].unit==wu)&(s.spectrum[1].unit==fu)&(s.spectrum[2].unit==fu))
def main():
unittest.main()
if __name__ == '__main__':
main() |
hrs = raw_input("Enter Hours:")
h = float(hrs)
rate = raw_input("Enter rate:")
r = float(rate)
if h > 40.0:
overtime = h - 40.0
o = float(overtime)
overtimePay = o * (r * 1.5)
h = 40.0
else:
overtimePay = 0.0
pay = (h * r) + overtimePay
print pay |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# The MIT License (MIT)
#
# Copyright (c) 2017 Matthew Pare (paretech@gmail.com)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from klvdata.element import UnknownElement
from klvdata.elementparser import BytesElementParser
from klvdata.misb0601 import UASLocalMetadataSet
from klvdata.setparser import SetParser
_classifying_country_coding = {
b'\x01': 'ISO-3166 Two Letter',
b'\x02': 'ISO-3166 Three Letter',
b'\x03': 'FIPS 10-4 Two Letter',
b'\x04': 'FIPS 10-4 Four Letter',
b'\x05': 'ISO-3166 Numeric',
b'\x06': '1059 Two Letter',
b'\x07': '1059 Three Letter',
b'\x08': 'Omitted Value',
b'\x09': 'Omitted Value',
b'\x0A': 'FIPS 10-4 Mixed',
b'\x0B': 'ISO 3166 Mixed',
b'\x0C': 'STANAG 1059 Mixed',
b'\x0D': 'GENC Two Letter',
b'\x0E': 'GENC Three Letter',
b'\x0F': 'GENC Numeric',
b'\x10': 'GENC Mixed',
}
_object_country_coding = {
b'\x01': 'ISO-3166 Two Letter',
b'\x02': 'ISO-3166 Three Letter',
b'\x03': 'ISO-3166 Numeric',
b'\x04': 'FIPS 10-4 Two Letter',
b'\x05': 'FIPS 10-4 Four Letter',
b'\x06': '1059 Two Letter',
b'\x07': '1059 Three Letter',
b'\x08': 'Omitted Value',
b'\x09': 'Omitted Value',
b'\x0A': 'Omitted Value',
b'\x0B': 'Omitted Value',
b'\x0C': 'Omitted Value',
b'\x0D': 'GENC Two Letter',
b'\x0E': 'GENC Three Letter',
b'\x0F': 'GENC Numeric',
b'\x40': 'GENC AdminSub',
}
class UnknownElement(UnknownElement):
pass
@UASLocalMetadataSet.add_parser
class SecurityLocalMetadataSet(SetParser):
"""MISB ST0102 Security Metadata nested local set parser.
The Security Metdata set comprise information needed to
comply with CAPCO, DoD Information Security Program and
other normatively referenced security directives.
Must be a subclass of Element or duck type Element.
"""
key, name = b'\x30', "Security Local Metadata Set"
key_length = 1
parsers = {}
_unknown_element = UnknownElement
@SecurityLocalMetadataSet.add_parser
class SecurityClassification(BytesElementParser):
"""MISB ST0102 Security Classification value interpretation parser.
The Security Classification metadata element contains a value
representing the entire security classification of the file in
accordance with U.S. and NATO classification guidance.
"""
key = b'\x01'
_classification = {
b'\x01': 'UNCLASSIFIED',
b'\x02': 'RESTRICTED',
b'\x03': 'CONFIDENTIAL',
b'\x04': 'SECRET',
b'\x05': 'TOP SECRET',
}
|
# importing required libraries
from math import pow
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
#defining figure size
plt.rcParams['figure.figsize'] = (20.0, 10.0)
# loading data
data = np.genfromtxt('headbrain.csv',delimiter=',')
print('The dimensions of the dataset are:',data.shape)
X = data[1:,2]
Y = data[1:,3]
#finding the mean of the x data and the y data
mean_x = np.mean(X)
mean_y = np.mean(Y)
# here m is the total number of values
m = len(X)
# using the formula to calculate b1 and b0
#b1= sum{(xi-xmean)(yi-ymean)}/sum{(xi-xmean)^2}
#b0=ymean-b1.xmean
numer = 0
denom = 0
for i in range(m):
numer += (X[i] - mean_x) * (Y[i] - mean_y)
denom += (X[i] - mean_x) ** 2
b1 = numer / denom
b0 = mean_y - (b1 * mean_x)
# printing coefficients b1 and b0
print('The coefficients (b1,b0) are:','(',b1,',', b0,')')
#our linear model is: brainweight=b0+b1.headsize
# plotting Values and Regression Line
max_x = np.max(X) + 100
min_x = np.min(X) - 100
# calculating the plottable line values of x and y
x = np.linspace(min_x, max_x, 1000)
y = b0 + b1 * x
#plotting regression line
plt.plot(x, y, color='#58b970', label='Regression Line')
#plotting scatter plots
plt.scatter(X, Y, c='#ef5423', label='Scatter Plot')
plt.xlabel('Head Size in cm3')
plt.ylabel('Brain Weight in grams')
plt.legend()
plt.show()
#calculating root mean square error
#rmse=sqrt(sum{(y^i-yi)^2/m})
rmse = 0
for i in range(m):
y_pred = b0 + b1 * X[i]
rmse += (Y[i] - y_pred) ** 2
rmse = np.sqrt(rmse/m)
#displaying mean squared error
print('Mean squared error:',pow(rmse,2))
ss_t = 0
ss_r = 0
#calculating sst and ssr values
for i in range(m):
y_pred = b0 + b1 * X[i]
ss_t += (Y[i] - mean_y) ** 2
ss_r += (Y[i] - y_pred) ** 2
#calcualting r^2 values=1-(ssr/sst)
r2 = 1 - (ss_r/ss_t)
print('Variance score:',r2)
|
# -*- coding: UTF-8 -*-
import numpy as np
# sigmoid函数
def sigmoid(x):
return 1 / (1 + np.exp(-x))
# sigmoid函数导数
def sigmoid_derivative(x):
return sigmoid(x) * (1 - sigmoid(x))
# tanh函数
def tanh(x):
return (np.exp(x) - np.exp(-x)) / (np.exp(x) + np.exp(-x))
# tanh函数导数
def tanh_derivative(x):
return 1 - tanh(x) * tanh(x)
# relu函数
def relu(x):
return np.where(x < 0, 0, x)
# relu函数导数
def relu_derivative(x):
return np.where(x < 0, 0, 1)
# 规范化
def normalize_rows(x):
return x / np.linalg.norm(x, axis=1, keepdims=True)
# softmax
def softmax(x):
exp = np.exp(x)
return exp / np.sum(exp, axis=1, keepdims=True)
|
import os
from django.contrib.auth.decorators import login_required
from django.contrib.auth.mixins import LoginRequiredMixin
from django.core.exceptions import ObjectDoesNotExist
from django.http import HttpResponseForbidden, HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.urls import reverse
from django.views import generic
import local_settings
from crm import telegram_bot
from .filters import UserFilter
from .models import User, Contact
from .forms import UserRegistrationForm, UserDetailForm, UserContactsForm
class UserListView(LoginRequiredMixin, generic.ListView):
model = User
paginate_by = 25
def get(self, request, *args, **kwargs):
if not self.request.user.is_staff:
return HttpResponse('<h1>403 Forbidden</h1>', status=403,)
else:
return super().get(self, request, *args, **kwargs)
def get_context_data(self, **kwargs):
context = super(UserListView, self).get_context_data(**kwargs)
filter = UserFilter(self.request.GET, queryset=User.objects.all())
context['filter'] = filter
return context
class UserDetailView(LoginRequiredMixin, generic.DetailView):
model = User
def get_context_data(self, **kwargs):
context = super(UserDetailView, self).get_context_data(**kwargs)
context['user'] = self.request.user
return context
def get(self, request, *args, **kwargs):
if not self.request.user.is_staff and self.request.user.id != int(self.kwargs['pk']):
return HttpResponse('<h1>403 Forbidden</h1>', status=403,)
else:
return super().get(self, request, *args, **kwargs)
def register(request):
if request.user.is_authenticated:
return HttpResponseRedirect(reverse('index'))
if request.method == 'POST':
user_form = UserRegistrationForm(request.POST)
if user_form.is_valid():
new_user = user_form.save(commit=False)
new_user.set_password(user_form.cleaned_data['password'])
new_user.save()
return render(request, 'register_done.html', {'new_user': new_user})
else:
user_form = UserRegistrationForm()
return render(request, 'register.html', {'user_form': user_form})
@login_required
def user_detail_update(request, pk):
if not request.user.is_staff and request.user.id != int(pk):
return HttpResponse('<h1>403 Forbidden</h1>', status=403, )
lookup_user = User.objects.get(id=pk)
try:
contacts = Contact.objects.get(user_id=pk)
except ObjectDoesNotExist:
contacts = Contact.objects.create(user=lookup_user)
if request.method == 'POST':
user_form = UserDetailForm(request.POST, instance=lookup_user)
contacts_form = UserContactsForm(request.POST, instance=contacts)
if user_form.is_valid() and contacts_form.is_valid():
new_data = user_form.save(commit=False)
new_data.save()
user_contacts = contacts_form.save()
user_contacts.save()
return HttpResponseRedirect(reverse('user-detail', kwargs={'pk': pk}))
else:
user_form = UserDetailForm(instance=lookup_user)
contacts_form = UserContactsForm(instance=contacts)
context = {'user_form': user_form, 'contacts_form': contacts_form, 'pk': pk}
return render(request, 'user_detail_update.html', context)
@login_required
def subscribe_to_updates(request):
chat_bot_name = os.environ.get('DJANGO_TELEGRAM_BOT_NAME', local_settings.TELEGRAM_BOT_NAME)
if request.user.is_sub:
return HttpResponseRedirect(reverse('user-detail', kwargs={'pk': request.user.id}))
if request.method == 'POST':
contact = Contact.objects.get(user=request.user.id)
telegram_bot.get_update()
username = contact.telegram
chat_id = telegram_bot.get_chat_id(username)
if chat_id:
request.user.is_sub = True
request.user.save(update_fields=['is_sub'])
contact.chat_bot_id = chat_id
contact.save(update_fields=['chat_bot_id'])
return HttpResponseRedirect(reverse('user-detail', kwargs={'pk': request.user.id}))
return render(request, 'sub_confirm.html', {'chat_bot_name': chat_bot_name})
else:
return render(request, 'sub_confirm.html', {'chat_bot_name': chat_bot_name})
|
import argparse
import os
import cv2
import numpy as np
from keras.optimizers import Adam
from keras.preprocessing.image import ImageDataGenerator
from sklearn.preprocessing import LabelBinarizer
from sklearn.model_selection import train_test_split
from sklearn.metrics import classification_report
import matplotlib.pyplot as plt
from utils.model import SudokuNet
# argument parser to get the output folder name as an argument
ap = argparse.ArgumentParser()
ap.add_argument('-o', '--output', required=True,
help="path to save the trained model file")
args = vars(ap.parse_args())
# initializing the hyperparameters like learning rate, number of epochs
# and batch size
LR = 1e-3
EPOCHS = 10
BATCH_SIZE = 50
STEPS_PER_EPOCH = 2000
print("[INFO] loading dataset...")
# dataset path
DATA_PATH = "dataset/"
images = []
labels = []
for i in range(0, 10):
dir_name = DATA_PATH + str(i)
img_names = os.listdir(dir_name)
for filename in img_names:
image = cv2.imread(dir_name + "/" + filename)
image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
image = cv2.threshold(
image, 0, 255, cv2.THRESH_BINARY_INV | cv2.THRESH_OTSU)[1]
image = cv2.resize(image, (32, 32))
image = image.astype('float') / 255.0
images.append(image)
labels.append(i)
print("[INFO] {} images loaded".format(len(images)))
# preprocess the data
print("[INFO] preprocessing the data...")
# convert images into numpy array
images = np.array(images)
labels = np.array(labels)
# split the data into training and testing
X_train, X_test, y_train, y_test = train_test_split(images, labels, test_size=0.2)
# add a channel (i.e., grayscale) dimension to the digits
X_train = X_train.reshape(X_train.shape[0], 32, 32, 1)
X_test = X_test.reshape(X_test.shape[0], 32, 32, 1)
# convert the labels from integers to vectors (one-hot encoding)
lb = LabelBinarizer()
y_train = lb.fit_transform(y_train)
y_test = lb.transform(y_test)
# augment the data
data_gen = ImageDataGenerator(
width_shift_range=0.1,
height_shift_range=0.1,
rotation_range=10,
shear_range=0.1,
zoom_range=0.2
)
data_gen.fit(X_train)
# initialize the optimizer
optimizer = Adam(learning_rate=LR)
# build and compile the SudokuNet model
print("[INFO] compiling the SudokuNet model...")
model = SudokuNet.build(width=32, height=32, depth=1, n_classes=10)
model.compile(
optimizer=optimizer,
loss='categorical_crossentropy',
metrics=['accuracy']
)
model.summary()
# train the SudokuNet model
print("[INFO] training the SudokuNet model...")
history = model.fit_generator(
data_gen.flow(X_train, y_train, batch_size=BATCH_SIZE),
steps_per_epoch=STEPS_PER_EPOCH,
validation_data=(X_test, y_test),
epochs=EPOCHS,
shuffle=True
)
# evaluate the model
print("[INFO] evaluating the model...")
y_pred = model.predict(X_test)
print(classification_report(
y_test.argmax(axis=1),
y_pred.argmax(axis=1),
target_names=[str(x) for x in lb.classes_]
))
plt.figure(1)
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.legend(['Training', 'Validation'])
plt.title('Accuracy')
plt.xlabel('Epochs')
plt.figure(2)
plt.plot(history.history['loss'])
plt.plot(history.history['val_loss'])
plt.legend(['Training', 'Validation'])
plt.title('Loss')
plt.xlabel('Epochs')
plt.show()
# save the trained model to disk
print("[INFO] saving the trained model to {}...".format(args['output']))
model.save(args['output'])
|
from sympy import *
A = Matrix([
[1, -1, 5],
[3, 9, 7],
[-2, 1, 0]
])
pprint(A.cofactorMatrix())
pprint(A.cofactorMatrix().T)
# TODO: Why is it different from A.adjoint() ?
|
#!/usr/bin/env python3
import os
import sys
import subprocess
[moddir, tgtdir] = sys.argv[1:3]
modules = {}
with open(moddir + "/modules.dep") as deps:
for dep in deps:
module, deps = dep.split(':', maxsplit=1)
dir, fn = os.path.split(module)
mn, _ = fn.split('.ko', maxsplit=1)
modules[mn] = (module, deps.strip())
aliases = {}
with open(moddir + "/modules.alias") as alias:
for al in alias:
if al[0] == '#':
continue
alcmd , alst, modname = al.split()
if alcmd != 'alias':
continue
if modname in aliases:
aliases[modname].append(alst)
else:
aliases[modname] = [alst]
def doit(mfn, script, loaded, ignore=False):
if mfn in loaded:
return
subprocess.run(['cp',moddir + os.path.sep + mfn,tgtdir]).check_returncode()
loaded.append(mfn)
dir, fn = os.path.split(mfn)
if ignore:
script.write(f"insmod {fn} 2>/dev/null\n")
else:
script.write(f"insmod {fn}\n")
def insmod(mod, script, loaded = None):
if loaded is None:
loaded = []
if mod in loaded:
return
mpth, deps = modules[mod]
for dfn in reversed(deps.split()):
doit(dfn, script, loaded, ignore=True)
doit(mpth, script, loaded)
modlstfn = tgtdir + os.path.sep + "modules"
modlst = open(modlstfn, "w")
# Do not put in these "drivers" ;)
blacklist = ( 'libata', 'libahci', 'pata_legacy', 'ata_generic', 'pata_isapnp' )
for mod in sys.argv[3:]:
path, fn = os.path.split(mod)
mname, _ = fn.split('.ko', maxsplit=1)
if mname in blacklist:
continue
loadfn = tgtdir + os.path.sep + "load-" + mname
with open(loadfn, "w") as script:
insmod(mname, script)
subprocess.run(['chmod', '+x', loadfn])
aliasfn = tgtdir + os.path.sep + "alias-" + mname
with open(aliasfn, "w") as af:
af.write('\n'.join(aliases[mname]) + '\n')
modlst.write(mname + '\n')
print("atamoddir done.")
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import csv
import MySQLdb #import the library to connect to mysql database
a = 0
#open connection to db
db = MySQLdb.connect(user = "kebench",passwd = "k3b3nch0$")
#prepare a cursor
cursor = db.cursor()
#create database
db_sql = "CREATE DATABASE IF NOT EXISTS sp_data CHARACTER SET utf8"
cursor.execute(db_sql);
cursor.execute("USE sp_data")
#create table for test cases
table_sql = """CREATE TABLE IF NOT EXISTS test_cases(
id int(11) NOT NULL AUTO_INCREMENT,
text TEXT,
coordinates GEOMETRY,
created_at VARCHAR(50),
screen_name VARCHAR(50),
country_code VARCHAR(2),
lang VARCHAR(5),
PRIMARY KEY (id)
)CHARACTER SET=utf8"""
cursor.execute(table_sql)
for num in range(1,9):
#read the csv
i = str(num)
print ("reading sp_num",i)
f = open('sp_'+i+'.csv','rb')
reader = csv.DictReader(f)
#try reading every row of the file
try:
#variables to be used for insertion
lat = lon = 0
text = date = name = ""
counter = 1
#traverse every row and save into database
for row in reader:
#check if lat and lon are set else, use the location coordinates
if row["lat"] != "NA" or row["lon"] != "NA":
#print (row[""],row["lat"],row["lon"])
lat = float(row["lat"])
lon = float(row["lon"])
elif row["place_lat"] != "NA" or row["place_lon"] != "NA":
lat = float(row["place_lat"])
lon = float(row["place_lon"])
text = row["text"]
date = row["created_at"]
name = row["screen_name"]
country = row["country_code"]
lang = row["lang"]
#try saving to the database
try:
#lat = x, long = y, point x,y
if (( row["lang"] == "en" or row["lang"] == "tl" ) and row["country_code"] == "PH" and (lat > 0 or lon > 0)):
a += 1
print (i," inserting row",a)
counter += 1
insert_sql = "INSERT INTO test_cases (text,coordinates,created_at,screen_name,country_code,lang) VALUES ( %s, GeomFromText('POINT(%s %s)'),%s,%s,%s,%s)"
cursor.execute(insert_sql,(text,lat,lon,date,name,country,lang))
if counter == 1000:
print "Committing to DB"
db.commit()
counter = 1
except:
db.rollback()
try:
print "Committing, not 1k though"
db.commit()
counter = 1
except:
db.rollback();
except csv.Error, e:
sys.exit('file %s, line %d: %s' % (filename, reader.line_num, e))
#close database connection
db.close()
|
from .cputils import *
from .mplutils import *
from .plots import *
__all__ = (cputils.__all__ +
mplutils.__all__ +
plots.__all__)
|
"""
Distributed under the terms of the BSD 3-Clause License.
The full license is in the file LICENSE, distributed with this software.
Author: Jun Zhu <jun.zhu@xfel.eu>
Copyright (C) European X-Ray Free-Electron Laser Facility GmbH.
All rights reserved.
"""
import abc
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (
QCheckBox, QComboBox, QFrame, QGroupBox, QLineEdit, QAbstractSpinBox
)
from .smart_widgets import SmartBoundaryLineEdit, SmartSliceLineEdit
from ..gui_helpers import parse_slice_inv
from ..mediator import Mediator
from ...database import MetaProxy
from ...logger import logger
class _AbstractCtrlWidgetMixin:
@abc.abstractmethod
def initUI(self):
"""Initialization of UI."""
raise NotImplementedError
@abc.abstractmethod
def initConnections(self):
"""Initialization of signal-slot connections."""
raise NotImplementedError
@abc.abstractmethod
def updateMetaData(self):
"""Update metadata belong to this control widget.
:returns bool: True if all metadata successfully parsed
and emitted, otherwise False.
"""
raise NotImplementedError
@abc.abstractmethod
def loadMetaData(self):
"""Load metadata from Redis and set this control widget."""
# raise NotImplementedError
pass
@abc.abstractmethod
def onStart(self):
raise NotImplementedError
@abc.abstractmethod
def onStop(self):
raise NotImplementedError
def _updateWidgetValue(self, widget, config, key, *, cast=None):
"""Update widget value from meta data."""
value = self._getMetaData(config, key)
if value is None:
return
if cast is not None:
value = cast(value)
if isinstance(widget, QCheckBox):
widget.setChecked(value == 'True')
elif isinstance(widget, SmartBoundaryLineEdit):
widget.setText(value[1:-1])
elif isinstance(widget, SmartSliceLineEdit):
widget.setText(parse_slice_inv(value))
elif isinstance(widget, QLineEdit):
widget.setText(value)
elif isinstance(widget, QAbstractSpinBox):
widget.setValue(value)
else:
logger.error(f"Unknown widget type: {type(widget)}")
@staticmethod
def _getMetaData(config, key):
"""Convienient function to get metadata and capture key error.
:param dict config: config dictionary.
:param str key: meta data key.
"""
try:
return config[key]
except KeyError:
# This happens when loading metadata in a new version with
# a config file in the old version.
logger.warning(f"Meta data key not found: {key}")
class _AbstractCtrlWidget(QFrame, _AbstractCtrlWidgetMixin):
def __init__(self, *,
pulse_resolved=True,
require_geometry=True,
parent=None):
"""Initialization.
:param bool pulse_resolved: whether the related data is
pulse-resolved or not.
:param bool require_geometry: whether the detector requires a
geometry to assemble its modules.
"""
super().__init__(parent=parent)
self.setAttribute(Qt.WA_DeleteOnClose, True)
self._mediator = Mediator()
self._meta = MetaProxy()
# widgets whose values are not allowed to change after the "run"
# button is clicked
self._non_reconfigurable_widgets = []
self._pulse_resolved = pulse_resolved
self._require_geometry = require_geometry
self.setFrameStyle(QFrame.StyledPanel)
def onStart(self):
for widget in self._non_reconfigurable_widgets:
widget.setEnabled(False)
def onStop(self):
for widget in self._non_reconfigurable_widgets:
widget.setEnabled(True)
class _AbstractGroupBoxCtrlWidget(QGroupBox, _AbstractCtrlWidgetMixin):
GROUP_BOX_STYLE_SHEET = 'QGroupBox:title {'\
'color: #8B008B;' \
'border: 1px;' \
'subcontrol-origin: margin;' \
'subcontrol-position: top left;' \
'padding-left: 10px;' \
'padding-top: 10px;' \
'margin-top: 0.0em;}'
def __init__(self, title, *,
pulse_resolved=True, require_geometry=True, parent=None):
"""Initialization.
:param bool pulse_resolved: whether the related data is
pulse-resolved or not.
:param bool require_geometry: whether the detector requires a
geometry to assemble its modules.
"""
super().__init__(title, parent=parent)
self.setStyleSheet(self.GROUP_BOX_STYLE_SHEET)
self._mediator = Mediator()
self._meta = MetaProxy()
# widgets whose values are not allowed to change after the "run"
# button is clicked
self._non_reconfigurable_widgets = []
self._pulse_resolved = pulse_resolved
self._require_geometry = require_geometry
def onStart(self):
for widget in self._non_reconfigurable_widgets:
widget.setEnabled(False)
def onStop(self):
for widget in self._non_reconfigurable_widgets:
widget.setEnabled(True)
|
import random
import string
from tkinter import *
from tkinter.ttk import *
root = Tk()
root.title("Password Generator App")
root['background']='#856ff8'
root.geometry("450x450")
pass_str = StringVar()
def generate(): # 2'ser tane buyuk harf, rakam ve karakteri seciyoruz, toplam 6 ediyor
random_source = string.ascii_letters + string.digits + string.punctuation
password = random.choice(string.ascii_uppercase) + random.choice(string.ascii_uppercase)
password += random.choice(string.digits) + random.choice(string.digits)
password += random.choice(string.punctuation) + random.choice(string.punctuation)
for i in range(4): #kalan 4 karakteri yukaridaki kumeden aliyorum
password += random.choice(random_source)
pass_str.set(password)
Label(root, text="Password Generator Application", font="Lucida 20 bold").pack()
Button(root, text="Generate Password", command=generate).pack(pady=7)
Entry(root, textvariable=pass_str).pack(pady=2)
root.mainloop()
|
import os, sys, re
def process_input(input_raw):
num_cases = int(input_raw.pop(0).strip("\n"))
cases = [i.strip('\n') for i in input_raw]
return num_cases, cases
def get_output(pancakes_stack):
#strip all '+' from the bottom of the stack. All problems are equivalent after that point.
pancakes_stack = pancakes_stack.rstrip('+')
if len(pancakes_stack) == 0:
return 0
else:
#represents the aggregate transitions that need to occur. and reverses it to make is it in decrease significance.
transition_command = pancakes_stack.replace("-", "1").replace("+","0")[::-1]
#get inversions
inversions = re.split('(0*)', transition_command)
# if there is a zero at the beginning or the end, there might be an empty space in the array. Removing it.
if inversions[0] == '':
inversions.pop(0)
if inversions[-1] == '':
inversions.pop(-1)
return len(inversions)
if __name__ == "__main__":
VERBOSE = False
case_file_name = "B-large"
input_file = os.path.join("input", "{}.in".format(case_file_name))
with open(input_file, 'r') as f:
input_data_raw = f.readlines()
num_cases, cases = process_input(input_data_raw)
output_array = []
for case_number in xrange(num_cases):
if VERBOSE:
print "Running Problem", case_number
print "\tInput: ", cases[case_number]
output_array.append("Case #{}: {}".format(str(case_number+1), str(get_output(cases[case_number]))))
if VERBOSE:
print "\tOutput: ", output_array[-1]
output_txt = "\n".join(output_array)
output_file = os.path.join("output", "{}.out".format(case_file_name))
with open(output_file, 'w+') as f:
f.write(output_txt)
print "Done"
|
import os
import numpy as np
import tensorflow as tf
import os
import cv2
from NetworkBuilder import NetworkBuilder
from opencv_utils import OpenCVHelper
class Predictor():
def __init__(self,dg,num_labels=3):
self.num_labels = num_labels
path = os.path.dirname(os.path.abspath(__file__))
model_save_path = os.path.join(path,"saved model v2/")
print model_save_path
model_name='model'
with tf.name_scope("Input") as scope:
self.input_img = tf.placeholder(dtype='float', shape=[None, 128, 128, 3], name="input")
self.session = tf.Session()
# saver = tf.train.Saver()
# saver.restore(sess, tf.train.latest_checkpoint(model_save_path))
# saver = tf.train.import_meta_graph('./saved '+model_name+'/model.ckpt.meta')
# saver = tf.train.import_meta_graph('./saved '+model_name+'/model.ckpt.meta')
# saver.restore(sess, tf.train.latest_checkpoint(model_save_path))
# predictor = tf.contrib.predictor.from_saved_model(model_save_path)
if os.path.exists(model_save_path+'checkpoint'):
print "Restoring %s " % model_name
saver = tf.train.import_meta_graph(model_save_path+model_name+'.meta')
saver.restore(self.session,tf.train.latest_checkpoint(model_save_path))
nb = NetworkBuilder(num_labels)
prediction = nb.get_prediction(self.input_img)
self.y_pred_cls = tf.argmax(prediction, axis=1)
tf.global_variables_initializer().run(session=self.session)
label_path = os.path.join(path,"labels.pkl")
self.labels = dg.load_labels(path=label_path)
self._np_labels = list()
for i in range(len(self.labels)):
label = np.zeros(len(self.labels),dtype=int)
label[i] = 1
self._np_labels.append(label)
# print("Predicted : {}".format(labels[sample_prediction(test_img)]))
def classify_image(self,test_im):
h, w = test_im.shape[:2]
sh, sw = 128 , 128
# interpolation method
if h > sh or w > sw: # shrinking image
interp = cv2.INTER_AREA
else: # stretching image
interp = cv2.INTER_CUBIC
x = tf.placeholder(dtype="float", shape=[128, 128, 1], name='x')
y_true = tf.placeholder(tf.float32, shape=[None, self.num_labels], name='y_true')
# test_im = cv2.resize(test_im, (128,128), interpolation=interp)
feed_dict_test = {
# x: test_im.reshape(3, img_size_flat),
# x: test_im.reshape(1, img_size_flat),
self.input_img :test_im.reshape(-1,128,128,3),
y_true: np.array(self._np_labels)
}
test_pred = self.session.run(self.y_pred_cls, feed_dict=feed_dict_test)
return self.labels[test_pred[0]] , test_pred[0]
if __name__=="__main__":
from DataSetGenerator import DataSetGenerator
path = os.path.dirname(os.path.abspath(__file__))
test_img = cv2.imread('3.jpeg')
dg = DataSetGenerator(os.path.join(path,"training_face_rec"))
MAX_LABELS = len(dg.data_labels)
p = Predictor(dg,num_labels=MAX_LABELS)
helper = OpenCVHelper()
img_face, pos_face = helper.convert_img(test_img)
print p.classify_image(img_face)
|
lim = 1000000
N = {}
S = 3
while True:
hold = False
s = S
p = 4*(S-1)
while p <= lim and s >= 3:
hold = True
N[p] = N[p] + 1 if p in N else 1
s -= 2
p += 4*(s-1)
if not hold:
break
S += 1
print(len([i for i in N if N[i] <= 10 and N[i] >= 1]))
|
#encoding=utf-8
import web
import subprocess
import sys
urls = (
'/', 'Index',
'/commit', 'Commit',
'/success','success',
)
render = web.template.render('templates')
app = web.application(urls, globals())
class Index:
def GET(self):
return render.login2()
class success:
def GET(self):
return "success"
class Commit:
def POST(self):
"""x = web.input(myfile={})
web.debug(x['myfile'].filename)
#web.debug(x['myfile'].value)
#web.debug(x['myfile'].file.read())
raise web.seeother('/')"""
x = web.input(mytext={})
if 'mytext' in x:
print x.mytext
orgpath = x.mytext+"/"+x.mytext+".pptx"
print orgpath
"""res = oss.get_object_to_file(bucketname, "ppt/1.pptx", "C:\\11.pptx", headers)
if (res.status / 100) == 2:
print "get_object_to_file OK"
else:
print "get_object_to_file ERROR"
"""
#x = web.input()
#filepath = "C:\\"+(str)x
#print filepath
"""filedir = 'E:\\pythonText\\test' # change this to the directory you want to store the file in.
if 'myfile' in x: # to check if the file-object is created
filepath=x.myfile.filename.replace('\\','/') # replaces the windows-style slashes with linux ones.
filename=filepath.split('/')[-1] # splits the and chooses the last part (the filename with extension)
fout = open(filedir +'/'+ filename,'wb') # creates the file where the uploaded file should be stored
wholepath = filedir+'\\'+filename
fout.write(x.myfile.file.read()) # writes the uploaded file to the newly created file.
fout.close() # closes the file, upload complete.
returnCode = subprocess.call("python pptChange.py %s" %wholepath)
#returnCode = subprocess.call("python pptChange.py C:\\FindLost.pptx")
print 'returncode:', returnCode
"""
raise web.seeother('/success')
#return "save successful"
if __name__ == "__main__":
app.run() |
# -*- coding:UTF-8 -*-
import unittest,time,sys
sys.path.append("../DATA")
from data_info import heaven_info
# -------------------登录-----------------------------
class TestLogin(unittest.TestCase):
def setUp(self):
time.sleep(2)
def testlogin(self,driver,An,Bn):
# 登录
driver.find_element_by_xpath("html/body/header/div[3]/div/div[4]/div[2]/div[1]/span[2]/em[1]/a[1]").click()
time.sleep(3)
# -------------------进入登录页面-------------------------------------------
# 用户名
driver.find_element_by_name("login_name").send_keys(An)
# inputTest = "$(':text').val('aaaa')"
# driver.execute_script ( inputTest )
# 密码
driver.find_element_by_name("password_shadow").send_keys(Bn)
time.sleep(1)
# 登录
driver.find_element_by_id("login_submit").click()
# inputTest = "$('#login_submit').click()"
# driver.execute_script ( inputTest )
time.sleep(1)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# composite_number.py
#
# Copyright 2012 Emil <emil@emil-luftbunt>
#
import Tkinter as tk
from math import pow, floor
from animated_number import AnimatedNumber
class CompositeNumber():
# Duration before stopping the first number
ROLL_DURATION_FIRST = 1
# Duration for the rest of the numbers
ROLL_DURATION_CONSECUTIVE = 1
def __init__(self, canvas, max_value, x, y, images):
self.current_stopper = 0
self.run_animation = False
update_time = 0.1
self.value = 0
self.number_margin = 3
self.number_images = images
self.numbers = list()
self.number_width = self.number_images[0].width() + self.number_margin
no_num = 1
while pow(10, no_num) <= max_value:
no_num += 1
self.no_numbers = no_num
centered_x = self.center_x(x)
centered_y = self.center_y(y)
for i in range(self.no_numbers):
offset = self.number_width * i
min_digit = 0
max_digit = 9
if i == self.most_significant_digit_pos():
most_significant_number_divisor = int(pow(10, self.no_numbers - 1))
max_digit = max_value // most_significant_number_divisor
self.numbers.append(
AnimatedNumber(
canvas,
centered_x + offset,
centered_y,
update_time,
self.value,
self.number_images,
min_digit,
max_digit))
return
def center_x(self, x):
return x - (self.no_numbers * self.number_width - self.number_margin) / 2
def center_y(self, y):
return y - self.number_images[0].height() / 2
def number_path(self, num):
return 'bilder/nr/' + str(num) + 's.gif'
def set_value(self, value):
chars = self.divide_into_singles(value)
i = 0
for num in self.numbers:
num.set_value(chars[i])
i += 1
def divide_into_singles(self, value):
no_chars = 1
while pow(10, no_chars) <= value:
no_chars += 1
chars = list()
last_char = 0
rest = 0
for i in reversed(range(no_chars)):
chars.append(int(floor((value - rest) / pow(10, i))))
rest += (chars[no_chars - i - 1] * pow(10, i))
if len(chars) < self.no_numbers:
for i in range(self.no_numbers - len(chars)):
chars.insert(0, 0)
return chars
def start_roll(self):
self.run_animation = True
self.stop_countdown_first = CompositeNumber.ROLL_DURATION_FIRST
self.stop_countdown_consecutive = CompositeNumber.ROLL_DURATION_CONSECUTIVE
self.current_stopper = self.least_significant_digit_pos()
for n in self.numbers:
n.animate(True)
def stop_roll(self):
self.run_animation = False
for n in self.numbers:
n.animate(False)
def stop_next(self):
self.numbers[self.current_stopper].animate(False)
self.current_stopper = self.current_stopper - 1
if self.current_stopper == -1:
self.run_animation = False
def update(self, delta):
if self.run_animation:
if self.stop_countdown_first < 0:
if self.stop_countdown_consecutive < 0:
self.stop_next()
self.stop_countdown_consecutive = CompositeNumber.ROLL_DURATION_CONSECUTIVE
else:
self.stop_countdown_consecutive -= delta
else:
self.stop_countdown_first -= delta
for n in self.numbers:
n.update(delta)
def move(self, x, y):
centered_x = self.center_x(x)
centered_y = self.center_y(y)
for i in range(self.no_numbers):
offset = self.number_width * i
self.numbers[i].set_position(centered_x + offset, centered_y)
def least_significant_digit_pos(self):
return self.no_numbers - 1
def most_significant_digit_pos(self):
return 0
def destroy(self):
for n in self.numbers:
n.canvas.delete(n.canvas_image)
|
import requests
import urllib.parse
from flask import redirect, render_template, request, session, url_for
from functools import wraps
def login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if not session.get("logged"):
return redirect(url_for('login_owner', next=request.path))
return f(*args, **kwargs)
return decorated_function
def admin_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("admin_id") is None:
return redirect(url_for('login_admin', next=request.path))
return f(*args, **kwargs)
return decorated_function
def staff_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("staff_id") is None:
return redirect(url_for('login_staff', next=request.path))
return f(*args, **kwargs)
return decorated_function
def owner_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("house_no") is None:
return redirect(url_for('login_owner', next=request.path))
return f(*args, **kwargs)
return decorated_function
def apology(message, code=400):
"""Render message as an apology to user."""
# def escape(s):
# for old, new in [("-", "--"), (" ", "-"), ("_", "__"), ("?", "~q"),
# ("%", "~p"), ("#", "~h"), ("/", "~s"), ("\"", "''")]:
# s = s.replace(old, new)
# return s
if session.get("admin_id"):
nav = "AdminNavbar.html"
elif session.get("staff_id"):
nav = "StaffNavbar.html"
elif session.get("house_no"):
nav = "HouseNavbar.html"
else:
nav = "LoginPageNavbar.html"
return render_template("apology.html", nav=nav, top=code, bottom=(message)), code |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.